summaryrefslogtreecommitdiffstats
path: root/src/tools/cargo
diff options
context:
space:
mode:
Diffstat (limited to 'src/tools/cargo')
-rw-r--r--src/tools/cargo/.github/ISSUE_TEMPLATE/bug_report.yml43
-rw-r--r--src/tools/cargo/.github/ISSUE_TEMPLATE/config.yml10
-rw-r--r--src/tools/cargo/.github/ISSUE_TEMPLATE/feature_request.yml35
-rw-r--r--src/tools/cargo/.github/ISSUE_TEMPLATE/tracking_issue.yml50
-rw-r--r--src/tools/cargo/.github/PULL_REQUEST_TEMPLATE.md43
-rw-r--r--src/tools/cargo/.github/workflows/audit.yml30
-rw-r--r--src/tools/cargo/.github/workflows/contrib.yml38
-rw-r--r--src/tools/cargo/.github/workflows/main.yml222
-rw-r--r--src/tools/cargo/.ignore6
-rw-r--r--src/tools/cargo/CHANGELOG.md3424
-rw-r--r--src/tools/cargo/CODE_OF_CONDUCT.md3
-rw-r--r--src/tools/cargo/CONTRIBUTING.md21
-rw-r--r--src/tools/cargo/Cargo.toml119
-rw-r--r--src/tools/cargo/LICENSE-APACHE201
-rw-r--r--src/tools/cargo/LICENSE-MIT23
-rw-r--r--src/tools/cargo/LICENSE-THIRD-PARTY1272
-rw-r--r--src/tools/cargo/README.md127
-rw-r--r--src/tools/cargo/benches/README.md124
-rw-r--r--src/tools/cargo/benches/benchsuite/Cargo.toml28
-rw-r--r--src/tools/cargo/benches/benchsuite/benches/resolve.rs146
-rw-r--r--src/tools/cargo/benches/benchsuite/benches/workspace_initialization.rs27
-rw-r--r--src/tools/cargo/benches/benchsuite/src/lib.rs197
-rw-r--r--src/tools/cargo/benches/capture/Cargo.toml12
-rw-r--r--src/tools/cargo/benches/capture/src/main.rs164
-rw-r--r--src/tools/cargo/benches/workspaces/cargo.tgzbin0 -> 9967 bytes
-rw-r--r--src/tools/cargo/benches/workspaces/diem.tgzbin0 -> 74635 bytes
-rw-r--r--src/tools/cargo/benches/workspaces/empty.tgzbin0 -> 287 bytes
-rw-r--r--src/tools/cargo/benches/workspaces/gecko-dev.tgzbin0 -> 49269 bytes
-rw-r--r--src/tools/cargo/benches/workspaces/rust-ws-inherit.tgzbin0 -> 45123 bytes
-rw-r--r--src/tools/cargo/benches/workspaces/rust.tgzbin0 -> 45064 bytes
-rw-r--r--src/tools/cargo/benches/workspaces/servo.tgzbin0 -> 50573 bytes
-rw-r--r--src/tools/cargo/benches/workspaces/substrate.tgzbin0 -> 92972 bytes
-rw-r--r--src/tools/cargo/benches/workspaces/tikv.tgzbin0 -> 44476 bytes
-rw-r--r--src/tools/cargo/benches/workspaces/toml-rs.tgzbin0 -> 1782 bytes
-rw-r--r--src/tools/cargo/build.rs70
-rwxr-xr-xsrc/tools/cargo/ci/clean-test-output.sh8
-rwxr-xr-xsrc/tools/cargo/ci/dump-environment.sh22
-rwxr-xr-xsrc/tools/cargo/ci/fetch-smoke-test.sh27
-rwxr-xr-xsrc/tools/cargo/ci/validate-man.sh26
-rw-r--r--src/tools/cargo/clippy.toml6
-rw-r--r--src/tools/cargo/crates/cargo-platform/Cargo.toml12
l---------src/tools/cargo/crates/cargo-platform/LICENSE-APACHE1
l---------src/tools/cargo/crates/cargo-platform/LICENSE-MIT1
-rw-r--r--src/tools/cargo/crates/cargo-platform/examples/matches.rs55
-rw-r--r--src/tools/cargo/crates/cargo-platform/src/cfg.rs319
-rw-r--r--src/tools/cargo/crates/cargo-platform/src/error.rs67
-rw-r--r--src/tools/cargo/crates/cargo-platform/src/lib.rs146
-rw-r--r--src/tools/cargo/crates/cargo-platform/tests/test_cfg.rs251
-rw-r--r--src/tools/cargo/crates/cargo-test-macro/Cargo.toml12
-rw-r--r--src/tools/cargo/crates/cargo-test-macro/src/lib.rs245
-rw-r--r--src/tools/cargo/crates/cargo-test-support/Cargo.toml32
-rw-r--r--src/tools/cargo/crates/cargo-test-support/build.rs7
-rw-r--r--src/tools/cargo/crates/cargo-test-support/containers/apache/Dockerfile26
-rw-r--r--src/tools/cargo/crates/cargo-test-support/containers/apache/bar/Cargo.toml4
-rw-r--r--src/tools/cargo/crates/cargo-test-support/containers/apache/bar/src/lib.rs1
-rw-r--r--src/tools/cargo/crates/cargo-test-support/containers/apache/httpd-cargo.conf12
-rw-r--r--src/tools/cargo/crates/cargo-test-support/containers/sshd/Dockerfile29
-rw-r--r--src/tools/cargo/crates/cargo-test-support/containers/sshd/bar/Cargo.toml4
-rw-r--r--src/tools/cargo/crates/cargo-test-support/containers/sshd/bar/src/lib.rs1
-rw-r--r--src/tools/cargo/crates/cargo-test-support/src/compare.rs784
-rw-r--r--src/tools/cargo/crates/cargo-test-support/src/containers.rs285
-rw-r--r--src/tools/cargo/crates/cargo-test-support/src/cross_compile.rs264
-rw-r--r--src/tools/cargo/crates/cargo-test-support/src/diff.rs174
-rw-r--r--src/tools/cargo/crates/cargo-test-support/src/git.rs256
-rw-r--r--src/tools/cargo/crates/cargo-test-support/src/install.rs29
-rw-r--r--src/tools/cargo/crates/cargo-test-support/src/lib.rs1424
-rw-r--r--src/tools/cargo/crates/cargo-test-support/src/paths.rs347
-rw-r--r--src/tools/cargo/crates/cargo-test-support/src/publish.rs245
-rw-r--r--src/tools/cargo/crates/cargo-test-support/src/registry.rs1635
-rw-r--r--src/tools/cargo/crates/cargo-test-support/src/tools.rs108
-rw-r--r--src/tools/cargo/crates/cargo-util/Cargo.toml28
l---------src/tools/cargo/crates/cargo-util/LICENSE-APACHE1
l---------src/tools/cargo/crates/cargo-util/LICENSE-MIT1
-rw-r--r--src/tools/cargo/crates/cargo-util/src/lib.rs18
-rw-r--r--src/tools/cargo/crates/cargo-util/src/paths.rs788
-rw-r--r--src/tools/cargo/crates/cargo-util/src/process_builder.rs689
-rw-r--r--src/tools/cargo/crates/cargo-util/src/process_error.rs200
-rw-r--r--src/tools/cargo/crates/cargo-util/src/read2.rs185
-rw-r--r--src/tools/cargo/crates/cargo-util/src/registry.rs45
-rw-r--r--src/tools/cargo/crates/cargo-util/src/sha256.rs53
-rw-r--r--src/tools/cargo/crates/crates-io/Cargo.toml21
l---------src/tools/cargo/crates/crates-io/LICENSE-APACHE1
l---------src/tools/cargo/crates/crates-io/LICENSE-MIT1
-rw-r--r--src/tools/cargo/crates/crates-io/lib.rs538
-rw-r--r--src/tools/cargo/crates/credential/README.md8
-rw-r--r--src/tools/cargo/crates/credential/cargo-credential-1password/Cargo.toml12
-rw-r--r--src/tools/cargo/crates/credential/cargo-credential-1password/src/main.rs314
-rw-r--r--src/tools/cargo/crates/credential/cargo-credential-gnome-secret/Cargo.toml13
-rw-r--r--src/tools/cargo/crates/credential/cargo-credential-gnome-secret/build.rs3
-rw-r--r--src/tools/cargo/crates/credential/cargo-credential-gnome-secret/src/main.rs194
-rw-r--r--src/tools/cargo/crates/credential/cargo-credential-macos-keychain/Cargo.toml11
-rw-r--r--src/tools/cargo/crates/credential/cargo-credential-macos-keychain/src/main.rs50
-rw-r--r--src/tools/cargo/crates/credential/cargo-credential-wincred/Cargo.toml11
-rw-r--r--src/tools/cargo/crates/credential/cargo-credential-wincred/src/main.rs111
-rw-r--r--src/tools/cargo/crates/credential/cargo-credential/Cargo.toml9
-rw-r--r--src/tools/cargo/crates/credential/cargo-credential/README.md41
-rw-r--r--src/tools/cargo/crates/credential/cargo-credential/src/lib.rs86
-rw-r--r--src/tools/cargo/crates/home/CHANGELOG.md46
-rw-r--r--src/tools/cargo/crates/home/Cargo.toml20
l---------src/tools/cargo/crates/home/LICENSE-APACHE1
l---------src/tools/cargo/crates/home/LICENSE-MIT1
-rw-r--r--src/tools/cargo/crates/home/README.md27
-rw-r--r--src/tools/cargo/crates/home/src/env.rs106
-rw-r--r--src/tools/cargo/crates/home/src/lib.rs149
-rw-r--r--src/tools/cargo/crates/home/src/windows.rs66
-rw-r--r--src/tools/cargo/crates/mdman/Cargo.lock459
-rw-r--r--src/tools/cargo/crates/mdman/Cargo.toml17
-rw-r--r--src/tools/cargo/crates/mdman/README.md7
-rwxr-xr-xsrc/tools/cargo/crates/mdman/build-man.sh7
-rw-r--r--src/tools/cargo/crates/mdman/doc/mdman.md95
-rw-r--r--src/tools/cargo/crates/mdman/doc/out/mdman.1124
-rw-r--r--src/tools/cargo/crates/mdman/doc/out/mdman.md95
-rw-r--r--src/tools/cargo/crates/mdman/doc/out/mdman.txt91
-rw-r--r--src/tools/cargo/crates/mdman/src/format.rs20
-rw-r--r--src/tools/cargo/crates/mdman/src/format/man.rs436
-rw-r--r--src/tools/cargo/crates/mdman/src/format/md.rs112
-rw-r--r--src/tools/cargo/crates/mdman/src/format/text.rs605
-rw-r--r--src/tools/cargo/crates/mdman/src/hbs.rs215
-rw-r--r--src/tools/cargo/crates/mdman/src/lib.rs122
-rw-r--r--src/tools/cargo/crates/mdman/src/main.rs133
-rw-r--r--src/tools/cargo/crates/mdman/src/util.rs44
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare.rs48
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/expected/formatting.1118
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/expected/formatting.md95
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/expected/formatting.txt84
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/expected/links.145
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/expected/links.md56
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/expected/links.txt40
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/expected/options.194
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/expected/options.md77
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/expected/options.txt57
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/expected/tables.1108
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/expected/tables.md35
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/expected/tables.txt45
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/expected/vars.79
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/expected/vars.md7
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/expected/vars.txt6
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/formatting.md95
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/includes/links-include.md7
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/includes/options-common.md14
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/links.md49
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/options.md62
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/tables.md35
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare/vars.md7
-rw-r--r--src/tools/cargo/crates/mdman/tests/invalid.rs34
-rw-r--r--src/tools/cargo/crates/mdman/tests/invalid/nested.md6
-rw-r--r--src/tools/cargo/crates/mdman/tests/invalid/not-inside-options.md5
-rw-r--r--src/tools/cargo/crates/resolver-tests/Cargo.toml12
-rw-r--r--src/tools/cargo/crates/resolver-tests/src/lib.rs991
-rw-r--r--src/tools/cargo/crates/resolver-tests/tests/resolve.rs1562
-rw-r--r--src/tools/cargo/deny.toml273
-rwxr-xr-xsrc/tools/cargo/publish.py55
-rw-r--r--src/tools/cargo/src/bin/cargo/cli.rs566
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/add.rs362
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/bench.rs77
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/build.rs73
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/check.rs56
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/clean.rs37
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/config.rs55
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/doc.rs61
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/fetch.rs24
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/fix.rs92
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/generate_lockfile.rs17
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/git_checkout.rs14
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/help.rs147
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/init.rs22
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/install.rs198
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/locate_project.rs93
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/login.rs47
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/logout.rs16
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/metadata.rs56
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/mod.rs128
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/new.rs30
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/owner.rs51
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/package.rs62
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/pkgid.rs28
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/publish.rs55
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/read_manifest.rs20
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/remove.rs344
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/report.rs49
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/run.rs103
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/rustc.rs100
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/rustdoc.rs66
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/search.rs37
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/test.rs113
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/tree.rs305
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/uninstall.rs34
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/update.rs46
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/vendor.rs100
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/verify_project.rs26
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/version.rs16
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/yank.rs65
-rw-r--r--src/tools/cargo/src/bin/cargo/main.rs323
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/artifact.rs98
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/build_config.rs261
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/build_context/mod.rs180
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/build_context/target_info.rs1116
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/build_plan.rs163
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/compilation.rs452
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/compile_kind.rs200
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/context/compilation_files.rs724
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/context/mod.rs646
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/crate_type.rs115
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/custom_build.rs1060
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/fingerprint/dirty_reason.rs255
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/fingerprint/mod.rs2191
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/future_incompat.rs521
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/job_queue/job.rs114
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/job_queue/job_state.rs197
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/job_queue/mod.rs1178
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/layout.rs242
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/links.rs61
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/lto.rs194
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/mod.rs1795
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/output_depinfo.rs172
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/rustdoc.rs272
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/standard_lib.rs253
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/timings.js470
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/timings.rs750
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/unit.rs251
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/unit_dependencies.rs1096
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/unit_graph.rs135
-rw-r--r--src/tools/cargo/src/cargo/core/dependency.rs642
-rw-r--r--src/tools/cargo/src/cargo/core/features.rs1197
-rw-r--r--src/tools/cargo/src/cargo/core/manifest.rs1014
-rw-r--r--src/tools/cargo/src/cargo/core/mod.rs32
-rw-r--r--src/tools/cargo/src/cargo/core/package.rs1218
-rw-r--r--src/tools/cargo/src/cargo/core/package_id.rs293
-rw-r--r--src/tools/cargo/src/cargo/core/package_id_spec.rs432
-rw-r--r--src/tools/cargo/src/cargo/core/profiles.rs1358
-rw-r--r--src/tools/cargo/src/cargo/core/registry.rs937
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/conflict_cache.rs225
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/context.rs441
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/dep_cache.rs579
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/encode.rs719
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/errors.rs439
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/features.rs924
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/mod.rs1152
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/resolve.rs407
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/types.rs381
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/version_prefs.rs189
-rw-r--r--src/tools/cargo/src/cargo/core/shell.rs630
-rw-r--r--src/tools/cargo/src/cargo/core/source/mod.rs340
-rw-r--r--src/tools/cargo/src/cargo/core/source/source_id.rs891
-rw-r--r--src/tools/cargo/src/cargo/core/summary.rs456
-rw-r--r--src/tools/cargo/src/cargo/core/workspace.rs1789
-rw-r--r--src/tools/cargo/src/cargo/lib.rs235
-rw-r--r--src/tools/cargo/src/cargo/macros.rs49
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_add/crate_spec.rs63
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_add/mod.rs971
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_clean.rs401
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_compile/compile_filter.rs309
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_compile/mod.rs924
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_compile/packages.rs220
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_compile/unit_generator.rs714
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_config.rs310
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_doc.rs75
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_fetch.rs79
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_generate_lockfile.rs257
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_install.rs911
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_new.rs880
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_output_metadata.rs344
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_package.rs986
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_pkgid.rs16
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_read_manifest.rs234
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_remove.rs65
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_run.rs106
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_test.rs439
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_uninstall.rs155
-rw-r--r--src/tools/cargo/src/cargo/ops/common_for_install_and_uninstall.rs717
-rw-r--r--src/tools/cargo/src/cargo/ops/fix.rs1021
-rw-r--r--src/tools/cargo/src/cargo/ops/lockfile.rs229
-rw-r--r--src/tools/cargo/src/cargo/ops/mod.rs90
-rw-r--r--src/tools/cargo/src/cargo/ops/registry.rs1250
-rw-r--r--src/tools/cargo/src/cargo/ops/resolve.rs866
-rw-r--r--src/tools/cargo/src/cargo/ops/tree/format/mod.rs135
-rw-r--r--src/tools/cargo/src/cargo/ops/tree/format/parse.rs123
-rw-r--r--src/tools/cargo/src/cargo/ops/tree/graph.rs685
-rw-r--r--src/tools/cargo/src/cargo/ops/tree/mod.rs451
-rw-r--r--src/tools/cargo/src/cargo/ops/vendor.rs426
-rw-r--r--src/tools/cargo/src/cargo/sources/config.rs317
-rw-r--r--src/tools/cargo/src/cargo/sources/directory.rs226
-rw-r--r--src/tools/cargo/src/cargo/sources/git/known_hosts.rs925
-rw-r--r--src/tools/cargo/src/cargo/sources/git/mod.rs10
-rw-r--r--src/tools/cargo/src/cargo/sources/git/oxide.rs355
-rw-r--r--src/tools/cargo/src/cargo/sources/git/source.rs295
-rw-r--r--src/tools/cargo/src/cargo/sources/git/utils.rs1366
-rw-r--r--src/tools/cargo/src/cargo/sources/mod.rs13
-rw-r--r--src/tools/cargo/src/cargo/sources/path.rs581
-rw-r--r--src/tools/cargo/src/cargo/sources/registry/download.rs130
-rw-r--r--src/tools/cargo/src/cargo/sources/registry/http_remote.rs854
-rw-r--r--src/tools/cargo/src/cargo/sources/registry/index.rs864
-rw-r--r--src/tools/cargo/src/cargo/sources/registry/local.rs157
-rw-r--r--src/tools/cargo/src/cargo/sources/registry/mod.rs1014
-rw-r--r--src/tools/cargo/src/cargo/sources/registry/remote.rs366
-rw-r--r--src/tools/cargo/src/cargo/sources/replaced.rs141
-rw-r--r--src/tools/cargo/src/cargo/util/auth.rs839
-rw-r--r--src/tools/cargo/src/cargo/util/canonical_url.rs75
-rw-r--r--src/tools/cargo/src/cargo/util/command_prelude.rs801
-rw-r--r--src/tools/cargo/src/cargo/util/config/de.rs633
-rw-r--r--src/tools/cargo/src/cargo/util/config/environment.rs189
-rw-r--r--src/tools/cargo/src/cargo/util/config/key.rs116
-rw-r--r--src/tools/cargo/src/cargo/util/config/mod.rs2647
-rw-r--r--src/tools/cargo/src/cargo/util/config/path.rs78
-rw-r--r--src/tools/cargo/src/cargo/util/config/target.rs246
-rw-r--r--src/tools/cargo/src/cargo/util/config/value.rs229
-rw-r--r--src/tools/cargo/src/cargo/util/counter.rs67
-rw-r--r--src/tools/cargo/src/cargo/util/cpu.rs244
-rw-r--r--src/tools/cargo/src/cargo/util/dependency_queue.rs255
-rw-r--r--src/tools/cargo/src/cargo/util/diagnostic_server.rs296
-rw-r--r--src/tools/cargo/src/cargo/util/errors.rs335
-rw-r--r--src/tools/cargo/src/cargo/util/flock.rs501
-rw-r--r--src/tools/cargo/src/cargo/util/graph.rs178
-rw-r--r--src/tools/cargo/src/cargo/util/hasher.rs24
-rw-r--r--src/tools/cargo/src/cargo/util/hex.rs31
-rw-r--r--src/tools/cargo/src/cargo/util/important_paths.rs45
-rw-r--r--src/tools/cargo/src/cargo/util/interning.rs182
-rw-r--r--src/tools/cargo/src/cargo/util/into_url.rs30
-rw-r--r--src/tools/cargo/src/cargo/util/into_url_with_base.rs50
-rw-r--r--src/tools/cargo/src/cargo/util/io.rs51
-rw-r--r--src/tools/cargo/src/cargo/util/job.rs149
-rw-r--r--src/tools/cargo/src/cargo/util/lev_distance.rs93
-rw-r--r--src/tools/cargo/src/cargo/util/lockserver.rs171
-rw-r--r--src/tools/cargo/src/cargo/util/machine_message.rs105
-rw-r--r--src/tools/cargo/src/cargo/util/mod.rs242
-rw-r--r--src/tools/cargo/src/cargo/util/network/mod.rs38
-rw-r--r--src/tools/cargo/src/cargo/util/network/retry.rs249
-rw-r--r--src/tools/cargo/src/cargo/util/network/sleep.rs103
-rw-r--r--src/tools/cargo/src/cargo/util/profile.rs93
-rw-r--r--src/tools/cargo/src/cargo/util/progress.rs531
-rw-r--r--src/tools/cargo/src/cargo/util/queue.rs82
-rw-r--r--src/tools/cargo/src/cargo/util/restricted_names.rs99
-rw-r--r--src/tools/cargo/src/cargo/util/rustc.rs366
-rw-r--r--src/tools/cargo/src/cargo/util/semver_ext.rs146
-rw-r--r--src/tools/cargo/src/cargo/util/to_semver.rs33
-rw-r--r--src/tools/cargo/src/cargo/util/toml/mod.rs3303
-rw-r--r--src/tools/cargo/src/cargo/util/toml/targets.rs969
-rw-r--r--src/tools/cargo/src/cargo/util/toml_mut/dependency.rs1132
-rw-r--r--src/tools/cargo/src/cargo/util/toml_mut/manifest.rs540
-rw-r--r--src/tools/cargo/src/cargo/util/toml_mut/mod.rs13
-rw-r--r--src/tools/cargo/src/cargo/util/vcs.rs104
-rw-r--r--src/tools/cargo/src/cargo/util/workspace.rs130
-rw-r--r--src/tools/cargo/src/cargo/version.rs80
-rw-r--r--src/tools/cargo/src/doc/README.md71
-rw-r--r--src/tools/cargo/src/doc/book.toml8
-rwxr-xr-xsrc/tools/cargo/src/doc/build-man.sh31
-rw-r--r--src/tools/cargo/src/doc/contrib/README.md12
-rw-r--r--src/tools/cargo/src/doc/contrib/book.toml17
-rw-r--r--src/tools/cargo/src/doc/contrib/src/SUMMARY.md20
-rw-r--r--src/tools/cargo/src/doc/contrib/src/design.md101
-rw-r--r--src/tools/cargo/src/doc/contrib/src/implementation/architecture.md5
-rw-r--r--src/tools/cargo/src/doc/contrib/src/implementation/console.md58
-rw-r--r--src/tools/cargo/src/doc/contrib/src/implementation/debugging.md26
-rw-r--r--src/tools/cargo/src/doc/contrib/src/implementation/filesystem.md21
-rw-r--r--src/tools/cargo/src/doc/contrib/src/implementation/index.md6
-rw-r--r--src/tools/cargo/src/doc/contrib/src/implementation/subcommands.md25
-rw-r--r--src/tools/cargo/src/doc/contrib/src/index.md29
-rw-r--r--src/tools/cargo/src/doc/contrib/src/issues.md109
-rw-r--r--src/tools/cargo/src/doc/contrib/src/process/index.md132
-rw-r--r--src/tools/cargo/src/doc/contrib/src/process/release.md164
-rw-r--r--src/tools/cargo/src/doc/contrib/src/process/unstable.md105
-rw-r--r--src/tools/cargo/src/doc/contrib/src/process/working-on-cargo.md172
-rw-r--r--src/tools/cargo/src/doc/contrib/src/tests/crater.md122
-rw-r--r--src/tools/cargo/src/doc/contrib/src/tests/index.md20
-rw-r--r--src/tools/cargo/src/doc/contrib/src/tests/profiling.md40
-rw-r--r--src/tools/cargo/src/doc/contrib/src/tests/running.md78
-rw-r--r--src/tools/cargo/src/doc/contrib/src/tests/writing.md341
-rw-r--r--src/tools/cargo/src/doc/man/cargo-add.md176
-rw-r--r--src/tools/cargo/src/doc/man/cargo-bench.md172
-rw-r--r--src/tools/cargo/src/doc/man/cargo-build.md116
-rw-r--r--src/tools/cargo/src/doc/man/cargo-check.md99
-rw-r--r--src/tools/cargo/src/doc/man/cargo-clean.md88
-rw-r--r--src/tools/cargo/src/doc/man/cargo-doc.md129
-rw-r--r--src/tools/cargo/src/doc/man/cargo-fetch.md65
-rw-r--r--src/tools/cargo/src/doc/man/cargo-fix.md182
-rw-r--r--src/tools/cargo/src/doc/man/cargo-generate-lockfile.md49
-rw-r--r--src/tools/cargo/src/doc/man/cargo-help.md26
-rw-r--r--src/tools/cargo/src/doc/man/cargo-init.md51
-rw-r--r--src/tools/cargo/src/doc/man/cargo-install.md231
-rw-r--r--src/tools/cargo/src/doc/man/cargo-locate-project.md66
-rw-r--r--src/tools/cargo/src/doc/man/cargo-login.md51
-rw-r--r--src/tools/cargo/src/doc/man/cargo-logout.md57
-rw-r--r--src/tools/cargo/src/doc/man/cargo-metadata.md353
-rw-r--r--src/tools/cargo/src/doc/man/cargo-new.md46
-rw-r--r--src/tools/cargo/src/doc/man/cargo-owner.md81
-rw-r--r--src/tools/cargo/src/doc/man/cargo-package.md138
-rw-r--r--src/tools/cargo/src/doc/man/cargo-pkgid.md89
-rw-r--r--src/tools/cargo/src/doc/man/cargo-publish.md117
-rw-r--r--src/tools/cargo/src/doc/man/cargo-remove.md94
-rw-r--r--src/tools/cargo/src/doc/man/cargo-report.md42
-rw-r--r--src/tools/cargo/src/doc/man/cargo-run.md115
-rw-r--r--src/tools/cargo/src/doc/man/cargo-rustc.md145
-rw-r--r--src/tools/cargo/src/doc/man/cargo-rustdoc.md116
-rw-r--r--src/tools/cargo/src/doc/man/cargo-search.md52
-rw-r--r--src/tools/cargo/src/doc/man/cargo-test.md199
-rw-r--r--src/tools/cargo/src/doc/man/cargo-tree.md268
-rw-r--r--src/tools/cargo/src/doc/man/cargo-uninstall.md63
-rw-r--r--src/tools/cargo/src/doc/man/cargo-update.md97
-rw-r--r--src/tools/cargo/src/doc/man/cargo-vendor.md93
-rw-r--r--src/tools/cargo/src/doc/man/cargo-verify-project.md58
-rw-r--r--src/tools/cargo/src/doc/man/cargo-version.md41
-rw-r--r--src/tools/cargo/src/doc/man/cargo-yank.md71
-rw-r--r--src/tools/cargo/src/doc/man/cargo.md238
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-add.txt243
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-bench.txt434
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-build.txt378
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-check.txt363
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-clean.txt172
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-doc.txt325
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-fetch.txt153
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-fix.txt434
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-generate-lockfile.txt128
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-help.txt23
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-init.txt136
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-install.txt399
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-locate-project.txt119
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-login.txt111
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-logout.txt115
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-metadata.txt441
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-new.txt131
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-owner.txt146
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-package.txt280
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-pkgid.txt170
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-publish.txt246
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-remove.txt155
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-report.txt34
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-run.txt277
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-rustc.txt385
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-rustdoc.txt340
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-search.txt108
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-test.txt466
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-tree.txt394
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-uninstall.txt120
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-update.txt166
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-vendor.txt162
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-verify-project.txt131
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-version.txt32
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-yank.txt135
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo.txt298
-rw-r--r--src/tools/cargo/src/doc/man/includes/description-install-root.md7
-rw-r--r--src/tools/cargo/src/doc/man/includes/description-one-target.md4
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-display.md24
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-future-incompat.md6
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-ignore-rust-version.md4
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-index.md3
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-jobs.md7
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-keep-going.md5
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-locked.md25
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-manifest-path.md4
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-message-format.md21
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-new.md39
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-profile-legacy-check.md10
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-profile.md4
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-registry.md6
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-release.md4
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-target-dir.md13
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-target-triple.md16
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-targets-bin-auto-built.md8
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-targets-lib-bin.md12
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-targets.md57
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-test.md14
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-timings.md16
-rw-r--r--src/tools/cargo/src/doc/man/includes/options-token.md11
-rw-r--r--src/tools/cargo/src/doc/man/includes/section-environment.md4
-rw-r--r--src/tools/cargo/src/doc/man/includes/section-exit-status.md4
-rw-r--r--src/tools/cargo/src/doc/man/includes/section-features.md26
-rw-r--r--src/tools/cargo/src/doc/man/includes/section-options-common.md39
-rw-r--r--src/tools/cargo/src/doc/man/includes/section-options-package.md13
-rw-r--r--src/tools/cargo/src/doc/man/includes/section-package-selection.md42
-rw-r--r--src/tools/cargo/src/doc/semver-check/Cargo.toml10
-rw-r--r--src/tools/cargo/src/doc/semver-check/src/main.rs289
-rw-r--r--src/tools/cargo/src/doc/src/SUMMARY.md93
-rw-r--r--src/tools/cargo/src/doc/src/appendix/git-authentication.md96
-rw-r--r--src/tools/cargo/src/doc/src/appendix/glossary.md274
-rw-r--r--src/tools/cargo/src/doc/src/commands/build-commands.md13
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-add.md283
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-bench.md508
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-build.md446
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-check.md427
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-clean.md205
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-doc.md392
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-fetch.md175
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-fix.md505
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-generate-lockfile.md149
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-help.md26
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-init.md162
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-install.md452
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-locate-project.md144
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-login.md130
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-logout.md136
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-metadata.md479
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-new.md157
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-owner.md176
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-package.md334
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-pkgid.md190
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-publish.md300
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-remove.md194
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-report.md43
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-run.md337
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-rustc.md445
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-rustdoc.md410
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-search.md134
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-test.md545
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-tree.md436
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-uninstall.md144
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-update.md197
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-vendor.md194
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-verify-project.md154
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-version.md42
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-yank.md164
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo.md335
-rw-r--r--src/tools/cargo/src/doc/src/commands/general-commands.md4
-rw-r--r--src/tools/cargo/src/doc/src/commands/index.md6
-rw-r--r--src/tools/cargo/src/doc/src/commands/manifest-commands.md11
-rw-r--r--src/tools/cargo/src/doc/src/commands/package-commands.md6
-rw-r--r--src/tools/cargo/src/doc/src/commands/publishing-commands.md7
-rw-r--r--src/tools/cargo/src/doc/src/faq.md261
-rw-r--r--src/tools/cargo/src/doc/src/getting-started/first-steps.md82
-rw-r--r--src/tools/cargo/src/doc/src/getting-started/index.md9
-rw-r--r--src/tools/cargo/src/doc/src/getting-started/installation.md38
-rw-r--r--src/tools/cargo/src/doc/src/guide/build-cache.md108
-rw-r--r--src/tools/cargo/src/doc/src/guide/cargo-home.md93
-rw-r--r--src/tools/cargo/src/doc/src/guide/cargo-toml-vs-cargo-lock.md107
-rw-r--r--src/tools/cargo/src/doc/src/guide/continuous-integration.md125
-rw-r--r--src/tools/cargo/src/doc/src/guide/creating-a-new-project.md97
-rw-r--r--src/tools/cargo/src/doc/src/guide/dependencies.md93
-rw-r--r--src/tools/cargo/src/doc/src/guide/index.md15
-rw-r--r--src/tools/cargo/src/doc/src/guide/project-layout.md61
-rw-r--r--src/tools/cargo/src/doc/src/guide/tests.md44
-rw-r--r--src/tools/cargo/src/doc/src/guide/why-cargo-exists.md65
-rw-r--r--src/tools/cargo/src/doc/src/guide/working-on-an-existing-project.md24
-rw-r--r--src/tools/cargo/src/doc/src/images/Cargo-Logo-Small.pngbin0 -> 58168 bytes
-rw-r--r--src/tools/cargo/src/doc/src/images/auth-level-acl.pngbin0 -> 90300 bytes
-rw-r--r--src/tools/cargo/src/doc/src/images/build-info.pngbin0 -> 28150 bytes
-rw-r--r--src/tools/cargo/src/doc/src/images/build-unit-time.pngbin0 -> 27728 bytes
-rw-r--r--src/tools/cargo/src/doc/src/images/cargo-concurrency-over-time.pngbin0 -> 31220 bytes
-rw-r--r--src/tools/cargo/src/doc/src/images/org-level-acl.pngbin0 -> 76572 bytes
-rw-r--r--src/tools/cargo/src/doc/src/images/winapi-features.svg3
-rw-r--r--src/tools/cargo/src/doc/src/index.md49
-rw-r--r--src/tools/cargo/src/doc/src/reference/build-script-examples.md506
-rw-r--r--src/tools/cargo/src/doc/src/reference/build-scripts.md485
-rw-r--r--src/tools/cargo/src/doc/src/reference/cargo-targets.md389
-rw-r--r--src/tools/cargo/src/doc/src/reference/config.md1219
-rw-r--r--src/tools/cargo/src/doc/src/reference/environment-variables.md418
-rw-r--r--src/tools/cargo/src/doc/src/reference/external-tools.md290
-rw-r--r--src/tools/cargo/src/doc/src/reference/features-examples.md187
-rw-r--r--src/tools/cargo/src/doc/src/reference/features.md521
-rw-r--r--src/tools/cargo/src/doc/src/reference/future-incompat-report.md37
-rw-r--r--src/tools/cargo/src/doc/src/reference/index.md26
-rw-r--r--src/tools/cargo/src/doc/src/reference/manifest.md632
-rw-r--r--src/tools/cargo/src/doc/src/reference/overriding-dependencies.md359
-rw-r--r--src/tools/cargo/src/doc/src/reference/pkgid-spec.md67
-rw-r--r--src/tools/cargo/src/doc/src/reference/profiles.md469
-rw-r--r--src/tools/cargo/src/doc/src/reference/publishing.md284
-rw-r--r--src/tools/cargo/src/doc/src/reference/registries.md151
-rw-r--r--src/tools/cargo/src/doc/src/reference/registry-index.md293
-rw-r--r--src/tools/cargo/src/doc/src/reference/registry-web-api.md358
-rw-r--r--src/tools/cargo/src/doc/src/reference/resolver.md558
-rw-r--r--src/tools/cargo/src/doc/src/reference/running-a-registry.md20
-rw-r--r--src/tools/cargo/src/doc/src/reference/semver.md1404
-rw-r--r--src/tools/cargo/src/doc/src/reference/source-replacement.md130
-rw-r--r--src/tools/cargo/src/doc/src/reference/specifying-dependencies.md513
-rw-r--r--src/tools/cargo/src/doc/src/reference/timings.md66
-rw-r--r--src/tools/cargo/src/doc/src/reference/unstable.md1473
-rw-r--r--src/tools/cargo/src/doc/src/reference/workspaces.md255
-rw-r--r--src/tools/cargo/src/doc/theme/favicon.pngbin0 -> 15086 bytes
-rw-r--r--src/tools/cargo/src/doc/theme/head.hbs5
-rw-r--r--src/tools/cargo/src/etc/_cargo468
-rw-r--r--src/tools/cargo/src/etc/cargo.bashcomp.sh287
-rw-r--r--src/tools/cargo/src/etc/man/cargo-add.1329
-rw-r--r--src/tools/cargo/src/etc/man/cargo-bench.1538
-rw-r--r--src/tools/cargo/src/etc/man/cargo-build.1467
-rw-r--r--src/tools/cargo/src/etc/man/cargo-check.1448
-rw-r--r--src/tools/cargo/src/etc/man/cargo-clean.1215
-rw-r--r--src/tools/cargo/src/etc/man/cargo-doc.1398
-rw-r--r--src/tools/cargo/src/etc/man/cargo-fetch.1179
-rw-r--r--src/tools/cargo/src/etc/man/cargo-fix.1545
-rw-r--r--src/tools/cargo/src/etc/man/cargo-generate-lockfile.1158
-rw-r--r--src/tools/cargo/src/etc/man/cargo-help.134
-rw-r--r--src/tools/cargo/src/etc/man/cargo-init.1171
-rw-r--r--src/tools/cargo/src/etc/man/cargo-install.1515
-rw-r--r--src/tools/cargo/src/etc/man/cargo-locate-project.1150
-rw-r--r--src/tools/cargo/src/etc/man/cargo-login.1135
-rw-r--r--src/tools/cargo/src/etc/man/cargo-logout.1147
-rw-r--r--src/tools/cargo/src/etc/man/cargo-metadata.1483
-rw-r--r--src/tools/cargo/src/etc/man/cargo-new.1166
-rw-r--r--src/tools/cargo/src/etc/man/cargo-owner.1197
-rw-r--r--src/tools/cargo/src/etc/man/cargo-package.1351
-rw-r--r--src/tools/cargo/src/etc/man/cargo-pkgid.1243
-rw-r--r--src/tools/cargo/src/etc/man/cargo-publish.1301
-rw-r--r--src/tools/cargo/src/etc/man/cargo-remove.1204
-rw-r--r--src/tools/cargo/src/etc/man/cargo-report.148
-rw-r--r--src/tools/cargo/src/etc/man/cargo-run.1344
-rw-r--r--src/tools/cargo/src/etc/man/cargo-rustc.1477
-rw-r--r--src/tools/cargo/src/etc/man/cargo-rustdoc.1416
-rw-r--r--src/tools/cargo/src/etc/man/cargo-search.1138
-rw-r--r--src/tools/cargo/src/etc/man/cargo-test.1581
-rw-r--r--src/tools/cargo/src/etc/man/cargo-tree.1507
-rw-r--r--src/tools/cargo/src/etc/man/cargo-uninstall.1161
-rw-r--r--src/tools/cargo/src/etc/man/cargo-update.1218
-rw-r--r--src/tools/cargo/src/etc/man/cargo-vendor.1209
-rw-r--r--src/tools/cargo/src/etc/man/cargo-verify-project.1168
-rw-r--r--src/tools/cargo/src/etc/man/cargo-version.152
-rw-r--r--src/tools/cargo/src/etc/man/cargo-yank.1169
-rw-r--r--src/tools/cargo/src/etc/man/cargo.1396
-rw-r--r--src/tools/cargo/tests/build-std/main.rs229
-rw-r--r--src/tools/cargo/tests/internal.rs107
-rw-r--r--src/tools/cargo/tests/testsuite/advanced_env.rs35
-rw-r--r--src/tools/cargo/tests/testsuite/alt_registry.rs1496
-rw-r--r--src/tools/cargo/tests/testsuite/artifact_dep.rs2901
-rw-r--r--src/tools/cargo/tests/testsuite/bad_config.rs1514
-rw-r--r--src/tools/cargo/tests/testsuite/bad_manifest_path.rs386
-rw-r--r--src/tools/cargo/tests/testsuite/bench.rs1673
-rw-r--r--src/tools/cargo/tests/testsuite/binary_name.rs301
-rw-r--r--src/tools/cargo/tests/testsuite/build.rs6409
-rw-r--r--src/tools/cargo/tests/testsuite/build_plan.rs222
-rw-r--r--src/tools/cargo/tests/testsuite/build_script.rs5168
-rw-r--r--src/tools/cargo/tests/testsuite/build_script_env.rs303
-rw-r--r--src/tools/cargo/tests/testsuite/build_script_extra_link_arg.rs376
-rw-r--r--src/tools/cargo/tests/testsuite/cache_messages.rs488
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/add-basic.in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/add-basic.in/src/lib.rs0
l---------src/tools/cargo/tests/testsuite/cargo_add/add_basic/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/add_basic/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/add_basic/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/add_basic/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/add_basic/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/add_multiple/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/add_multiple/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/add_multiple/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/add_multiple/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/add_multiple/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/stderr.log18
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/build/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/build/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/build/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/build/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/build/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/in/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/in/dependency/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/mod.rs28
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/out/Cargo.toml12
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/out/dependency/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/stderr.log4
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/in/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/stderr.log5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/default_features/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/default_features/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/default_features/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/default_features/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/default_features/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/in/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/in/Cargo.toml11
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/out/Cargo.toml11
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/out/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/out/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/dependency/Cargo.toml14
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/out/dependency/Cargo.toml14
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/out/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/stderr.log10
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/dev/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dev/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dev/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dev/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dev/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/stderr.log7
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/in/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/in/dependency/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/out/Cargo.toml12
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/out/dependency/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/stderr.log4
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/dry_run/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dry_run/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dry_run/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dry_run/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/dry_run/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/features/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features/stderr.log7
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/features_empty/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_empty/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_empty/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_empty/stderr.log7
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_empty/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/stderr.log7
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_preserve/in/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_preserve/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_preserve/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_preserve/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_preserve/stderr.log7
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_preserve/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/stderr.log7
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/features_unknown/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_unknown/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_unknown/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_unknown/stderr.log5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_unknown/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/stderr.log4
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/git/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git/mod.rs34
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/git_branch/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_branch/mod.rs37
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_branch/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_branch/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_branch/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/mod.rs29
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/git_dev/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_dev/mod.rs34
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_dev/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_dev/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_dev/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/mod.rs34
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/stderr.log4
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/mod.rs74
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/stderr.log5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/mod.rs39
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/stderr.log4
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/mod.rs34
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_registry/in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_registry/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_registry/mod.rs40
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_registry/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_registry/stderr.log6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_registry/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/git_rev/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_rev/mod.rs36
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_rev/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_rev/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_rev/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/git_tag/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_tag/mod.rs36
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_tag/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_tag/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/git_tag/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/stderr.log9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/mod.rs28
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/stderr.log12
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/mod.rs34
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/mod.rs23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/out/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/out/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/mod.rs23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/out/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/out/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency-alt/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency-alt/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/mod.rs23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/dependency-alt/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/in/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/stderr.log12
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/invalid_path/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_path/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_path/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_path/stderr.log10
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_path/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/in/dependency/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/in/primary/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/out/dependency/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/out/primary/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/stderr.log4
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/list_features/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features/stderr.log7
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/Cargo.toml2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/dependency/Cargo.toml13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/optional/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/optional/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path/out/Cargo.toml2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path/out/dependency/Cargo.toml13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path/out/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path/stderr.log7
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/Cargo.toml2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/dependency/Cargo.toml13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/optional/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/optional/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/mod.rs30
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/out/Cargo.toml2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/out/dependency/Cargo.toml13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/out/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/stderr.log7
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/locked_changed/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/locked_changed/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/locked_changed/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/locked_changed/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/locked_changed/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/in/Cargo.lock16
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/in/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/in/Cargo.lock17
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/in/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/out/Cargo.lock23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/Cargo.toml2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/mod.rs31
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/out/Cargo.toml2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/out/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/out/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/dependency/Cargo.toml14
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/mod.rs23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/out/dependency/Cargo.toml14
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/out/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/stderr.log10
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/mod.rs203
l---------src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/namever/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/namever/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/namever/out/Cargo.toml10
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/namever/stderr.log4
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/namever/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/no_args/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_args/mod.rs24
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_args/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_args/stderr.log8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_args/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/no_default_features/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_default_features/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_default_features/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_default_features/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_default_features/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/no_optional/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_optional/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_optional/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_optional/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_optional/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/optional/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/optional/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/optional/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/optional/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/optional/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/in/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/in/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/in/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/stderr.log7
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/in/dependency/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/in/primary/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/out/dependency/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/out/primary/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/dependency/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/mod.rs23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/out/dependency/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/out/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/out/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/out/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/in/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/mod.rs27
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/stderr.log8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/dependency/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/out/dependency/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/stderr.log4
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/in/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/in/dependency/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/out/dependency/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/stderr.log4
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/in/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/in/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/in/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/in/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/in/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/in/Cargo.toml13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/out/Cargo.toml13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/stderr.log8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/in/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/in/dependency/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/out/dependency/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/stderr.log4
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/in/dependency/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/in/primary/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/out/dependency/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/out/primary/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/in/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/stderr.log7
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/in/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/in/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/in/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/in/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/mod.rs34
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/in/dependency/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/in/primary/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/out/dependency/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/out/primary/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/in/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/out/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/out/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/dependency/Cargo.toml14
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/out/dependency/Cargo.toml14
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/out/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/stderr.log10
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path/in/dependency/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path/in/primary/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path/out/dependency/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path/out/primary/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_dev/in/dependency/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_dev/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_dev/in/primary/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_dev/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_dev/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_dev/out/dependency/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_dev/out/primary/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_dev/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_dev/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/in/dependency/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/in/primary/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/out/dependency/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/out/primary/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/Cargo.toml2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/dependency/Cargo.toml14
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/optional/Cargo.toml7
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/optional/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/out/Cargo.toml2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/out/dependency/Cargo.toml14
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/out/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/in/dependency/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/in/primary/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/out/dependency/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/out/primary/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/in/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/out/Cargo.toml10
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/in/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/out/Cargo.toml10
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/quiet/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/quiet/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/quiet/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/quiet/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/quiet/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/registry/in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/registry/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/registry/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/registry/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/registry/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/registry/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/rename/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/rename/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/rename/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/rename/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/rename/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/require_weak/in/Cargo.toml11
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/require_weak/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/require_weak/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/require_weak/out/Cargo.toml11
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/require_weak/stderr.log7
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/require_weak/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/in/Cargo.toml13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/out/Cargo.toml14
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/target/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/target/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/target/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/target/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/target/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/target_cfg/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/target_cfg/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/target_cfg/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/target_cfg/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/target_cfg/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/dependency/Cargo.toml20
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/mod.rs23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/out/dependency/Cargo.toml20
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/out/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/stderr.log7
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/vers/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/vers/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/vers/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/vers/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/vers/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/Cargo.toml2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_name/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_name/out/Cargo.toml2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_name/out/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_name/out/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_name/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_name/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/Cargo.toml2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path/out/Cargo.toml2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path/out/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path/out/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/Cargo.toml2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/out/Cargo.toml2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/out/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/out/primary/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_alias_config.rs434
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_command.rs535
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_config.rs520
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_env_config.rs181
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_features.rs714
l---------src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/out/Cargo.toml23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/build/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/build/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/build/out/Cargo.toml21
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/build/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/build/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/dev/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/dev/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/dev/out/Cargo.toml23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/dev/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/dev/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/dry_run/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/dry_run/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/dry_run/out/Cargo.toml24
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/dry_run/out/src/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/dry_run/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/dry_run/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/mod.rs72
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/out/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/in/Cargo.toml36
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/in/src/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/out/Cargo.toml32
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/in/my-package/Cargo.toml26
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/in/my-package/src/main.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/out/Cargo.toml2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/out/my-package/Cargo.toml25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/out/my-package/src/main.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/out/Cargo.toml24
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/stderr.log7
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/out/Cargo.toml24
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/dep-a/Cargo.toml23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/dep-a/src/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/dep-b/Cargo.toml23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/dep-b/src/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-a/Cargo.toml23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-a/src/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-b/Cargo.toml23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-b/src/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/out/Cargo.toml24
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/out/Cargo.toml24
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/out/Cargo.toml33
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/out/Cargo.toml33
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/mod.rs88
l---------src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/out/Cargo.toml22
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/out/Cargo.toml20
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/no_arg/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/no_arg/mod.rs24
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/no_arg/out/Cargo.toml24
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/no_arg/stderr.log6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/no_arg/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/offline/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/offline/mod.rs32
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/offline/out/Cargo.toml23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/offline/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/offline/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/out/Cargo.toml23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/out/Cargo.toml23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/package/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/package/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/package/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/package/out/dep-a/Cargo.toml22
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/package/out/dep-a/src/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/package/out/dep-b/Cargo.toml23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/package/out/dep-b/src/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/package/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/package/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/remove-basic.in/Cargo.toml24
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/remove-basic.in/src/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/dep-a/Cargo.toml23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/dep-a/src/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/dep-b/Cargo.toml23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/dep-b/src/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/remove-target.in/Cargo.toml33
l---------src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/out/Cargo.toml23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/target/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/target/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/target/out/Cargo.toml30
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/target/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/target/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/target_build/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/target_build/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/target_build/out/Cargo.toml30
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/target_build/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/target_build/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_remove/target_dev/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/target_dev/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/target_dev/out/Cargo.toml30
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/target_dev/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/target_dev/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.lock58
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.toml24
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/in/src/main.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.lock51
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.toml23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/out/src/main.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace/in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace/in/my-package/Cargo.toml24
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace/in/my-package/src/main.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace/out/Cargo.toml2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace/out/my-package/Cargo.toml21
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace/out/my-package/src/main.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/in/Cargo.toml30
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/in/my-member/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/in/my-member/src/main.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/out/Cargo.toml24
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/out/my-member/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/out/my-member/src/main.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/my-other-package/Cargo.toml22
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/my-other-package/src/main.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/my-package/Cargo.toml24
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/my-package/src/main.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/mod.rs25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/my-other-package/Cargo.toml22
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/my-other-package/src/main.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/my-package/Cargo.toml21
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/my-package/src/main.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_targets.rs68
-rw-r--r--src/tools/cargo/tests/testsuite/cfg.rs515
-rw-r--r--src/tools/cargo/tests/testsuite/check.rs1521
-rw-r--r--src/tools/cargo/tests/testsuite/check_cfg.rs588
-rw-r--r--src/tools/cargo/tests/testsuite/clean.rs675
-rw-r--r--src/tools/cargo/tests/testsuite/collisions.rs550
-rw-r--r--src/tools/cargo/tests/testsuite/concurrent.rs507
-rw-r--r--src/tools/cargo/tests/testsuite/config.rs1596
-rw-r--r--src/tools/cargo/tests/testsuite/config_cli.rs564
-rw-r--r--src/tools/cargo/tests/testsuite/config_include.rs285
-rw-r--r--src/tools/cargo/tests/testsuite/corrupt_git.rs159
-rw-r--r--src/tools/cargo/tests/testsuite/credential_process.rs504
-rw-r--r--src/tools/cargo/tests/testsuite/cross_compile.rs1342
-rw-r--r--src/tools/cargo/tests/testsuite/cross_publish.rs122
-rw-r--r--src/tools/cargo/tests/testsuite/custom_target.rs250
-rw-r--r--src/tools/cargo/tests/testsuite/death.rs101
-rw-r--r--src/tools/cargo/tests/testsuite/dep_info.rs600
-rw-r--r--src/tools/cargo/tests/testsuite/direct_minimal_versions.rs236
-rw-r--r--src/tools/cargo/tests/testsuite/directory.rs774
-rw-r--r--src/tools/cargo/tests/testsuite/doc.rs2503
-rw-r--r--src/tools/cargo/tests/testsuite/docscrape.rs637
-rw-r--r--src/tools/cargo/tests/testsuite/edition.rs124
-rw-r--r--src/tools/cargo/tests/testsuite/error.rs19
-rw-r--r--src/tools/cargo/tests/testsuite/features.rs2084
-rw-r--r--src/tools/cargo/tests/testsuite/features2.rs2553
-rw-r--r--src/tools/cargo/tests/testsuite/features_namespaced.rs1215
-rw-r--r--src/tools/cargo/tests/testsuite/fetch.rs135
-rw-r--r--src/tools/cargo/tests/testsuite/fix.rs1855
-rw-r--r--src/tools/cargo/tests/testsuite/freshness.rs2816
-rw-r--r--src/tools/cargo/tests/testsuite/future_incompat_report.rs391
-rw-r--r--src/tools/cargo/tests/testsuite/generate_lockfile.rs230
-rw-r--r--src/tools/cargo/tests/testsuite/git.rs3702
-rw-r--r--src/tools/cargo/tests/testsuite/git_auth.rs437
-rw-r--r--src/tools/cargo/tests/testsuite/git_gc.rs117
-rw-r--r--src/tools/cargo/tests/testsuite/glob_targets.rs539
-rw-r--r--src/tools/cargo/tests/testsuite/help.rs219
-rw-r--r--src/tools/cargo/tests/testsuite/https.rs152
-rw-r--r--src/tools/cargo/tests/testsuite/inheritable_workspace_fields.rs1717
l---------src/tools/cargo/tests/testsuite/init/auto_git/in1
-rw-r--r--src/tools/cargo/tests/testsuite/init/auto_git/mod.rs22
-rw-r--r--src/tools/cargo/tests/testsuite/init/auto_git/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/auto_git/out/src/lib.rs14
-rw-r--r--src/tools/cargo/tests/testsuite/init/auto_git/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/auto_git/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/in/src/main.rs4
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/mod.rs21
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/out/src/main.rs4
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/in/main.rs4
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/mod.rs22
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/out/Cargo.toml12
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/out/main.rs4
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/in/src/main.rs4
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/mod.rs21
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/out/src/main.rs4
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/in/case.rs4
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/mod.rs22
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/out/Cargo.toml12
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/out/case.rs4
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/in/src/case.rs4
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/mod.rs22
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/out/Cargo.toml12
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/out/src/case.rs4
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/in/main.rs4
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/mod.rs22
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/out/Cargo.toml12
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/out/main.rs4
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/both_lib_and_bin/mod.rs19
-rw-r--r--src/tools/cargo/tests/testsuite/init/both_lib_and_bin/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/both_lib_and_bin/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/in/case.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/in/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/mod.rs18
-rw-r--r--src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/in/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/in/src/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/mod.rs22
-rw-r--r--src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/out/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/out/src/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/in/case.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/in/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/mod.rs21
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/Cargo.toml16
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/case.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/in/case.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/mod.rs21
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/out/Cargo.toml12
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/out/case.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/in/case.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/mod.rs21
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/out/Cargo.toml12
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/out/case.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/empty_dir/.keep0
-rw-r--r--src/tools/cargo/tests/testsuite/init/empty_dir/mod.rs7
l---------src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/in1
-rw-r--r--src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/mod.rs21
-rw-r--r--src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/out/src/main.rs3
-rw-r--r--src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/formats_source/in/rustfmt.toml1
-rw-r--r--src/tools/cargo/tests/testsuite/init/formats_source/mod.rs29
-rw-r--r--src/tools/cargo/tests/testsuite/init/formats_source/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/formats_source/out/rustfmt.toml1
-rw-r--r--src/tools/cargo/tests/testsuite/init/formats_source/out/src/lib.rs14
-rw-r--r--src/tools/cargo/tests/testsuite/init/formats_source/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/formats_source/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/fossil_autodetect/in/.fossil/.keep0
-rw-r--r--src/tools/cargo/tests/testsuite/init/fossil_autodetect/mod.rs22
-rw-r--r--src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/.fossil-settings/clean-glob2
-rw-r--r--src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/.fossil-settings/ignore-glob2
-rw-r--r--src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/src/lib.rs14
-rw-r--r--src/tools/cargo/tests/testsuite/init/fossil_autodetect/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/fossil_autodetect/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/git_autodetect/mod.rs24
-rw-r--r--src/tools/cargo/tests/testsuite/init/git_autodetect/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/git_autodetect/out/src/lib.rs14
-rw-r--r--src/tools/cargo/tests/testsuite/init/git_autodetect/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/git_autodetect/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/mod.rs22
-rw-r--r--src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/out/src/lib.rs14
-rw-r--r--src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/in/rustfmt.toml1
-rw-r--r--src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/mod.rs22
-rw-r--r--src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/rustfmt.toml1
-rw-r--r--src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/src/lib.rs14
-rw-r--r--src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/in/main.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/mod.rs21
-rw-r--r--src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/out/Cargo.toml12
-rw-r--r--src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/out/main.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/in/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/mod.rs21
-rw-r--r--src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/out/Cargo.toml12
-rw-r--r--src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/out/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/invalid_dir_name/mod.rs21
-rw-r--r--src/tools/cargo/tests/testsuite/init/invalid_dir_name/stderr.log8
-rw-r--r--src/tools/cargo/tests/testsuite/init/invalid_dir_name/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/in/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/mod.rs22
-rw-r--r--src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/out/Cargo.toml12
-rw-r--r--src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/out/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/lib_already_exists_src/in/src/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/lib_already_exists_src/mod.rs22
-rw-r--r--src/tools/cargo/tests/testsuite/init/lib_already_exists_src/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/lib_already_exists_src/out/src/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/lib_already_exists_src/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/lib_already_exists_src/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/mercurial_autodetect/mod.rs22
-rw-r--r--src/tools/cargo/tests/testsuite/init/mercurial_autodetect/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/mercurial_autodetect/out/src/lib.rs14
-rw-r--r--src/tools/cargo/tests/testsuite/init/mercurial_autodetect/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/mercurial_autodetect/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/mod.rs42
-rw-r--r--src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/in/case.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/in/main.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/mod.rs22
-rw-r--r--src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/out/case.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/out/main.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/stderr.log4
-rw-r--r--src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/no_filename/mod.rs16
-rw-r--r--src/tools/cargo/tests/testsuite/init/no_filename/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/no_filename/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/path_contains_separator/in/.keep0
-rw-r--r--src/tools/cargo/tests/testsuite/init/path_contains_separator/mod.rs26
-rw-r--r--src/tools/cargo/tests/testsuite/init/path_contains_separator/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/path_contains_separator/out/src/main.rs3
-rw-r--r--src/tools/cargo/tests/testsuite/init/path_contains_separator/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/init/path_contains_separator/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/pijul_autodetect/in/.pijul/.keep0
-rw-r--r--src/tools/cargo/tests/testsuite/init/pijul_autodetect/mod.rs22
-rw-r--r--src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/.ignore2
-rw-r--r--src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/src/lib.rs14
-rw-r--r--src/tools/cargo/tests/testsuite/init/pijul_autodetect/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/pijul_autodetect/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/reserved_name/mod.rs21
-rw-r--r--src/tools/cargo/tests/testsuite/init/reserved_name/stderr.log8
-rw-r--r--src/tools/cargo/tests/testsuite/init/reserved_name/stdout.log0
l---------src/tools/cargo/tests/testsuite/init/simple_bin/in1
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_bin/mod.rs29
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_bin/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_bin/out/src/main.rs3
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_bin/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_bin/stdout.log0
l---------src/tools/cargo/tests/testsuite/init/simple_git/in1
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_git/mod.rs22
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_git/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_git/out/src/lib.rs14
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_git/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_git/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/mod.rs28
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/out/src/lib.rs14
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/stdout.log0
l---------src/tools/cargo/tests/testsuite/init/simple_hg/in1
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_hg/mod.rs22
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_hg/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_hg/out/src/lib.rs14
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_hg/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_hg/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/mod.rs22
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/out/src/lib.rs14
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/stdout.log0
l---------src/tools/cargo/tests/testsuite/init/simple_lib/in1
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_lib/mod.rs29
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_lib/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_lib/out/src/lib.rs14
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_lib/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/simple_lib/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/unknown_flags/mod.rs15
-rw-r--r--src/tools/cargo/tests/testsuite/init/unknown_flags/stderr.log7
-rw-r--r--src/tools/cargo/tests/testsuite/init/unknown_flags/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/init/with_argument/in/foo/.keep0
-rw-r--r--src/tools/cargo/tests/testsuite/init/with_argument/mod.rs21
-rw-r--r--src/tools/cargo/tests/testsuite/init/with_argument/out/foo/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/init/with_argument/out/foo/src/main.rs3
-rw-r--r--src/tools/cargo/tests/testsuite/init/with_argument/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/init/with_argument/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/install.rs2289
-rw-r--r--src/tools/cargo/tests/testsuite/install_upgrade.rs862
-rw-r--r--src/tools/cargo/tests/testsuite/jobserver.rs250
-rw-r--r--src/tools/cargo/tests/testsuite/list_availables.rs232
-rw-r--r--src/tools/cargo/tests/testsuite/local_registry.rs528
-rw-r--r--src/tools/cargo/tests/testsuite/locate_project.rs76
-rw-r--r--src/tools/cargo/tests/testsuite/lockfile_compat.rs890
-rw-r--r--src/tools/cargo/tests/testsuite/login.rs404
-rw-r--r--src/tools/cargo/tests/testsuite/logout.rs104
-rw-r--r--src/tools/cargo/tests/testsuite/lto.rs850
-rw-r--r--src/tools/cargo/tests/testsuite/main.rs146
-rw-r--r--src/tools/cargo/tests/testsuite/member_discovery.rs44
-rw-r--r--src/tools/cargo/tests/testsuite/member_errors.rs164
-rw-r--r--src/tools/cargo/tests/testsuite/message_format.rs133
-rw-r--r--src/tools/cargo/tests/testsuite/messages.rs144
-rw-r--r--src/tools/cargo/tests/testsuite/metabuild.rs771
-rw-r--r--src/tools/cargo/tests/testsuite/metadata.rs4192
-rw-r--r--src/tools/cargo/tests/testsuite/minimal_versions.rs38
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/alloc/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/alloc/src/lib.rs11
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/compiler_builtins/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/compiler_builtins/src/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/core/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/core/src/lib.rs9
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/panic_unwind/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/panic_unwind/src/lib.rs5
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/proc_macro/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/proc_macro/src/lib.rs11
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-alloc/Cargo.toml11
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-alloc/lib.rs3
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-core/Cargo.toml11
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-core/lib.rs3
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-std/Cargo.toml11
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-std/lib.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/std/Cargo.toml11
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/std/src/lib.rs12
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/test/Cargo.toml18
-rw-r--r--src/tools/cargo/tests/testsuite/mock-std/library/test/src/lib.rs10
-rw-r--r--src/tools/cargo/tests/testsuite/multitarget.rs231
-rw-r--r--src/tools/cargo/tests/testsuite/net_config.rs74
-rw-r--r--src/tools/cargo/tests/testsuite/new.rs560
-rw-r--r--src/tools/cargo/tests/testsuite/offline.rs728
-rw-r--r--src/tools/cargo/tests/testsuite/old_cargos.rs679
-rw-r--r--src/tools/cargo/tests/testsuite/out_dir.rs317
-rw-r--r--src/tools/cargo/tests/testsuite/owner.rs192
-rw-r--r--src/tools/cargo/tests/testsuite/package.rs2764
-rw-r--r--src/tools/cargo/tests/testsuite/package_features.rs704
-rw-r--r--src/tools/cargo/tests/testsuite/patch.rs2645
-rw-r--r--src/tools/cargo/tests/testsuite/path.rs1139
-rw-r--r--src/tools/cargo/tests/testsuite/paths.rs226
-rw-r--r--src/tools/cargo/tests/testsuite/pkgid.rs128
-rw-r--r--src/tools/cargo/tests/testsuite/plugins.rs421
-rw-r--r--src/tools/cargo/tests/testsuite/proc_macro.rs560
-rw-r--r--src/tools/cargo/tests/testsuite/profile_config.rs519
-rw-r--r--src/tools/cargo/tests/testsuite/profile_custom.rs731
-rw-r--r--src/tools/cargo/tests/testsuite/profile_overrides.rs515
-rw-r--r--src/tools/cargo/tests/testsuite/profile_targets.rs674
-rw-r--r--src/tools/cargo/tests/testsuite/profiles.rs744
-rw-r--r--src/tools/cargo/tests/testsuite/progress.rs159
-rw-r--r--src/tools/cargo/tests/testsuite/pub_priv.rs199
-rw-r--r--src/tools/cargo/tests/testsuite/publish.rs2951
-rw-r--r--src/tools/cargo/tests/testsuite/publish_lockfile.rs592
-rw-r--r--src/tools/cargo/tests/testsuite/read_manifest.rs206
-rw-r--r--src/tools/cargo/tests/testsuite/registry.rs3406
-rw-r--r--src/tools/cargo/tests/testsuite/registry_auth.rs519
-rw-r--r--src/tools/cargo/tests/testsuite/rename_deps.rs391
-rw-r--r--src/tools/cargo/tests/testsuite/replace.rs1300
-rw-r--r--src/tools/cargo/tests/testsuite/required_features.rs1452
-rw-r--r--src/tools/cargo/tests/testsuite/run.rs1509
-rw-r--r--src/tools/cargo/tests/testsuite/rust_version.rs194
-rw-r--r--src/tools/cargo/tests/testsuite/rustc.rs794
-rw-r--r--src/tools/cargo/tests/testsuite/rustc_info_cache.rs186
-rw-r--r--src/tools/cargo/tests/testsuite/rustdoc.rs252
-rw-r--r--src/tools/cargo/tests/testsuite/rustdoc_extern_html.rs426
-rw-r--r--src/tools/cargo/tests/testsuite/rustdocflags.rs155
-rw-r--r--src/tools/cargo/tests/testsuite/rustflags.rs1673
-rw-r--r--src/tools/cargo/tests/testsuite/search.rs192
-rw-r--r--src/tools/cargo/tests/testsuite/shell_quoting.rs37
-rw-r--r--src/tools/cargo/tests/testsuite/source_replacement.rs250
-rw-r--r--src/tools/cargo/tests/testsuite/ssh.rs592
-rw-r--r--src/tools/cargo/tests/testsuite/standard_lib.rs657
-rw-r--r--src/tools/cargo/tests/testsuite/test.rs4820
-rw-r--r--src/tools/cargo/tests/testsuite/timings.rs53
-rw-r--r--src/tools/cargo/tests/testsuite/tool_paths.rs402
-rw-r--r--src/tools/cargo/tests/testsuite/tree.rs2150
-rw-r--r--src/tools/cargo/tests/testsuite/tree_graph_features.rs362
-rw-r--r--src/tools/cargo/tests/testsuite/unit_graph.rs233
-rw-r--r--src/tools/cargo/tests/testsuite/update.rs832
-rw-r--r--src/tools/cargo/tests/testsuite/vendor.rs1152
-rw-r--r--src/tools/cargo/tests/testsuite/verify_project.rs73
-rw-r--r--src/tools/cargo/tests/testsuite/version.rs54
-rw-r--r--src/tools/cargo/tests/testsuite/warn_on_failure.rs111
-rw-r--r--src/tools/cargo/tests/testsuite/weak_dep_features.rs632
-rw-r--r--src/tools/cargo/tests/testsuite/workspaces.rs2531
-rw-r--r--src/tools/cargo/tests/testsuite/yank.rs202
-rw-r--r--src/tools/cargo/triagebot.toml320
1947 files changed, 271845 insertions, 0 deletions
diff --git a/src/tools/cargo/.github/ISSUE_TEMPLATE/bug_report.yml b/src/tools/cargo/.github/ISSUE_TEMPLATE/bug_report.yml
new file mode 100644
index 000000000..f465e1240
--- /dev/null
+++ b/src/tools/cargo/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -0,0 +1,43 @@
+name: Bug Report
+description: Create a report to help us improve
+labels: ["C-bug"]
+body:
+ - type: markdown
+ attributes:
+ value: Thanks for filing a 🐛 bug report 😄!
+ - type: textarea
+ id: problem
+ attributes:
+ label: Problem
+ description: >
+ Please provide a clear and concise description of what the bug is,
+ including what currently happens and what you expected to happen.
+ validations:
+ required: true
+ - type: textarea
+ id: steps
+ attributes:
+ label: Steps
+ description: Please list the steps to reproduce the bug.
+ placeholder: |
+ 1.
+ 2.
+ 3.
+ - type: textarea
+ id: possible-solutions
+ attributes:
+ label: Possible Solution(s)
+ description: >
+ Not obligatory, but suggest a fix/reason for the bug,
+ or ideas how to implement the addition or change.
+ - type: textarea
+ id: notes
+ attributes:
+ label: Notes
+ description: Provide any additional notes that might be helpful.
+ - type: textarea
+ id: version
+ attributes:
+ label: Version
+ description: Please paste the output of running `cargo version --verbose`.
+ render: text
diff --git a/src/tools/cargo/.github/ISSUE_TEMPLATE/config.yml b/src/tools/cargo/.github/ISSUE_TEMPLATE/config.yml
new file mode 100644
index 000000000..ed4289847
--- /dev/null
+++ b/src/tools/cargo/.github/ISSUE_TEMPLATE/config.yml
@@ -0,0 +1,10 @@
+contact_links:
+ - name: Question
+ url: https://users.rust-lang.org
+ about: >
+ Got a question about Cargo? Ask the community on the user forum.
+ - name: Inspiring Idea
+ url: https://internals.rust-lang.org/c/tools-and-infrastructure/cargo
+ about: >
+ Need more discussions with your next big idea?
+ Reach out the coummunity on the internals forum.
diff --git a/src/tools/cargo/.github/ISSUE_TEMPLATE/feature_request.yml b/src/tools/cargo/.github/ISSUE_TEMPLATE/feature_request.yml
new file mode 100644
index 000000000..bae2469d1
--- /dev/null
+++ b/src/tools/cargo/.github/ISSUE_TEMPLATE/feature_request.yml
@@ -0,0 +1,35 @@
+name: Feature Request
+description: Suggest an idea for enhancing Cargo
+labels: ["C-feature-request"]
+body:
+ - type: markdown
+ attributes:
+ value: |
+ Thanks for filing a 🙋 feature request 😄!
+
+ If the feature request is relatively small and already with a possible solution, this might be the place for you.
+
+ If you are brewing a big feature that needs feedback from the community, [the internal forum] is the best fit, especially for pre-RFC. You can also talk the idea over with other developers in [#t-cargo Zulip stream].
+
+ [the internal forum]: https://internals.rust-lang.org/c/tools-and-infrastructure/cargo/15
+ [#t-cargo Zulip stream]: https://rust-lang.zulipchat.com/#narrow/stream/246057-t-cargo
+ - type: textarea
+ id: problem
+ attributes:
+ label: Problem
+ description: >
+ Please provide a clear description of your use case and the problem
+ this feature request is trying to solve.
+ validations:
+ required: true
+ - type: textarea
+ id: solution
+ attributes:
+ label: Proposed Solution
+ description: >
+ Please provide a clear and concise description of what you want to happen.
+ - type: textarea
+ id: notes
+ attributes:
+ label: Notes
+ description: Provide any additional context or information that might be helpful.
diff --git a/src/tools/cargo/.github/ISSUE_TEMPLATE/tracking_issue.yml b/src/tools/cargo/.github/ISSUE_TEMPLATE/tracking_issue.yml
new file mode 100644
index 000000000..d687db4e5
--- /dev/null
+++ b/src/tools/cargo/.github/ISSUE_TEMPLATE/tracking_issue.yml
@@ -0,0 +1,50 @@
+name: Tracking Issue
+description: A tracking issue for an accepted feature or RFC in Cargo.
+title: "Tracking Issue for _FEATURE_NAME_"
+labels: ["C-tracking-issue"]
+body:
+ - type: markdown
+ attributes:
+ value: >
+ Thank you for creating a tracking issue! Tracking issues are for tracking an
+ accepted feature or RFC from implementation to stabilization. Please do not
+ file a tracking issue until the feature or RFC has been approved.
+ - type: textarea
+ id: summary
+ attributes:
+ label: Summary
+ description: Please provide a very brief summary of the feature.
+ value: |
+ RFC: [#NNNN](https://github.com/rust-lang/rfcs/pull/NNNN) <!-- If this is an RFC -->
+ Original issue: #NNNN <!-- if there is a related issue that spawned this feature -->
+ Implementation: #NNNN <!-- link to the PR that implemented this feature if applicable -->
+ Documentation: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#my-feature
+
+ Please enter a short, one-sentence description here.
+ validations:
+ required: true
+ - type: textarea
+ id: unresolved
+ attributes:
+ label: Unresolved Issues
+ description: List issues that have not yet been resolved.
+ placeholder: |
+ * [ ] Make a list of any known implementation or design issues.
+ - type: textarea
+ id: future
+ attributes:
+ label: Future Extensions
+ description: >
+ An optional section where you can mention where the feature may be
+ extended in the future, but is explicitly not intended to
+ address.
+ - type: textarea
+ id: about
+ attributes:
+ label: About tracking issues
+ description: Please include this notice in the issue.
+ value: |
+ Tracking issues are used to record the overall progress of implementation.
+ They are also used as hubs connecting to other relevant issues, e.g., bugs or open design questions.
+ A tracking issue is however *not* meant for large scale discussion, questions, or bug reports about a feature.
+ Instead, open a dedicated issue for the specific matter and add the relevant feature gate label.
diff --git a/src/tools/cargo/.github/PULL_REQUEST_TEMPLATE.md b/src/tools/cargo/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 000000000..141ac5ed4
--- /dev/null
+++ b/src/tools/cargo/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,43 @@
+<!-- homu-ignore:start -->
+<!--
+NOTICE: Due to limited review capacity, the Cargo team is not accepting new
+features or major changes at this time. Please consult with the team before
+opening a new PR. Only issues that have been explicitly marked as accepted
+will be reviewed.
+
+Thanks for submitting a pull request 🎉! Here are some tips for you:
+
+* If this is your first contribution, read "Cargo Contribution Guide":
+ https://doc.crates.io/contrib/
+* Run `cargo fmt --all` to format your code changes.
+* Small commits and pull requests are always preferable and easy to review.
+* If your idea is large and needs feedback from the community, read how:
+ https://doc.crates.io/contrib/process/#working-on-large-features
+* Cargo takes care of compatibility. Read our design principles:
+ https://doc.crates.io/contrib/design.html
+* When changing help text of cargo commands, follow the steps to generate docs:
+ https://github.com/rust-lang/cargo/tree/master/src/doc#building-the-man-pages
+* If your PR is not finished, set it as "draft" PR or add "WIP" in its title.
+* It's ok to use the CI resources to test your PR, but please don't abuse them.
+
+### What does this PR try to resolve?
+
+Explain the motivation behind this change.
+A clear overview along with an in-depth explanation are helpful.
+
+You can use `Fixes #<issue number>` to associate this PR to an existing issue.
+
+### How should we test and review this PR?
+
+Demonstrate how you test this change and guide reviewers through your PR.
+With a smooth review process, a pull request usually gets reviewed quicker.
+
+If you don't know how to write and run your tests, please read the guide:
+https://doc.crates.io/contrib/tests
+
+### Additional information
+
+Other information you want to mention in this PR, such as prior arts,
+future extensions, an unresolved problem, or a TODO list.
+-->
+<!-- homu-ignore:end -->
diff --git a/src/tools/cargo/.github/workflows/audit.yml b/src/tools/cargo/.github/workflows/audit.yml
new file mode 100644
index 000000000..14e35b7b3
--- /dev/null
+++ b/src/tools/cargo/.github/workflows/audit.yml
@@ -0,0 +1,30 @@
+name: Security audit
+
+permissions:
+ contents: read
+
+on:
+ pull_request:
+ paths:
+ - '**/Cargo.toml'
+ - '**/Cargo.lock'
+ push:
+ branches:
+ - master
+
+jobs:
+ cargo_deny:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ checks:
+ - advisories
+ - bans licenses sources
+ steps:
+ - uses: actions/checkout@v3
+ - uses: EmbarkStudios/cargo-deny-action@v1
+ # Prevent sudden announcement of a new advisory from failing ci:
+ continue-on-error: ${{ matrix.checks == 'advisories' }}
+ with:
+ command: check ${{ matrix.checks }}
+ rust-version: stable
diff --git a/src/tools/cargo/.github/workflows/contrib.yml b/src/tools/cargo/.github/workflows/contrib.yml
new file mode 100644
index 000000000..bbd4a7ef7
--- /dev/null
+++ b/src/tools/cargo/.github/workflows/contrib.yml
@@ -0,0 +1,38 @@
+name: Contrib Deploy
+on:
+ push:
+ branches:
+ - master
+
+permissions:
+ contents: read
+
+jobs:
+ deploy:
+ permissions:
+ contents: write # for Git to git push
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+ - name: Install mdbook
+ run: |
+ mkdir mdbook
+ curl -Lf https://github.com/rust-lang/mdBook/releases/download/v0.4.27/mdbook-v0.4.27-x86_64-unknown-linux-gnu.tar.gz | tar -xz --directory=./mdbook
+ echo `pwd`/mdbook >> $GITHUB_PATH
+ - name: Deploy docs
+ run: |
+ cd src/doc/contrib
+ mdbook build
+ git worktree add gh-pages gh-pages
+ git config user.name "Deploy from CI"
+ git config user.email ""
+ cd gh-pages
+ # Delete the ref to avoid keeping history.
+ git update-ref -d refs/heads/gh-pages
+ rm -rf contrib
+ mv ../book contrib
+ git add contrib
+ git commit -m "Deploy $GITHUB_SHA to gh-pages"
+ git push --force
diff --git a/src/tools/cargo/.github/workflows/main.yml b/src/tools/cargo/.github/workflows/main.yml
new file mode 100644
index 000000000..400e725e5
--- /dev/null
+++ b/src/tools/cargo/.github/workflows/main.yml
@@ -0,0 +1,222 @@
+name: CI
+on:
+ push:
+ branches-ignore: [master]
+ pull_request:
+ branches: ['*']
+
+defaults:
+ run:
+ shell: bash
+
+permissions:
+ contents: read
+
+env:
+ CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse
+
+jobs:
+ # Check Code style quickly by running `rustfmt` over all code
+ rustfmt:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - run: rustup update stable && rustup default stable
+ - run: rustup component add rustfmt
+ - run: cargo fmt --all --check
+ - run: |
+ for manifest in `find crates benches/benchsuite benches/capture -name Cargo.toml`
+ do
+ echo check fmt for $manifest
+ cargo fmt --all --manifest-path $manifest --check
+ done
+
+ # Ensure there are no clippy warnings
+ clippy:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - run: rustup update stable && rustup default stable
+ - run: rustup component add clippy
+ # Only check cargo lib for now
+ - run: cargo clippy -p cargo --lib -- -D warnings
+
+ test:
+ runs-on: ${{ matrix.os }}
+ env:
+ CARGO_PROFILE_DEV_DEBUG: 1
+ CARGO_PROFILE_TEST_DEBUG: 1
+ CARGO_INCREMENTAL: 0
+ CARGO_PUBLIC_NETWORK_TESTS: 1
+ # Deny warnings on CI to avoid warnings getting into the codebase.
+ RUSTFLAGS: -D warnings
+ strategy:
+ matrix:
+ include:
+ - name: Linux x86_64 stable
+ os: ubuntu-latest
+ rust: stable
+ other: i686-unknown-linux-gnu
+ - name: Linux x86_64 beta
+ os: ubuntu-latest
+ rust: beta
+ other: i686-unknown-linux-gnu
+ - name: Linux x86_64 nightly
+ os: ubuntu-latest
+ rust: nightly
+ other: i686-unknown-linux-gnu
+ - name: macOS x86_64 stable
+ os: macos-latest
+ rust: stable
+ other: x86_64-apple-ios
+ - name: macOS x86_64 nightly
+ os: macos-latest
+ rust: nightly
+ other: x86_64-apple-ios
+ - name: Windows x86_64 MSVC stable
+ os: windows-latest
+ rust: stable-msvc
+ other: i686-pc-windows-msvc
+ - name: Windows x86_64 gnu nightly # runs out of space while trying to link the test suite
+ os: windows-latest
+ rust: nightly-gnu
+ other: i686-pc-windows-gnu
+ name: Tests ${{ matrix.name }}
+ steps:
+ - uses: actions/checkout@v3
+ - name: Dump Environment
+ run: ci/dump-environment.sh
+ - name: Update Rustup (temporary workaround)
+ run: rustup self update
+ shell: bash
+ if: startsWith(matrix.os, 'windows')
+ - run: rustup update --no-self-update ${{ matrix.rust }} && rustup default ${{ matrix.rust }}
+ - run: rustup target add ${{ matrix.other }}
+ - run: rustup component add rustc-dev llvm-tools-preview rust-docs
+ if: startsWith(matrix.rust, 'nightly')
+ - run: sudo apt update -y && sudo apt install gcc-multilib libsecret-1-0 libsecret-1-dev -y
+ if: matrix.os == 'ubuntu-latest'
+ - run: rustup component add rustfmt || echo "rustfmt not available"
+ - name: Configure extra test environment
+ run: echo CARGO_CONTAINER_TESTS=1 >> $GITHUB_ENV
+ if: matrix.os == 'ubuntu-latest'
+
+ - run: cargo test
+ - name: Clear intermediate test output
+ run: ci/clean-test-output.sh
+ - name: gitoxide tests (all git-related tests)
+ run: cargo test git
+ env:
+ __CARGO_USE_GITOXIDE_INSTEAD_OF_GIT2: 1
+ # The testsuite generates a huge amount of data, and fetch-smoke-test was
+ # running out of disk space.
+ - name: Clear test output
+ run: ci/clean-test-output.sh
+ - name: Check operability of rustc invocation with argfile
+ env:
+ __CARGO_TEST_FORCE_ARGFILE: 1
+ run: |
+ # This only tests `cargo fix` because fix-proxy-mode is one of the most
+ # complicated subprocess management in Cargo.
+ cargo test --test testsuite -- fix::
+ - run: cargo test --manifest-path crates/cargo-test-support/Cargo.toml
+ env:
+ CARGO_TARGET_DIR: target
+ - run: cargo test -p cargo-platform
+ - run: cargo test -p cargo-util
+ - run: cargo test --manifest-path crates/home/Cargo.toml
+ - run: cargo test --manifest-path crates/mdman/Cargo.toml
+ - run: cargo build --manifest-path crates/credential/cargo-credential-1password/Cargo.toml
+ - run: cargo build --manifest-path crates/credential/cargo-credential-gnome-secret/Cargo.toml
+ if: matrix.os == 'ubuntu-latest'
+ - run: cargo build --manifest-path crates/credential/cargo-credential-macos-keychain/Cargo.toml
+ if: matrix.os == 'macos-latest'
+ - run: cargo build --manifest-path crates/credential/cargo-credential-wincred/Cargo.toml
+ if: matrix.os == 'windows-latest'
+ - name: Check benchmarks
+ env:
+ # Share the target dir to try to cache a few build-time deps.
+ CARGO_TARGET_DIR: target
+ run: |
+ # This only tests one benchmark since it can take over 10 minutes to
+ # download all workspaces.
+ cargo test --manifest-path benches/benchsuite/Cargo.toml --all-targets -- cargo
+ cargo check --manifest-path benches/capture/Cargo.toml
+ # The testsuite generates a huge amount of data, and fetch-smoke-test was
+ # running out of disk space.
+ - name: Clear benchmark output
+ run: ci/clean-test-output.sh
+ - name: Fetch smoke test
+ run: ci/fetch-smoke-test.sh
+
+ resolver:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - run: rustup update stable && rustup default stable
+ - run: cargo test --manifest-path crates/resolver-tests/Cargo.toml
+
+ test_gitoxide:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - run: rustup update --no-self-update stable && rustup default stable
+ - run: rustup target add i686-unknown-linux-gnu
+ - run: sudo apt update -y && sudo apt install gcc-multilib libsecret-1-0 libsecret-1-dev -y
+ - run: rustup component add rustfmt || echo "rustfmt not available"
+ - run: cargo test
+ env:
+ __CARGO_USE_GITOXIDE_INSTEAD_OF_GIT2: 1
+
+ build_std:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - run: rustup update nightly && rustup default nightly
+ - run: rustup component add rust-src
+ - run: cargo build
+ - run: cargo test --test build-std
+ env:
+ CARGO_RUN_BUILD_STD_TESTS: 1
+ docs:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - run: rustup update nightly && rustup default nightly
+ - run: rustup update stable
+ - run: rustup component add rust-docs
+ - run: ci/validate-man.sh
+ # This requires rustfmt, use stable.
+ - run: cd src/doc/semver-check && cargo +stable run
+ - run: |
+ mkdir mdbook
+ curl -Lf https://github.com/rust-lang/mdBook/releases/download/v0.4.27/mdbook-v0.4.27-x86_64-unknown-linux-gnu.tar.gz | tar -xz --directory=./mdbook
+ echo `pwd`/mdbook >> $GITHUB_PATH
+ - run: cargo doc --document-private-items --no-deps
+ env:
+ RUSTDOCFLAGS: -D warnings
+ - run: cd src/doc && mdbook build --dest-dir ../../target/doc
+ - run: |
+ cd src/doc
+ curl -sSLo linkcheck.sh \
+ https://raw.githubusercontent.com/rust-lang/rust/master/src/tools/linkchecker/linkcheck.sh
+ sh linkcheck.sh --all cargo
+
+ success:
+ permissions:
+ contents: none
+ name: bors build finished
+ needs: [docs, rustfmt, test, resolver, build_std, test_gitoxide]
+ runs-on: ubuntu-latest
+ if: "success() && github.event_name == 'push' && github.ref == 'refs/heads/auto-cargo'"
+ steps:
+ - run: echo ok
+ failure:
+ permissions:
+ contents: none
+ name: bors build finished
+ needs: [docs, rustfmt, test, resolver, build_std]
+ runs-on: ubuntu-latest
+ if: "!success() && github.event_name == 'push' && github.ref == 'refs/heads/auto-cargo'"
+ steps:
+ - run: exit 1
diff --git a/src/tools/cargo/.ignore b/src/tools/cargo/.ignore
new file mode 100644
index 000000000..65290aeab
--- /dev/null
+++ b/src/tools/cargo/.ignore
@@ -0,0 +1,6 @@
+# Output generated from src/doc/man
+#
+# The goal is to help people find the right file to edit
+src/doc/man/generated_txt
+src/doc/src/commands
+src/etc/man
diff --git a/src/tools/cargo/CHANGELOG.md b/src/tools/cargo/CHANGELOG.md
new file mode 100644
index 000000000..a3d488d92
--- /dev/null
+++ b/src/tools/cargo/CHANGELOG.md
@@ -0,0 +1,3424 @@
+# Changelog
+
+## Cargo 1.70 (2023-06-01)
+[9880b408...HEAD](https://github.com/rust-lang/cargo/compare/9880b408...HEAD)
+
+### Added
+
+- The `CARGO_PKG_README` environment variable is now set to the path to the
+ README file when compiling a crate.
+ [#11645](https://github.com/rust-lang/cargo/pull/11645)
+- Cargo now displays richer information of Cargo target failed to compile.
+ [#11636](https://github.com/rust-lang/cargo/pull/11636)
+
+### Changed
+
+- 🎉 The `sparse` protocol is now the default protocol for crates.io!
+ ([RFC 2789](https://github.com/rust-lang/rfcs/blob/master/text/2789-sparse-index.md))
+ ([docs](https://doc.rust-lang.org/nightly/cargo/reference/registries.html#registry-protocols))
+ [#11791](https://github.com/rust-lang/cargo/pull/11791)
+ [#11783](https://github.com/rust-lang/cargo/pull/11783)
+
+### Fixed
+
+- Removed duplicates of possible values in `--charset` option of `cargo tree`.
+ [#11785](https://github.com/rust-lang/cargo/pull/11785)
+- Fixed `CARGO_CFG_` vars for configs defined both with and without value.
+ [#11790](https://github.com/rust-lang/cargo/pull/11790)
+- Broke endless loop on cyclic features in added dependency in `cargo add`.
+ [#11805](https://github.com/rust-lang/cargo/pull/11805)
+- Don't panic when [`patch`] involved in dependency resolution results in a conflict.
+ [#11770](https://github.com/rust-lang/cargo/pull/11770)
+
+### Nightly only
+
+- Added `-Zdirect-minimal-versions`. This behaves like `-Zminimal-versions` but
+ only for direct dependencies.
+ ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#direct-minimal-versions))
+ [#11688](https://github.com/rust-lang/cargo/pull/11688)
+- Added `-Zgitoxide` which switches all `git fetch` operation in Cargo to
+ use `gitoxide` crate. This is still an MVP but could improve the performance
+ up to 2 times.
+ ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html##gitoxide))
+ [#11448](https://github.com/rust-lang/cargo/pull/11448)
+ [#11800](https://github.com/rust-lang/cargo/pull/11800)
+- Removed `-Zjobserver-per-rustc`. Its rustc counterpart never got landed.
+ [#11764](https://github.com/rust-lang/cargo/pull/11764)
+
+### Documentation
+
+- Cleaned-up unstable documentation.
+ [#11793](https://github.com/rust-lang/cargo/pull/11793)
+- Enhanced the documentation of timing report with graphs.
+ [#11798](https://github.com/rust-lang/cargo/pull/11798)
+
+### Internal
+
+- Switched to `sha2` crate for SHA256 calculation.
+ [#11795](https://github.com/rust-lang/cargo/pull/11795)
+ [#11807](https://github.com/rust-lang/cargo/pull/11807)
+- Updated to `base64` v0.21.0.
+ [#11796](https://github.com/rust-lang/cargo/pull/11796)
+- Integrated `cargo-deny` in Cargo its own CI pipeline.
+ [#11761](https://github.com/rust-lang/cargo/pull/11761)
+
+## Cargo 1.69 (2023-04-20)
+[985d561f...rust-1.69.0](https://github.com/rust-lang/cargo/compare/985d561f...rust-1.69.0)
+
+### Added
+
+- Cargo now suggests `cargo fix` or `cargo clippy --fix`
+ when compilation warnings are auto-fixable.
+ [#11558](https://github.com/rust-lang/cargo/pull/11558)
+- Cargo now suggests `cargo add` if you try to install a library crate.
+ [#11410](https://github.com/rust-lang/cargo/pull/11410)
+- Cargo now sets `CARGO_BIN_NAME` environment variable also for binary examples.
+ [#11705](https://github.com/rust-lang/cargo/pull/11705)
+
+### Changed
+
+- ❗ When `default-features` is set to false of a workspace dependency,
+ and an inherited dependency of a member has `default-features = true`,
+ Cargo will enable default features of that dependency.
+ [#11409](https://github.com/rust-lang/cargo/pull/11409)
+- ❗ Deny `CARGO_HOME` in `[env]` configuration table. Cargo itself doesn't
+ pick up this value, but recursive calls to cargo will. We consider it as a
+ wrong behavior to only pass it to recursive invocations.
+ [#11644](https://github.com/rust-lang/cargo/pull/11644)
+- ❗ Debuginfo for build dependencies is now off if not explicit set. This is
+ expected to boost the overall build time.
+ [#11252](https://github.com/rust-lang/cargo/pull/11252)
+- Cargo now emits errors on invalid alphanumeric token for crates.io.
+ [#11600](https://github.com/rust-lang/cargo/pull/11600)
+- `cargo add` now checks only the order of `[dependencies]`
+ without considering `[dependencies.*]`.
+ [#11612](https://github.com/rust-lang/cargo/pull/11612)
+- Cargo now respects the new jobserver IPC style in GNU Make 4.4, by updating
+ its dependency `jobserver`.
+ [#11767](https://github.com/rust-lang/cargo/pull/11767)
+- `cargo install` now reports required features when no binary meets its requirements.
+ [#11647](https://github.com/rust-lang/cargo/pull/11647)
+
+### Fixed
+
+- Uplifted `.dwp` DWARF package file next to the executable for debuggers to
+ locate them.
+ [#11572](https://github.com/rust-lang/cargo/pull/11572)
+- Fixed build scripts triggering recompiles when a `rerun-if-changed` points to
+ a directory whose mtime is not preserved by the filesystem.
+ [#11613](https://github.com/rust-lang/cargo/pull/11613)
+- Fixed panics when using dependencies from `[workspace.dependencies]`
+ for `[patch]`. This usage is not supposed to be supported.
+ [#11565](https://github.com/rust-lang/cargo/pull/11565)
+ [#11630](https://github.com/rust-lang/cargo/pull/11630)
+- Fixed `cargo report` saving the same future-incompat reports multiple times.
+ [#11648](https://github.com/rust-lang/cargo/pull/11648)
+- Fixed the incorrect inference of a directory ending with `.rs` as a file.
+ [#11678](https://github.com/rust-lang/cargo/pull/11678)
+- Fixed `.cargo-ok` file being truncated wrongly, preventing from using a dependency.
+ [#11665](https://github.com/rust-lang/cargo/pull/11665)
+ [#11724](https://github.com/rust-lang/cargo/pull/11724)
+
+### Nightly only
+
+- `-Zrustdoc-scrape-example` must fail with bad build script.
+ [#11694](https://github.com/rust-lang/cargo/pull/11694)
+- Updated 1password credential manager integration to the version 2 CLI.
+ [#11692](https://github.com/rust-lang/cargo/pull/11692)
+- Emit an error message for transitive artifact dependencies with targets the
+ package doesn't directly interact with.
+ [#11643](https://github.com/rust-lang/cargo/pull/11643)
+- Added `-C` flag for changing current dir before build starts.
+ [#10952](https://github.com/rust-lang/cargo/pull/10952)
+
+### Documentation
+
+- Clarified the difference between `CARGO_CRATE_NAME` and `CARGO_PKG_NAME`.
+ [#11576](https://github.com/rust-lang/cargo/pull/11576)
+- Added links to the Target section of the glossary for occurences of target triple.
+ [#11603](https://github.com/rust-lang/cargo/pull/11603)
+- Described how the current resolver sometimes duplicates depenencies.
+ [#11604](https://github.com/rust-lang/cargo/pull/11604)
+- Added a note about verifying your email address on crates.io.
+ [#11620](https://github.com/rust-lang/cargo/pull/11620)
+- Mention current default value in `publish.timeout` docs.
+ [#11652](https://github.com/rust-lang/cargo/pull/11652)
+- More doc comments for `cargo::core::compiler` modules.
+ [#11669](https://github.com/rust-lang/cargo/pull/11669)
+ [#11703](https://github.com/rust-lang/cargo/pull/11703)
+ [#11711](https://github.com/rust-lang/cargo/pull/11711)
+ [#11758](https://github.com/rust-lang/cargo/pull/11758)
+- Added more guidance on how to implement unstable features.
+ [#11675](https://github.com/rust-lang/cargo/pull/11675)
+- Fixed unstable chapter layout for `codegen-backend`.
+ [#11676](https://github.com/rust-lang/cargo/pull/11676)
+- Add a link to LTO doc.
+ [#11701](https://github.com/rust-lang/cargo/pull/11701)
+- Added documentation for the configuration discovery of `cargo install`
+ to the man pages
+ [#11763](https://github.com/rust-lang/cargo/pull/11763)
+- Documented `-F` flag as an alias for `--features` in `cargo add`.
+ [#11774](https://github.com/rust-lang/cargo/pull/11774)
+
+### Internal
+
+- Disable network SSH tests on Windows.
+ [#11610](https://github.com/rust-lang/cargo/pull/11610)
+- Made some blocking tests non-blocking.
+ [#11650](https://github.com/rust-lang/cargo/pull/11650)
+- Deny warnings in CI, not locally.
+ [#11699](https://github.com/rust-lang/cargo/pull/11699)
+- Re-export `cargo_new::NewProjectKind` as public.
+ [#11700](https://github.com/rust-lang/cargo/pull/11700)
+- Made dependencies in alphabetical order.
+ [#11719](https://github.com/rust-lang/cargo/pull/11719)
+- Switched some tests from `build` to `check`.
+ [#11725](https://github.com/rust-lang/cargo/pull/11725)
+- Consolidated how Cargo reads environments variables internally.
+ [#11727](https://github.com/rust-lang/cargo/pull/11727)
+ [#11754](https://github.com/rust-lang/cargo/pull/11754)
+- Fixed tests with nondeterministic ordering
+ [#11766](https://github.com/rust-lang/cargo/pull/11766)
+- Added a test to verify the intermediate artifacts persist in the temp directory.
+ [#11771](https://github.com/rust-lang/cargo/pull/11771)
+- Updated cross test instructions for aarch64-apple-darwin.
+ [#11663](https://github.com/rust-lang/cargo/pull/11663)
+- Updated to `toml` v0.6 and `toml_edit` v0.18 for TOML manipulations.
+ [#11618](https://github.com/rust-lang/cargo/pull/11618)
+- Updated to `clap` v4.1.3.
+ [#11619](https://github.com/rust-lang/cargo/pull/11619)
+- Replaced `winapi` with `windows-sys` crate for Windows bindings.
+ [#11656](https://github.com/rust-lang/cargo/pull/11656)
+- Reused `url` crate for percent encoding instead of `percent-encoding`.
+ [#11750](https://github.com/rust-lang/cargo/pull/11750)
+- Cargo contributors can benefit from smart punctuations when writing
+ documentations, e.g., `---` is auto-converted into an em dash.
+ ([docs](https://rust-lang.github.io/mdBook/format/markdown.html#smart-punctuation))
+ [#11646](https://github.com/rust-lang/cargo/pull/11646)
+ [#11715](https://github.com/rust-lang/cargo/pull/11715)
+- Cargo's CI pipeline now covers macOS on nightly.
+ [#11712](https://github.com/rust-lang/cargo/pull/11712)
+- Re-enabled some clippy lints in Cargo itself.
+ [#11722](https://github.com/rust-lang/cargo/pull/11722)
+- Enabled sparse protocol in Cargo's CI.
+ [#11632](https://github.com/rust-lang/cargo/pull/11632)
+- Pull requests in Cargo now get autolabelled for label `A-*` and `Command-*`.
+ [#11664](https://github.com/rust-lang/cargo/pull/11664)
+ [#11679](https://github.com/rust-lang/cargo/pull/11679)
+
+## Cargo 1.68.2 (2023-03-28)
+[115f3455...rust-1.68.0](https://github.com/rust-lang/cargo/compare/115f3455...rust-1.68.0)
+
+- Updated the GitHub RSA SSH host key bundled within cargo.
+ The key was [rotated by
+ GitHub](https://github.blog/2023-03-23-we-updated-our-rsa-ssh-host-key/) on
+ 2023-03-24 after the old one leaked.
+ [#11883](https://github.com/rust-lang/cargo/pull/11883)
+- Added support for SSH known hosts marker `@revoked`.
+ [#11635](https://github.com/rust-lang/cargo/pull/11635)
+- Marked the old GitHub RSA host key as revoked. This will prevent Cargo from
+ accepting the leaked key even when trusted by the system.
+ [#11889](https://github.com/rust-lang/cargo/pull/11889)
+
+## Cargo 1.68 (2023-03-09)
+[f6e737b1...rust-1.68.0](https://github.com/rust-lang/cargo/compare/f6e737b1...rust-1.68.0)
+
+### Added
+
+- 🎉 The new "sparse" protocol has been stabilized.
+ It should provide a significant performance improvement when accessing crates.io.
+ ([RFC 2789](https://github.com/rust-lang/rfcs/blob/master/text/2789-sparse-index.md))
+ ([docs](https://doc.rust-lang.org/nightly/cargo/reference/registries.html#registry-protocols))
+ [#11224](https://github.com/rust-lang/cargo/pull/11224)
+ [#11480](https://github.com/rust-lang/cargo/pull/11480)
+ [#11733](https://github.com/rust-lang/cargo/pull/11733)
+ [#11756](https://github.com/rust-lang/cargo/pull/11756)
+- 🎉 `home` crate is now a subcrate in `rust-lang/cargo` repository. Welcome!
+ [#11359](https://github.com/rust-lang/cargo/pull/11359)
+ [#11481](https://github.com/rust-lang/cargo/pull/11481)
+- Long diagnostic messages now can be truncated to be more readable.
+ [#11494](https://github.com/rust-lang/cargo/pull/11494)
+- Shows the progress of crates.io index update even when `net.git-fetch-with-cli` enabled.
+ [#11579](https://github.com/rust-lang/cargo/pull/11579)
+- `cargo build --verbose` tells you more about why it recompiles.
+ [#11407](https://github.com/rust-lang/cargo/pull/11407)
+- Cargo's file locking mechanism now supports Solaris by using `fcntl`.
+ [#11439](https://github.com/rust-lang/cargo/pull/11439)
+ [#11474](https://github.com/rust-lang/cargo/pull/11474)
+- Added a new SemVer compatibility rule explaining the expectations around diagnostic lints
+ [#11596](https://github.com/rust-lang/cargo/pull/11596)
+- `cargo vendor` generates a different source replacement entry
+ for each revision from the same git repository.
+ [#10690](https://github.com/rust-lang/cargo/pull/1090)
+- Cargo contributors can relabel issues via triagebot.
+ [doc](https://github.com/rust-lang/triagebot/wiki/Labeling)
+ [#11498](https://github.com/rust-lang/cargo/pull/11498)
+- Cargo contributors can write tests in containers.
+ [#11583](https://github.com/rust-lang/cargo/pull/11583)
+
+### Changed
+
+- Cargo now by default saves credentials to `.cargo/credentials.toml`.
+ If `.cargo/credentials` exists, writes to it for backward compatibility reasons.
+ [#11533](https://github.com/rust-lang/cargo/pull/11533)
+- To prevent sensitive data from being logged, Cargo introduces a new wrapper type internally.
+ [#11545](https://github.com/rust-lang/cargo/pull/11545)
+- Several documentation improvements.
+ [#11475](https://github.com/rust-lang/cargo/pull/11475)
+ [#11504](https://github.com/rust-lang/cargo/pull/11504)
+ [#11516](https://github.com/rust-lang/cargo/pull/11516)
+ [#11517](https://github.com/rust-lang/cargo/pull/11517)
+ [#11568](https://github.com/rust-lang/cargo/pull/11568)
+ [#11586](https://github.com/rust-lang/cargo/pull/11586)
+ [#11592](https://github.com/rust-lang/cargo/pull/11592)
+
+### Fixed
+
+- ❗ `cargo package` and `cargo publish` now respects workspace's `Cargo.lock`.
+ This is an expected behavior but previously got overlooked.
+ [#11477](https://github.com/rust-lang/cargo/pull/11477)
+- Fixed `cargo vendor` failing on resolving git dependencies inherited from a workspace.
+ [#11414](https://github.com/rust-lang/cargo/pull/11414)
+- `cargo install` can now correctly install root package when `workspace.default-members` is specified.
+ [#11067](https://github.com/rust-lang/cargo/pull/11067)
+- Fixed panic on target specific dependency errors.
+ [#11541](https://github.com/rust-lang/cargo/pull/11541)
+- Shows `--help` if there is no man page for a subcommand.
+ [#11473](https://github.com/rust-lang/cargo/pull/11473)
+- Setting `target.cfg(…).rustflags` shouldn't erase `build.rustdocflags`.
+ [#11323](https://github.com/rust-lang/cargo/pull/11323)
+- Unsupported `profile.split-debuginfo` options are now ignored,
+ which previously made Cargo fail to compile on certain platforms.
+ [#11347](https://github.com/rust-lang/cargo/pull/11347)
+ [#11633](https://github.com/rust-lang/cargo/pull/11633)
+- Don't panic in Windows headless session with really long file names.
+ [#11759](https://github.com/rust-lang/cargo/pull/11759)
+
+### Nightly only
+
+- Implemented a inital support of asymmetric token authentication for registries.
+ ([RFC 3231](https://github.com/rust-lang/rfcs/blob/master/text/3231-cargo-asymmetric-tokens.md))
+ ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#registry-auth))
+ [#10771](https://github.com/rust-lang/cargo/pull/10771)
+- Do not error for `auth-required: true` without `-Z sparse-registry`
+ [#11661](https://github.com/rust-lang/cargo/pull/11661)
+- Supports `codegen-backend` and `rustflags` in profiles in config file.
+ [#11562](https://github.com/rust-lang/cargo/pull/11562)
+- Suggests `cargo clippy --fix` when warnings/errors could be fixed with clippy.
+ [#11399](https://github.com/rust-lang/cargo/pull/11399)
+- Fixed artifact deps not working when target field specified coexists with `optional = true`.
+ [#11434](https://github.com/rust-lang/cargo/pull/11434)
+- Make Cargo distinguish `Unit`s with and without artifact targets.
+ [#11478](https://github.com/rust-lang/cargo/pull/11478)
+- `cargo metadata` supports artifact dependencies.
+ [#11550](https://github.com/rust-lang/cargo/pull/11550)
+- Allows builds of some crate to fail during optional doc-scraping.
+ [#11450](https://github.com/rust-lang/cargo/pull/11450)
+- Add warning if potentially-scrapable examples are skipped due to dev-dependencies.
+ [#11503](https://github.com/rust-lang/cargo/pull/11503)
+- Don't scrape examples from library targets by default.
+ [#11499](https://github.com/rust-lang/cargo/pull/11499)
+- Fixed examples of proc-macro crates being scraped for examples.
+ [#11497](https://github.com/rust-lang/cargo/pull/11497)
+
+## Cargo 1.67 (2023-01-26)
+[7e484fc1...rust-1.67.0](https://github.com/rust-lang/cargo/compare/7e484fc1...rust-1.67.0)
+
+### Added
+
+- `cargo remove` now cleans up the referenced dependency of the root
+ workspace manifest, `profile`, `patch`, and `replace` sections
+ after a successful removal of a dependency.
+ [#11194](https://github.com/rust-lang/cargo/pull/11194)
+ [#11242](https://github.com/rust-lang/cargo/pull/11242)
+ [#11351](https://github.com/rust-lang/cargo/pull/11351)
+- `cargo package` and `cargo publish` now report total and compressed crate size
+ after packaging.
+ [#11270](https://github.com/rust-lang/cargo/pull/11270)
+
+### Changed
+
+- ❗ Cargo now reuses the value of `$CARGO` if it's already set in the environment,
+ and forwards the value when executing external subcommands and build scripts.
+ [#11285](https://github.com/rust-lang/cargo/pull/11285)
+- ❗ Cargo now emits an error when running `cargo update --precise` without a `-p` flag.
+ [#11349](https://github.com/rust-lang/cargo/pull/11349)
+- ❗ Cargo now emits an error if there are multiple registries in the configuration with the same index URL.
+ [#10592](https://github.com/rust-lang/cargo/pull/10592)
+- Cargo now is aware of compression ratio when extracting crate files.
+ This relaxes the hard size limit introduced in 1.64.0 to mitigate zip bomb attack.
+ [#11337](https://github.com/rust-lang/cargo/pull/11337)
+- Cargo now errors out when `cargo fix` on a git repo with uncommitted changes.
+ [#11400](https://github.com/rust-lang/cargo/pull/11400)
+- Cargo now warns when `cargo tree -i <spec>` cannot find any package.
+ [#11377](https://github.com/rust-lang/cargo/pull/11377)
+- Cargo now warns when running `cargo new/init` and `PATH` env separator
+ is in the project path.
+ [#11318](https://github.com/rust-lang/cargo/pull/11318)
+- Better error messages when multiple packages were found and
+ `cargo add/remove` gets confused.
+ [#11186](https://github.com/rust-lang/cargo/pull/11186)
+ [#11375](https://github.com/rust-lang/cargo/pull/11375)
+- A better error message when `cargo init` but existing ignore files aren't UTF-8.
+ [#11321](https://github.com/rust-lang/cargo/pull/11321)
+- A better error message for `cargo install .`.
+ [#11401](https://github.com/rust-lang/cargo/pull/11401)
+- A better warning when the same file path found in multiple build targets.
+ [#11299](https://github.com/rust-lang/cargo/pull/11299)
+- Updated the internal HTTP library libcurl with various fixes and updates.
+ [#11307](https://github.com/rust-lang/cargo/pull/11307)
+ [#11326](https://github.com/rust-lang/cargo/pull/11326)
+
+### Fixed
+
+- Fixed `cargo clean` for removing fingerprints and build script
+ artifacts of only the requested package
+ [#10621](https://github.com/rust-lang/cargo/pull/10621)
+- Fixed `cargo install --index` not working when config `registry.default` is set.
+ [#11302](https://github.com/rust-lang/cargo/pull/11302)
+- Fixed git2 safe-directory accidentally disabled when no network configuration was found.
+ [#11366](https://github.com/rust-lang/cargo/pull/11366)
+- Migrate from crate `atty` to resolve potential soundness issue.
+ [#11420](https://github.com/rust-lang/cargo/pull/11420)
+- Cleans stale git temp files left when libgit2 indexing is interrupted.
+ [#11308](https://github.com/rust-lang/cargo/pull/11308)
+
+### Nightly only
+
+- Suggests `cargo fix` when some compilation warnings/errors can be auto-fixed.
+ [#10989](https://github.com/rust-lang/cargo/pull/10989)
+ [#11368](https://github.com/rust-lang/cargo/pull/11368)
+- Changed `rustdoc-scrape-examples` to be a target-level configuration.
+ [#10343](https://github.com/rust-lang/cargo/pull/10343)
+ [#11425](https://github.com/rust-lang/cargo/pull/11425)
+ [#11430](https://github.com/rust-lang/cargo/pull/11430)
+ [#11445](https://github.com/rust-lang/cargo/pull/11445)
+- Propagates change of artifact bin dependency to its parent fingerprint.
+ [#11353](https://github.com/rust-lang/cargo/pull/11353)
+- Fixed `wait-for-publish` to work with sparse registry.
+ [#11356](https://github.com/rust-lang/cargo/pull/11356)
+ [#11327](https://github.com/rust-lang/cargo/pull/11327)
+ [#11388](https://github.com/rust-lang/cargo/pull/11388)
+- Stores the `sparse+` prefix in the `SourceId` for sparse registries
+ [#11387](https://github.com/rust-lang/cargo/pull/11387)
+ [#11403](https://github.com/rust-lang/cargo/pull/11403)
+- Implemented alternative registry authentication support.
+ ([RFC 3139](https://github.com/rust-lang/rfcs/blob/master/text/3139-cargo-alternative-registry-auth.md))
+ ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#registry-auth))
+ [#10592](https://github.com/rust-lang/cargo/pull/10592)
+- Added documentation of config option `registries.crates-io.protocol`.
+ [#11350](https://github.com/rust-lang/cargo/pull/11350)
+
+## Cargo 1.66.1 (2023-01-10)
+
+### Fixed
+- [CVE-2022-46176](https://github.com/rust-lang/cargo/security/advisories/GHSA-r5w3-xm58-jv6j):
+ Added validation of SSH host keys for git URLs.
+ See [the docs](https://doc.rust-lang.org/cargo/appendix/git-authentication.html#ssh-known-hosts) for more information on how to configure the known host keys.
+
+## Cargo 1.66 (2022-12-15)
+[08250398...rust-1.66.0](https://github.com/rust-lang/cargo/compare/08250398...rust-1.66.0)
+
+### Added
+
+- 🎉 Added `cargo remove` command for removing dependencies from `Cargo.toml`.
+ [docs](https://doc.rust-lang.org/nightly/cargo/commands/cargo-remove.html)
+ [#11059](https://github.com/rust-lang/cargo/pull/11059)
+ [#11099](https://github.com/rust-lang/cargo/pull/11099)
+ [#11193](https://github.com/rust-lang/cargo/pull/11193)
+ [#11204](https://github.com/rust-lang/cargo/pull/11204)
+ [#11227](https://github.com/rust-lang/cargo/pull/11227)
+- Added support for git dependencies having git submodules with relative paths.
+ [#11106](https://github.com/rust-lang/cargo/pull/11106)
+- Cargo now sends requests with a `Accept-Encoding` header to registries.
+ [#11292](https://github.com/rust-lang/cargo/pull/11292)
+- Cargo now forwards non-UTF8 arguments to external subcommands.
+ [#11118](https://github.com/rust-lang/cargo/pull/11118)
+
+### Changed
+
+- ❗ Disambiguate source replacements from various angles.
+ [RFC-3289](https://github.com/rust-lang/rfcs/blob/master/text/3289-source_replacement_ambiguity.md)
+ [#10907](https://github.com/rust-lang/cargo/pull/10907)
+ - When the crates-io source is replaced, the user is required to specify which registry to use with `--registry <NAME>` when performing an API operation.
+ - Publishing to source-replaced crates.io is no longer permitted using the crates.io token (`registry.token`).
+ - In source replacement, the `replace-with` key can reference the name of an alternative registry in the `[registries]` table.
+- ❗ `cargo publish` now blocks until it sees the published package in the index.
+ [#11062](https://github.com/rust-lang/cargo/pull/11062)
+ [#11210](https://github.com/rust-lang/cargo/pull/11210)
+ [#11216](https://github.com/rust-lang/cargo/pull/11216)
+ [#11255](https://github.com/rust-lang/cargo/pull/11255)
+- Cargo now uses the clap v4 library for command-line argument parsing.
+ [#11116](https://github.com/rust-lang/cargo/pull/11116)
+ [#11119](https://github.com/rust-lang/cargo/pull/11119)
+ [#11159](https://github.com/rust-lang/cargo/pull/11159)
+ [#11190](https://github.com/rust-lang/cargo/pull/11190)
+ [#11239](https://github.com/rust-lang/cargo/pull/11239)
+ [#11280](https://github.com/rust-lang/cargo/pull/11280)
+- Cargo now only warns on a user-defined alias shadowing an external command.
+ [#11170](https://github.com/rust-lang/cargo/pull/11170)
+- Several documentation improvements.
+ [#10770](https://github.com/rust-lang/cargo/pull/10770)
+ [#10938](https://github.com/rust-lang/cargo/pull/10938)
+ [#11082](https://github.com/rust-lang/cargo/pull/11082)
+ [#11093](https://github.com/rust-lang/cargo/pull/11093)
+ [#11157](https://github.com/rust-lang/cargo/pull/11157)
+ [#11185](https://github.com/rust-lang/cargo/pull/11185)
+ [#11207](https://github.com/rust-lang/cargo/pull/11207)
+ [#11219](https://github.com/rust-lang/cargo/pull/11219)
+ [#11240](https://github.com/rust-lang/cargo/pull/11240)
+ [#11241](https://github.com/rust-lang/cargo/pull/11241)
+ [#11282](https://github.com/rust-lang/cargo/pull/11282)
+
+### Fixed
+
+- ❗ Config file loaded via `cargo --config <file>` now takes priority over
+ environment variables. This is a documented behaviour but the old
+ implementation accidentally got it wrong.
+ [#11077](https://github.com/rust-lang/cargo/pull/11077)
+- ❗ Cargo collects rustflags in `target.cfg(…).rustflags` more correctly
+ and warns if that's not enough for convergence.
+ [#11114](https://github.com/rust-lang/cargo/pull/11114)
+- Final artifacts not removed by linker should be removed before a compilation gets started.
+ [#11122](https://github.com/rust-lang/cargo/pull/11122)
+- `cargo add` now reports unknown features in a more discoverable manner.
+ [#11098](https://github.com/rust-lang/cargo/pull/11098)
+- Cargo now reports command aliasing failure with more error contexts.
+ [#11087](https://github.com/rust-lang/cargo/pull/11087)
+- A better error message when `cargo login` prompt receives empty input.
+ [#11145](https://github.com/rust-lang/cargo/pull/11145)
+- A better error message for fields with wrong types
+ where workspace inheritance is supported.
+ [#11113](https://github.com/rust-lang/cargo/pull/11113)
+- A better error message when mixing feature syntax `dep:` with `/`.
+ [#11172](https://github.com/rust-lang/cargo/pull/11172)
+- A better error message when publishing but `package.publish` is `false`
+ in the manifest.
+ [#11280](https://github.com/rust-lang/cargo/pull/11280)
+
+### Nightly only
+
+- Added new config option `publish.timeout` behind `-Zpublish-timeout`.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#publish-timeout)
+ [#11230](https://github.com/rust-lang/cargo/pull/11230)
+- Added retry support to sparse registries.
+ [#11069](https://github.com/rust-lang/cargo/pull/11069)
+- Fixed sparse registry lockfile urls containing `registry+sparse+`.
+ [#11177](https://github.com/rust-lang/cargo/pull/11177)
+- Add new config option `registries.crates-io.protocol`
+ for controlling crates.io protocol.
+ [#11215](https://github.com/rust-lang/cargo/pull/11215)
+- Removed `sparse+` prefix for index.crates.io.
+ [#11247](https://github.com/rust-lang/cargo/pull/11247)
+- Fixed publishing with a dependency on a sparse registry.
+ [#11268](https://github.com/rust-lang/cargo/pull/11268)
+- Fixed confusing error messages when using `-Zsparse-registry`.
+ [#11283](https://github.com/rust-lang/cargo/pull/11283)
+- Fixed 410 gone response handling for sparse registries.
+ [#11286](https://github.com/rust-lang/cargo/pull/11286)
+
+## Cargo 1.65 (2022-11-03)
+[4fd148c4...rust-1.65.0](https://github.com/rust-lang/cargo/compare/4fd148c4...rust-1.65.0)
+
+### Added
+
+- External subcommands can now inherit jobserver file descriptors from Cargo.
+ [#10511](https://github.com/rust-lang/cargo/pull/10511)
+- Added an API documentation for private items in cargo-the-library. See
+ <https://doc.rust-lang.org/nightly/nightly-rustc/cargo>.
+ [#11019](https://github.com/rust-lang/cargo/pull/11019)
+
+### Changed
+
+- Cargo now stops adding its bin path to `PATH` if it's already there.
+ [#11023](https://github.com/rust-lang/cargo/pull/11023)
+- Improved the performance of Cargo build scheduling
+ by sorting the queue of pending jobs.
+ [#11032](https://github.com/rust-lang/cargo/pull/11032)
+- Improved the performance fetching git dependencies from GitHub even
+ when using a partial hash in the `rev` field.
+ [#10807](https://github.com/rust-lang/cargo/pull/10807)
+- Cargo now uses git2 v0.15 and libgit2-sys v0.14,
+ which bring several compatibility fixes with git's new behaviors.
+ [#11004](https://github.com/rust-lang/cargo/pull/11004)
+- Registry index files are cached in a more granular way based on content hash.
+ [#11044](https://github.com/rust-lang/cargo/pull/11044)
+- Cargo now uses the standard library's `std::thread::scope` instead of the
+ `crossbeam` crate for spawning scoped threads.
+ [#10977](https://github.com/rust-lang/cargo/pull/10977)
+- Cargo now uses the standard library's `available_parallelism` instead of the
+ `num_cpus` crate for determining the default parallelism.
+ [#10969](https://github.com/rust-lang/cargo/pull/10969)
+- Cargo now guides you how to solve it when seeing an error message of
+ `rust-version` requirement not satisfied.
+ [#10891](https://github.com/rust-lang/cargo/pull/10891)
+- Cargo now tells you more about possible causes and how to fix it
+ when a subcommand cannot be found.
+ [#10924](https://github.com/rust-lang/cargo/pull/10924)
+- Cargo now lists available target names when a given Cargo target cannot be found.
+ [#10999](https://github.com/rust-lang/cargo/pull/10999)
+- `cargo update` now warns if `--precise` is given without `--package` flag.
+ This will become a hard error after a transition period.
+ [#10988](https://github.com/rust-lang/cargo/pull/10988)
+ [#11011](https://github.com/rust-lang/cargo/pull/11011)
+- `cargo bench` and `cargo test` now report a more precise test execution error
+ right after a test fails.
+ [#11028](https://github.com/rust-lang/cargo/pull/11028)
+- `cargo add` now tells you for which version the features are added.
+ [#11075](https://github.com/rust-lang/cargo/pull/11075)
+- Call out that non-ASCII crate names are not supported by Rust anymore.
+ [#11017](https://github.com/rust-lang/cargo/pull/11017)
+- Enhanced the error message when in the manifest a field is expected to be
+ an array but a string is used.
+ [#10944](https://github.com/rust-lang/cargo/pull/10944)
+
+### Fixed
+
+- Removed the restriction on file locking supports on platforms other than Linux.
+ [#10975](https://github.com/rust-lang/cargo/pull/10975)
+- Fixed incorrect OS detection by bumping os_info to 3.5.0.
+ [#10943](https://github.com/rust-lang/cargo/pull/10943)
+- Scanning the package directory now ignores errors from broken
+ but excluded symlink files.
+ [#11008](https://github.com/rust-lang/cargo/pull/11008)
+- Fixed deadlock when build scripts are waiting for input on stdin.
+ [#11257](https://github.com/rust-lang/cargo/pull/11257)
+
+### Nightly
+
+- Progress indicator for sparse registries becomes more straightforward.
+ [#11068](https://github.com/rust-lang/cargo/pull/11068)
+
+## Cargo 1.64 (2022-09-22)
+[a5e08c47...rust-1.64.0](https://github.com/rust-lang/cargo/compare/a5e08c47...rust-1.64.0)
+
+### Added
+
+- 🎉 Packages can now inherit settings from the workspace so that the settings
+ can be centralized in one place. See
+ [`workspace.package`](https://doc.rust-lang.org/nightly/cargo/reference/workspaces.html#the-package-table)
+ and
+ [`workspace.dependencies`](https://doc.rust-lang.org/nightly/cargo/reference/workspaces.html#the-dependencies-table)
+ for more details on how to define these common settings.
+ [#10859](https://github.com/rust-lang/cargo/pull/10859)
+- Added the
+ [`--crate-type`](https://doc.rust-lang.org/nightly/cargo/commands/cargo-rustc.html#option-cargo-rustc---crate-type)
+ flag to `cargo rustc` to override the crate type.
+ [#10838](https://github.com/rust-lang/cargo/pull/10838)
+- Cargo commands can now accept multiple `--target` flags to build for
+ multiple targets at once, and the
+ [`build.target`](https://doc.rust-lang.org/nightly/cargo/reference/config.html#buildtarget)
+ config option may now take an array of multiple targets.
+ [#10766](https://github.com/rust-lang/cargo/pull/10766)
+- The `--jobs` argument can now take a negative number to count backwards from
+ the max CPUs.
+ [#10844](https://github.com/rust-lang/cargo/pull/10844)
+
+### Changed
+- Bash completion of `cargo install --path` now supports path completion.
+ [#10798](https://github.com/rust-lang/cargo/pull/10798)
+- Significantly improved the performance fetching git dependencies from GitHub
+ when using a hash in the `rev` field.
+ [#10079](https://github.com/rust-lang/cargo/pull/10079)
+- Published packages will now include the resolver setting from the workspace
+ to ensure that they use the same resolver when used in isolation.
+ [#10911](https://github.com/rust-lang/cargo/pull/10911)
+ [#10961](https://github.com/rust-lang/cargo/pull/10961)
+ [#10970](https://github.com/rust-lang/cargo/pull/10970)
+- `cargo add` will now update `Cargo.lock`.
+ [#10902](https://github.com/rust-lang/cargo/pull/10902)
+- The path in the config output of `cargo vendor` now translates backslashes
+ to forward slashes so that the settings should work across platforms.
+ [#10668](https://github.com/rust-lang/cargo/pull/10668)
+- The
+ [`workspace.default-members`](https://doc.rust-lang.org/nightly/cargo/reference/workspaces.html#package-selection)
+ setting now allows a value of `"."` in a non-virtual workspace to refer to
+ the root package.
+ [#10784](https://github.com/rust-lang/cargo/pull/10784)
+
+### Fixed
+
+- [CVE-2022-36113](https://github.com/rust-lang/cargo/security/advisories/GHSA-rfj2-q3h3-hm5j):
+ Extracting malicious crates can corrupt arbitrary files.
+ [#11089](https://github.com/rust-lang/cargo/pull/11089)
+ [#11088](https://github.com/rust-lang/cargo/pull/11088)
+- [CVE-2022-36114](https://github.com/rust-lang/cargo/security/advisories/GHSA-2hvr-h6gw-qrxp):
+ Extracting malicious crates can fill the file system.
+ [#11089](https://github.com/rust-lang/cargo/pull/11089)
+ [#11088](https://github.com/rust-lang/cargo/pull/11088)
+- The `os` output in `cargo --version --verbose` now supports more platforms.
+ [#10802](https://github.com/rust-lang/cargo/pull/10802)
+- Cached git checkouts will now be rebuilt if they are corrupted. This may
+ happen when using `net.git-fetch-with-cli` and interrupting the clone
+ process.
+ [#10829](https://github.com/rust-lang/cargo/pull/10829)
+- Fixed panic in `cargo add --offline`.
+ [#10817](https://github.com/rust-lang/cargo/pull/10817)
+
+
+### Nightly only
+- Fixed deserialization of unstable `check-cfg` in `config.toml`.
+ [#10799](https://github.com/rust-lang/cargo/pull/10799)
+
+
+## Cargo 1.63 (2022-08-11)
+[3f052d8e...rust-1.63.0](https://github.com/rust-lang/cargo/compare/3f052d8e...rust-1.63.0)
+
+### Added
+
+- 🎉 Added the `--config` CLI option to pass config options directly on the CLI.
+ [#10755](https://github.com/rust-lang/cargo/pull/10755)
+- The `CARGO_PKG_RUST_VERSION` environment variable is now set when compiling
+ a crate if the manifest has the `rust-version` field set.
+ [#10713](https://github.com/rust-lang/cargo/pull/10713)
+
+
+### Changed
+- A warning is emitted when encountering multiple packages with the same name
+ in a git dependency. This will ignore packages with `publish=false`.
+ [#10701](https://github.com/rust-lang/cargo/pull/10701)
+ [#10767](https://github.com/rust-lang/cargo/pull/10767)
+- Change tracking now uses the contents of a `.json` target spec file instead
+ of its path. This should help avoid rebuilds if the path changes.
+ [#10746](https://github.com/rust-lang/cargo/pull/10746)
+- Git dependencies with a submodule configured with the `update=none` strategy
+ in `.gitmodules` is now honored, and the submodule will not be fetched.
+ [#10717](https://github.com/rust-lang/cargo/pull/10717)
+- Crate files now use a more recent date (Jul 23, 2006 instead of Nov 29, 1973)
+ for deterministic behavior.
+ [#10720](https://github.com/rust-lang/cargo/pull/10720)
+- The initial template used for `cargo new` now includes a slightly more
+ realistic test structure that has `use super::*;` in the test module.
+ [#10706](https://github.com/rust-lang/cargo/pull/10706)
+- Updated the internal HTTP library libcurl with various small fixes and updates.
+ [#10696](https://github.com/rust-lang/cargo/pull/10696)
+
+### Fixed
+- Fix zsh completions for `cargo add` and `cargo locate-project`
+ [#10810](https://github.com/rust-lang/cargo/pull/10810)
+ [#10811](https://github.com/rust-lang/cargo/pull/10811)
+- Fixed `-p` being ignored with `cargo publish` in the root of a virtual
+ workspace. Some additional checks were also added to generate an error if
+ multiple packages were selected (previously it would pick the first one).
+ [#10677](https://github.com/rust-lang/cargo/pull/10677)
+- The human-readable executable name is no longer displayed for `cargo test`
+ when using JSON output.
+ [#10691](https://github.com/rust-lang/cargo/pull/10691)
+
+### Nightly only
+
+- Added `-Zcheck-cfg=output` to support build-scripts declaring their
+ supported set of `cfg` values with `cargo:rustc-check-cfg`.
+ [#10539](https://github.com/rust-lang/cargo/pull/10539)
+- `-Z sparse-registry` now uses https://index.crates.io/ when accessing crates-io.
+ [#10725](https://github.com/rust-lang/cargo/pull/10725)
+- Fixed formatting of `.workspace` key in `cargo add` for workspace inheritance.
+ [#10705](https://github.com/rust-lang/cargo/pull/10705)
+- Sparse HTTP registry URLs must now end with a `/`.
+ [#10698](https://github.com/rust-lang/cargo/pull/10698)
+- Fixed issue with `cargo add` and workspace inheritance of the `default-features` key.
+ [#10685](https://github.com/rust-lang/cargo/pull/10685)
+
+
+
+## Cargo 1.62 (2022-06-30)
+[1ef1e0a1...rust-1.62.0](https://github.com/rust-lang/cargo/compare/1ef1e0a1...rust-1.62.0)
+
+### Added
+
+- 🎉 Added the `cargo add` command for adding dependencies to `Cargo.toml` from
+ the command-line.
+ [docs](https://doc.rust-lang.org/nightly/cargo/commands/cargo-add.html)
+ [#10472](https://github.com/rust-lang/cargo/pull/10472)
+ [#10577](https://github.com/rust-lang/cargo/pull/10577)
+ [#10578](https://github.com/rust-lang/cargo/pull/10578)
+- Package ID specs now support `name@version` syntax in addition to the
+ previous `name:version` to align with the behavior in `cargo add` and other
+ tools. `cargo install` and `cargo yank` also now support this syntax so the
+ version does not need to passed as a separate flag.
+ [#10582](https://github.com/rust-lang/cargo/pull/10582)
+ [#10650](https://github.com/rust-lang/cargo/pull/10650)
+ [#10597](https://github.com/rust-lang/cargo/pull/10597)
+- Added the CLI option `-F` as an alias of `--features`.
+ [#10576](https://github.com/rust-lang/cargo/pull/10576)
+- The `git` and `registry` directories in Cargo's home directory (usually
+ `~/.cargo`) are now marked as cache directories so that they are not
+ included in backups or content indexing (on Windows).
+ [#10553](https://github.com/rust-lang/cargo/pull/10553)
+- Added the `--version` flag to `cargo yank` to replace the `--vers` flag to
+ be consistent with `cargo install`.
+ [#10575](https://github.com/rust-lang/cargo/pull/10575)
+- Added automatic `@` argfile support, which will use "response files" if the
+ command-line to `rustc` exceeds the operating system's limit.
+ [#10546](https://github.com/rust-lang/cargo/pull/10546)
+- `cargo clean` now has a progress bar (if it takes longer than half a second).
+ [#10236](https://github.com/rust-lang/cargo/pull/10236)
+
+### Changed
+
+- `cargo install` no longer generates an error if no binaries were found
+ to install (such as missing required features).
+ [#10508](https://github.com/rust-lang/cargo/pull/10508)
+- `cargo test` now passes `--target` to `rustdoc` if the specified target is
+ the same as the host target.
+ [#10594](https://github.com/rust-lang/cargo/pull/10594)
+- `cargo doc` now automatically passes `-Arustdoc::private-intra-doc-links`
+ when documenting a binary (which automatically includes
+ `--document-private-items`). The
+ [`private-intra-doc-links`](https://doc.rust-lang.org/rustdoc/lints.html#private_intra_doc_links)
+ lint is only relevant when *not* documenting private items, which doesn't
+ apply to binaries.
+ [#10142](https://github.com/rust-lang/cargo/pull/10142)
+- The length of the short git hash in the `cargo --version` output is now
+ fixed to 9 characters. Previously the length was inconsistent between
+ different platforms.
+ [#10579](https://github.com/rust-lang/cargo/pull/10579)
+- Attempting to publish a package with a `Cargo.toml.orig` file will now
+ result in an error. The filename would otherwise conflict with the
+ automatically-generated file.
+ [#10551](https://github.com/rust-lang/cargo/pull/10551)
+
+### Fixed
+
+- The `build.dep-info-basedir` configuration setting now properly supports the
+ use of `..` in the path to refer to a parent directory.
+ [#10281](https://github.com/rust-lang/cargo/pull/10281)
+- Fixed regression in automatic detection of the default number of CPUs to use
+ on systems using cgroups v1.
+ [#10737](https://github.com/rust-lang/cargo/pull/10737)
+ [#10739](https://github.com/rust-lang/cargo/pull/10739)
+
+
+### Nightly only
+
+- `cargo fetch` now works with `-Zbuild-std` to fetch the standard library's dependencies.
+ [#10129](https://github.com/rust-lang/cargo/pull/10129)
+- Added support for workspace inheritance.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#workspace-inheritance)
+ [#10584](https://github.com/rust-lang/cargo/pull/10584)
+ [#10568](https://github.com/rust-lang/cargo/pull/10568)
+ [#10565](https://github.com/rust-lang/cargo/pull/10565)
+ [#10564](https://github.com/rust-lang/cargo/pull/10564)
+ [#10563](https://github.com/rust-lang/cargo/pull/10563)
+ [#10606](https://github.com/rust-lang/cargo/pull/10606)
+ [#10548](https://github.com/rust-lang/cargo/pull/10548)
+ [#10538](https://github.com/rust-lang/cargo/pull/10538)
+- Added `-Zcheck-cfg` which adds various forms of validating `cfg` expressions
+ for unknown names and values.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#check-cfg)
+ [#10486](https://github.com/rust-lang/cargo/pull/10486)
+ [#10566](https://github.com/rust-lang/cargo/pull/10566)
+- The `--config` CLI option no longer allows setting a registry token.
+ [#10580](https://github.com/rust-lang/cargo/pull/10580)
+- Fixed issues with proc-macros and `-Z rustdoc-scrape-examples`.
+ [#10549](https://github.com/rust-lang/cargo/pull/10549)
+ [#10533](https://github.com/rust-lang/cargo/pull/10533)
+
+
+## Cargo 1.61 (2022-05-19)
+[ea2a21c9...rust-1.61.0](https://github.com/rust-lang/cargo/compare/ea2a21c9...rust-1.61.0)
+
+### Added
+
+### Changed
+
+- `cargo test --no-run` will now display the path to the test executables.
+ [#10346](https://github.com/rust-lang/cargo/pull/10346)
+- `cargo tree --duplicates` no longer reports dependencies that are shared
+ between the host and the target as duplicates.
+ [#10466](https://github.com/rust-lang/cargo/pull/10466)
+- Updated to the 1.4.2 release of libgit2 which brings in several fixes
+ [#10442](https://github.com/rust-lang/cargo/pull/10442)
+ [#10479](https://github.com/rust-lang/cargo/pull/10479)
+- `cargo vendor` no longer allows multiple values for `--sync`, you must pass
+ multiple `--sync` flags instead.
+ [#10448](https://github.com/rust-lang/cargo/pull/10448)
+- Warnings are now issued for manifest keys that have mixed both underscore
+ and dash variants (such as specifying both `proc_macro` and `proc-macro`)
+ [#10316](https://github.com/rust-lang/cargo/pull/10316)
+- Cargo now uses the standard library's `available_parallelism` instead of the
+ `num_cpus` crate for determining the default parallelism.
+ [#10427](https://github.com/rust-lang/cargo/pull/10427)
+- `cargo search` terms are now highlighted.
+ [#10425](https://github.com/rust-lang/cargo/pull/10425)
+
+### Fixed
+
+- Paths passed to VCS tools like `hg` are now added after `--` to avoid
+ conflict with VCS flags.
+ [#10483](https://github.com/rust-lang/cargo/pull/10483)
+- Fixed the `http.timeout` configuration value to actually work.
+ [#10456](https://github.com/rust-lang/cargo/pull/10456)
+- Fixed issues with `cargo rustc --crate-type` not working in some situations.
+ [#10388](https://github.com/rust-lang/cargo/pull/10388)
+
+### Nightly only
+
+- Added `-Z check-cfg-features` to enable compile-time checking of features
+ [#10408](https://github.com/rust-lang/cargo/pull/10408)
+- Added `-Z bindeps` to support binary artifact dependencies (RFC-3028)
+ [#9992](https://github.com/rust-lang/cargo/pull/9992)
+- `-Z multitarget` is now supported in the `build.target` config value with an array.
+ [#10473](https://github.com/rust-lang/cargo/pull/10473)
+- Added `--keep-going` flag which will continue compilation even if one crate
+ fails to compile.
+ [#10383](https://github.com/rust-lang/cargo/pull/10383)
+- Start work on inheriting manifest values in a workspace.
+ [#10497](https://github.com/rust-lang/cargo/pull/10497)
+ [#10517](https://github.com/rust-lang/cargo/pull/10517)
+- Added support for sparse HTTP registries.
+ [#10470](https://github.com/rust-lang/cargo/pull/10470)
+ [#10064](https://github.com/rust-lang/cargo/pull/10064)
+- Fixed panic when artifact target is used for `[target.'cfg(<target>)'.dependencies]`
+ [#10433](https://github.com/rust-lang/cargo/pull/10433)
+- Fixed host flags to pass to build scripts (`-Z target-applies-to-host`)
+ [#10395](https://github.com/rust-lang/cargo/pull/10395)
+- Added `-Z check-cfg-features` support for rustdoc
+ [#10428](https://github.com/rust-lang/cargo/pull/10428)
+
+
+## Cargo 1.60 (2022-04-07)
+[358e79fe...rust-1.60.0](https://github.com/rust-lang/cargo/compare/358e79fe...rust-1.60.0)
+
+### Added
+
+- 🎉 Added the `dep:` prefix in the `[features]` table to refer to an optional
+ dependency. This allows creating feature names with the same name as a
+ dependency, and allows for "hiding" optional dependencies so that they do
+ not implicitly expose a feature name.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/features.html#optional-dependencies)
+ [#10269](https://github.com/rust-lang/cargo/pull/10269)
+- 🎉 Added the `dep-name?/feature-name` syntax to the `[features]` table to
+ only enable the feature `feature-name` if the optional dependency `dep-name`
+ is already enabled by some other feature.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/features.html#dependency-features)
+ [#10269](https://github.com/rust-lang/cargo/pull/10269)
+- 🎉 Added `--timings` option to generate an HTML report about build timing,
+ concurrency, and CPU use.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/timings.html)
+ [#10245](https://github.com/rust-lang/cargo/pull/10245)
+- Added the `"v"` and `"features2"` fields to the registry index.
+ The `"v"` field provides a method for compatibility with future changes to the index.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/registries.html#index-format)
+ [#10269](https://github.com/rust-lang/cargo/pull/10269)
+- Added bash completion for `cargo clippy`
+ [#10347](https://github.com/rust-lang/cargo/pull/10347)
+- Added bash completion for `cargo report`
+ [#10295](https://github.com/rust-lang/cargo/pull/10295)
+- Added support to build scripts for `rustc-link-arg-tests`,
+ `rustc-link-arg-examples`, and `rustc-link-arg-benches`.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/build-scripts.html#outputs-of-the-build-script)
+ [#10274](https://github.com/rust-lang/cargo/pull/10274)
+
+### Changed
+
+- Cargo now uses the clap 3 library for command-line argument parsing.
+ [#10265](https://github.com/rust-lang/cargo/pull/10265)
+- The `build.pipelining` config option is now deprecated, pipelining will now
+ always be enabled.
+ [#10258](https://github.com/rust-lang/cargo/pull/10258)
+- `cargo new` will now generate a `.gitignore` which only ignores `Cargo.lock`
+ in the root of the repo, instead of any directory.
+ [#10379](https://github.com/rust-lang/cargo/pull/10379)
+- Improved startup time of bash completion.
+ [#10365](https://github.com/rust-lang/cargo/pull/10365)
+- The `--features` flag is now honored when used with the `--all-features`
+ flag, which allows enabling features from other packages.
+ [#10337](https://github.com/rust-lang/cargo/pull/10337)
+- Cargo now uses a different TOML parser. This should not introduce any
+ user-visible changes. This paves the way to support format-preserving
+ programmatic modification of TOML files for supporting `cargo add` and other
+ future enhancements.
+ [#10086](https://github.com/rust-lang/cargo/pull/10086)
+- Setting a library to emit both a `dylib` and `cdylib` is now an error, as
+ this combination is not supported.
+ [#10243](https://github.com/rust-lang/cargo/pull/10243)
+- `cargo --list` now includes the `help` command.
+ [#10300](https://github.com/rust-lang/cargo/pull/10300)
+
+### Fixed
+
+- Fixed running `cargo doc` on examples with dev-dependencies.
+ [#10341](https://github.com/rust-lang/cargo/pull/10341)
+- Fixed `cargo install --path` for a path that is relative to a directory
+ outside of the workspace in the current directory.
+ [#10335](https://github.com/rust-lang/cargo/pull/10335)
+- `cargo test TEST_FILTER` should no longer build binaries that are explicitly
+ disabled with `test = false`.
+ [#10305](https://github.com/rust-lang/cargo/pull/10305)
+- Fixed regression with `term.verbose` without `term.quiet`, and vice versa.
+ [#10429](https://github.com/rust-lang/cargo/pull/10429)
+ [#10436](https://github.com/rust-lang/cargo/pull/10436)
+
+### Nightly only
+
+- Added `rustflags` option to a profile definition.
+ [#10217](https://github.com/rust-lang/cargo/pull/10217)
+- Changed `--config` to only support dotted keys.
+ [#10176](https://github.com/rust-lang/cargo/pull/10176)
+- Fixed profile `rustflags` not being gated in profile overrides.
+ [#10411](https://github.com/rust-lang/cargo/pull/10411)
+ [#10413](https://github.com/rust-lang/cargo/pull/10413)
+
+## Cargo 1.59 (2022-02-24)
+[7f08ace4...rust-1.59.0](https://github.com/rust-lang/cargo/compare/7f08ace4...rust-1.59.0)
+
+### Added
+
+- 🎉 The `strip` option can now be specified in a profile to specify the
+ behavior for removing symbols and debug information from binaries.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/profiles.html#strip)
+ [#10088](https://github.com/rust-lang/cargo/pull/10088)
+ [#10376](https://github.com/rust-lang/cargo/pull/10376)
+- 🎉 Added future incompatible reporting.
+ This provides reporting for when a future change in `rustc` may cause a
+ package or any of its dependencies to stop building.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/future-incompat-report.html)
+ [#10165](https://github.com/rust-lang/cargo/pull/10165)
+- SSH authentication on Windows now supports ssh-agent.
+ [docs](https://doc.rust-lang.org/nightly/cargo/appendix/git-authentication.html#ssh-authentication)
+ [#10248](https://github.com/rust-lang/cargo/pull/10248)
+- Added `term.quiet` configuration option to enable the `--quiet` behavior
+ from a config file.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/config.html#termquiet)
+ [#10152](https://github.com/rust-lang/cargo/pull/10152)
+- Added `-r` CLI option as an alias for `--release`.
+ [#10133](https://github.com/rust-lang/cargo/pull/10133)
+
+### Changed
+
+- Scanning the package directory should now be resilient to errors, such as
+ filesystem loops or access issues.
+ [#10188](https://github.com/rust-lang/cargo/pull/10188)
+ [#10214](https://github.com/rust-lang/cargo/pull/10214)
+ [#10286](https://github.com/rust-lang/cargo/pull/10286)
+- `cargo help <alias>` will now show the target of the alias.
+ [#10193](https://github.com/rust-lang/cargo/pull/10193)
+- Removed the deprecated `--host` CLI option.
+ [#10145](https://github.com/rust-lang/cargo/pull/10145)
+ [#10327](https://github.com/rust-lang/cargo/pull/10327)
+- Cargo should now report its version to always be in sync with `rustc`.
+ [#10178](https://github.com/rust-lang/cargo/pull/10178)
+- Added EOPNOTSUPP to ignored file locking errors, which is relevant to BSD
+ operating systems.
+ [#10157](https://github.com/rust-lang/cargo/pull/10157)
+
+### Fixed
+
+- macOS: Fixed an issue where running an executable would sporadically be
+ killed by the kernel (likely starting in macOS 12).
+ [#10196](https://github.com/rust-lang/cargo/pull/10196)
+- Fixed so that the `doc=false` setting is honored in the `[lib]` definition
+ of a dependency.
+ [#10201](https://github.com/rust-lang/cargo/pull/10201)
+ [#10324](https://github.com/rust-lang/cargo/pull/10324)
+- The `"executable"` field in the JSON option was incorrectly including the
+ path to `index.html` when documenting a binary. It is now null.
+ [#10171](https://github.com/rust-lang/cargo/pull/10171)
+- Documenting a binary now waits for the package library to finish documenting
+ before starting. This fixes some race conditions if the binary has intra-doc
+ links to the library.
+ [#10172](https://github.com/rust-lang/cargo/pull/10172)
+- Fixed panic when displaying help text to a closed pipe.
+ [#10164](https://github.com/rust-lang/cargo/pull/10164)
+
+### Nightly only
+- Added the `--crate-type` flag to `cargo rustc`.
+ [#10093](https://github.com/rust-lang/cargo/pull/10093)
+
+
+## Cargo 1.58 (2022-01-13)
+[b2e52d7c...rust-1.58.0](https://github.com/rust-lang/cargo/compare/b2e52d7c...rust-1.58.0)
+
+### Added
+
+- Added `rust_version` field to package data in `cargo metadata`.
+ [#9967](https://github.com/rust-lang/cargo/pull/9967)
+- Added `--message-format` option to `cargo install`.
+ [#10107](https://github.com/rust-lang/cargo/pull/10107)
+
+### Changed
+
+- A warning is now shown when an alias shadows an external command.
+ [#10082](https://github.com/rust-lang/cargo/pull/10082)
+- Updated curl to 7.80.0.
+ [#10040](https://github.com/rust-lang/cargo/pull/10040)
+ [#10106](https://github.com/rust-lang/cargo/pull/10106)
+
+### Fixed
+
+- Doctests now include rustc-link-args from build scripts.
+ [#9916](https://github.com/rust-lang/cargo/pull/9916)
+- Fixed `cargo tree` entering an infinite loop with cyclical dev-dependencies.
+ Fixed an edge case where the resolver would fail to handle a cyclical dev-dependency with a feature.
+ [#10103](https://github.com/rust-lang/cargo/pull/10103)
+- Fixed `cargo clean -p` when the directory path contains glob characters.
+ [#10072](https://github.com/rust-lang/cargo/pull/10072)
+- Fixed debug builds of `cargo` which could panic when downloading a crate
+ when the server has a redirect with a non-empty body.
+ [#10048](https://github.com/rust-lang/cargo/pull/10048)
+
+### Nightly only
+
+- Make future-incompat-report output more user-friendly.
+ [#9953](https://github.com/rust-lang/cargo/pull/9953)
+- Added support to scrape code examples from the `examples` directory to be included in the documentation.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#scrape-examples)
+ [#9525](https://github.com/rust-lang/cargo/pull/9525)
+ [#10037](https://github.com/rust-lang/cargo/pull/10037)
+ [#10017](https://github.com/rust-lang/cargo/pull/10017)
+- Fixed `cargo report future-incompatibilities` to check stdout if it supports color.
+ [#10024](https://github.com/rust-lang/cargo/pull/10024)
+
+## Cargo 1.57 (2021-12-02)
+[18751dd3...rust-1.57.0](https://github.com/rust-lang/cargo/compare/18751dd3...rust-1.57.0)
+
+### Added
+
+- 🎉 Added custom named profiles. This also changes the `test` and `bench`
+ profiles to inherit their settings from `dev` and `release`, and Cargo will
+ now only use a single profile during a given command instead of using
+ different profiles for dependencies and cargo-targets.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/profiles.html#custom-profiles)
+ [#9943](https://github.com/rust-lang/cargo/pull/9943)
+- The `rev` option for a git dependency now supports git references that start
+ with `refs/`. An example where this can be used is to depend on a pull
+ request from a service like GitHub before it is merged.
+ [#9859](https://github.com/rust-lang/cargo/pull/9859)
+- Added `path_in_vcs` field to the `.cargo_vcs_info.json` file.
+ [docs](https://doc.rust-lang.org/nightly/cargo/commands/cargo-package.html#cargo_vcs_infojson-format)
+ [#9866](https://github.com/rust-lang/cargo/pull/9866)
+
+### Changed
+
+- ❗ `RUSTFLAGS` is no longer set for build scripts. This change was made in
+ 1.55, but the release notes did not highlight this change. Build scripts
+ should use `CARGO_ENCODED_RUSTFLAGS` instead. See the
+ [documentation](https://doc.rust-lang.org/nightly/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts)
+ for more details.
+- The `cargo version` command now includes some extra information.
+ [#9968](https://github.com/rust-lang/cargo/pull/9968)
+- Updated libgit2 to 1.3 which brings in a number of fixes and changes to git
+ handling.
+ [#9963](https://github.com/rust-lang/cargo/pull/9963)
+ [#9988](https://github.com/rust-lang/cargo/pull/9988)
+- Shell completions now include shorthand b/r/c/d subcommands.
+ [#9951](https://github.com/rust-lang/cargo/pull/9951)
+- `cargo update --precise` now allows specifying a version without semver
+ metadata (stuff after `+` in the version number).
+ [#9945](https://github.com/rust-lang/cargo/pull/9945)
+- zsh completions now complete `--example` names.
+ [#9939](https://github.com/rust-lang/cargo/pull/9939)
+- The progress bar now differentiates when building unittests.
+ [#9934](https://github.com/rust-lang/cargo/pull/9934)
+- Some backwards-compatibility support for invalid TOML syntax has been removed.
+ [#9932](https://github.com/rust-lang/cargo/pull/9932)
+- Reverted the change from 1.55 that triggered an error for dependency
+ specifications that did not include any fields.
+ [#9911](https://github.com/rust-lang/cargo/pull/9911)
+
+### Fixed
+
+- Removed a log message (from `CARGO_LOG`) that may leak tokens.
+ [#9873](https://github.com/rust-lang/cargo/pull/9873)
+- `cargo fix` will now avoid writing fixes to the global registry cache.
+ [#9938](https://github.com/rust-lang/cargo/pull/9938)
+- Fixed `-Z help` CLI option when used with a shorthand alias (b/c/r/d).
+ [#9933](https://github.com/rust-lang/cargo/pull/9933)
+
+
+### Nightly only
+
+
+## Cargo 1.56 (2021-10-21)
+[cebef295...rust-1.56.0](https://github.com/rust-lang/cargo/compare/cebef295...rust-1.56.0)
+
+### Added
+
+- 🎉 Cargo now supports the 2021 edition.
+ More information may be found in the [edition
+ guide](https://doc.rust-lang.org/nightly/edition-guide/rust-2021/index.html).
+ [#9800](https://github.com/rust-lang/cargo/pull/9800)
+- 🎉 Added the
+ [`rust-version`](https://doc.rust-lang.org/nightly/cargo/reference/manifest.html#the-rust-version-field)
+ field to `Cargo.toml` to specify the minimum supported Rust version, and the
+ `--ignore-rust-version` command line option to override it.
+ [#9732](https://github.com/rust-lang/cargo/pull/9732)
+- Added the `[env]` table to config files to specify environment variables to
+ set.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/config.html#env)
+ [#9411](https://github.com/rust-lang/cargo/pull/9411)
+- `[patch]` tables may now be specified in config files.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/config.html#patch)
+ [#9839](https://github.com/rust-lang/cargo/pull/9839)
+- `cargo doc` now supports the `--example` and `--examples` flags.
+ [#9808](https://github.com/rust-lang/cargo/pull/9808)
+- 🎉 Build scripts can now pass additional linker arguments for binaries or all
+ linkable targets. [docs](https://doc.rust-lang.org/nightly/cargo/reference/build-scripts.html#outputs-of-the-build-script)
+ [#9557](https://github.com/rust-lang/cargo/pull/9557)
+- Added support for the `-p` flag for `cargo publish` to publish a specific
+ package in a workspace. `cargo package` also now supports `-p` and
+ `--workspace`.
+ [#9559](https://github.com/rust-lang/cargo/pull/9559)
+- Added documentation about third-party registries.
+ [#9830](https://github.com/rust-lang/cargo/pull/9830)
+- Added the `{sha256-checksum}` placeholder for URLs in a registry `config.json`.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/registries.html#index-format)
+ [#9801](https://github.com/rust-lang/cargo/pull/9801)
+- Added a warning when a dependency does not have a library.
+ [#9771](https://github.com/rust-lang/cargo/pull/9771)
+
+### Changed
+
+- Doc tests now support the `-q` flag to show terse test output.
+ [#9730](https://github.com/rust-lang/cargo/pull/9730)
+- `features` used in a `[replace]` table now issues a warning, as they are ignored.
+ [#9681](https://github.com/rust-lang/cargo/pull/9681)
+- Changed so that only `wasm32-unknown-emscripten` executables are built
+ without a hash in the filename. Previously it was all `wasm32` targets.
+ Additionally, all `apple` binaries are now built with a hash in the
+ filename. This allows multiple copies to be cached at once, and matches the
+ behavior on other platforms (except `msvc`).
+ [#9653](https://github.com/rust-lang/cargo/pull/9653)
+- `cargo new` now generates an example that doesn't generate a warning with
+ clippy.
+ [#9796](https://github.com/rust-lang/cargo/pull/9796)
+- `cargo fix --edition` now only applies edition-specific lints.
+ [#9846](https://github.com/rust-lang/cargo/pull/9846)
+- Improve resolver message to include dependency requirements.
+ [#9827](https://github.com/rust-lang/cargo/pull/9827)
+- `cargo fix` now has more debug logging available with the `CARGO_LOG`
+ environment variable.
+ [#9831](https://github.com/rust-lang/cargo/pull/9831)
+- Changed `cargo fix --edition` to emit a warning when on the latest stable
+ edition when running on stable instead of generating an error.
+ [#9792](https://github.com/rust-lang/cargo/pull/9792)
+- `cargo install` will now determine all of the packages to install before
+ starting the installation, which should help with reporting errors without
+ partially installing.
+ [#9793](https://github.com/rust-lang/cargo/pull/9793)
+- The resolver report for `cargo fix --edition` now includes differences for
+ dev-dependencies.
+ [#9803](https://github.com/rust-lang/cargo/pull/9803)
+- `cargo fix` will now show better diagnostics for abnormal errors from `rustc`.
+ [#9799](https://github.com/rust-lang/cargo/pull/9799)
+- Entries in `cargo --list` are now deduplicated.
+ [#9773](https://github.com/rust-lang/cargo/pull/9773)
+- Aliases are now included in `cargo --list`.
+ [#9764](https://github.com/rust-lang/cargo/pull/9764)
+
+### Fixed
+
+- Fixed panic with build-std of a proc-macro.
+ [#9834](https://github.com/rust-lang/cargo/pull/9834)
+- Fixed running `cargo` recursively from proc-macros while running `cargo fix`.
+ [#9818](https://github.com/rust-lang/cargo/pull/9818)
+- Return an error instead of a stack overflow for command alias loops.
+ [#9791](https://github.com/rust-lang/cargo/pull/9791)
+- Updated to curl 7.79.1, which will hopefully fix intermittent http2 errors.
+ [#9937](https://github.com/rust-lang/cargo/pull/9937)
+
+### Nightly only
+
+- Added `[future-incompat-report]` config section.
+ [#9774](https://github.com/rust-lang/cargo/pull/9774)
+- Fixed value-after-table error with custom named profiles.
+ [#9789](https://github.com/rust-lang/cargo/pull/9789)
+- Added the `different-binary-name` feature to support specifying a
+ non-rust-identifier for a binary name.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#different-binary-name)
+ [#9627](https://github.com/rust-lang/cargo/pull/9627)
+- Added a profile option to select the codegen backend.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#codegen-backend)
+ [#9118](https://github.com/rust-lang/cargo/pull/9118)
+
+
+## Cargo 1.55 (2021-09-09)
+[aa8b0929...rust-1.55.0](https://github.com/rust-lang/cargo/compare/aa8b0929...rust-1.55.0)
+
+### Added
+
+- The package definition in `cargo metadata` now includes the `"default_run"`
+ field from the manifest.
+ [#9550](https://github.com/rust-lang/cargo/pull/9550)
+- ❗ Build scripts now have access to the following environment variables:
+ `RUSTC_WRAPPER`, `RUSTC_WORKSPACE_WRAPPER`, `CARGO_ENCODED_RUSTFLAGS`.
+ `RUSTFLAGS` is no longer set for build scripts; they should use
+ `CARGO_ENCODED_RUSTFLAGS` instead.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts)
+ [#9601](https://github.com/rust-lang/cargo/pull/9601)
+- Added `cargo d` as an alias for `cargo doc`.
+ [#9680](https://github.com/rust-lang/cargo/pull/9680)
+- Added `{lib}` to the `cargo tree --format` option to display the library
+ name of a package.
+ [#9663](https://github.com/rust-lang/cargo/pull/9663)
+- Added `members_mut` method to the `Workspace` API.
+ [#9547](https://github.com/rust-lang/cargo/pull/9547)
+
+### Changed
+
+- If a build command does not match any targets when using the
+ `--all-targets`, `--bins`, `--tests`, `--examples`, or `--benches` flags, a
+ warning is now displayed to inform you that there were no matching targets.
+ [#9549](https://github.com/rust-lang/cargo/pull/9549)
+- The way `cargo init` detects whether or not existing source files represent
+ a binary or library has been changed to respect the command-line flags
+ instead of trying to guess which type it is.
+ [#9522](https://github.com/rust-lang/cargo/pull/9522)
+- Registry names are now displayed instead of registry URLs when possible.
+ [#9632](https://github.com/rust-lang/cargo/pull/9632)
+- Duplicate compiler diagnostics are no longer shown. This can often happen
+ with `cargo test` which builds multiple copies of the same code in parallel.
+ This also updates the warning summary to provide more context.
+ [#9675](https://github.com/rust-lang/cargo/pull/9675)
+- The output for warnings or errors is now improved to be leaner, cleaner, and
+ show more context.
+ [#9655](https://github.com/rust-lang/cargo/pull/9655)
+- Network send errors are now treated as "spurious" which means they will be retried.
+ [#9695](https://github.com/rust-lang/cargo/pull/9695)
+- Git keys (`branch`, `tag`, `rev`) on a non-git dependency are now an error.
+ Additionally, specifying both `git` and `path` is now an error.
+ [#9689](https://github.com/rust-lang/cargo/pull/9689)
+- Specifying a dependency without any keys is now an error.
+ [#9686](https://github.com/rust-lang/cargo/pull/9686)
+- The resolver now prefers to use `[patch]` table entries of dependencies when
+ possible.
+ [#9639](https://github.com/rust-lang/cargo/pull/9639)
+- Package name typo errors in dependencies are now displayed aligned with the
+ original to help make it easier to see the difference.
+ [#9665](https://github.com/rust-lang/cargo/pull/9665)
+- Windows platforms may now warn on environment variables that have the wrong case.
+ [#9654](https://github.com/rust-lang/cargo/pull/9654)
+- `features` used in a `[patch]` table now issues a warning, as they are ignored.
+ [#9666](https://github.com/rust-lang/cargo/pull/9666)
+- The `target` directory is now excluded from content indexing on Windows.
+ [#9635](https://github.com/rust-lang/cargo/pull/9635)
+- When `Cargo.toml` is not found, the error message now detects if it was
+ misnamed with a lowercase `c` to suggest the correct form.
+ [#9607](https://github.com/rust-lang/cargo/pull/9607)
+- Building `diesel` with the new resolver displays a compatibility notice.
+ [#9602](https://github.com/rust-lang/cargo/pull/9602)
+- Updated the `opener` dependency, which handles opening a web browser, which
+ includes several changes, such as new behavior when run on WSL, and using
+ the system `xdg-open` on Linux.
+ [#9583](https://github.com/rust-lang/cargo/pull/9583)
+- Updated to libcurl 7.78.
+ [#9809](https://github.com/rust-lang/cargo/pull/9809)
+ [#9810](https://github.com/rust-lang/cargo/pull/9810)
+
+### Fixed
+
+- Fixed dep-info files including non-local build script paths.
+ [#9596](https://github.com/rust-lang/cargo/pull/9596)
+- Handle "jobs = 0" case in cargo config files
+ [#9584](https://github.com/rust-lang/cargo/pull/9584)
+- Implement warning for ignored trailing arguments after `--`
+ [#9561](https://github.com/rust-lang/cargo/pull/9561)
+- Fixed rustc/rustdoc config values to be config-relative.
+ [#9566](https://github.com/rust-lang/cargo/pull/9566)
+- `cargo fix` now supports rustc's suggestions with multiple spans.
+ [#9567](https://github.com/rust-lang/cargo/pull/9567)
+- `cargo fix` now fixes each target serially instead of in parallel to avoid
+ problems with fixing the same file concurrently.
+ [#9677](https://github.com/rust-lang/cargo/pull/9677)
+- Changes to the target `linker` config value now trigger a rebuild.
+ [#9647](https://github.com/rust-lang/cargo/pull/9647)
+- Git unstaged deleted files are now ignored when using the `--allow-dirty`
+ flag with `cargo publish` or `cargo package`.
+ [#9645](https://github.com/rust-lang/cargo/pull/9645)
+
+### Nightly only
+
+- Enabled support for `cargo fix --edition` for 2021.
+ [#9588](https://github.com/rust-lang/cargo/pull/9588)
+- Several changes to named profiles.
+ [#9685](https://github.com/rust-lang/cargo/pull/9685)
+- Extended instructions on what to do when running `cargo fix --edition` on
+ the 2021 edition.
+ [#9694](https://github.com/rust-lang/cargo/pull/9694)
+- Multiple updates to error messages using nightly features to help better
+ explain the situation.
+ [#9657](https://github.com/rust-lang/cargo/pull/9657)
+- Adjusted the edition 2021 resolver diff report.
+ [#9649](https://github.com/rust-lang/cargo/pull/9649)
+- Fixed error using `cargo doc --open` with `doc.extern-map`.
+ [#9531](https://github.com/rust-lang/cargo/pull/9531)
+- Unified weak and namespaced features.
+ [#9574](https://github.com/rust-lang/cargo/pull/9574)
+- Various updates to future-incompatible reporting.
+ [#9606](https://github.com/rust-lang/cargo/pull/9606)
+- `[env]` environment variables are not allowed to set vars set by Cargo.
+ [#9579](https://github.com/rust-lang/cargo/pull/9579)
+
+## Cargo 1.54 (2021-07-29)
+[4369396c...rust-1.54.0](https://github.com/rust-lang/cargo/compare/4369396c...rust-1.54.0)
+
+### Added
+
+- Fetching from a git repository (such as the crates.io index) now displays
+ the network transfer rate.
+ [#9395](https://github.com/rust-lang/cargo/pull/9395)
+- Added `--prune` option for `cargo tree` to limit what is displayed.
+ [#9520](https://github.com/rust-lang/cargo/pull/9520)
+- Added `--depth` option for `cargo tree` to limit what is displayed.
+ [#9499](https://github.com/rust-lang/cargo/pull/9499)
+- Added `cargo tree -e no-proc-macro` to hide procedural macro dependencies.
+ [#9488](https://github.com/rust-lang/cargo/pull/9488)
+- Added `doc.browser` config option to set which browser to open with `cargo doc --open`.
+ [#9473](https://github.com/rust-lang/cargo/pull/9473)
+- Added `CARGO_TARGET_TMPDIR` environment variable set for integration tests &
+ benches. This provides a temporary or "scratch" directory in the `target`
+ directory for tests and benches to use.
+ [#9375](https://github.com/rust-lang/cargo/pull/9375)
+
+### Changed
+
+- `--features` CLI flags now provide typo suggestions with the new feature resolver.
+ [#9420](https://github.com/rust-lang/cargo/pull/9420)
+- Cargo now uses a new parser for SemVer versions. This should behave mostly
+ the same as before with some minor exceptions where invalid syntax for
+ version requirements is now rejected.
+ [#9508](https://github.com/rust-lang/cargo/pull/9508)
+- Mtime handling of `.crate` published packages has changed slightly to avoid
+ mtime values of 0. This was causing problems with lldb which refused to read
+ those files.
+ [#9517](https://github.com/rust-lang/cargo/pull/9517)
+- Improved performance of git status check in `cargo package`.
+ [#9478](https://github.com/rust-lang/cargo/pull/9478)
+- `cargo new` with fossil now places the ignore settings in the new repository
+ instead of using `fossil settings` to set them globally. This also includes
+ several other cleanups to make it more consistent with other VCS
+ configurations.
+ [#9469](https://github.com/rust-lang/cargo/pull/9469)
+- `rustc-cdylib-link-arg` applying transitively displays a warning that this
+ was not intended, and may be an error in the future.
+ [#9563](https://github.com/rust-lang/cargo/pull/9563)
+
+### Fixed
+
+- Fixed `package.exclude` in `Cargo.toml` using inverted exclusions
+ (`!somefile`) when not in a git repository or when vendoring a dependency.
+ [#9186](https://github.com/rust-lang/cargo/pull/9186)
+- Dep-info files now adjust build script `rerun-if-changed` paths to be
+ absolute paths.
+ [#9421](https://github.com/rust-lang/cargo/pull/9421)
+- Fixed a bug when with resolver = "1" non-virtual package was allowing
+ unknown features.
+ [#9437](https://github.com/rust-lang/cargo/pull/9437)
+- Fixed an issue with the index cache mishandling versions that only
+ differed in build metadata (such as `110.0.0` and `110.0.0+1.1.0f`).
+ [#9476](https://github.com/rust-lang/cargo/pull/9476)
+- Fixed `cargo install` with a semver metadata version.
+ [#9467](https://github.com/rust-lang/cargo/pull/9467)
+
+### Nightly only
+
+- Added `report` subcommand, and changed `cargo
+ describe-future-incompatibilitie` to `cargo report
+ future-incompatibilities`.
+ [#9438](https://github.com/rust-lang/cargo/pull/9438)
+- Added a `[host]` table to the config files to be able to set build flags for
+ host target. Also added `target-applies-to-host` to control how the
+ `[target]` tables behave.
+ [#9322](https://github.com/rust-lang/cargo/pull/9322)
+- Added some validation to build script `rustc-link-arg-*` instructions to
+ return an error if the target doesn't exist.
+ [#9523](https://github.com/rust-lang/cargo/pull/9523)
+- Added `cargo:rustc-link-arg-bin` instruction for build scripts.
+ [#9486](https://github.com/rust-lang/cargo/pull/9486)
+
+
+## Cargo 1.53 (2021-06-17)
+[90691f2b...rust-1.53.0](https://github.com/rust-lang/cargo/compare/90691f2b...rust-1.53.0)
+
+### Added
+
+### Changed
+- 🔥 Cargo now supports git repositories where the default `HEAD` branch is not
+ "master". This also includes a switch to the version 3 `Cargo.lock` format
+ which can handle default branches correctly.
+ [#9133](https://github.com/rust-lang/cargo/pull/9133)
+ [#9397](https://github.com/rust-lang/cargo/pull/9397)
+ [#9384](https://github.com/rust-lang/cargo/pull/9384)
+ [#9392](https://github.com/rust-lang/cargo/pull/9392)
+- 🔥 macOS targets now default to `unpacked` split-debuginfo.
+ [#9298](https://github.com/rust-lang/cargo/pull/9298)
+- ❗ The `authors` field is no longer included in `Cargo.toml` for new
+ projects.
+ [#9282](https://github.com/rust-lang/cargo/pull/9282)
+- `cargo update` may now work with the `--offline` flag.
+ [#9279](https://github.com/rust-lang/cargo/pull/9279)
+- `cargo doc` will now erase the `doc` directory when switching between
+ different toolchain versions. There are shared, unversioned files (such as
+ the search index) that can become broken when using different versions.
+ [#8640](https://github.com/rust-lang/cargo/pull/8640)
+ [#9404](https://github.com/rust-lang/cargo/pull/9404)
+- Improved error messages when path dependency/workspace member is missing.
+ [#9368](https://github.com/rust-lang/cargo/pull/9368)
+
+### Fixed
+- Fixed `cargo doc` detecting if the documentation needs to be rebuilt when
+ changing some settings such as features.
+ [#9419](https://github.com/rust-lang/cargo/pull/9419)
+- `cargo doc` now deletes the output directory for the package before running
+ rustdoc to clear out any stale files.
+ [#9419](https://github.com/rust-lang/cargo/pull/9419)
+- Fixed the `-C metadata` value to always include all information for all
+ builds. Previously, in some situations, the hash only included the package
+ name and version. This fixes some issues, such as incremental builds with
+ split-debuginfo on macOS corrupting the incremental cache in some cases.
+ [#9418](https://github.com/rust-lang/cargo/pull/9418)
+- Fixed man pages not working on Windows if `man` is in `PATH`.
+ [#9378](https://github.com/rust-lang/cargo/pull/9378)
+- The `rustc` cache is now aware of `RUSTC_WRAPPER` and `RUSTC_WORKSPACE_WRAPPER`.
+ [#9348](https://github.com/rust-lang/cargo/pull/9348)
+- Track the `CARGO` environment variable in the rebuild fingerprint if the
+ code uses `env!("CARGO")`.
+ [#9363](https://github.com/rust-lang/cargo/pull/9363)
+
+### Nightly only
+- Fixed config includes not working.
+ [#9299](https://github.com/rust-lang/cargo/pull/9299)
+- Emit note when `--future-incompat-report` had nothing to report.
+ [#9263](https://github.com/rust-lang/cargo/pull/9263)
+- Error messages for nightly features flags (like `-Z` and `cargo-features`)
+ now provides more information.
+ [#9290](https://github.com/rust-lang/cargo/pull/9290)
+- Added the ability to set the target for an individual package in `Cargo.toml`.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#per-package-target)
+ [#9030](https://github.com/rust-lang/cargo/pull/9030)
+- Fixed build-std updating the index on every build.
+ [#9393](https://github.com/rust-lang/cargo/pull/9393)
+- `-Z help` now displays all the `-Z` options.
+ [#9369](https://github.com/rust-lang/cargo/pull/9369)
+- Added `-Zallow-features` to specify which nightly features are allowed to be used.
+ [#9283](https://github.com/rust-lang/cargo/pull/9283)
+- Added `cargo config` subcommand.
+ [#9302](https://github.com/rust-lang/cargo/pull/9302)
+
+## Cargo 1.52 (2021-05-06)
+[34170fcd...rust-1.52.0](https://github.com/rust-lang/cargo/compare/34170fcd...rust-1.52.0)
+
+### Added
+- Added the `"manifest_path"` field to JSON messages for a package.
+ [#9022](https://github.com/rust-lang/cargo/pull/9022)
+ [#9247](https://github.com/rust-lang/cargo/pull/9247)
+
+### Changed
+- Build scripts are now forbidden from setting `RUSTC_BOOTSTRAP` on stable.
+ [#9181](https://github.com/rust-lang/cargo/pull/9181)
+ [#9385](https://github.com/rust-lang/cargo/pull/9385)
+- crates.io now supports SPDX 3.11 licenses.
+ [#9209](https://github.com/rust-lang/cargo/pull/9209)
+- An error is now reported if `CARGO_TARGET_DIR` is an empty string.
+ [#8939](https://github.com/rust-lang/cargo/pull/8939)
+- Doc tests now pass the `--message-format` flag into the test so that the
+ "short" format can now be used for doc tests.
+ [#9128](https://github.com/rust-lang/cargo/pull/9128)
+- `cargo test` now prints a clearer indicator of which target is currently running.
+ [#9195](https://github.com/rust-lang/cargo/pull/9195)
+- The `CARGO_TARGET_<TRIPLE>` environment variable will now issue a warning if
+ it is using lowercase letters.
+ [#9169](https://github.com/rust-lang/cargo/pull/9169)
+
+### Fixed
+- Fixed publication of packages with metadata and resolver fields in `Cargo.toml`.
+ [#9300](https://github.com/rust-lang/cargo/pull/9300)
+ [#9304](https://github.com/rust-lang/cargo/pull/9304)
+- Fixed logic for determining prefer-dynamic for a dylib which differed in a
+ workspace vs a single package.
+ [#9252](https://github.com/rust-lang/cargo/pull/9252)
+- Fixed an issue where exclusive target-specific dependencies that overlapped
+ across dependency kinds (like regular and build-dependencies) would
+ incorrectly include the dependencies in both.
+ [#9255](https://github.com/rust-lang/cargo/pull/9255)
+- Fixed panic with certain styles of Package IDs when passed to the `-p` flag.
+ [#9188](https://github.com/rust-lang/cargo/pull/9188)
+- When running cargo with output not going to a TTY, and with the progress bar
+ and color force-enabled, the output will now correctly clear the progress
+ line.
+ [#9231](https://github.com/rust-lang/cargo/pull/9231)
+- Error instead of panic when JSON may contain non-utf8 paths.
+ [#9226](https://github.com/rust-lang/cargo/pull/9226)
+- Fixed a hang that can happen on broken stderr.
+ [#9201](https://github.com/rust-lang/cargo/pull/9201)
+- Fixed thin-local LTO not being disabled correctly when `lto=off` is set.
+ [#9182](https://github.com/rust-lang/cargo/pull/9182)
+
+### Nightly only
+- The `strip` profile option now supports `true` and `false` values.
+ [#9153](https://github.com/rust-lang/cargo/pull/9153)
+- `cargo fix --edition` now displays a report when switching to 2021 if the
+ new resolver changes features.
+ [#9268](https://github.com/rust-lang/cargo/pull/9268)
+- Added `[patch]` table support in `.cargo/config` files.
+ [#9204](https://github.com/rust-lang/cargo/pull/9204)
+- Added `cargo describe-future-incompatibilities` for generating a report on
+ dependencies that contain future-incompatible warnings.
+ [#8825](https://github.com/rust-lang/cargo/pull/8825)
+- Added easier support for testing the 2021 edition.
+ [#9184](https://github.com/rust-lang/cargo/pull/9184)
+- Switch the default resolver to "2" in the 2021 edition.
+ [#9184](https://github.com/rust-lang/cargo/pull/9184)
+- `cargo fix --edition` now supports 2021.
+ [#9184](https://github.com/rust-lang/cargo/pull/9184)
+- Added `--print` flag to `cargo rustc` to pass along to `rustc` to display
+ information from rustc.
+ [#9002](https://github.com/rust-lang/cargo/pull/9002)
+- Added `-Zdoctest-in-workspace` for changing the directory where doctests are
+ *run* versus where they are *compiled*.
+ [#9105](https://github.com/rust-lang/cargo/pull/9105)
+- Added support for an `[env]` section in `.cargo/config.toml` to set
+ environment variables when running cargo.
+ [#9175](https://github.com/rust-lang/cargo/pull/9175)
+- Added a schema field and `features2` field to the index.
+ [#9161](https://github.com/rust-lang/cargo/pull/9161)
+- Changes to JSON spec targets will now trigger a rebuild.
+ [#9223](https://github.com/rust-lang/cargo/pull/9223)
+
+## Cargo 1.51 (2021-03-25)
+[75d5d8cf...rust-1.51.0](https://github.com/rust-lang/cargo/compare/75d5d8cf...rust-1.51.0)
+
+### Added
+- 🔥 Added the `split-debuginfo` profile option.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/profiles.html#split-debuginfo)
+ [#9112](https://github.com/rust-lang/cargo/pull/9112)
+- Added the `path` field to `cargo metadata` for the package dependencies list
+ to show the path for "path" dependencies.
+ [#8994](https://github.com/rust-lang/cargo/pull/8994)
+- 🔥 Added a new feature resolver, and new CLI feature flag behavior. See the
+ new [features](https://doc.rust-lang.org/nightly/cargo/reference/features.html#feature-resolver-version-2)
+ and [resolver](https://doc.rust-lang.org/nightly/cargo/reference/resolver.html#feature-resolver-version-2)
+ documentation for the `resolver = "2"` option. See the
+ [CLI](https://doc.rust-lang.org/nightly/cargo/reference/features.html#command-line-feature-options)
+ and [resolver 2 CLI](https://doc.rust-lang.org/nightly/cargo/reference/features.html#resolver-version-2-command-line-flags)
+ options for the new CLI behavior. And, finally, see
+ [RFC 2957](https://github.com/rust-lang/rfcs/blob/master/text/2957-cargo-features2.md)
+ for a detailed look at what has changed.
+ [#8997](https://github.com/rust-lang/cargo/pull/8997)
+
+### Changed
+- `cargo install --locked` now emits a warning if `Cargo.lock` is not found.
+ [#9108](https://github.com/rust-lang/cargo/pull/9108)
+- Unknown or ambiguous package IDs passed on the command-line now display
+ suggestions for the correct package ID.
+ [#9095](https://github.com/rust-lang/cargo/pull/9095)
+- Slightly optimize `cargo vendor`
+ [#8937](https://github.com/rust-lang/cargo/pull/8937)
+ [#9131](https://github.com/rust-lang/cargo/pull/9131)
+ [#9132](https://github.com/rust-lang/cargo/pull/9132)
+
+### Fixed
+- Fixed environment variables and cfg settings emitted by a build script that
+ are set for `cargo test` and `cargo run` when the build script runs multiple
+ times during the same build session.
+ [#9122](https://github.com/rust-lang/cargo/pull/9122)
+- Fixed a panic with `cargo doc` and the new feature resolver. This also
+ introduces some heuristics to try to avoid path collisions with `rustdoc` by
+ only documenting one variant of a package if there are multiple (such as
+ multiple versions, or the same package shared for host and target
+ platforms).
+ [#9077](https://github.com/rust-lang/cargo/pull/9077)
+- Fixed a bug in Cargo's cyclic dep graph detection that caused a stack
+ overflow.
+ [#9075](https://github.com/rust-lang/cargo/pull/9075)
+- Fixed build script `links` environment variables (`DEP_*`) not showing up
+ for testing packages in some cases.
+ [#9065](https://github.com/rust-lang/cargo/pull/9065)
+- Fixed features being selected in a nondeterministic way for a specific
+ scenario when building an entire workspace with all targets with a
+ proc-macro in the workspace with `resolver="2"`.
+ [#9059](https://github.com/rust-lang/cargo/pull/9059)
+- Fixed to use `http.proxy` setting in `~/.gitconfig`.
+ [#8986](https://github.com/rust-lang/cargo/pull/8986)
+- Fixed --feature pkg/feat for V1 resolver for non-member.
+ [#9275](https://github.com/rust-lang/cargo/pull/9275)
+ [#9277](https://github.com/rust-lang/cargo/pull/9277)
+- Fixed panic in `cargo doc` when there are colliding output filenames in a workspace.
+ [#9276](https://github.com/rust-lang/cargo/pull/9276)
+ [#9277](https://github.com/rust-lang/cargo/pull/9277)
+- Fixed `cargo install` from exiting with success if one of several packages
+ did not install successfully.
+ [#9185](https://github.com/rust-lang/cargo/pull/9185)
+ [#9196](https://github.com/rust-lang/cargo/pull/9196)
+- Fix panic with doc collision orphan.
+ [#9142](https://github.com/rust-lang/cargo/pull/9142)
+ [#9196](https://github.com/rust-lang/cargo/pull/9196)
+
+### Nightly only
+- Removed the `publish-lockfile` unstable feature, it was stabilized without
+ the need for an explicit flag 1.5 years ago.
+ [#9092](https://github.com/rust-lang/cargo/pull/9092)
+- Added better diagnostics, help messages, and documentation for nightly
+ features (such as those passed with the `-Z` flag, or specified with
+ `cargo-features` in `Cargo.toml`).
+ [#9092](https://github.com/rust-lang/cargo/pull/9092)
+- Added support for Rust edition 2021.
+ [#8922](https://github.com/rust-lang/cargo/pull/8922)
+- Added support for the `rust-version` field in project metadata.
+ [#8037](https://github.com/rust-lang/cargo/pull/8037)
+- Added a schema field to the index.
+ [#9161](https://github.com/rust-lang/cargo/pull/9161)
+ [#9196](https://github.com/rust-lang/cargo/pull/9196)
+
+## Cargo 1.50 (2021-02-11)
+[8662ab42...rust-1.50.0](https://github.com/rust-lang/cargo/compare/8662ab42...rust-1.50.0)
+
+### Added
+- Added the `doc` field to `cargo metadata`, which indicates if a target is
+ documented.
+ [#8869](https://github.com/rust-lang/cargo/pull/8869)
+- Added `RUSTC_WORKSPACE_WRAPPER`, an alternate RUSTC wrapper that only runs
+ for the local workspace packages, and caches its artifacts independently of
+ non-wrapped builds.
+ [#8976](https://github.com/rust-lang/cargo/pull/8976)
+- Added `--workspace` to `cargo update` to update only the workspace members,
+ and not their dependencies. This is particularly useful if you update the
+ version in `Cargo.toml` and want to update `Cargo.lock` without running any
+ other commands.
+ [#8725](https://github.com/rust-lang/cargo/pull/8725)
+
+### Changed
+- `.crate` files uploaded to a registry are now built with reproducible
+ settings, so that the same `.crate` file created on different machines
+ should be identical.
+ [#8864](https://github.com/rust-lang/cargo/pull/8864)
+- Git dependencies that specify more than one of `branch`, `tag`, or `rev` are
+ now rejected.
+ [#8984](https://github.com/rust-lang/cargo/pull/8984)
+- The `rerun-if-changed` build script directive can now point to a directory,
+ in which case Cargo will check if any file in that directory changes.
+ [#8973](https://github.com/rust-lang/cargo/pull/8973)
+- If Cargo cannot determine the username or email address, `cargo new` will no
+ longer fail, and instead create an empty authors list.
+ [#8912](https://github.com/rust-lang/cargo/pull/8912)
+- The progress bar width has been reduced to provide more room to display the
+ crates currently being built.
+ [#8892](https://github.com/rust-lang/cargo/pull/8892)
+- `cargo new` will now support `includeIf` directives in `.gitconfig` to match
+ the correct directory when determining the username and email address.
+ [#8886](https://github.com/rust-lang/cargo/pull/8886)
+
+### Fixed
+- Fixed `cargo metadata` and `cargo tree` to only download packages for the
+ requested target.
+ [#8987](https://github.com/rust-lang/cargo/pull/8987)
+- Updated libgit2, which brings in many fixes, particularly fixing a zlib
+ error that occasionally appeared on 32-bit systems.
+ [#8998](https://github.com/rust-lang/cargo/pull/8998)
+- Fixed stack overflow with a circular dev-dependency that uses the `links`
+ field.
+ [#8969](https://github.com/rust-lang/cargo/pull/8969)
+- Fixed `cargo publish` failing on some filesystems, particularly 9p on WSL2.
+ [#8950](https://github.com/rust-lang/cargo/pull/8950)
+
+### Nightly only
+- Allow `resolver="1"` to specify the original feature resolution behavior.
+ [#8857](https://github.com/rust-lang/cargo/pull/8857)
+- Added `-Z extra-link-arg` which adds the `cargo:rustc-link-arg-bins`
+ and `cargo:rustc-link-arg` build script options.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#extra-link-arg)
+ [#8441](https://github.com/rust-lang/cargo/pull/8441)
+- Implemented external credential process support, and added `cargo logout`.
+ ([RFC 2730](https://github.com/rust-lang/rfcs/blob/master/text/2730-cargo-token-from-process.md))
+ ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#credential-process))
+ [#8934](https://github.com/rust-lang/cargo/pull/8934)
+- Fix panic with `-Zbuild-std` and no roots.
+ [#8942](https://github.com/rust-lang/cargo/pull/8942)
+- Set docs.rs as the default extern-map for crates.io
+ [#8877](https://github.com/rust-lang/cargo/pull/8877)
+
+## Cargo 1.49 (2020-12-31)
+[75615f8e...rust-1.49.0](https://github.com/rust-lang/cargo/compare/75615f8e...rust-1.49.0)
+
+### Added
+- Added `homepage` and `documentation` fields to `cargo metadata`.
+ [#8744](https://github.com/rust-lang/cargo/pull/8744)
+- Added the `CARGO_PRIMARY_PACKAGE` environment variable which is set when
+ running `rustc` if the package is one of the "root" packages selected on the
+ command line.
+ [#8758](https://github.com/rust-lang/cargo/pull/8758)
+- Added support for Unix-style glob patterns for package and target selection
+ flags on the command-line (such as `-p 'serde*'` or `--test '*'`).
+ [#8752](https://github.com/rust-lang/cargo/pull/8752)
+
+### Changed
+- Computed LTO flags are now included in the filename metadata hash so that
+ changes in LTO settings will independently cache build artifacts instead of
+ overwriting previous ones. This prevents rebuilds in some situations such as
+ switching between `cargo build` and `cargo test` in some circumstances.
+ [#8755](https://github.com/rust-lang/cargo/pull/8755)
+- `cargo tree` now displays `(proc-macro)` next to proc-macro packages.
+ [#8765](https://github.com/rust-lang/cargo/pull/8765)
+- Added a warning that the allowed characters for a feature name have been
+ restricted to letters, digits, `_`, `-`, and `+` to accommodate future
+ syntax changes. This is still a superset of the allowed syntax on crates.io,
+ which requires ASCII. This is intended to be changed to an error in the
+ future.
+ [#8814](https://github.com/rust-lang/cargo/pull/8814)
+- `-p` without a value will now print a list of workspace package names.
+ [#8808](https://github.com/rust-lang/cargo/pull/8808)
+- Add period to allowed feature name characters.
+ [#8932](https://github.com/rust-lang/cargo/pull/8932)
+ [#8943](https://github.com/rust-lang/cargo/pull/8943)
+
+### Fixed
+- Fixed building a library with both "dylib" and "rlib" crate types with LTO enabled.
+ [#8754](https://github.com/rust-lang/cargo/pull/8754)
+- Fixed paths in Cargo's dep-info files.
+ [#8819](https://github.com/rust-lang/cargo/pull/8819)
+- Fixed inconsistent source IDs in `cargo metadata` for git dependencies that
+ explicitly specify `branch="master"`.
+ [#8824](https://github.com/rust-lang/cargo/pull/8824)
+- Fixed re-extracting dependencies which contained a `.cargo-ok` file.
+ [#8835](https://github.com/rust-lang/cargo/pull/8835)
+
+### Nightly only
+- Fixed a panic with `cargo doc -Zfeatures=itarget` in some situations.
+ [#8777](https://github.com/rust-lang/cargo/pull/8777)
+- New implementation for namespaced features, using the syntax `dep:serde`.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#namespaced-features)
+ [#8799](https://github.com/rust-lang/cargo/pull/8799)
+- Added support for "weak" dependency features, using the syntax
+ `dep_name?/feat_name`, which will enable a feature for a dependency without
+ also enabling the dependency.
+ [#8818](https://github.com/rust-lang/cargo/pull/8818)
+- Fixed the new feature resolver downloading extra dependencies that weren't
+ strictly necessary.
+ [#8823](https://github.com/rust-lang/cargo/pull/8823)
+
+## Cargo 1.48 (2020-11-19)
+[51b66125...rust-1.48.0](https://github.com/rust-lang/cargo/compare/51b66125...rust-1.48.0)
+
+### Added
+- Added `term.progress` configuration option to control when and how the
+ progress bar is displayed.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/config.html#termprogresswhen)
+ [#8165](https://github.com/rust-lang/cargo/pull/8165)
+- Added `--message-format plain` option to `cargo locate-project` to display
+ the project location without JSON to make it easier to use in a script.
+ [#8707](https://github.com/rust-lang/cargo/pull/8707)
+- Added `--workspace` option to `cargo locate-project` to display the path to
+ the workspace manifest.
+ [#8712](https://github.com/rust-lang/cargo/pull/8712)
+- A new contributor guide has been added for contributing to Cargo itself.
+ This is published at <https://rust-lang.github.io/cargo/contrib/>.
+ [#8715](https://github.com/rust-lang/cargo/pull/8715)
+- Zsh `--target` completion will now complete with the built-in rustc targets.
+ [#8740](https://github.com/rust-lang/cargo/pull/8740)
+
+### Changed
+
+### Fixed
+- Fixed `cargo new` creating a fossil repository to properly ignore the `target` directory.
+ [#8671](https://github.com/rust-lang/cargo/pull/8671)
+- Don't show warnings about the workspace in the current directory when using `cargo install`
+ of a remote package.
+ [#8681](https://github.com/rust-lang/cargo/pull/8681)
+- Automatically reinitialize the index when an "Object not found" error is
+ encountered in the git repository.
+ [#8735](https://github.com/rust-lang/cargo/pull/8735)
+- Updated libgit2, which brings in several fixes for git repository handling.
+ [#8778](https://github.com/rust-lang/cargo/pull/8778)
+ [#8780](https://github.com/rust-lang/cargo/pull/8780)
+
+### Nightly only
+- Fixed `cargo install` so that it will ignore the `[unstable]` table in local config files.
+ [#8656](https://github.com/rust-lang/cargo/pull/8656)
+- Fixed nondeterministic behavior of the new feature resolver.
+ [#8701](https://github.com/rust-lang/cargo/pull/8701)
+- Fixed running `cargo test` on a proc-macro with the new feature resolver
+ under a specific combination of circumstances.
+ [#8742](https://github.com/rust-lang/cargo/pull/8742)
+
+## Cargo 1.47 (2020-10-08)
+[4f74d9b2...rust-1.47.0](https://github.com/rust-lang/cargo/compare/4f74d9b2...rust-1.47.0)
+
+### Added
+- `cargo doc` will now include the package's version in the left sidebar.
+ [#8509](https://github.com/rust-lang/cargo/pull/8509)
+- Added the `test` field to `cargo metadata` targets.
+ [#8478](https://github.com/rust-lang/cargo/pull/8478)
+- Cargo's man pages are now displayed via the `cargo help` command (such as
+ `cargo help build`).
+ [#8456](https://github.com/rust-lang/cargo/pull/8456)
+ [#8577](https://github.com/rust-lang/cargo/pull/8577)
+- Added new documentation chapters on [how dependency resolution
+ works](https://doc.rust-lang.org/nightly/cargo/reference/resolver.html) and
+ [SemVer
+ compatibility](https://doc.rust-lang.org/nightly/cargo/reference/semver.html),
+ along with suggestions on how to version your project and work with
+ dependencies.
+ [#8609](https://github.com/rust-lang/cargo/pull/8609)
+
+### Changed
+- The comments added to `.gitignore` when it is modified have been tweaked to
+ add some spacing.
+ [#8476](https://github.com/rust-lang/cargo/pull/8476)
+- `cargo metadata` output should now be sorted to be deterministic.
+ [#8489](https://github.com/rust-lang/cargo/pull/8489)
+- By default, build scripts and proc-macros are now built with `opt-level=0`
+ and the default codegen units, even in release mode.
+ [#8500](https://github.com/rust-lang/cargo/pull/8500)
+- `workspace.default-members` is now filtered by `workspace.exclude`.
+ [#8485](https://github.com/rust-lang/cargo/pull/8485)
+- `workspace.members` globs now ignore non-directory paths.
+ [#8511](https://github.com/rust-lang/cargo/pull/8511)
+- git zlib errors now trigger a retry.
+ [#8520](https://github.com/rust-lang/cargo/pull/8520)
+- "http" class git errors now trigger a retry.
+ [#8553](https://github.com/rust-lang/cargo/pull/8553)
+- git dependencies now override the `core.autocrlf` git configuration value to
+ ensure they behave consistently across platforms, particularly when
+ vendoring git dependencies on Windows.
+ [#8523](https://github.com/rust-lang/cargo/pull/8523)
+- If `Cargo.lock` needs to be updated, then it will be automatically
+ transitioned to the new V2 format. This format removes the `[metadata]`
+ table, and should be easier to merge changes in source control systems. This
+ format was introduced in 1.38, and made the default for new projects in
+ 1.41.
+ [#8554](https://github.com/rust-lang/cargo/pull/8554)
+- Added preparation for support of git repositories with a non-"master"
+ default branch. Actual support will arrive in a future version. This
+ introduces some warnings:
+ - Warn if a git dependency does not specify a branch, and the default branch
+ on the repository is not "master". In the future, Cargo will fetch the
+ default branch. In this scenario, the branch should be explicitly
+ specified.
+ - Warn if a workspace has multiple dependencies to the same git repository,
+ one without a `branch` and one with `branch="master"`. Dependencies should
+ all use one form or the other.
+ [#8522](https://github.com/rust-lang/cargo/pull/8522)
+- Warnings are now issued if a `required-features` entry lists a feature that
+ does not exist.
+ [#7950](https://github.com/rust-lang/cargo/pull/7950)
+- Built-in aliases are now included in `cargo --list`.
+ [#8542](https://github.com/rust-lang/cargo/pull/8542)
+- `cargo install` with a specific version that has been yanked will now
+ display an error message that it has been yanked, instead of "could not
+ find".
+ [#8565](https://github.com/rust-lang/cargo/pull/8565)
+- `cargo publish` with a package that has the `publish` field set to a single
+ registry, and no `--registry` flag has been given, will now publish to that
+ registry instead of generating an error.
+ [#8571](https://github.com/rust-lang/cargo/pull/8571)
+
+### Fixed
+- Fixed issue where if a project directory was moved, and one of the
+ build scripts did not use the `rerun-if-changed` directive, then that
+ build script was being rebuilt when it shouldn't.
+ [#8497](https://github.com/rust-lang/cargo/pull/8497)
+- Console colors should now work on Windows 7 and 8.
+ [#8540](https://github.com/rust-lang/cargo/pull/8540)
+- The `CARGO_TARGET_{triplet}_RUNNER` environment variable will now correctly
+ override the config file instead of trying to merge the commands.
+ [#8629](https://github.com/rust-lang/cargo/pull/8629)
+- Fixed LTO with doctests.
+ [#8657](https://github.com/rust-lang/cargo/pull/8657)
+ [#8658](https://github.com/rust-lang/cargo/pull/8658)
+
+### Nightly only
+- Added support for `-Z terminal-width` which tells `rustc` the width of the
+ terminal so that it can format diagnostics better.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#terminal-width)
+ [#8427](https://github.com/rust-lang/cargo/pull/8427)
+- Added ability to configure `-Z` unstable flags in config files via the
+ `[unstable]` table.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html)
+ [#8393](https://github.com/rust-lang/cargo/pull/8393)
+- Added `-Z build-std-features` flag to set features for the standard library.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#build-std-features)
+ [#8490](https://github.com/rust-lang/cargo/pull/8490)
+
+## Cargo 1.46 (2020-08-27)
+[9fcb8c1d...rust-1.46.0](https://github.com/rust-lang/cargo/compare/9fcb8c1d...rust-1.46.0)
+
+### Added
+- The `dl` key in `config.json` of a registry index now supports the
+ replacement markers `{prefix}` and `{lowerprefix}` to allow spreading crates
+ across directories similar to how the index itself is structured.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/registries.html#index-format)
+ [#8267](https://github.com/rust-lang/cargo/pull/8267)
+- Added new environment variables that are set during compilation:
+ - `CARGO_CRATE_NAME`: The name of the crate being built.
+ - `CARGO_BIN_NAME`: The name of the executable binary (if this is a binary crate).
+ - `CARGO_PKG_LICENSE`: The `license` field from the manifest.
+ - `CARGO_PKG_LICENSE_FILE`: The `license-file` field from the manifest.
+ [#8270](https://github.com/rust-lang/cargo/pull/8270)
+ [#8325](https://github.com/rust-lang/cargo/pull/8325)
+ [#8387](https://github.com/rust-lang/cargo/pull/8387)
+- If the value for `readme` is not specified in `Cargo.toml`, it is now
+ automatically inferred from the existence of a file named `README`,
+ `README.md`, or `README.txt`. This can be suppressed by setting
+ `readme = false`.
+ [#8277](https://github.com/rust-lang/cargo/pull/8277)
+- `cargo install` now supports the `--index` flag to install directly from an index.
+ [#8344](https://github.com/rust-lang/cargo/pull/8344)
+- Added the `metadata` table to the `workspace` definition in `Cargo.toml`.
+ This can be used for arbitrary data similar to the `package.metadata` table.
+ [#8323](https://github.com/rust-lang/cargo/pull/8323)
+- Added the `--target-dir` flag to `cargo install` to set the target directory.
+ [#8391](https://github.com/rust-lang/cargo/pull/8391)
+- Changes to environment variables used by the
+ [`env!`](https://doc.rust-lang.org/std/macro.env.html) or
+ [`option_env!`](https://doc.rust-lang.org/std/macro.option_env.html) macros
+ are now automatically detected to trigger a rebuild.
+ [#8421](https://github.com/rust-lang/cargo/pull/8421)
+- The `target` directory now includes the `CACHEDIR.TAG` file which is used by
+ some tools to exclude the directory from backups.
+ [#8378](https://github.com/rust-lang/cargo/pull/8378)
+- Added docs about rustup's `+toolchain` syntax.
+ [#8455](https://github.com/rust-lang/cargo/pull/8455)
+
+### Changed
+- A warning is now displayed if a git dependency includes a `#` fragment in
+ the URL. This was potentially confusing because Cargo itself displays git
+ URLs with this syntax, but it does not have any meaning outside of the
+ `Cargo.lock` file, and would not work properly.
+ [#8297](https://github.com/rust-lang/cargo/pull/8297)
+- Various optimizations and fixes for bitcode embedding and LTO.
+ [#8349](https://github.com/rust-lang/cargo/pull/8349)
+- Reduced the amount of data fetched for git dependencies. If Cargo knows the
+ branch or tag to fetch, it will now only fetch that branch or tag instead of
+ all branches and tags.
+ [#8363](https://github.com/rust-lang/cargo/pull/8363)
+- Enhanced git fetch error messages.
+ [#8409](https://github.com/rust-lang/cargo/pull/8409)
+- `.crate` files are now generated with GNU tar format instead of UStar, which
+ supports longer file names.
+ [#8453](https://github.com/rust-lang/cargo/pull/8453)
+
+### Fixed
+- Fixed a rare situation where an update to `Cargo.lock` failed once, but then
+ subsequent runs allowed it proceed.
+ [#8274](https://github.com/rust-lang/cargo/pull/8274)
+- Removed assertion that Windows dylibs must have a `.dll` extension. Some
+ custom JSON spec targets may change the extension.
+ [#8310](https://github.com/rust-lang/cargo/pull/8310)
+- Updated libgit2, which brings in a fix for zlib errors for some remote
+ git servers like googlesource.com.
+ [#8320](https://github.com/rust-lang/cargo/pull/8320)
+- Fixed the GitHub fast-path check for up-to-date git dependencies on
+ non-master branches.
+ [#8363](https://github.com/rust-lang/cargo/pull/8363)
+- Fixed issue when enabling a feature with `pkg/feature` syntax, and `pkg` is
+ an optional dependency, but also a dev-dependency, and the dev-dependency
+ appears before the optional normal dependency in the registry summary, then
+ the optional dependency would not get activated.
+ [#8395](https://github.com/rust-lang/cargo/pull/8395)
+- Fixed `clean -p` deleting the build directory if there is a test named
+ `build`.
+ [#8398](https://github.com/rust-lang/cargo/pull/8398)
+- Fixed indentation of multi-line Cargo error messages.
+ [#8409](https://github.com/rust-lang/cargo/pull/8409)
+- Fixed issue where the automatic inclusion of the `--document-private-items`
+ flag for rustdoc would override any flags passed to the `cargo rustdoc`
+ command.
+ [#8449](https://github.com/rust-lang/cargo/pull/8449)
+- Cargo will now include a version in the hash of the fingerprint directories
+ to support backwards-incompatible changes to the fingerprint structure.
+ [#8473](https://github.com/rust-lang/cargo/pull/8473)
+ [#8488](https://github.com/rust-lang/cargo/pull/8488)
+
+### Nightly only
+- Added `-Zrustdoc-map` feature which provides external mappings for rustdoc
+ (such as https://docs.rs/ links).
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#rustdoc-map)
+ [#8287](https://github.com/rust-lang/cargo/pull/8287)
+- Fixed feature calculation when a proc-macro is declared in `Cargo.toml` with
+ an underscore (like `proc_macro = true`).
+ [#8319](https://github.com/rust-lang/cargo/pull/8319)
+- Added support for setting `-Clinker` with `-Zdoctest-xcompile`.
+ [#8359](https://github.com/rust-lang/cargo/pull/8359)
+- Fixed setting the `strip` profile field in config files.
+ [#8454](https://github.com/rust-lang/cargo/pull/8454)
+
+## Cargo 1.45 (2020-07-16)
+[ebda5065e...rust-1.45.0](https://github.com/rust-lang/cargo/compare/ebda5065...rust-1.45.0)
+
+### Added
+
+### Changed
+- Changed official documentation to recommend `.cargo/config.toml` filenames
+ (with the `.toml` extension). `.toml` extension support was added in 1.39.
+ [#8121](https://github.com/rust-lang/cargo/pull/8121)
+- The `registry.index` config value is no longer allowed (it has been
+ deprecated for 4 years).
+ [#7973](https://github.com/rust-lang/cargo/pull/7973)
+- An error is generated if both `--index` and `--registry` are passed
+ (previously `--index` was silently ignored).
+ [#7973](https://github.com/rust-lang/cargo/pull/7973)
+- The `registry.token` config value is no longer used with the `--index` flag.
+ This is intended to avoid potentially leaking the crates.io token to another
+ registry.
+ [#7973](https://github.com/rust-lang/cargo/pull/7973)
+- Added a warning if `registry.token` is used with source replacement. It is
+ intended this will be an error in future versions.
+ [#7973](https://github.com/rust-lang/cargo/pull/7973)
+- Windows GNU targets now copy `.dll.a` import library files for DLL crate
+ types to the output directory.
+ [#8141](https://github.com/rust-lang/cargo/pull/8141)
+- Dylibs for all dependencies are now unconditionally copied to the output
+ directory. Some obscure scenarios can cause an old dylib to be referenced
+ between builds, and this ensures that all the latest copies are used.
+ [#8139](https://github.com/rust-lang/cargo/pull/8139)
+- `package.exclude` can now match directory names. If a directory is
+ specified, the entire directory will be excluded, and Cargo will not attempt
+ to inspect it further. Previously Cargo would try to check every file in the
+ directory which could cause problems if the directory contained unreadable
+ files.
+ [#8095](https://github.com/rust-lang/cargo/pull/8095)
+- When packaging with `cargo publish` or `cargo package`, Cargo can use git to
+ guide its decision on which files to include. Previously this git-based
+ logic required a `Cargo.toml` file to exist at the root of the repository.
+ This is no longer required, so Cargo will now use git-based guidance even if
+ there is not a `Cargo.toml` in the root of the repository.
+ [#8095](https://github.com/rust-lang/cargo/pull/8095)
+- While unpacking a crate on Windows, if it fails to write a file because the
+ file is a reserved Windows filename (like "aux.rs"), Cargo will display an
+ extra message to explain why it failed.
+ [#8136](https://github.com/rust-lang/cargo/pull/8136)
+- Failures to set mtime on files are now ignored. Some filesystems did not
+ support this.
+ [#8185](https://github.com/rust-lang/cargo/pull/8185)
+- Certain classes of git errors will now recommend enabling
+ `net.git-fetch-with-cli`.
+ [#8166](https://github.com/rust-lang/cargo/pull/8166)
+- When doing an LTO build, Cargo will now instruct rustc not to perform
+ codegen when possible. This may result in a faster build and use less disk
+ space. Additionally, for non-LTO builds, Cargo will instruct rustc to not
+ embed LLVM bitcode in libraries, which should decrease their size.
+ [#8192](https://github.com/rust-lang/cargo/pull/8192)
+ [#8226](https://github.com/rust-lang/cargo/pull/8226)
+ [#8254](https://github.com/rust-lang/cargo/pull/8254)
+- The implementation for `cargo clean -p` has been rewritten so that it can
+ more accurately remove the files for a specific package.
+ [#8210](https://github.com/rust-lang/cargo/pull/8210)
+- The way Cargo computes the outputs from a build has been rewritten to be
+ more complete and accurate. Newly tracked files will be displayed in JSON
+ messages, and may be uplifted to the output directory in some cases. Some of
+ the changes from this are:
+
+ - `.exp` export files on Windows MSVC dynamic libraries are now tracked.
+ - Proc-macros on Windows track import/export files.
+ - All targets (like tests, etc.) that generate separate debug files
+ (pdb/dSYM) are tracked.
+ - Added .map files for wasm32-unknown-emscripten.
+ - macOS dSYM directories are tracked for all dynamic libraries
+ (dylib/cdylib/proc-macro) and for build scripts.
+
+ There are a variety of other changes as a consequence of this:
+
+ - Binary examples on Windows MSVC with a hyphen will now show up twice in
+ the examples directory (`foo_bar.exe` and `foo-bar.exe`). Previously Cargo
+ just renamed the file instead of hard-linking it.
+ - Example libraries now follow the same rules for hyphen/underscore
+ translation as normal libs (they will now use underscores).
+
+ [#8210](https://github.com/rust-lang/cargo/pull/8210)
+- Cargo attempts to scrub any secrets from the debug log for HTTP debugging.
+ [#8222](https://github.com/rust-lang/cargo/pull/8222)
+- Context has been added to many of Cargo's filesystem operations, so that
+ error messages now provide more information, such as the path that caused
+ the problem.
+ [#8232](https://github.com/rust-lang/cargo/pull/8232)
+- Several commands now ignore the error if stdout or stderr is closed while it
+ is running. For example `cargo install --list | grep -q cargo-fuzz` would
+ previously sometimes panic because `grep -q` may close stdout before the
+ command finishes. Regular builds continue to fail if stdout or stderr is
+ closed, matching the behavior of many other build systems.
+ [#8236](https://github.com/rust-lang/cargo/pull/8236)
+- If `cargo install` is given an exact version, like `--version=1.2.3`, it
+ will now avoid updating the index if that version is already installed, and
+ exit quickly indicating it is already installed.
+ [#8022](https://github.com/rust-lang/cargo/pull/8022)
+- Changes to the `[patch]` section will now attempt to automatically update
+ `Cargo.lock` to the new version. It should now also provide better error
+ messages for the rare cases where it is unable to automatically update.
+ [#8248](https://github.com/rust-lang/cargo/pull/8248)
+
+### Fixed
+- Fixed copying Windows `.pdb` files to the output directory when the filename
+ contained dashes.
+ [#8123](https://github.com/rust-lang/cargo/pull/8123)
+- Fixed error where Cargo would fail when scanning if a package is inside a
+ git repository when any of its ancestor paths is a symlink.
+ [#8186](https://github.com/rust-lang/cargo/pull/8186)
+- Fixed `cargo update` with an unused `[patch]` so that it does not get
+ stuck and refuse to update.
+ [#8243](https://github.com/rust-lang/cargo/pull/8243)
+- Fixed a situation where Cargo would hang if stderr is closed, and the
+ compiler generated a large number of messages.
+ [#8247](https://github.com/rust-lang/cargo/pull/8247)
+- Fixed backtraces on macOS not showing filenames or line numbers. As a
+ consequence of this, binary executables on apple targets do not include a
+ hash in the filename in Cargo's cache. This means Cargo can only track one
+ copy, so if you switch features or rustc versions, Cargo will need to
+ rebuild the executable.
+ [#8329](https://github.com/rust-lang/cargo/pull/8329)
+ [#8335](https://github.com/rust-lang/cargo/pull/8335)
+- Fixed fingerprinting when using lld on Windows with a dylib. Cargo was
+ erroneously thinking the dylib was never fresh.
+ [#8290](https://github.com/rust-lang/cargo/pull/8290)
+ [#8335](https://github.com/rust-lang/cargo/pull/8335)
+
+### Nightly only
+- Fixed passing the full path for `--target` to `rustdoc` when using JSON spec
+ targets.
+ [#8094](https://github.com/rust-lang/cargo/pull/8094)
+- `-Cembed-bitcode=no` renamed to `-Cbitcode-in-rlib=no`
+ [#8134](https://github.com/rust-lang/cargo/pull/8134)
+- Added new `resolver` field to `Cargo.toml` to opt-in to the new feature
+ resolver.
+ [#8129](https://github.com/rust-lang/cargo/pull/8129)
+- `-Zbuild-std` no longer treats std dependencies as "local". This means that
+ it won't use incremental compilation for those dependencies, removes them
+ from dep-info files, and caps lints at "allow".
+ [#8177](https://github.com/rust-lang/cargo/pull/8177)
+- Added `-Zmultitarget` which allows multiple `--target` flags to build the
+ same thing for multiple targets at once.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#multitarget)
+ [#8167](https://github.com/rust-lang/cargo/pull/8167)
+- Added `strip` option to the profile to remove symbols and debug information.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#profile-strip-option)
+ [#8246](https://github.com/rust-lang/cargo/pull/8246)
+- Fixed panic with `cargo tree --target=all -Zfeatures=all`.
+ [#8269](https://github.com/rust-lang/cargo/pull/8269)
+
+## Cargo 1.44 (2020-06-04)
+[bda50510...rust-1.44.0](https://github.com/rust-lang/cargo/compare/bda50510...rust-1.44.0)
+
+### Added
+- 🔥 Added the `cargo tree` command.
+ [docs](https://doc.rust-lang.org/nightly/cargo/commands/cargo-tree.html)
+ [#8062](https://github.com/rust-lang/cargo/pull/8062)
+- Added warnings if a package has Windows-restricted filenames (like `nul`,
+ `con`, `aux`, `prn`, etc.).
+ [#7959](https://github.com/rust-lang/cargo/pull/7959)
+- Added a `"build-finished"` JSON message when compilation is complete so that
+ tools can detect when they can stop listening for JSON messages with
+ commands like `cargo run` or `cargo test`.
+ [#8069](https://github.com/rust-lang/cargo/pull/8069)
+
+### Changed
+- Valid package names are now restricted to Unicode XID identifiers. This is
+ mostly the same as before, except package names cannot start with a number
+ or `-`.
+ [#7959](https://github.com/rust-lang/cargo/pull/7959)
+- `cargo new` and `init` will warn or reject additional package names
+ (reserved Windows names, reserved Cargo directories, non-ASCII names,
+ conflicting std names like `core`, etc.).
+ [#7959](https://github.com/rust-lang/cargo/pull/7959)
+- Tests are no longer hard-linked into the output directory (`target/debug/`).
+ This ensures tools will have access to debug symbols and execute tests in
+ the same way as Cargo. Tools should use JSON messages to discover the path
+ to the executable.
+ [#7965](https://github.com/rust-lang/cargo/pull/7965)
+- Updating git submodules now displays an "Updating" message for each
+ submodule.
+ [#7989](https://github.com/rust-lang/cargo/pull/7989)
+- File modification times are now preserved when extracting a `.crate` file.
+ This reverses the change made in 1.40 where the mtime was not preserved.
+ [#7935](https://github.com/rust-lang/cargo/pull/7935)
+- Build script warnings are now displayed separately when the build script
+ fails.
+ [#8017](https://github.com/rust-lang/cargo/pull/8017)
+- Removed the `git-checkout` subcommand.
+ [#8040](https://github.com/rust-lang/cargo/pull/8040)
+- The progress bar is now enabled for all unix platforms. Previously it was
+ only Linux, macOS, and FreeBSD.
+ [#8054](https://github.com/rust-lang/cargo/pull/8054)
+- Artifacts generated by pre-release versions of `rustc` now share the same
+ filenames. This means that changing nightly versions will not leave stale
+ files in the build directory.
+ [#8073](https://github.com/rust-lang/cargo/pull/8073)
+- Invalid package names are rejected when using renamed dependencies.
+ [#8090](https://github.com/rust-lang/cargo/pull/8090)
+- Added a certain class of HTTP2 errors as "spurious" that will get retried.
+ [#8102](https://github.com/rust-lang/cargo/pull/8102)
+- Allow `cargo package --list` to succeed, even if there are other validation
+ errors (such as `Cargo.lock` generation problem, or missing dependencies).
+ [#8175](https://github.com/rust-lang/cargo/pull/8175)
+ [#8215](https://github.com/rust-lang/cargo/pull/8215)
+
+### Fixed
+- Cargo no longer buffers excessive amounts of compiler output in memory.
+ [#7838](https://github.com/rust-lang/cargo/pull/7838)
+- Symbolic links in git repositories now work on Windows.
+ [#7996](https://github.com/rust-lang/cargo/pull/7996)
+- Fixed an issue where `profile.dev` was not loaded from a config file with
+ `cargo test` when the `dev` profile was not defined in `Cargo.toml`.
+ [#8012](https://github.com/rust-lang/cargo/pull/8012)
+- When a binary is built as an implicit dependency of an integration test,
+ it now checks `dep_name/feature_name` syntax in `required-features` correctly.
+ [#8020](https://github.com/rust-lang/cargo/pull/8020)
+- Fixed an issue where Cargo would not detect that an executable (such as an
+ integration test) needs to be rebuilt when the previous build was
+ interrupted with Ctrl-C.
+ [#8087](https://github.com/rust-lang/cargo/pull/8087)
+- Protect against some (unknown) situations where Cargo could panic when the
+ system monotonic clock doesn't appear to be monotonic.
+ [#8114](https://github.com/rust-lang/cargo/pull/8114)
+- Fixed panic with `cargo clean -p` if the package has a build script.
+ [#8216](https://github.com/rust-lang/cargo/pull/8216)
+
+### Nightly only
+- Fixed panic with new feature resolver and required-features.
+ [#7962](https://github.com/rust-lang/cargo/pull/7962)
+- Added `RUSTC_WORKSPACE_WRAPPER` environment variable, which provides a way
+ to wrap `rustc` for workspace members only, and affects the filename hash so
+ that artifacts produced by the wrapper are cached separately. This usage can
+ be seen on nightly clippy with `cargo clippy -Zunstable-options`.
+ [#7533](https://github.com/rust-lang/cargo/pull/7533)
+- Added `--unit-graph` CLI option to display Cargo's internal dependency graph
+ as JSON.
+ [#7977](https://github.com/rust-lang/cargo/pull/7977)
+- Changed `-Zbuild_dep` to `-Zhost_dep`, and added proc-macros to the feature
+ decoupling logic.
+ [#8003](https://github.com/rust-lang/cargo/pull/8003)
+ [#8028](https://github.com/rust-lang/cargo/pull/8028)
+- Fixed so that `--crate-version` is not automatically passed when the flag
+ is found in `RUSTDOCFLAGS`.
+ [#8014](https://github.com/rust-lang/cargo/pull/8014)
+- Fixed panic with `-Zfeatures=dev_dep` and `check --profile=test`.
+ [#8027](https://github.com/rust-lang/cargo/pull/8027)
+- Fixed panic with `-Zfeatures=itarget` with certain host dependencies.
+ [#8048](https://github.com/rust-lang/cargo/pull/8048)
+- Added support for `-Cembed-bitcode=no`, which provides a performance boost
+ and disk-space usage reduction for non-LTO builds.
+ [#8066](https://github.com/rust-lang/cargo/pull/8066)
+- `-Zpackage-features` has been extended with several changes intended to make
+ it easier to select features on the command-line in a workspace.
+ [#8074](https://github.com/rust-lang/cargo/pull/8074)
+
+## Cargo 1.43 (2020-04-23)
+[9d32b7b0...rust-1.43.0](https://github.com/rust-lang/cargo/compare/9d32b7b0...rust-1.43.0)
+
+### Added
+- 🔥 Profiles may now be specified in config files (and environment variables).
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/config.html#profile)
+ [#7823](https://github.com/rust-lang/cargo/pull/7823)
+- ❗ Added `CARGO_BIN_EXE_<name>` environment variable when building
+ integration tests. This variable contains the path to any `[[bin]]` targets
+ in the package. Integration tests should use the `env!` macro to determine
+ the path to a binary to execute.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates)
+ [#7697](https://github.com/rust-lang/cargo/pull/7697)
+
+### Changed
+- `cargo install --git` now honors workspaces in a git repository. This allows
+ workspace settings, like `[patch]`, `[replace]`, or `[profile]` to be used.
+ [#7768](https://github.com/rust-lang/cargo/pull/7768)
+- `cargo new` will now run `rustfmt` on the new files to pick up rustfmt
+ settings like `tab_spaces` so that the new file matches the user's preferred
+ indentation settings.
+ [#7827](https://github.com/rust-lang/cargo/pull/7827)
+- Environment variables printed with "very verbose" output (`-vv`) are now
+ consistently sorted.
+ [#7877](https://github.com/rust-lang/cargo/pull/7877)
+- Debug logging for fingerprint rebuild-detection now includes more information.
+ [#7888](https://github.com/rust-lang/cargo/pull/7888)
+ [#7890](https://github.com/rust-lang/cargo/pull/7890)
+ [#7952](https://github.com/rust-lang/cargo/pull/7952)
+- Added warning during publish if the license-file doesn't exist.
+ [#7905](https://github.com/rust-lang/cargo/pull/7905)
+- The `license-file` file is automatically included during publish, even if it
+ is not explicitly listed in the `include` list or is in a location outside
+ of the root of the package.
+ [#7905](https://github.com/rust-lang/cargo/pull/7905)
+- `CARGO_CFG_DEBUG_ASSERTIONS` and `CARGO_CFG_PROC_MACRO` are no longer set
+ when running a build script. These were inadvertently set in the past, but
+ had no meaning as they were always true. Additionally, `cfg(proc-macro)`
+ is no longer supported in a `target` expression.
+ [#7943](https://github.com/rust-lang/cargo/pull/7943)
+ [#7970](https://github.com/rust-lang/cargo/pull/7970)
+
+### Fixed
+- Global command-line flags now work with aliases (like `cargo -v b`).
+ [#7837](https://github.com/rust-lang/cargo/pull/7837)
+- Required-features using dependency syntax (like `renamed_dep/feat_name`) now
+ handle renamed dependencies correctly.
+ [#7855](https://github.com/rust-lang/cargo/pull/7855)
+- Fixed a rare situation where if a build script is run multiple times during
+ the same build, Cargo will now keep the results separate instead of losing
+ the output of the first execution.
+ [#7857](https://github.com/rust-lang/cargo/pull/7857)
+- Fixed incorrect interpretation of environment variable
+ `CARGO_TARGET_*_RUNNER=true` as a boolean. Also improved related env var
+ error messages.
+ [#7891](https://github.com/rust-lang/cargo/pull/7891)
+- Updated internal libgit2 library, bringing various fixes to git support.
+ [#7939](https://github.com/rust-lang/cargo/pull/7939)
+- `cargo package` / `cargo publish` should no longer buffer the entire
+ contents of each file in memory.
+ [#7946](https://github.com/rust-lang/cargo/pull/7946)
+- Ignore more invalid `Cargo.toml` files in a git dependency. Cargo currently
+ walks the entire repo to find the requested package. Certain invalid
+ manifests were already skipped, and now it should skip all of them.
+ [#7947](https://github.com/rust-lang/cargo/pull/7947)
+
+### Nightly only
+- Added `build.out-dir` config variable to set the output directory.
+ [#7810](https://github.com/rust-lang/cargo/pull/7810)
+- Added `-Zjobserver-per-rustc` feature to support improved performance for
+ parallel rustc.
+ [#7731](https://github.com/rust-lang/cargo/pull/7731)
+- Fixed filename collision with `build-std` and crates like `cc`.
+ [#7860](https://github.com/rust-lang/cargo/pull/7860)
+- `-Ztimings` will now save its report even if there is an error.
+ [#7872](https://github.com/rust-lang/cargo/pull/7872)
+- Updated `--config` command-line flag to support taking a path to a config
+ file to load.
+ [#7901](https://github.com/rust-lang/cargo/pull/7901)
+- Added new feature resolver.
+ [#7820](https://github.com/rust-lang/cargo/pull/7820)
+- Rustdoc docs now automatically include the version of the package in the
+ side bar (requires `-Z crate-versions` flag).
+ [#7903](https://github.com/rust-lang/cargo/pull/7903)
+
+## Cargo 1.42 (2020-03-12)
+[0bf7aafe...rust-1.42.0](https://github.com/rust-lang/cargo/compare/0bf7aafe...rust-1.42.0)
+
+### Added
+- Added documentation on git authentication.
+ [#7658](https://github.com/rust-lang/cargo/pull/7658)
+- Bitbucket Pipeline badges are now supported on crates.io.
+ [#7663](https://github.com/rust-lang/cargo/pull/7663)
+- `cargo vendor` now accepts the `--versioned-dirs` option to force it to
+ always include the version number in each package's directory name.
+ [#7631](https://github.com/rust-lang/cargo/pull/7631)
+- The `proc_macro` crate is now automatically added to the extern prelude for
+ proc-macro packages. This means that `extern crate proc_macro;` is no longer
+ necessary for proc-macros.
+ [#7700](https://github.com/rust-lang/cargo/pull/7700)
+
+### Changed
+- Emit a warning if `debug_assertions`, `test`, `proc_macro`, or `feature=` is
+ used in a `cfg()` expression.
+ [#7660](https://github.com/rust-lang/cargo/pull/7660)
+- Large update to the Cargo documentation, adding new chapters on Cargo
+ targets, workspaces, and features.
+ [#7733](https://github.com/rust-lang/cargo/pull/7733)
+- Windows: `.lib` DLL import libraries are now copied next to the dll for all
+ Windows MSVC targets. Previously it was only supported for
+ `pc-windows-msvc`. This adds DLL support for `uwp-windows-msvc` targets.
+ [#7758](https://github.com/rust-lang/cargo/pull/7758)
+- The `ar` field in the `[target]` configuration is no longer read. It has
+ been ignored for over 4 years.
+ [#7763](https://github.com/rust-lang/cargo/pull/7763)
+- Bash completion file simplified and updated for latest changes.
+ [#7789](https://github.com/rust-lang/cargo/pull/7789)
+- Credentials are only loaded when needed, instead of every Cargo command.
+ [#7774](https://github.com/rust-lang/cargo/pull/7774)
+
+### Fixed
+- Removed `--offline` empty index check, which was a false positive in some
+ cases.
+ [#7655](https://github.com/rust-lang/cargo/pull/7655)
+- Files and directories starting with a `.` can now be included in a package
+ by adding it to the `include` list.
+ [#7680](https://github.com/rust-lang/cargo/pull/7680)
+- Fixed `cargo login` removing alternative registry tokens when previous
+ entries existed in the credentials file.
+ [#7708](https://github.com/rust-lang/cargo/pull/7708)
+- Fixed `cargo vendor` from panicking when used with alternative registries.
+ [#7718](https://github.com/rust-lang/cargo/pull/7718)
+- Fixed incorrect explanation in the fingerprint debug log message.
+ [#7749](https://github.com/rust-lang/cargo/pull/7749)
+- A `[source]` that is defined multiple times will now result in an error.
+ Previously it was randomly picking a source, which could cause
+ non-deterministic behavior.
+ [#7751](https://github.com/rust-lang/cargo/pull/7751)
+- `dep_kinds` in `cargo metadata` are now de-duplicated.
+ [#7756](https://github.com/rust-lang/cargo/pull/7756)
+- Fixed packaging where `Cargo.lock` was listed in `.gitignore` in a
+ subdirectory inside a git repository. Previously it was assuming
+ `Cargo.lock` was at the root of the repo.
+ [#7779](https://github.com/rust-lang/cargo/pull/7779)
+- Partial file transfer errors will now cause an automatic retry.
+ [#7788](https://github.com/rust-lang/cargo/pull/7788)
+- Linux: Fixed panic if CPU iowait stat decreases.
+ [#7803](https://github.com/rust-lang/cargo/pull/7803)
+- Fixed using the wrong sysroot for detecting host compiler settings when
+ `--sysroot` is passed in via `RUSTFLAGS`.
+ [#7798](https://github.com/rust-lang/cargo/pull/7798)
+
+### Nightly only
+- `build-std` now uses `--extern` instead of `--sysroot` to find sysroot
+ packages.
+ [#7699](https://github.com/rust-lang/cargo/pull/7699)
+- Added `--config` command-line option to set config settings.
+ [#7649](https://github.com/rust-lang/cargo/pull/7649)
+- Added `include` config setting which allows including another config file.
+ [#7649](https://github.com/rust-lang/cargo/pull/7649)
+- Profiles in config files now support any named profile. Previously it was
+ limited to dev/release.
+ [#7750](https://github.com/rust-lang/cargo/pull/7750)
+
+## Cargo 1.41 (2020-01-30)
+[5da4b4d4...rust-1.41.0](https://github.com/rust-lang/cargo/compare/5da4b4d4...rust-1.41.0)
+
+### Added
+- 🔥 Cargo now uses a new `Cargo.lock` file format. This new format should
+ support easier merges in source control systems. Projects using the old
+ format will continue to use the old format, only new `Cargo.lock` files will
+ use the new format.
+ [#7579](https://github.com/rust-lang/cargo/pull/7579)
+- 🔥 `cargo install` will now upgrade already installed packages instead of
+ failing.
+ [#7560](https://github.com/rust-lang/cargo/pull/7560)
+- 🔥 Profile overrides have been added. This allows overriding profiles for
+ individual dependencies or build scripts. See [the
+ documentation](https://doc.rust-lang.org/nightly/cargo/reference/profiles.html#overrides)
+ for more.
+ [#7591](https://github.com/rust-lang/cargo/pull/7591)
+- Added new documentation for build scripts.
+ [#7565](https://github.com/rust-lang/cargo/pull/7565)
+- Added documentation for Cargo's JSON output.
+ [#7595](https://github.com/rust-lang/cargo/pull/7595)
+- Significant expansion of config and environment variable documentation.
+ [#7650](https://github.com/rust-lang/cargo/pull/7650)
+- Add back support for `BROWSER` environment variable for `cargo doc --open`.
+ [#7576](https://github.com/rust-lang/cargo/pull/7576)
+- Added `kind` and `platform` for dependencies in `cargo metadata`.
+ [#7132](https://github.com/rust-lang/cargo/pull/7132)
+- The `OUT_DIR` value is now included in the `build-script-executed` JSON message.
+ [#7622](https://github.com/rust-lang/cargo/pull/7622)
+
+### Changed
+- `cargo doc` will now document private items in binaries by default.
+ [#7593](https://github.com/rust-lang/cargo/pull/7593)
+- Subcommand typo suggestions now include aliases.
+ [#7486](https://github.com/rust-lang/cargo/pull/7486)
+- Tweak how the "already existing..." comment is added to `.gitignore`.
+ [#7570](https://github.com/rust-lang/cargo/pull/7570)
+- Ignore `cargo login` text from copy/paste in token.
+ [#7588](https://github.com/rust-lang/cargo/pull/7588)
+- Windows: Ignore errors for locking files when not supported by the filesystem.
+ [#7602](https://github.com/rust-lang/cargo/pull/7602)
+- Remove `**/*.rs.bk` from `.gitignore`.
+ [#7647](https://github.com/rust-lang/cargo/pull/7647)
+
+### Fixed
+- Fix unused warnings for some keys in the `build` config section.
+ [#7575](https://github.com/rust-lang/cargo/pull/7575)
+- Linux: Don't panic when parsing `/proc/stat`.
+ [#7580](https://github.com/rust-lang/cargo/pull/7580)
+- Don't show canonical path in `cargo vendor`.
+ [#7629](https://github.com/rust-lang/cargo/pull/7629)
+
+### Nightly only
+
+
+## Cargo 1.40 (2019-12-19)
+[1c6ec66d...5da4b4d4](https://github.com/rust-lang/cargo/compare/1c6ec66d...5da4b4d4)
+
+### Added
+- Added `http.ssl-version` config option to control the version of TLS,
+ along with min/max versions.
+ [#7308](https://github.com/rust-lang/cargo/pull/7308)
+- 🔥 Compiler warnings are now cached on disk. If a build generates warnings,
+ re-running the build will now re-display the warnings.
+ [#7450](https://github.com/rust-lang/cargo/pull/7450)
+- Added `--filter-platform` option to `cargo metadata` to narrow the nodes
+ shown in the resolver graph to only packages included for the given target
+ triple.
+ [#7376](https://github.com/rust-lang/cargo/pull/7376)
+
+### Changed
+- Cargo's "platform" `cfg` parsing has been extracted into a separate crate
+ named `cargo-platform`.
+ [#7375](https://github.com/rust-lang/cargo/pull/7375)
+- Dependencies extracted into Cargo's cache no longer preserve mtimes to
+ reduce syscall overhead.
+ [#7465](https://github.com/rust-lang/cargo/pull/7465)
+- Windows: EXE files no longer include a metadata hash in the filename.
+ This helps with debuggers correlating the filename with the PDB file.
+ [#7400](https://github.com/rust-lang/cargo/pull/7400)
+- Wasm32: `.wasm` files are no longer treated as an "executable", allowing
+ `cargo test` and `cargo run` to work properly with the generated `.js` file.
+ [#7476](https://github.com/rust-lang/cargo/pull/7476)
+- crates.io now supports SPDX 3.6 licenses.
+ [#7481](https://github.com/rust-lang/cargo/pull/7481)
+- Improved cyclic dependency error message.
+ [#7470](https://github.com/rust-lang/cargo/pull/7470)
+- Bare `cargo clean` no longer locks the package cache.
+ [#7502](https://github.com/rust-lang/cargo/pull/7502)
+- `cargo publish` now allows dev-dependencies without a version key to be
+ published. A git or path-only dev-dependency will be removed from the
+ package manifest before uploading.
+ [#7333](https://github.com/rust-lang/cargo/pull/7333)
+- `--features` and `--no-default-features` in the root of a virtual workspace
+ will now generate an error instead of being ignored.
+ [#7507](https://github.com/rust-lang/cargo/pull/7507)
+- Generated files (like `Cargo.toml` and `Cargo.lock`) in a package archive
+ now have their timestamp set to the current time instead of the epoch.
+ [#7523](https://github.com/rust-lang/cargo/pull/7523)
+- The `-Z` flag parser is now more strict, rejecting more invalid syntax.
+ [#7531](https://github.com/rust-lang/cargo/pull/7531)
+
+### Fixed
+- Fixed an issue where if a package had an `include` field, and `Cargo.lock`
+ in `.gitignore`, and a binary or example target, and the `Cargo.lock` exists
+ in the current project, it would fail to publish complaining the
+ `Cargo.lock` was dirty.
+ [#7448](https://github.com/rust-lang/cargo/pull/7448)
+- Fixed a panic in a particular combination of `[patch]` entries.
+ [#7452](https://github.com/rust-lang/cargo/pull/7452)
+- Windows: Better error message when `cargo test` or `rustc` crashes in an
+ abnormal way, such as a signal or seg fault.
+ [#7535](https://github.com/rust-lang/cargo/pull/7535)
+
+### Nightly only
+- The `mtime-on-use` feature may now be enabled via the
+ `unstable.mtime_on_use` config option.
+ [#7411](https://github.com/rust-lang/cargo/pull/7411)
+- Added support for named profiles.
+ [#6989](https://github.com/rust-lang/cargo/pull/6989)
+- Added `-Zpanic-abort-tests` to allow building and running tests with the
+ "abort" panic strategy.
+ [#7460](https://github.com/rust-lang/cargo/pull/7460)
+- Changed `build-std` to use `--sysroot`.
+ [#7421](https://github.com/rust-lang/cargo/pull/7421)
+- Various fixes and enhancements to `-Ztimings`.
+ [#7395](https://github.com/rust-lang/cargo/pull/7395)
+ [#7398](https://github.com/rust-lang/cargo/pull/7398)
+ [#7397](https://github.com/rust-lang/cargo/pull/7397)
+ [#7403](https://github.com/rust-lang/cargo/pull/7403)
+ [#7428](https://github.com/rust-lang/cargo/pull/7428)
+ [#7429](https://github.com/rust-lang/cargo/pull/7429)
+- Profile overrides have renamed the syntax to be
+ `[profile.dev.package.NAME]`.
+ [#7504](https://github.com/rust-lang/cargo/pull/7504)
+- Fixed warnings for unused profile overrides in a workspace.
+ [#7536](https://github.com/rust-lang/cargo/pull/7536)
+
+## Cargo 1.39 (2019-11-07)
+[e853aa97...1c6ec66d](https://github.com/rust-lang/cargo/compare/e853aa97...1c6ec66d)
+
+### Added
+- Config files may now use the `.toml` filename extension.
+ [#7295](https://github.com/rust-lang/cargo/pull/7295)
+- The `--workspace` flag has been added as an alias for `--all` to help avoid
+ confusion about the meaning of "all".
+ [#7241](https://github.com/rust-lang/cargo/pull/7241)
+- The `publish` field has been added to `cargo metadata`.
+ [#7354](https://github.com/rust-lang/cargo/pull/7354)
+
+### Changed
+- Display more information if parsing the output from `rustc` fails.
+ [#7236](https://github.com/rust-lang/cargo/pull/7236)
+- TOML errors now show the column number.
+ [#7248](https://github.com/rust-lang/cargo/pull/7248)
+- `cargo vendor` no longer deletes files in the `vendor` directory that starts
+ with a `.`.
+ [#7242](https://github.com/rust-lang/cargo/pull/7242)
+- `cargo fetch` will now show manifest warnings.
+ [#7243](https://github.com/rust-lang/cargo/pull/7243)
+- `cargo publish` will now check git submodules if they contain any
+ uncommitted changes.
+ [#7245](https://github.com/rust-lang/cargo/pull/7245)
+- In a build script, `cargo:rustc-flags` now allows `-l` and `-L` flags
+ without spaces.
+ [#7257](https://github.com/rust-lang/cargo/pull/7257)
+- When `cargo install` replaces an older version of a package it will now
+ delete any installed binaries that are no longer present in the newly
+ installed version.
+ [#7246](https://github.com/rust-lang/cargo/pull/7246)
+- A git dependency may now also specify a `version` key when published. The
+ `git` value will be stripped from the uploaded crate, matching the behavior
+ of `path` dependencies.
+ [#7237](https://github.com/rust-lang/cargo/pull/7237)
+- The behavior of workspace default-members has changed. The default-members
+ now only applies when running Cargo in the root of the workspace. Previously
+ it would always apply regardless of which directory Cargo is running in.
+ [#7270](https://github.com/rust-lang/cargo/pull/7270)
+- libgit2 updated pulling in all upstream changes.
+ [#7275](https://github.com/rust-lang/cargo/pull/7275)
+- Bump `home` dependency for locating home directories.
+ [#7277](https://github.com/rust-lang/cargo/pull/7277)
+- zsh completions have been updated.
+ [#7296](https://github.com/rust-lang/cargo/pull/7296)
+- SSL connect errors are now retried.
+ [#7318](https://github.com/rust-lang/cargo/pull/7318)
+- The jobserver has been changed to acquire N tokens (instead of N-1), and
+ then immediately acquires the extra token. This was changed to accommodate
+ the `cc` crate on Windows to allow it to release its implicit token.
+ [#7344](https://github.com/rust-lang/cargo/pull/7344)
+- The scheduling algorithm for choosing which crate to build next has been
+ changed. It now chooses the crate with the greatest number of transitive
+ crates waiting on it. Previously it used a maximum topological depth.
+ [#7390](https://github.com/rust-lang/cargo/pull/7390)
+- RUSTFLAGS are no longer incorporated in the metadata and filename hash,
+ reversing the change from 1.33 that added it. This means that any change to
+ RUSTFLAGS will cause a recompile, and will not affect symbol munging.
+ [#7459](https://github.com/rust-lang/cargo/pull/7459)
+
+### Fixed
+- Git dependencies with submodules with shorthand SSH URLs (like
+ `git@github.com/user/repo.git`) should now work.
+ [#7238](https://github.com/rust-lang/cargo/pull/7238)
+- Handle broken symlinks when creating `.dSYM` symlinks on macOS.
+ [#7268](https://github.com/rust-lang/cargo/pull/7268)
+- Fixed issues with multiple versions of the same crate in a `[patch]` table.
+ [#7303](https://github.com/rust-lang/cargo/pull/7303)
+- Fixed issue with custom target `.json` files where a substring of the name
+ matches an unsupported crate type (like "bin").
+ [#7363](https://github.com/rust-lang/cargo/issues/7363)
+- Fixed issues with generating documentation for proc-macro crate types.
+ [#7159](https://github.com/rust-lang/cargo/pull/7159)
+- Fixed hang if Cargo panics within a build thread.
+ [#7366](https://github.com/rust-lang/cargo/pull/7366)
+- Fixed rebuild detection if a `build.rs` script issues different `rerun-if`
+ directives between builds. Cargo was erroneously causing a rebuild after the
+ change.
+ [#7373](https://github.com/rust-lang/cargo/pull/7373)
+- Properly handle canonical URLs for `[patch]` table entries, preventing
+ the patch from working after the first time it is used.
+ [#7368](https://github.com/rust-lang/cargo/pull/7368)
+- Fixed an issue where integration tests were waiting for the package binary
+ to finish building before starting their own build. They now may build
+ concurrently.
+ [#7394](https://github.com/rust-lang/cargo/pull/7394)
+- Fixed accidental change in the previous release on how `--features a b` flag
+ is interpreted, restoring the original behavior where this is interpreted as
+ `--features a` along with the argument `b` passed to the command. To pass
+ multiple features, use quotes around the features to pass multiple features
+ like `--features "a b"`, or use commas, or use multiple `--features` flags.
+ [#7419](https://github.com/rust-lang/cargo/pull/7419)
+
+### Nightly only
+- Basic support for building the standard library directly from Cargo has been
+ added.
+ ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#build-std))
+ [#7216](https://github.com/rust-lang/cargo/pull/7216)
+- Added `-Ztimings` feature to generate an HTML report on the time spent on
+ individual compilation steps. This also may output completion steps on the
+ console and JSON data.
+ ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#timings))
+ [#7311](https://github.com/rust-lang/cargo/pull/7311)
+- Added ability to cross-compile doctests.
+ ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#doctest-xcompile))
+ [#6892](https://github.com/rust-lang/cargo/pull/6892)
+
+## Cargo 1.38 (2019-09-26)
+[4c1fa54d...23ef9a4e](https://github.com/rust-lang/cargo/compare/4c1fa54d...23ef9a4e)
+
+### Added
+- 🔥 Cargo build pipelining has been enabled by default to leverage more idle CPU
+ parallelism during builds.
+ [#7143](https://github.com/rust-lang/cargo/pull/7143)
+- The `--message-format` option to Cargo can now be specified multiple times and
+ accepts a comma-separated list of values. In addition to the previous values
+ it also now accepts `json-diagnostic-short` and
+ `json-diagnostic-rendered-ansi` which configures the output coming from rustc
+ in `json` message mode.
+ [#7214](https://github.com/rust-lang/cargo/pull/7214)
+- Cirrus CI badges are now supported on crates.io.
+ [#7119](https://github.com/rust-lang/cargo/pull/7119)
+- A new format for `Cargo.lock` has been introduced. This new format is
+ intended to avoid source-control merge conflicts more often, and to
+ generally make it safer to merge changes. This new format is *not* enabled
+ at this time, though Cargo will use it if it sees it. At some point in the
+ future, it is intended that this will become the default.
+ [#7070](https://github.com/rust-lang/cargo/pull/7070)
+- Progress bar support added for FreeBSD.
+ [#7222](https://github.com/rust-lang/cargo/pull/7222)
+
+### Changed
+- The `-q` flag will no longer suppress the root error message for an error
+ from Cargo itself.
+ [#7116](https://github.com/rust-lang/cargo/pull/7116)
+- The Cargo Book is now published with mdbook 0.3 providing a number of
+ formatting fixes and improvements.
+ [#7140](https://github.com/rust-lang/cargo/pull/7140)
+- The `--features` command-line flag can now be specified multiple times.
+ The list of features from all the flags are joined together.
+ [#7084](https://github.com/rust-lang/cargo/pull/7084)
+- Package include/exclude glob-vs-gitignore warnings have been removed.
+ Packages may now use gitignore-style matching without producing any
+ warnings.
+ [#7170](https://github.com/rust-lang/cargo/pull/7170)
+- Cargo now shows the command and output when parsing `rustc` output fails
+ when querying `rustc` for information like `cfg` values.
+ [#7185](https://github.com/rust-lang/cargo/pull/7185)
+- `cargo package`/`cargo publish` now allows a symbolic link to a git
+ submodule to include that submodule.
+ [#6817](https://github.com/rust-lang/cargo/pull/6817)
+- Improved the error message when a version requirement does not
+ match any versions, but there are pre-release versions available.
+ [#7191](https://github.com/rust-lang/cargo/pull/7191)
+
+### Fixed
+- Fixed using the wrong directory when updating git repositories when using
+ the `git-fetch-with-cli` config option, and the `GIT_DIR` environment
+ variable is set. This may happen when running cargo from git callbacks.
+ [#7082](https://github.com/rust-lang/cargo/pull/7082)
+- Fixed dep-info files being overwritten for targets that have separate debug
+ outputs. For example, binaries on `-apple-` targets with `.dSYM` directories
+ would overwrite the `.d` file.
+ [#7057](https://github.com/rust-lang/cargo/pull/7057)
+- Fix `[patch]` table not preserving "one major version per source" rule.
+ [#7118](https://github.com/rust-lang/cargo/pull/7118)
+- Ignore `--remap-path-prefix` flags for the metadata hash in the `cargo
+ rustc` command. This was causing the remap settings to inadvertently affect
+ symbol names.
+ [#7134](https://github.com/rust-lang/cargo/pull/7134)
+- Fixed cycle detection in `[patch]` dependencies.
+ [#7174](https://github.com/rust-lang/cargo/pull/7174)
+- Fixed `cargo new` leaving behind a symlink on Windows when `core.symlinks`
+ git config is true. Also adds a number of fixes and updates from upstream
+ libgit2.
+ [#7176](https://github.com/rust-lang/cargo/pull/7176)
+- macOS: Fixed setting the flag to mark the `target` directory to be excluded
+ from backups.
+ [#7192](https://github.com/rust-lang/cargo/pull/7192)
+- Fixed `cargo fix` panicking under some situations involving multi-byte
+ characters.
+ [#7221](https://github.com/rust-lang/cargo/pull/7221)
+
+### Nightly only
+- Added `cargo fix --clippy` which will apply machine-applicable fixes from
+ Clippy.
+ [#7069](https://github.com/rust-lang/cargo/pull/7069)
+- Added `-Z binary-dep-depinfo` flag to add change tracking for binary
+ dependencies like the standard library.
+ [#7137](https://github.com/rust-lang/cargo/pull/7137)
+ [#7219](https://github.com/rust-lang/cargo/pull/7219)
+- `cargo clippy-preview` will always run, even if no changes have been made.
+ [#7157](https://github.com/rust-lang/cargo/pull/7157)
+- Fixed exponential blowup when using `CARGO_BUILD_PIPELINING`.
+ [#7062](https://github.com/rust-lang/cargo/pull/7062)
+- Fixed passing args to clippy in `cargo clippy-preview`.
+ [#7162](https://github.com/rust-lang/cargo/pull/7162)
+
+## Cargo 1.37 (2019-08-15)
+[c4fcfb72...9edd0891](https://github.com/rust-lang/cargo/compare/c4fcfb72...9edd0891)
+
+### Added
+- Added `doctest` field to `cargo metadata` to determine if a target's
+ documentation is tested.
+ [#6953](https://github.com/rust-lang/cargo/pull/6953)
+ [#6965](https://github.com/rust-lang/cargo/pull/6965)
+- 🔥 The [`cargo
+ vendor`](https://doc.rust-lang.org/nightly/cargo/commands/cargo-vendor.html)
+ command is now built-in to Cargo. This command may be used to create a local
+ copy of the sources of all dependencies.
+ [#6869](https://github.com/rust-lang/cargo/pull/6869)
+- 🔥 The "publish lockfile" feature is now stable. This feature will
+ automatically include the `Cargo.lock` file when a package is published if
+ it contains a binary executable target. By default, Cargo will ignore
+ `Cargo.lock` when installing a package. To force Cargo to use the
+ `Cargo.lock` file included in the published package, use `cargo install
+ --locked`. This may be useful to ensure that `cargo install` consistently
+ reproduces the same result. It may also be useful when a semver-incompatible
+ change is accidentally published to a dependency, providing a way to fall
+ back to a version that is known to work.
+ [#7026](https://github.com/rust-lang/cargo/pull/7026)
+- 🔥 The `default-run` feature has been stabilized. This feature allows you to
+ specify which binary executable to run by default with `cargo run` when a
+ package includes multiple binaries. Set the `default-run` key in the
+ `[package]` table in `Cargo.toml` to the name of the binary to use by
+ default.
+ [#7056](https://github.com/rust-lang/cargo/pull/7056)
+
+### Changed
+- `cargo package` now verifies that build scripts do not create empty
+ directories.
+ [#6973](https://github.com/rust-lang/cargo/pull/6973)
+- A warning is now issued if `cargo doc` generates duplicate outputs, which
+ causes files to be randomly stomped on. This may happen for a variety of
+ reasons (renamed dependencies, multiple versions of the same package,
+ packages with renamed libraries, etc.). This is a known bug, which needs
+ more work to handle correctly.
+ [#6998](https://github.com/rust-lang/cargo/pull/6998)
+- Enabling a dependency's feature with `--features foo/bar` will no longer
+ compile the current crate with the `foo` feature if `foo` is not an optional
+ dependency.
+ [#7010](https://github.com/rust-lang/cargo/pull/7010)
+- If `--remap-path-prefix` is passed via RUSTFLAGS, it will no longer affect
+ the filename metadata hash.
+ [#6966](https://github.com/rust-lang/cargo/pull/6966)
+- libgit2 has been updated to 0.28.2, which Cargo uses to access git
+ repositories. This brings in hundreds of changes and fixes since it was last
+ updated in November.
+ [#7018](https://github.com/rust-lang/cargo/pull/7018)
+- Cargo now supports absolute paths in the dep-info files generated by rustc.
+ This is laying the groundwork for [tracking
+ binaries](https://github.com/rust-lang/rust/pull/61727), such as libstd, for
+ rebuild detection. (Note: this contains a known bug.)
+ [#7030](https://github.com/rust-lang/cargo/pull/7030)
+
+### Fixed
+- Fixed how zsh completions fetch the list of commands.
+ [#6956](https://github.com/rust-lang/cargo/pull/6956)
+- "+ debuginfo" is no longer printed in the build summary when `debug` is set
+ to 0.
+ [#6971](https://github.com/rust-lang/cargo/pull/6971)
+- Fixed `cargo doc` with an example configured with `doc = true` to document
+ correctly.
+ [#7023](https://github.com/rust-lang/cargo/pull/7023)
+- Don't fail if a read-only lock cannot be acquired in CARGO_HOME. This helps
+ when CARGO_HOME doesn't exist, but `--locked` is used which means CARGO_HOME
+ is not needed.
+ [#7149](https://github.com/rust-lang/cargo/pull/7149)
+- Reverted a change in 1.35 which released jobserver tokens when Cargo blocked
+ on a lock file. It caused a deadlock in some situations.
+ [#7204](https://github.com/rust-lang/cargo/pull/7204)
+
+### Nightly only
+- Added [compiler message
+ caching](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#cache-messages).
+ The `-Z cache-messages` flag makes cargo cache the compiler output so that
+ future runs can redisplay previous warnings.
+ [#6933](https://github.com/rust-lang/cargo/pull/6933)
+- `-Z mtime-on-use` no longer touches intermediate artifacts.
+ [#7050](https://github.com/rust-lang/cargo/pull/7050)
+
+## Cargo 1.36 (2019-07-04)
+[6f3e9c36...c4fcfb72](https://github.com/rust-lang/cargo/compare/6f3e9c36...c4fcfb72)
+
+### Added
+- Added more detailed documentation on target auto-discovery.
+ [#6898](https://github.com/rust-lang/cargo/pull/6898)
+- 🔥 Stabilize the `--offline` flag which allows using cargo without a network
+ connection.
+ [#6934](https://github.com/rust-lang/cargo/pull/6934)
+ [#6871](https://github.com/rust-lang/cargo/pull/6871)
+
+### Changed
+- `publish = ["crates-io"]` may be added to the manifest to restrict
+ publishing to crates.io only.
+ [#6838](https://github.com/rust-lang/cargo/pull/6838)
+- macOS: Only include the default paths if `DYLD_FALLBACK_LIBRARY_PATH` is not
+ set. Also, remove `/lib` from the default set.
+ [#6856](https://github.com/rust-lang/cargo/pull/6856)
+- `cargo publish` will now exit early if the login token is not available.
+ [#6854](https://github.com/rust-lang/cargo/pull/6854)
+- HTTP/2 stream errors are now considered "spurious" and will cause a retry.
+ [#6861](https://github.com/rust-lang/cargo/pull/6861)
+- Setting a feature on a dependency where that feature points to a *required*
+ dependency is now an error. Previously it was a warning.
+ [#6860](https://github.com/rust-lang/cargo/pull/6860)
+- The `registry.index` config value now supports relative `file:` URLs.
+ [#6873](https://github.com/rust-lang/cargo/pull/6873)
+- macOS: The `.dSYM` directory is now symbolically linked next to example
+ binaries without the metadata hash so that debuggers can find it.
+ [#6891](https://github.com/rust-lang/cargo/pull/6891)
+- The default `Cargo.toml` template for now projects now includes a comment
+ providing a link to the documentation.
+ [#6881](https://github.com/rust-lang/cargo/pull/6881)
+- Some improvements to the wording of the crate download summary.
+ [#6916](https://github.com/rust-lang/cargo/pull/6916)
+ [#6920](https://github.com/rust-lang/cargo/pull/6920)
+- ✨ Changed `RUST_LOG` environment variable to `CARGO_LOG` so that user code
+ that uses the `log` crate will not display cargo's debug output.
+ [#6918](https://github.com/rust-lang/cargo/pull/6918)
+- `Cargo.toml` is now always included when packaging, even if it is not listed
+ in `package.include`.
+ [#6925](https://github.com/rust-lang/cargo/pull/6925)
+- Package include/exclude values now use gitignore patterns instead of glob
+ patterns. [#6924](https://github.com/rust-lang/cargo/pull/6924)
+- Provide a better error message when crates.io times out. Also improve error
+ messages with other HTTP response codes.
+ [#6936](https://github.com/rust-lang/cargo/pull/6936)
+
+### Performance
+- Resolver performance improvements for some cases.
+ [#6853](https://github.com/rust-lang/cargo/pull/6853)
+- Optimized how cargo reads the index JSON files by caching the results.
+ [#6880](https://github.com/rust-lang/cargo/pull/6880)
+ [#6912](https://github.com/rust-lang/cargo/pull/6912)
+ [#6940](https://github.com/rust-lang/cargo/pull/6940)
+- Various performance improvements.
+ [#6867](https://github.com/rust-lang/cargo/pull/6867)
+
+### Fixed
+- More carefully track the on-disk fingerprint information for dependencies.
+ This can help in some rare cases where the build is interrupted and
+ restarted. [#6832](https://github.com/rust-lang/cargo/pull/6832)
+- `cargo run` now correctly passes non-UTF8 arguments to the child process.
+ [#6849](https://github.com/rust-lang/cargo/pull/6849)
+- Fixed bash completion to run on bash 3.2, the stock version in macOS.
+ [#6905](https://github.com/rust-lang/cargo/pull/6905)
+- Various fixes and improvements to zsh completion.
+ [#6926](https://github.com/rust-lang/cargo/pull/6926)
+ [#6929](https://github.com/rust-lang/cargo/pull/6929)
+- Fix `cargo update` ignoring `-p` arguments if the `Cargo.lock` file was
+ missing.
+ [#6904](https://github.com/rust-lang/cargo/pull/6904)
+
+### Nightly only
+- Added [`-Z install-upgrade`
+ feature](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#install-upgrade)
+ to track details about installed crates and to update them if they are
+ out-of-date. [#6798](https://github.com/rust-lang/cargo/pull/6798)
+- Added the [`public-dependency`
+ feature](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#public-dependency)
+ which allows tracking public versus private dependencies.
+ [#6772](https://github.com/rust-lang/cargo/pull/6772)
+- Added build pipelining via the `build.pipelining` config
+ option (`CARGO_BUILD_PIPELINING` env var).
+ [#6883](https://github.com/rust-lang/cargo/pull/6883)
+- The `publish-lockfile` feature has had some significant changes. The default
+ is now `true`, the `Cargo.lock` will always be published for binary crates.
+ The `Cargo.lock` is now regenerated during publishing. `cargo install` now
+ ignores the `Cargo.lock` file by default, and requires `--locked` to use the
+ lock file. Warnings have been added if yanked dependencies are detected.
+ [#6840](https://github.com/rust-lang/cargo/pull/6840)
+
+## Cargo 1.35 (2019-05-23)
+[6789d8a0...6f3e9c36](https://github.com/rust-lang/cargo/compare/6789d8a0...6f3e9c36)
+
+### Added
+- Added the `rustc-cdylib-link-arg` key for build scripts to specify linker
+ arguments for cdylib crates.
+ [#6298](https://github.com/rust-lang/cargo/pull/6298)
+
+### Changed
+- When passing a test filter, such as `cargo test foo`, don't build examples
+ (unless they set `test = true`).
+ [#6683](https://github.com/rust-lang/cargo/pull/6683)
+- Forward the `--quiet` flag from `cargo test` to the libtest harness so that
+ tests are actually quiet.
+ [#6358](https://github.com/rust-lang/cargo/pull/6358)
+- The verification step in `cargo package` that checks if any files are
+ modified is now stricter. It uses a hash of the contents instead of checking
+ filesystem mtimes. It also checks *all* files in the package.
+ [#6740](https://github.com/rust-lang/cargo/pull/6740)
+- Jobserver tokens are now released whenever Cargo blocks on a file lock.
+ [#6748](https://github.com/rust-lang/cargo/pull/6748)
+- Issue a warning for a previous bug in the TOML parser that allowed multiple
+ table headers with the same name.
+ [#6761](https://github.com/rust-lang/cargo/pull/6761)
+- Removed the `CARGO_PKG_*` environment variables from the metadata hash and
+ added them to the fingerprint instead. This means that when these values
+ change, stale artifacts are not left behind. Also added the "repository"
+ value to the fingerprint.
+ [#6785](https://github.com/rust-lang/cargo/pull/6785)
+- `cargo metadata` no longer shows a `null` field for a dependency without a
+ library in `resolve.nodes.deps`. The dependency is no longer shown.
+ [#6534](https://github.com/rust-lang/cargo/pull/6534)
+- `cargo new` will no longer include an email address in the `authors` field
+ if it is set to the empty string.
+ [#6802](https://github.com/rust-lang/cargo/pull/6802)
+- `cargo doc --open` now works when documenting multiple packages.
+ [#6803](https://github.com/rust-lang/cargo/pull/6803)
+- `cargo install --path P` now loads the `.cargo/config` file from the
+ directory P. [#6805](https://github.com/rust-lang/cargo/pull/6805)
+- Using semver metadata in a version requirement (such as `1.0.0+1234`) now
+ issues a warning that it is ignored.
+ [#6806](https://github.com/rust-lang/cargo/pull/6806)
+- `cargo install` now rejects certain combinations of flags where some flags
+ would have been ignored.
+ [#6801](https://github.com/rust-lang/cargo/pull/6801)
+- Resolver performance improvements for some cases.
+ [#6776](https://github.com/rust-lang/cargo/pull/6776)
+
+### Fixed
+- Fixed running separate commands (such as `cargo build` then `cargo test`)
+ where the second command could use stale results from a build script.
+ [#6720](https://github.com/rust-lang/cargo/pull/6720)
+- Fixed `cargo fix` not working properly if a `.gitignore` file that matched
+ the root package directory.
+ [#6767](https://github.com/rust-lang/cargo/pull/6767)
+- Fixed accidentally compiling a lib multiple times if `panic=unwind` was set
+ in a profile. [#6781](https://github.com/rust-lang/cargo/pull/6781)
+- Paths to JSON files in `build.target` config value are now canonicalized to
+ fix building dependencies.
+ [#6778](https://github.com/rust-lang/cargo/pull/6778)
+- Fixed re-running a build script if its compilation was interrupted (such as
+ if it is killed). [#6782](https://github.com/rust-lang/cargo/pull/6782)
+- Fixed `cargo new` initializing a fossil repo.
+ [#6792](https://github.com/rust-lang/cargo/pull/6792)
+- Fixed supporting updating a git repo that has a force push when using the
+ `git-fetch-with-cli` feature. `git-fetch-with-cli` also shows more error
+ information now when it fails.
+ [#6800](https://github.com/rust-lang/cargo/pull/6800)
+- `--example` binaries built for the WASM target are fixed to no longer
+ include a metadata hash in the filename, and are correctly emitted in the
+ `compiler-artifact` JSON message.
+ [#6812](https://github.com/rust-lang/cargo/pull/6812)
+
+### Nightly only
+- `cargo clippy-preview` is now a built-in cargo command.
+ [#6759](https://github.com/rust-lang/cargo/pull/6759)
+- The `build-override` profile setting now includes proc-macros and their
+ dependencies.
+ [#6811](https://github.com/rust-lang/cargo/pull/6811)
+- Optional and target dependencies now work better with `-Z offline`.
+ [#6814](https://github.com/rust-lang/cargo/pull/6814)
+
+## Cargo 1.34 (2019-04-11)
+[f099fe94...6789d8a0](https://github.com/rust-lang/cargo/compare/f099fe94...6789d8a0)
+
+### Added
+- 🔥 Stabilized support for [alternate
+ registries](https://doc.rust-lang.org/1.34.0/cargo/reference/registries.html).
+ [#6654](https://github.com/rust-lang/cargo/pull/6654)
+- Added documentation on using builds.sr.ht Continuous Integration with Cargo.
+ [#6565](https://github.com/rust-lang/cargo/pull/6565)
+- `Cargo.lock` now includes a comment at the top that it is `@generated`.
+ [#6548](https://github.com/rust-lang/cargo/pull/6548)
+- Azure DevOps badges are now supported.
+ [#6264](https://github.com/rust-lang/cargo/pull/6264)
+- Added a warning if `--exclude` flag specifies an unknown package.
+ [#6679](https://github.com/rust-lang/cargo/pull/6679)
+
+### Changed
+- `cargo test --doc --no-run` doesn't do anything, so it now displays an error
+ to that effect. [#6628](https://github.com/rust-lang/cargo/pull/6628)
+- Various updates to bash completion: add missing options and commands,
+ support libtest completions, use rustup for `--target` completion, fallback
+ to filename completion, fix editing the command line.
+ [#6644](https://github.com/rust-lang/cargo/pull/6644)
+- Publishing a crate with a `[patch]` section no longer generates an error.
+ The `[patch]` section is removed from the manifest before publishing.
+ [#6535](https://github.com/rust-lang/cargo/pull/6535)
+- `build.incremental = true` config value is now treated the same as
+ `CARGO_INCREMENTAL=1`, previously it was ignored.
+ [#6688](https://github.com/rust-lang/cargo/pull/6688)
+- Errors from a registry are now always displayed regardless of the HTTP
+ response code. [#6771](https://github.com/rust-lang/cargo/pull/6771)
+
+### Fixed
+- Fixed bash completion for `cargo run --example`.
+ [#6578](https://github.com/rust-lang/cargo/pull/6578)
+- Fixed a race condition when using a *local* registry and running multiple
+ cargo commands at the same time that build the same crate.
+ [#6591](https://github.com/rust-lang/cargo/pull/6591)
+- Fixed some flickering and excessive updates of the progress bar.
+ [#6615](https://github.com/rust-lang/cargo/pull/6615)
+- Fixed a hang when using a git credential helper that returns incorrect
+ credentials. [#6681](https://github.com/rust-lang/cargo/pull/6681)
+- Fixed resolving yanked crates with a local registry.
+ [#6750](https://github.com/rust-lang/cargo/pull/6750)
+
+### Nightly only
+- Added `-Z mtime-on-use` flag to cause the mtime to be updated on the
+ filesystem when a crate is used. This is intended to be able to track stale
+ artifacts in the future for cleaning up unused files.
+ [#6477](https://github.com/rust-lang/cargo/pull/6477)
+ [#6573](https://github.com/rust-lang/cargo/pull/6573)
+- Added experimental `-Z dual-proc-macros` to build proc macros for both the
+ host and the target.
+ [#6547](https://github.com/rust-lang/cargo/pull/6547)
+
+## Cargo 1.33 (2019-02-28)
+[8610973a...f099fe94](https://github.com/rust-lang/cargo/compare/8610973a...f099fe94)
+
+### Added
+- `compiler-artifact` JSON messages now include an `"executable"` key which
+ includes the path to the executable that was built.
+ [#6363](https://github.com/rust-lang/cargo/pull/6363)
+- The man pages have been rewritten, and are now published with the web
+ documentation. [#6405](https://github.com/rust-lang/cargo/pull/6405)
+- `cargo login` now displays a confirmation after saving the token.
+ [#6466](https://github.com/rust-lang/cargo/pull/6466)
+- A warning is now emitted if a `[patch]` entry does not match any package.
+ [#6470](https://github.com/rust-lang/cargo/pull/6470)
+- `cargo metadata` now includes the `links` key for a package.
+ [#6480](https://github.com/rust-lang/cargo/pull/6480)
+- "Very verbose" output with `-vv` now displays the environment variables that
+ cargo sets when it runs a process.
+ [#6492](https://github.com/rust-lang/cargo/pull/6492)
+- `--example`, `--bin`, `--bench`, or `--test` without an argument now lists
+ the available targets for those options.
+ [#6505](https://github.com/rust-lang/cargo/pull/6505)
+- Windows: If a process fails with an extended status exit code, a
+ human-readable name for the code is now displayed.
+ [#6532](https://github.com/rust-lang/cargo/pull/6532)
+- Added `--features`, `--no-default-features`, and `--all-features` flags to
+ the `cargo package` and `cargo publish` commands to use the given features
+ when verifying the package.
+ [#6453](https://github.com/rust-lang/cargo/pull/6453)
+
+### Changed
+- If `cargo fix` fails to compile the fixed code, the rustc errors are now
+ displayed on the console.
+ [#6419](https://github.com/rust-lang/cargo/pull/6419)
+- Hide the `--host` flag from `cargo login`, it is unused.
+ [#6466](https://github.com/rust-lang/cargo/pull/6466)
+- Build script fingerprints now include the rustc version.
+ [#6473](https://github.com/rust-lang/cargo/pull/6473)
+- macOS: Switched to setting `DYLD_FALLBACK_LIBRARY_PATH` instead of
+ `DYLD_LIBRARY_PATH`. [#6355](https://github.com/rust-lang/cargo/pull/6355)
+- `RUSTFLAGS` is now included in the metadata hash, meaning that changing
+ the flags will not overwrite previously built files.
+ [#6503](https://github.com/rust-lang/cargo/pull/6503)
+- When updating the crate graph, unrelated yanked crates were erroneously
+ removed. They are now kept at their original version if possible. This was
+ causing unrelated packages to be downgraded during `cargo update -p
+ somecrate`. [#5702](https://github.com/rust-lang/cargo/issues/5702)
+- TOML files now support the [0.5 TOML
+ syntax](https://github.com/toml-lang/toml/blob/master/CHANGELOG.md#050--2018-07-11).
+
+### Fixed
+- `cargo fix` will now ignore suggestions that modify multiple files.
+ [#6402](https://github.com/rust-lang/cargo/pull/6402)
+- `cargo fix` will now only fix one target at a time, to deal with targets
+ which share the same source files.
+ [#6434](https://github.com/rust-lang/cargo/pull/6434)
+- Fixed bash completion showing the list of cargo commands.
+ [#6461](https://github.com/rust-lang/cargo/issues/6461)
+- `cargo init` will now avoid creating duplicate entries in `.gitignore`
+ files. [#6521](https://github.com/rust-lang/cargo/pull/6521)
+- Builds now attempt to detect if a file is modified in the middle of a
+ compilation, allowing you to build again and pick up the new changes. This
+ is done by keeping track of when the compilation *starts* not when it
+ finishes. Also, [#5919](https://github.com/rust-lang/cargo/pull/5919) was
+ reverted, meaning that cargo does *not* treat equal filesystem mtimes as
+ requiring a rebuild. [#6484](https://github.com/rust-lang/cargo/pull/6484)
+
+### Nightly only
+- Allow using registry *names* in `[patch]` tables instead of just URLs.
+ [#6456](https://github.com/rust-lang/cargo/pull/6456)
+- `cargo metadata` added the `registry` key for dependencies.
+ [#6500](https://github.com/rust-lang/cargo/pull/6500)
+- Registry names are now restricted to the same style as
+ package names (alphanumeric, `-` and `_` characters).
+ [#6469](https://github.com/rust-lang/cargo/pull/6469)
+- `cargo login` now displays the `/me` URL from the registry config.
+ [#6466](https://github.com/rust-lang/cargo/pull/6466)
+- `cargo login --registry=NAME` now supports interactive input for the token.
+ [#6466](https://github.com/rust-lang/cargo/pull/6466)
+- Registries may now elide the `api` key from `config.json` to indicate they
+ do not support API access.
+ [#6466](https://github.com/rust-lang/cargo/pull/6466)
+- Fixed panic when using `--message-format=json` with metabuild.
+ [#6432](https://github.com/rust-lang/cargo/pull/6432)
+- Fixed detection of publishing to crates.io when using alternate registries.
+ [#6525](https://github.com/rust-lang/cargo/pull/6525)
+
+## Cargo 1.32 (2019-01-17)
+[339d9f9c...8610973a](https://github.com/rust-lang/cargo/compare/339d9f9c...8610973a)
+
+### Added
+- Registries may now display warnings after a successful publish.
+ [#6303](https://github.com/rust-lang/cargo/pull/6303)
+- Added a [glossary](https://doc.rust-lang.org/cargo/appendix/glossary.html)
+ to the documentation. [#6321](https://github.com/rust-lang/cargo/pull/6321)
+- Added the alias `c` for `cargo check`.
+ [#6218](https://github.com/rust-lang/cargo/pull/6218)
+
+### Changed
+- 🔥 HTTP/2 multiplexing is now enabled by default. The `http.multiplexing`
+ config value may be used to disable it.
+ [#6271](https://github.com/rust-lang/cargo/pull/6271)
+- Use ANSI escape sequences to clear lines instead of spaces.
+ [#6233](https://github.com/rust-lang/cargo/pull/6233)
+- Disable git templates when checking out git dependencies, which can cause
+ problems. [#6252](https://github.com/rust-lang/cargo/pull/6252)
+- Include the `--update-head-ok` git flag when using the
+ `net.git-fetch-with-cli` option. This can help prevent failures when
+ fetching some repositories.
+ [#6250](https://github.com/rust-lang/cargo/pull/6250)
+- When extracting a crate during the verification step of `cargo package`, the
+ filesystem mtimes are no longer set, which was failing on some rare
+ filesystems. [#6257](https://github.com/rust-lang/cargo/pull/6257)
+- `crate-type = ["proc-macro"]` is now treated the same as `proc-macro = true`
+ in `Cargo.toml`. [#6256](https://github.com/rust-lang/cargo/pull/6256)
+- An error is raised if `dependencies`, `features`, `target`, or `badges` is
+ set in a virtual workspace. Warnings are displayed if `replace` or `patch`
+ is used in a workspace member.
+ [#6276](https://github.com/rust-lang/cargo/pull/6276)
+- Improved performance of the resolver in some cases.
+ [#6283](https://github.com/rust-lang/cargo/pull/6283)
+ [#6366](https://github.com/rust-lang/cargo/pull/6366)
+- `.rmeta` files are no longer hard-linked into the base target directory
+ (`target/debug`). [#6292](https://github.com/rust-lang/cargo/pull/6292)
+- A warning is issued if multiple targets are built with the same output
+ filenames. [#6308](https://github.com/rust-lang/cargo/pull/6308)
+- When using `cargo build` (without `--release`) benchmarks are now built
+ using the "test" profile instead of "bench". This makes it easier to debug
+ benchmarks, and avoids confusing behavior.
+ [#6309](https://github.com/rust-lang/cargo/pull/6309)
+- User aliases may now override built-in aliases (`b`, `r`, `t`, and `c`).
+ [#6259](https://github.com/rust-lang/cargo/pull/6259)
+- Setting `autobins=false` now disables auto-discovery of inferred targets.
+ [#6329](https://github.com/rust-lang/cargo/pull/6329)
+- `cargo verify-project` will now fail on stable if the project uses unstable
+ features. [#6326](https://github.com/rust-lang/cargo/pull/6326)
+- Platform targets with an internal `.` within the name are now allowed.
+ [#6255](https://github.com/rust-lang/cargo/pull/6255)
+- `cargo clean --release` now only deletes the release directory.
+ [#6349](https://github.com/rust-lang/cargo/pull/6349)
+
+### Fixed
+- Avoid adding extra angle brackets in email address for `cargo new`.
+ [#6243](https://github.com/rust-lang/cargo/pull/6243)
+- The progress bar is disabled if the CI environment variable is set.
+ [#6281](https://github.com/rust-lang/cargo/pull/6281)
+- Avoid retaining all rustc output in memory.
+ [#6289](https://github.com/rust-lang/cargo/pull/6289)
+- If JSON parsing fails, and rustc exits nonzero, don't lose the parse failure
+ message. [#6290](https://github.com/rust-lang/cargo/pull/6290)
+- Fixed renaming a project directory with build scripts.
+ [#6328](https://github.com/rust-lang/cargo/pull/6328)
+- Fixed `cargo run --example NAME` to work correctly if the example sets
+ `crate_type = ["bin"]`.
+ [#6330](https://github.com/rust-lang/cargo/pull/6330)
+- Fixed issue with `cargo package` git discovery being too aggressive. The
+ `--allow-dirty` now completely disables the git repo checks.
+ [#6280](https://github.com/rust-lang/cargo/pull/6280)
+- Fixed build change tracking for `[patch]` deps which resulted in `cargo
+ build` rebuilding when it shouldn't.
+ [#6493](https://github.com/rust-lang/cargo/pull/6493)
+
+### Nightly only
+- Allow usernames in registry URLs.
+ [#6242](https://github.com/rust-lang/cargo/pull/6242)
+- Added `"compile_mode"` key to the build-plan JSON structure to be able to
+ distinguish running a custom build script versus compiling the build script.
+ [#6331](https://github.com/rust-lang/cargo/pull/6331)
+- `--out-dir` no longer copies over build scripts.
+ [#6300](https://github.com/rust-lang/cargo/pull/6300)
+
+## Cargo 1.31 (2018-12-06)
+[36d96825...339d9f9c](https://github.com/rust-lang/cargo/compare/36d96825...339d9f9c)
+
+### Added
+- 🔥 Stabilized support for the 2018 edition.
+ [#5984](https://github.com/rust-lang/cargo/pull/5984)
+ [#5989](https://github.com/rust-lang/cargo/pull/5989)
+- 🔥 Added the ability to [rename
+ dependencies](https://doc.rust-lang.org/1.31.0/cargo/reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml)
+ in Cargo.toml. [#6319](https://github.com/rust-lang/cargo/pull/6319)
+- 🔥 Added support for HTTP/2 pipelining and multiplexing. Set the
+ `http.multiplexing` config value to enable.
+ [#6005](https://github.com/rust-lang/cargo/pull/6005)
+- Added `http.debug` configuration value to debug HTTP connections. Use
+ `CARGO_HTTP_DEBUG=true RUST_LOG=cargo::ops::registry cargo build` to display
+ the debug information. [#6166](https://github.com/rust-lang/cargo/pull/6166)
+- `CARGO_PKG_REPOSITORY` environment variable is set with the repository value
+ from `Cargo.toml` when building .
+ [#6096](https://github.com/rust-lang/cargo/pull/6096)
+
+### Changed
+- `cargo test --doc` now rejects other flags instead of ignoring them.
+ [#6037](https://github.com/rust-lang/cargo/pull/6037)
+- `cargo install` ignores `~/.cargo/config`.
+ [#6026](https://github.com/rust-lang/cargo/pull/6026)
+- `cargo version --verbose` is now the same as `cargo -vV`.
+ [#6076](https://github.com/rust-lang/cargo/pull/6076)
+- Comments at the top of `Cargo.lock` are now preserved.
+ [#6181](https://github.com/rust-lang/cargo/pull/6181)
+- When building in "very verbose" mode (`cargo build -vv`), build script
+ output is prefixed with the package name and version, such as `[foo 0.0.1]`.
+ [#6164](https://github.com/rust-lang/cargo/pull/6164)
+- If `cargo fix --broken-code` fails to compile after fixes have been applied,
+ the files are no longer reverted and are left in their broken state.
+ [#6316](https://github.com/rust-lang/cargo/pull/6316)
+
+### Fixed
+- Windows: Pass Ctrl-C to the process with `cargo run`.
+ [#6004](https://github.com/rust-lang/cargo/pull/6004)
+- macOS: Fix bash completion.
+ [#6038](https://github.com/rust-lang/cargo/pull/6038)
+- Support arbitrary toolchain names when completing `+toolchain` in bash
+ completion. [#6038](https://github.com/rust-lang/cargo/pull/6038)
+- Fixed edge cases in the resolver, when backtracking on failed dependencies.
+ [#5988](https://github.com/rust-lang/cargo/pull/5988)
+- Fixed `cargo test --all-targets` running lib tests three times.
+ [#6039](https://github.com/rust-lang/cargo/pull/6039)
+- Fixed publishing renamed dependencies to crates.io.
+ [#5993](https://github.com/rust-lang/cargo/pull/5993)
+- Fixed `cargo install` on a git repo with multiple binaries.
+ [#6060](https://github.com/rust-lang/cargo/pull/6060)
+- Fixed deeply nested JSON emitted by rustc being lost.
+ [#6081](https://github.com/rust-lang/cargo/pull/6081)
+- Windows: Fix locking msys terminals to 60 characters.
+ [#6122](https://github.com/rust-lang/cargo/pull/6122)
+- Fixed renamed dependencies with dashes.
+ [#6140](https://github.com/rust-lang/cargo/pull/6140)
+- Fixed linking against the wrong dylib when the dylib existed in both
+ `target/debug` and `target/debug/deps`.
+ [#6167](https://github.com/rust-lang/cargo/pull/6167)
+- Fixed some unnecessary recompiles when `panic=abort` is used.
+ [#6170](https://github.com/rust-lang/cargo/pull/6170)
+
+### Nightly only
+- Added `--registry` flag to `cargo install`.
+ [#6128](https://github.com/rust-lang/cargo/pull/6128)
+- Added `registry.default` configuration value to specify the
+ default registry to use if `--registry` flag is not passed.
+ [#6135](https://github.com/rust-lang/cargo/pull/6135)
+- Added `--registry` flag to `cargo new` and `cargo init`.
+ [#6135](https://github.com/rust-lang/cargo/pull/6135)
+
+## Cargo 1.30 (2018-10-25)
+[524a578d...36d96825](https://github.com/rust-lang/cargo/compare/524a578d...36d96825)
+
+### Added
+- 🔥 Added an animated progress bar shows progress during building.
+ [#5995](https://github.com/rust-lang/cargo/pull/5995/)
+- Added `resolve.nodes.deps` key to `cargo metadata`, which includes more
+ information about resolved dependencies, and properly handles renamed
+ dependencies. [#5871](https://github.com/rust-lang/cargo/pull/5871)
+- When creating a package, provide more detail with `-v` when failing to
+ discover if files are dirty in a git repository. Also fix a problem with
+ discovery on Windows. [#5858](https://github.com/rust-lang/cargo/pull/5858)
+- Filters like `--bin`, `--test`, `--example`, `--bench`, or `--lib` can be
+ used in a workspace without selecting a specific package.
+ [#5873](https://github.com/rust-lang/cargo/pull/5873)
+- `cargo run` can be used in a workspace without selecting a specific package.
+ [#5877](https://github.com/rust-lang/cargo/pull/5877)
+- `cargo doc --message-format=json` now outputs JSON messages from rustdoc.
+ [#5878](https://github.com/rust-lang/cargo/pull/5878)
+- Added `--message-format=short` to show one-line messages.
+ [#5879](https://github.com/rust-lang/cargo/pull/5879)
+- Added `.cargo_vcs_info.json` file to `.crate` packages that captures the
+ current git hash. [#5886](https://github.com/rust-lang/cargo/pull/5886)
+- Added `net.git-fetch-with-cli` configuration option to use the `git`
+ executable to fetch repositories instead of using the built-in libgit2
+ library. [#5914](https://github.com/rust-lang/cargo/pull/5914)
+- Added `required-features` to `cargo metadata`.
+ [#5902](https://github.com/rust-lang/cargo/pull/5902)
+- `cargo uninstall` within a package will now uninstall that package.
+ [#5927](https://github.com/rust-lang/cargo/pull/5927)
+- Added `--allow-staged` flag to `cargo fix` to allow it to run if files are
+ staged in git. [#5943](https://github.com/rust-lang/cargo/pull/5943)
+- Added `net.low-speed-limit` config value, and also honor `net.timeout` for
+ http operations. [#5957](https://github.com/rust-lang/cargo/pull/5957)
+- Added `--edition` flag to `cargo new`.
+ [#5984](https://github.com/rust-lang/cargo/pull/5984)
+- Temporarily stabilized 2018 edition support for the duration of the beta.
+ [#5984](https://github.com/rust-lang/cargo/pull/5984)
+ [#5989](https://github.com/rust-lang/cargo/pull/5989)
+- Added support for `target.'cfg(…)'.runner` config value to specify the
+ run/test/bench runner for targets that use config expressions.
+ [#5959](https://github.com/rust-lang/cargo/pull/5959)
+
+### Changed
+- Windows: `cargo run` will not kill child processes when the main process
+ exits. [#5887](https://github.com/rust-lang/cargo/pull/5887)
+- Switched to the `opener` crate to open a web browser with `cargo doc
+ --open`. This should more reliably select the system-preferred browser on
+ all platforms. [#5888](https://github.com/rust-lang/cargo/pull/5888)
+- Equal file mtimes now cause a target to be rebuilt. Previously only if files
+ were strictly *newer* than the last build would it cause a rebuild.
+ [#5919](https://github.com/rust-lang/cargo/pull/5919)
+- Ignore `build.target` config value when running `cargo install`.
+ [#5874](https://github.com/rust-lang/cargo/pull/5874)
+- Ignore `RUSTC_WRAPPER` for `cargo fix`.
+ [#5983](https://github.com/rust-lang/cargo/pull/5983)
+- Ignore empty `RUSTC_WRAPPER`.
+ [#5985](https://github.com/rust-lang/cargo/pull/5985)
+
+### Fixed
+- Fixed error when creating a package with an edition field in `Cargo.toml`.
+ [#5908](https://github.com/rust-lang/cargo/pull/5908)
+- More consistently use relative paths for path dependencies in a workspace.
+ [#5935](https://github.com/rust-lang/cargo/pull/5935)
+- `cargo fix` now always runs, even if it was run previously.
+ [#5944](https://github.com/rust-lang/cargo/pull/5944)
+- Windows: Attempt to more reliably detect terminal width. msys-based
+ terminals are forced to 60 characters wide.
+ [#6010](https://github.com/rust-lang/cargo/pull/6010)
+- Allow multiple target flags with `cargo doc --document-private-items`.
+ [6022](https://github.com/rust-lang/cargo/pull/6022)
+
+### Nightly only
+- Added
+ [metabuild](https://doc.rust-lang.org/1.30.0/cargo/reference/unstable.html#metabuild).
+ [#5628](https://github.com/rust-lang/cargo/pull/5628)
diff --git a/src/tools/cargo/CODE_OF_CONDUCT.md b/src/tools/cargo/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..e3708bc48
--- /dev/null
+++ b/src/tools/cargo/CODE_OF_CONDUCT.md
@@ -0,0 +1,3 @@
+# The Rust Code of Conduct
+
+The Code of Conduct for this repository [can be found online](https://www.rust-lang.org/conduct.html).
diff --git a/src/tools/cargo/CONTRIBUTING.md b/src/tools/cargo/CONTRIBUTING.md
new file mode 100644
index 000000000..88ffbd3d0
--- /dev/null
+++ b/src/tools/cargo/CONTRIBUTING.md
@@ -0,0 +1,21 @@
+# Contributing to Cargo
+
+Contributing documentation has moved to the **[Cargo Contributor Guide]**.
+
+[Cargo Contributor Guide]: https://rust-lang.github.io/cargo/contrib/
+
+## Before hacking on Cargo
+
+We encourage people to discuss their design before hacking on code. Typically,
+you [file an issue] or start a thread on the [internals forum] before submitting
+a pull request. Please read [the process] of how features and bugs are managed
+in Cargo.
+
+**NOTICE: Due to limited review capacity, the Cargo team is not accepting new
+features or major changes at this time. Please consult with the team before
+opening a new PR. Only issues that have been explicitly marked as accepted
+will be reviewed.**
+
+[internals forum]: https://internals.rust-lang.org/c/tools-and-infrastructure/cargo
+[file an issue]: https://github.com/rust-lang/cargo/issues
+[the process]: https://doc.crates.io/contrib/process/index.html
diff --git a/src/tools/cargo/Cargo.toml b/src/tools/cargo/Cargo.toml
new file mode 100644
index 000000000..56c9827cb
--- /dev/null
+++ b/src/tools/cargo/Cargo.toml
@@ -0,0 +1,119 @@
+[package]
+name = "cargo"
+version = "0.71.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+homepage = "https://crates.io"
+repository = "https://github.com/rust-lang/cargo"
+documentation = "https://docs.rs/cargo"
+readme = "README.md"
+description = """
+Cargo, a package manager for Rust.
+"""
+
+[lib]
+name = "cargo"
+path = "src/cargo/lib.rs"
+
+[dependencies]
+anyhow = "1.0.47"
+base64 = "0.21.0"
+bytesize = "1.0"
+cargo-platform = { path = "crates/cargo-platform", version = "0.1.2" }
+cargo-util = { path = "crates/cargo-util", version = "0.2.4" }
+clap = "4.2.0"
+crates-io = { path = "crates/crates-io", version = "0.36.0" }
+curl = { version = "0.4.44", features = ["http2"] }
+curl-sys = "0.4.61"
+env_logger = "0.10.0"
+filetime = "0.2.9"
+flate2 = { version = "1.0.3", default-features = false, features = ["zlib"] }
+git2 = "0.17.0"
+git2-curl = "0.18.0"
+gix = { version = "0.39.0", default-features = false, features = ["blocking-http-transport-curl", "progress-tree"] }
+gix-features-for-configuration-only = { version = "0.28.0", package = "gix-features", features = [ "parallel" ] }
+glob = "0.3.0"
+hex = "0.4"
+hmac = "0.12.1"
+home = "0.5"
+http-auth = { version = "0.1.6", default-features = false }
+humantime = "2.0.0"
+ignore = "0.4.7"
+im-rc = "15.0.0"
+indexmap = "1"
+is-terminal = "0.4.4"
+itertools = "0.10.0"
+jobserver = "0.1.26"
+lazy_static = "1.2.0"
+lazycell = "1.2.0"
+libc = "0.2"
+libgit2-sys = "0.15.0"
+log = "0.4.6"
+memchr = "2.1.3"
+opener = "0.5"
+openssl = { version = '0.10.11', optional = true }
+os_info = "3.5.0"
+pasetors = { version = "0.6.4", features = ["v3", "paserk", "std", "serde"] }
+pathdiff = "0.2"
+pretty_env_logger = { version = "0.4", optional = true }
+rand = "0.8.5"
+rustfix = "0.6.0"
+semver = { version = "1.0.3", features = ["serde"] }
+serde = { version = "1.0.123", features = ["derive"] }
+serde-value = "0.7.0"
+serde_ignored = "0.1.0"
+serde_json = { version = "1.0.30", features = ["raw_value"] }
+sha1 = "0.10.5"
+shell-escape = "0.1.4"
+strip-ansi-escapes = "0.1.0"
+tar = { version = "0.4.38", default-features = false }
+tempfile = "3.0"
+termcolor = "1.1"
+time = { version = "0.3", features = ["parsing", "formatting"]}
+toml = "0.7.0"
+toml_edit = "0.19.0"
+unicode-width = "0.1.5"
+unicode-xid = "0.2.0"
+url = "2.2.2"
+walkdir = "2.2"
+
+# A noop dependency that changes in the Rust repository, it's a bit of a hack.
+# See the `src/tools/rustc-workspace-hack/README.md` file in `rust-lang/rust`
+# for more information.
+rustc-workspace-hack = "1.0.0"
+
+[target.'cfg(windows)'.dependencies]
+fwdansi = "1.1.0"
+
+[target.'cfg(windows)'.dependencies.windows-sys]
+version = "0.45"
+features = [
+ "Win32_Foundation",
+ "Win32_Storage_FileSystem",
+ "Win32_System_Console",
+ "Win32_System_IO",
+ "Win32_System_Threading",
+ "Win32_System_JobObjects",
+ "Win32_Security",
+ "Win32_System_SystemServices"
+]
+
+[dev-dependencies]
+cargo-test-macro = { path = "crates/cargo-test-macro" }
+cargo-test-support = { path = "crates/cargo-test-support" }
+same-file = "1.0.6"
+snapbox = { version = "0.4.0", features = ["diff", "path"] }
+
+[build-dependencies]
+flate2 = { version = "1.0.3", default-features = false, features = ["zlib"] }
+tar = { version = "0.4.38", default-features = false }
+
+[[bin]]
+name = "cargo"
+test = false
+doc = false
+
+[features]
+vendored-openssl = ["openssl/vendored"]
+vendored-libgit2 = ["libgit2-sys/vendored"]
+pretty-env-logger = ["pretty_env_logger"]
diff --git a/src/tools/cargo/LICENSE-APACHE b/src/tools/cargo/LICENSE-APACHE
new file mode 100644
index 000000000..c98d27d4f
--- /dev/null
+++ b/src/tools/cargo/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ https://www.apache.org/licenses/LICENSE-2.0
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/src/tools/cargo/LICENSE-MIT b/src/tools/cargo/LICENSE-MIT
new file mode 100644
index 000000000..31aa79387
--- /dev/null
+++ b/src/tools/cargo/LICENSE-MIT
@@ -0,0 +1,23 @@
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/src/tools/cargo/LICENSE-THIRD-PARTY b/src/tools/cargo/LICENSE-THIRD-PARTY
new file mode 100644
index 000000000..8f83ab502
--- /dev/null
+++ b/src/tools/cargo/LICENSE-THIRD-PARTY
@@ -0,0 +1,1272 @@
+The Cargo source code itself does not bundle any third party libraries, but it
+depends on a number of libraries which carry their own copyright notices and
+license terms. These libraries are normally all linked static into the binary
+distributions of Cargo:
+
+* OpenSSL - https://www.openssl.org/source/license.html
+
+ Copyright (c) 1998-2011 The OpenSSL Project. All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+
+ 3. All advertising materials mentioning features or use of this
+ software must display the following acknowledgment:
+ "This product includes software developed by the OpenSSL Project
+ for use in the OpenSSL Toolkit. (https://www.openssl.org/)"
+
+ 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
+ endorse or promote products derived from this software without
+ prior written permission. For written permission, please contact
+ openssl-core@openssl.org.
+
+ 5. Products derived from this software may not be called "OpenSSL"
+ nor may "OpenSSL" appear in their names without prior written
+ permission of the OpenSSL Project.
+
+ 6. Redistributions of any form whatsoever must retain the following
+ acknowledgment:
+ "This product includes software developed by the OpenSSL Project
+ for use in the OpenSSL Toolkit (https://www.openssl.org/)"
+
+ THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
+ EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
+ ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+ OF THE POSSIBILITY OF SUCH DAMAGE.
+ ====================================================================
+
+ This product includes cryptographic software written by Eric Young
+ (eay@cryptsoft.com). This product includes software written by Tim
+ Hudson (tjh@cryptsoft.com).
+
+ ---
+
+ Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
+ All rights reserved.
+
+ This package is an SSL implementation written
+ by Eric Young (eay@cryptsoft.com).
+ The implementation was written so as to conform with Netscapes SSL.
+
+ This library is free for commercial and non-commercial use as long as
+ the following conditions are aheared to. The following conditions
+ apply to all code found in this distribution, be it the RC4, RSA,
+ lhash, DES, etc., code; not just the SSL code. The SSL documentation
+ included with this distribution is covered by the same copyright terms
+ except that the holder is Tim Hudson (tjh@cryptsoft.com).
+
+ Copyright remains Eric Young's, and as such any Copyright notices in
+ the code are not to be removed.
+ If this package is used in a product, Eric Young should be given attribution
+ as the author of the parts of the library used.
+ This can be in the form of a textual message at program startup or
+ in documentation (online or textual) provided with the package.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ 1. Redistributions of source code must retain the copyright
+ notice, this list of conditions and the following disclaimer.
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ 3. All advertising materials mentioning features or use of this software
+ must display the following acknowledgement:
+ "This product includes cryptographic software written by
+ Eric Young (eay@cryptsoft.com)"
+ The word 'cryptographic' can be left out if the rouines from the library
+ being used are not cryptographic related :-).
+ 4. If you include any Windows specific code (or a derivative thereof) from
+ the apps directory (application code) you must include an acknowledgement:
+ "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
+
+ THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
+ ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ SUCH DAMAGE.
+
+ The licence and distribution terms for any publically available version or
+ derivative of this code cannot be changed. i.e. this code cannot simply be
+ copied and put under another distribution licence
+ [including the GNU Public Licence.]
+
+* libgit2 - https://github.com/libgit2/libgit2/blob/master/COPYING
+
+ libgit2 is Copyright (C) the libgit2 contributors,
+ unless otherwise stated. See the AUTHORS file for details.
+
+ Note that the only valid version of the GPL as far as this project
+ is concerned is _this_ particular version of the license (ie v2, not
+ v2.2 or v3.x or whatever), unless explicitly otherwise stated.
+
+ ----------------------------------------------------------------------
+
+ LINKING EXCEPTION
+
+ In addition to the permissions in the GNU General Public License,
+ the authors give you unlimited permission to link the compiled
+ version of this library into combinations with other programs,
+ and to distribute those combinations without any restriction
+ coming from the use of this file. (The General Public License
+ restrictions do apply in other respects; for example, they cover
+ modification of the file, and distribution when not linked into
+ a combined executable.)
+
+ ----------------------------------------------------------------------
+
+ GNU GENERAL PUBLIC LICENSE
+ Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+ 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+ freedom to share and change it. By contrast, the GNU General Public
+ License is intended to guarantee your freedom to share and change free
+ software--to make sure the software is free for all its users. This
+ General Public License applies to most of the Free Software
+ Foundation's software and to any other program whose authors commit to
+ using it. (Some other Free Software Foundation software is covered by
+ the GNU Library General Public License instead.) You can apply it to
+ your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+ price. Our General Public Licenses are designed to make sure that you
+ have the freedom to distribute copies of free software (and charge for
+ this service if you wish), that you receive source code or can get it
+ if you want it, that you can change the software or use pieces of it
+ in new free programs; and that you know you can do these things.
+
+ To protect your rights, we need to make restrictions that forbid
+ anyone to deny you these rights or to ask you to surrender the rights.
+ These restrictions translate to certain responsibilities for you if you
+ distribute copies of the software, or if you modify it.
+
+ For example, if you distribute copies of such a program, whether
+ gratis or for a fee, you must give the recipients all the rights that
+ you have. You must make sure that they, too, receive or can get the
+ source code. And you must show them these terms so they know their
+ rights.
+
+ We protect your rights with two steps: (1) copyright the software, and
+ (2) offer you this license which gives you legal permission to copy,
+ distribute and/or modify the software.
+
+ Also, for each author's protection and ours, we want to make certain
+ that everyone understands that there is no warranty for this free
+ software. If the software is modified by someone else and passed on, we
+ want its recipients to know that what they have is not the original, so
+ that any problems introduced by others will not reflect on the original
+ authors' reputations.
+
+ Finally, any free program is threatened constantly by software
+ patents. We wish to avoid the danger that redistributors of a free
+ program will individually obtain patent licenses, in effect making the
+ program proprietary. To prevent this, we have made it clear that any
+ patent must be licensed for everyone's free use or not licensed at all.
+
+ The precise terms and conditions for copying, distribution and
+ modification follow.
+
+ GNU GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License applies to any program or other work which contains
+ a notice placed by the copyright holder saying it may be distributed
+ under the terms of this General Public License. The "Program", below,
+ refers to any such program or work, and a "work based on the Program"
+ means either the Program or any derivative work under copyright law:
+ that is to say, a work containing the Program or a portion of it,
+ either verbatim or with modifications and/or translated into another
+ language. (Hereinafter, translation is included without limitation in
+ the term "modification".) Each licensee is addressed as "you".
+
+ Activities other than copying, distribution and modification are not
+ covered by this License; they are outside its scope. The act of
+ running the Program is not restricted, and the output from the Program
+ is covered only if its contents constitute a work based on the
+ Program (independent of having been made by running the Program).
+ Whether that is true depends on what the Program does.
+
+ 1. You may copy and distribute verbatim copies of the Program's
+ source code as you receive it, in any medium, provided that you
+ conspicuously and appropriately publish on each copy an appropriate
+ copyright notice and disclaimer of warranty; keep intact all the
+ notices that refer to this License and to the absence of any warranty;
+ and give any other recipients of the Program a copy of this License
+ along with the Program.
+
+ You may charge a fee for the physical act of transferring a copy, and
+ you may at your option offer warranty protection in exchange for a fee.
+
+ 2. You may modify your copy or copies of the Program or any portion
+ of it, thus forming a work based on the Program, and copy and
+ distribute such modifications or work under the terms of Section 1
+ above, provided that you also meet all of these conditions:
+
+ a) You must cause the modified files to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ b) You must cause any work that you distribute or publish, that in
+ whole or in part contains or is derived from the Program or any
+ part thereof, to be licensed as a whole at no charge to all third
+ parties under the terms of this License.
+
+ c) If the modified program normally reads commands interactively
+ when run, you must cause it, when started running for such
+ interactive use in the most ordinary way, to print or display an
+ announcement including an appropriate copyright notice and a
+ notice that there is no warranty (or else, saying that you provide
+ a warranty) and that users may redistribute the program under
+ these conditions, and telling the user how to view a copy of this
+ License. (Exception: if the Program itself is interactive but
+ does not normally print such an announcement, your work based on
+ the Program is not required to print an announcement.)
+
+ These requirements apply to the modified work as a whole. If
+ identifiable sections of that work are not derived from the Program,
+ and can be reasonably considered independent and separate works in
+ themselves, then this License, and its terms, do not apply to those
+ sections when you distribute them as separate works. But when you
+ distribute the same sections as part of a whole which is a work based
+ on the Program, the distribution of the whole must be on the terms of
+ this License, whose permissions for other licensees extend to the
+ entire whole, and thus to each and every part regardless of who wrote it.
+
+ Thus, it is not the intent of this section to claim rights or contest
+ your rights to work written entirely by you; rather, the intent is to
+ exercise the right to control the distribution of derivative or
+ collective works based on the Program.
+
+ In addition, mere aggregation of another work not based on the Program
+ with the Program (or with a work based on the Program) on a volume of
+ a storage or distribution medium does not bring the other work under
+ the scope of this License.
+
+ 3. You may copy and distribute the Program (or a work based on it,
+ under Section 2) in object code or executable form under the terms of
+ Sections 1 and 2 above provided that you also do one of the following:
+
+ a) Accompany it with the complete corresponding machine-readable
+ source code, which must be distributed under the terms of Sections
+ 1 and 2 above on a medium customarily used for software interchange; or,
+
+ b) Accompany it with a written offer, valid for at least three
+ years, to give any third party, for a charge no more than your
+ cost of physically performing source distribution, a complete
+ machine-readable copy of the corresponding source code, to be
+ distributed under the terms of Sections 1 and 2 above on a medium
+ customarily used for software interchange; or,
+
+ c) Accompany it with the information you received as to the offer
+ to distribute corresponding source code. (This alternative is
+ allowed only for noncommercial distribution and only if you
+ received the program in object code or executable form with such
+ an offer, in accord with Subsection b above.)
+
+ The source code for a work means the preferred form of the work for
+ making modifications to it. For an executable work, complete source
+ code means all the source code for all modules it contains, plus any
+ associated interface definition files, plus the scripts used to
+ control compilation and installation of the executable. However, as a
+ special exception, the source code distributed need not include
+ anything that is normally distributed (in either source or binary
+ form) with the major components (compiler, kernel, and so on) of the
+ operating system on which the executable runs, unless that component
+ itself accompanies the executable.
+
+ If distribution of executable or object code is made by offering
+ access to copy from a designated place, then offering equivalent
+ access to copy the source code from the same place counts as
+ distribution of the source code, even though third parties are not
+ compelled to copy the source along with the object code.
+
+ 4. You may not copy, modify, sublicense, or distribute the Program
+ except as expressly provided under this License. Any attempt
+ otherwise to copy, modify, sublicense or distribute the Program is
+ void, and will automatically terminate your rights under this License.
+ However, parties who have received copies, or rights, from you under
+ this License will not have their licenses terminated so long as such
+ parties remain in full compliance.
+
+ 5. You are not required to accept this License, since you have not
+ signed it. However, nothing else grants you permission to modify or
+ distribute the Program or its derivative works. These actions are
+ prohibited by law if you do not accept this License. Therefore, by
+ modifying or distributing the Program (or any work based on the
+ Program), you indicate your acceptance of this License to do so, and
+ all its terms and conditions for copying, distributing or modifying
+ the Program or works based on it.
+
+ 6. Each time you redistribute the Program (or any work based on the
+ Program), the recipient automatically receives a license from the
+ original licensor to copy, distribute or modify the Program subject to
+ these terms and conditions. You may not impose any further
+ restrictions on the recipients' exercise of the rights granted herein.
+ You are not responsible for enforcing compliance by third parties to
+ this License.
+
+ 7. If, as a consequence of a court judgment or allegation of patent
+ infringement or for any other reason (not limited to patent issues),
+ conditions are imposed on you (whether by court order, agreement or
+ otherwise) that contradict the conditions of this License, they do not
+ excuse you from the conditions of this License. If you cannot
+ distribute so as to satisfy simultaneously your obligations under this
+ License and any other pertinent obligations, then as a consequence you
+ may not distribute the Program at all. For example, if a patent
+ license would not permit royalty-free redistribution of the Program by
+ all those who receive copies directly or indirectly through you, then
+ the only way you could satisfy both it and this License would be to
+ refrain entirely from distribution of the Program.
+
+ If any portion of this section is held invalid or unenforceable under
+ any particular circumstance, the balance of the section is intended to
+ apply and the section as a whole is intended to apply in other
+ circumstances.
+
+ It is not the purpose of this section to induce you to infringe any
+ patents or other property right claims or to contest validity of any
+ such claims; this section has the sole purpose of protecting the
+ integrity of the free software distribution system, which is
+ implemented by public license practices. Many people have made
+ generous contributions to the wide range of software distributed
+ through that system in reliance on consistent application of that
+ system; it is up to the author/donor to decide if he or she is willing
+ to distribute software through any other system and a licensee cannot
+ impose that choice.
+
+ This section is intended to make thoroughly clear what is believed to
+ be a consequence of the rest of this License.
+
+ 8. If the distribution and/or use of the Program is restricted in
+ certain countries either by patents or by copyrighted interfaces, the
+ original copyright holder who places the Program under this License
+ may add an explicit geographical distribution limitation excluding
+ those countries, so that distribution is permitted only in or among
+ countries not thus excluded. In such case, this License incorporates
+ the limitation as if written in the body of this License.
+
+ 9. The Free Software Foundation may publish revised and/or new versions
+ of the General Public License from time to time. Such new versions will
+ be similar in spirit to the present version, but may differ in detail to
+ address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the Program
+ specifies a version number of this License which applies to it and "any
+ later version", you have the option of following the terms and conditions
+ either of that version or of any later version published by the Free
+ Software Foundation. If the Program does not specify a version number of
+ this License, you may choose any version ever published by the Free Software
+ Foundation.
+
+ 10. If you wish to incorporate parts of the Program into other free
+ programs whose distribution conditions are different, write to the author
+ to ask for permission. For software which is copyrighted by the Free
+ Software Foundation, write to the Free Software Foundation; we sometimes
+ make exceptions for this. Our decision will be guided by the two goals
+ of preserving the free status of all derivatives of our free software and
+ of promoting the sharing and reuse of software generally.
+
+ NO WARRANTY
+
+ 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+ FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
+ OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+ PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+ OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
+ TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
+ PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+ REPAIR OR CORRECTION.
+
+ 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+ WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+ REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+ INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+ OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+ TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+ YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+ PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+ POSSIBILITY OF SUCH DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+ possible use to the public, the best way to achieve this is to make it
+ free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+ to attach them to the start of each source file to most effectively
+ convey the exclusion of warranty; and each file should have at least
+ the "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 2 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+
+ Also add information on how to contact you by electronic and paper mail.
+
+ If the program is interactive, make it output a short notice like this
+ when it starts in an interactive mode:
+
+ Gnomovision version 69, Copyright (C) year name of author
+ Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+ The hypothetical commands `show w' and `show c' should show the appropriate
+ parts of the General Public License. Of course, the commands you use may
+ be called something other than `show w' and `show c'; they could even be
+ mouse-clicks or menu items--whatever suits your program.
+
+ You should also get your employer (if you work as a programmer) or your
+ school, if any, to sign a "copyright disclaimer" for the program, if
+ necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+ `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+ <signature of Ty Coon>, 1 April 1989
+ Ty Coon, President of Vice
+
+ This General Public License does not permit incorporating your program into
+ proprietary programs. If your program is a subroutine library, you may
+ consider it more useful to permit linking proprietary applications with the
+ library. If this is what you want to do, use the GNU Library General
+ Public License instead of this License.
+
+ ----------------------------------------------------------------------
+
+ The bundled ZLib code is licensed under the ZLib license:
+
+ Copyright (C) 1995-2010 Jean-loup Gailly and Mark Adler
+
+ This software is provided 'as-is', without any express or implied
+ warranty. In no event will the authors be held liable for any damages
+ arising from the use of this software.
+
+ Permission is granted to anyone to use this software for any purpose,
+ including commercial applications, and to alter it and redistribute it
+ freely, subject to the following restrictions:
+
+ 1. The origin of this software must not be misrepresented; you must not
+ claim that you wrote the original software. If you use this software
+ in a product, an acknowledgment in the product documentation would be
+ appreciated but is not required.
+ 2. Altered source versions must be plainly marked as such, and must not be
+ misrepresented as being the original software.
+ 3. This notice may not be removed or altered from any source distribution.
+
+ Jean-loup Gailly Mark Adler
+ jloup@gzip.org madler@alumni.caltech.edu
+
+ ----------------------------------------------------------------------
+
+ The Clar framework is licensed under the MIT license:
+
+ Copyright (C) 2011 by Vicent Marti
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in
+ all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ THE SOFTWARE.
+
+ ----------------------------------------------------------------------
+
+ The regex library (deps/regex/) is licensed under the GNU LGPL
+
+ GNU LESSER GENERAL PUBLIC LICENSE
+ Version 2.1, February 1999
+
+ Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ [This is the first released version of the Lesser GPL. It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+ freedom to share and change it. By contrast, the GNU General Public
+ Licenses are intended to guarantee your freedom to share and change
+ free software--to make sure the software is free for all its users.
+
+ This license, the Lesser General Public License, applies to some
+ specially designated software packages--typically libraries--of the
+ Free Software Foundation and other authors who decide to use it. You
+ can use it too, but we suggest you first think carefully about whether
+ this license or the ordinary General Public License is the better
+ strategy to use in any particular case, based on the explanations below.
+
+ When we speak of free software, we are referring to freedom of use,
+ not price. Our General Public Licenses are designed to make sure that
+ you have the freedom to distribute copies of free software (and charge
+ for this service if you wish); that you receive source code or can get
+ it if you want it; that you can change the software and use pieces of
+ it in new free programs; and that you are informed that you can do
+ these things.
+
+ To protect your rights, we need to make restrictions that forbid
+ distributors to deny you these rights or to ask you to surrender these
+ rights. These restrictions translate to certain responsibilities for
+ you if you distribute copies of the library or if you modify it.
+
+ For example, if you distribute copies of the library, whether gratis
+ or for a fee, you must give the recipients all the rights that we gave
+ you. You must make sure that they, too, receive or can get the source
+ code. If you link other code with the library, you must provide
+ complete object files to the recipients, so that they can relink them
+ with the library after making changes to the library and recompiling
+ it. And you must show them these terms so they know their rights.
+
+ We protect your rights with a two-step method: (1) we copyright the
+ library, and (2) we offer you this license, which gives you legal
+ permission to copy, distribute and/or modify the library.
+
+ To protect each distributor, we want to make it very clear that
+ there is no warranty for the free library. Also, if the library is
+ modified by someone else and passed on, the recipients should know
+ that what they have is not the original version, so that the original
+ author's reputation will not be affected by problems that might be
+ introduced by others.
+
+ Finally, software patents pose a constant threat to the existence of
+ any free program. We wish to make sure that a company cannot
+ effectively restrict the users of a free program by obtaining a
+ restrictive license from a patent holder. Therefore, we insist that
+ any patent license obtained for a version of the library must be
+ consistent with the full freedom of use specified in this license.
+
+ Most GNU software, including some libraries, is covered by the
+ ordinary GNU General Public License. This license, the GNU Lesser
+ General Public License, applies to certain designated libraries, and
+ is quite different from the ordinary General Public License. We use
+ this license for certain libraries in order to permit linking those
+ libraries into non-free programs.
+
+ When a program is linked with a library, whether statically or using
+ a shared library, the combination of the two is legally speaking a
+ combined work, a derivative of the original library. The ordinary
+ General Public License therefore permits such linking only if the
+ entire combination fits its criteria of freedom. The Lesser General
+ Public License permits more lax criteria for linking other code with
+ the library.
+
+ We call this license the "Lesser" General Public License because it
+ does Less to protect the user's freedom than the ordinary General
+ Public License. It also provides other free software developers Less
+ of an advantage over competing non-free programs. These disadvantages
+ are the reason we use the ordinary General Public License for many
+ libraries. However, the Lesser license provides advantages in certain
+ special circumstances.
+
+ For example, on rare occasions, there may be a special need to
+ encourage the widest possible use of a certain library, so that it becomes
+ a de-facto standard. To achieve this, non-free programs must be
+ allowed to use the library. A more frequent case is that a free
+ library does the same job as widely used non-free libraries. In this
+ case, there is little to gain by limiting the free library to free
+ software only, so we use the Lesser General Public License.
+
+ In other cases, permission to use a particular library in non-free
+ programs enables a greater number of people to use a large body of
+ free software. For example, permission to use the GNU C Library in
+ non-free programs enables many more people to use the whole GNU
+ operating system, as well as its variant, the GNU/Linux operating
+ system.
+
+ Although the Lesser General Public License is Less protective of the
+ users' freedom, it does ensure that the user of a program that is
+ linked with the Library has the freedom and the wherewithal to run
+ that program using a modified version of the Library.
+
+ The precise terms and conditions for copying, distribution and
+ modification follow. Pay close attention to the difference between a
+ "work based on the library" and a "work that uses the library". The
+ former contains code derived from the library, whereas the latter must
+ be combined with the library in order to run.
+
+ GNU LESSER GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License Agreement applies to any software library or other
+ program which contains a notice placed by the copyright holder or
+ other authorized party saying it may be distributed under the terms of
+ this Lesser General Public License (also called "this License").
+ Each licensee is addressed as "you".
+
+ A "library" means a collection of software functions and/or data
+ prepared so as to be conveniently linked with application programs
+ (which use some of those functions and data) to form executables.
+
+ The "Library", below, refers to any such software library or work
+ which has been distributed under these terms. A "work based on the
+ Library" means either the Library or any derivative work under
+ copyright law: that is to say, a work containing the Library or a
+ portion of it, either verbatim or with modifications and/or translated
+ straightforwardly into another language. (Hereinafter, translation is
+ included without limitation in the term "modification".)
+
+ "Source code" for a work means the preferred form of the work for
+ making modifications to it. For a library, complete source code means
+ all the source code for all modules it contains, plus any associated
+ interface definition files, plus the scripts used to control compilation
+ and installation of the library.
+
+ Activities other than copying, distribution and modification are not
+ covered by this License; they are outside its scope. The act of
+ running a program using the Library is not restricted, and output from
+ such a program is covered only if its contents constitute a work based
+ on the Library (independent of the use of the Library in a tool for
+ writing it). Whether that is true depends on what the Library does
+ and what the program that uses the Library does.
+
+ 1. You may copy and distribute verbatim copies of the Library's
+ complete source code as you receive it, in any medium, provided that
+ you conspicuously and appropriately publish on each copy an
+ appropriate copyright notice and disclaimer of warranty; keep intact
+ all the notices that refer to this License and to the absence of any
+ warranty; and distribute a copy of this License along with the
+ Library.
+
+ You may charge a fee for the physical act of transferring a copy,
+ and you may at your option offer warranty protection in exchange for a
+ fee.
+
+ 2. You may modify your copy or copies of the Library or any portion
+ of it, thus forming a work based on the Library, and copy and
+ distribute such modifications or work under the terms of Section 1
+ above, provided that you also meet all of these conditions:
+
+ a) The modified work must itself be a software library.
+
+ b) You must cause the files modified to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ c) You must cause the whole of the work to be licensed at no
+ charge to all third parties under the terms of this License.
+
+ d) If a facility in the modified Library refers to a function or a
+ table of data to be supplied by an application program that uses
+ the facility, other than as an argument passed when the facility
+ is invoked, then you must make a good faith effort to ensure that,
+ in the event an application does not supply such function or
+ table, the facility still operates, and performs whatever part of
+ its purpose remains meaningful.
+
+ (For example, a function in a library to compute square roots has
+ a purpose that is entirely well-defined independent of the
+ application. Therefore, Subsection 2d requires that any
+ application-supplied function or table used by this function must
+ be optional: if the application does not supply it, the square
+ root function must still compute square roots.)
+
+ These requirements apply to the modified work as a whole. If
+ identifiable sections of that work are not derived from the Library,
+ and can be reasonably considered independent and separate works in
+ themselves, then this License, and its terms, do not apply to those
+ sections when you distribute them as separate works. But when you
+ distribute the same sections as part of a whole which is a work based
+ on the Library, the distribution of the whole must be on the terms of
+ this License, whose permissions for other licensees extend to the
+ entire whole, and thus to each and every part regardless of who wrote
+ it.
+
+ Thus, it is not the intent of this section to claim rights or contest
+ your rights to work written entirely by you; rather, the intent is to
+ exercise the right to control the distribution of derivative or
+ collective works based on the Library.
+
+ In addition, mere aggregation of another work not based on the Library
+ with the Library (or with a work based on the Library) on a volume of
+ a storage or distribution medium does not bring the other work under
+ the scope of this License.
+
+ 3. You may opt to apply the terms of the ordinary GNU General Public
+ License instead of this License to a given copy of the Library. To do
+ this, you must alter all the notices that refer to this License, so
+ that they refer to the ordinary GNU General Public License, version 2,
+ instead of to this License. (If a newer version than version 2 of the
+ ordinary GNU General Public License has appeared, then you can specify
+ that version instead if you wish.) Do not make any other change in
+ these notices.
+
+ Once this change is made in a given copy, it is irreversible for
+ that copy, so the ordinary GNU General Public License applies to all
+ subsequent copies and derivative works made from that copy.
+
+ This option is useful when you wish to copy part of the code of
+ the Library into a program that is not a library.
+
+ 4. You may copy and distribute the Library (or a portion or
+ derivative of it, under Section 2) in object code or executable form
+ under the terms of Sections 1 and 2 above provided that you accompany
+ it with the complete corresponding machine-readable source code, which
+ must be distributed under the terms of Sections 1 and 2 above on a
+ medium customarily used for software interchange.
+
+ If distribution of object code is made by offering access to copy
+ from a designated place, then offering equivalent access to copy the
+ source code from the same place satisfies the requirement to
+ distribute the source code, even though third parties are not
+ compelled to copy the source along with the object code.
+
+ 5. A program that contains no derivative of any portion of the
+ Library, but is designed to work with the Library by being compiled or
+ linked with it, is called a "work that uses the Library". Such a
+ work, in isolation, is not a derivative work of the Library, and
+ therefore falls outside the scope of this License.
+
+ However, linking a "work that uses the Library" with the Library
+ creates an executable that is a derivative of the Library (because it
+ contains portions of the Library), rather than a "work that uses the
+ library". The executable is therefore covered by this License.
+ Section 6 states terms for distribution of such executables.
+
+ When a "work that uses the Library" uses material from a header file
+ that is part of the Library, the object code for the work may be a
+ derivative work of the Library even though the source code is not.
+ Whether this is true is especially significant if the work can be
+ linked without the Library, or if the work is itself a library. The
+ threshold for this to be true is not precisely defined by law.
+
+ If such an object file uses only numerical parameters, data
+ structure layouts and accessors, and small macros and small inline
+ functions (ten lines or less in length), then the use of the object
+ file is unrestricted, regardless of whether it is legally a derivative
+ work. (Executables containing this object code plus portions of the
+ Library will still fall under Section 6.)
+
+ Otherwise, if the work is a derivative of the Library, you may
+ distribute the object code for the work under the terms of Section 6.
+ Any executables containing that work also fall under Section 6,
+ whether or not they are linked directly with the Library itself.
+
+ 6. As an exception to the Sections above, you may also combine or
+ link a "work that uses the Library" with the Library to produce a
+ work containing portions of the Library, and distribute that work
+ under terms of your choice, provided that the terms permit
+ modification of the work for the customer's own use and reverse
+ engineering for debugging such modifications.
+
+ You must give prominent notice with each copy of the work that the
+ Library is used in it and that the Library and its use are covered by
+ this License. You must supply a copy of this License. If the work
+ during execution displays copyright notices, you must include the
+ copyright notice for the Library among them, as well as a reference
+ directing the user to the copy of this License. Also, you must do one
+ of these things:
+
+ a) Accompany the work with the complete corresponding
+ machine-readable source code for the Library including whatever
+ changes were used in the work (which must be distributed under
+ Sections 1 and 2 above); and, if the work is an executable linked
+ with the Library, with the complete machine-readable "work that
+ uses the Library", as object code and/or source code, so that the
+ user can modify the Library and then relink to produce a modified
+ executable containing the modified Library. (It is understood
+ that the user who changes the contents of definitions files in the
+ Library will not necessarily be able to recompile the application
+ to use the modified definitions.)
+
+ b) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (1) uses at run time a
+ copy of the library already present on the user's computer system,
+ rather than copying library functions into the executable, and (2)
+ will operate properly with a modified version of the library, if
+ the user installs one, as long as the modified version is
+ interface-compatible with the version that the work was made with.
+
+ c) Accompany the work with a written offer, valid for at
+ least three years, to give the same user the materials
+ specified in Subsection 6a, above, for a charge no more
+ than the cost of performing this distribution.
+
+ d) If distribution of the work is made by offering access to copy
+ from a designated place, offer equivalent access to copy the above
+ specified materials from the same place.
+
+ e) Verify that the user has already received a copy of these
+ materials or that you have already sent this user a copy.
+
+ For an executable, the required form of the "work that uses the
+ Library" must include any data and utility programs needed for
+ reproducing the executable from it. However, as a special exception,
+ the materials to be distributed need not include anything that is
+ normally distributed (in either source or binary form) with the major
+ components (compiler, kernel, and so on) of the operating system on
+ which the executable runs, unless that component itself accompanies
+ the executable.
+
+ It may happen that this requirement contradicts the license
+ restrictions of other proprietary libraries that do not normally
+ accompany the operating system. Such a contradiction means you cannot
+ use both them and the Library together in an executable that you
+ distribute.
+
+ 7. You may place library facilities that are a work based on the
+ Library side-by-side in a single library together with other library
+ facilities not covered by this License, and distribute such a combined
+ library, provided that the separate distribution of the work based on
+ the Library and of the other library facilities is otherwise
+ permitted, and provided that you do these two things:
+
+ a) Accompany the combined library with a copy of the same work
+ based on the Library, uncombined with any other library
+ facilities. This must be distributed under the terms of the
+ Sections above.
+
+ b) Give prominent notice with the combined library of the fact
+ that part of it is a work based on the Library, and explaining
+ where to find the accompanying uncombined form of the same work.
+
+ 8. You may not copy, modify, sublicense, link with, or distribute
+ the Library except as expressly provided under this License. Any
+ attempt otherwise to copy, modify, sublicense, link with, or
+ distribute the Library is void, and will automatically terminate your
+ rights under this License. However, parties who have received copies,
+ or rights, from you under this License will not have their licenses
+ terminated so long as such parties remain in full compliance.
+
+ 9. You are not required to accept this License, since you have not
+ signed it. However, nothing else grants you permission to modify or
+ distribute the Library or its derivative works. These actions are
+ prohibited by law if you do not accept this License. Therefore, by
+ modifying or distributing the Library (or any work based on the
+ Library), you indicate your acceptance of this License to do so, and
+ all its terms and conditions for copying, distributing or modifying
+ the Library or works based on it.
+
+ 10. Each time you redistribute the Library (or any work based on the
+ Library), the recipient automatically receives a license from the
+ original licensor to copy, distribute, link with or modify the Library
+ subject to these terms and conditions. You may not impose any further
+ restrictions on the recipients' exercise of the rights granted herein.
+ You are not responsible for enforcing compliance by third parties with
+ this License.
+
+ 11. If, as a consequence of a court judgment or allegation of patent
+ infringement or for any other reason (not limited to patent issues),
+ conditions are imposed on you (whether by court order, agreement or
+ otherwise) that contradict the conditions of this License, they do not
+ excuse you from the conditions of this License. If you cannot
+ distribute so as to satisfy simultaneously your obligations under this
+ License and any other pertinent obligations, then as a consequence you
+ may not distribute the Library at all. For example, if a patent
+ license would not permit royalty-free redistribution of the Library by
+ all those who receive copies directly or indirectly through you, then
+ the only way you could satisfy both it and this License would be to
+ refrain entirely from distribution of the Library.
+
+ If any portion of this section is held invalid or unenforceable under any
+ particular circumstance, the balance of the section is intended to apply,
+ and the section as a whole is intended to apply in other circumstances.
+
+ It is not the purpose of this section to induce you to infringe any
+ patents or other property right claims or to contest validity of any
+ such claims; this section has the sole purpose of protecting the
+ integrity of the free software distribution system which is
+ implemented by public license practices. Many people have made
+ generous contributions to the wide range of software distributed
+ through that system in reliance on consistent application of that
+ system; it is up to the author/donor to decide if he or she is willing
+ to distribute software through any other system and a licensee cannot
+ impose that choice.
+
+ This section is intended to make thoroughly clear what is believed to
+ be a consequence of the rest of this License.
+
+ 12. If the distribution and/or use of the Library is restricted in
+ certain countries either by patents or by copyrighted interfaces, the
+ original copyright holder who places the Library under this License may add
+ an explicit geographical distribution limitation excluding those countries,
+ so that distribution is permitted only in or among countries not thus
+ excluded. In such case, this License incorporates the limitation as if
+ written in the body of this License.
+
+ 13. The Free Software Foundation may publish revised and/or new
+ versions of the Lesser General Public License from time to time.
+ Such new versions will be similar in spirit to the present version,
+ but may differ in detail to address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the Library
+ specifies a version number of this License which applies to it and
+ "any later version", you have the option of following the terms and
+ conditions either of that version or of any later version published by
+ the Free Software Foundation. If the Library does not specify a
+ license version number, you may choose any version ever published by
+ the Free Software Foundation.
+
+ 14. If you wish to incorporate parts of the Library into other free
+ programs whose distribution conditions are incompatible with these,
+ write to the author to ask for permission. For software which is
+ copyrighted by the Free Software Foundation, write to the Free
+ Software Foundation; we sometimes make exceptions for this. Our
+ decision will be guided by the two goals of preserving the free status
+ of all derivatives of our free software and of promoting the sharing
+ and reuse of software generally.
+
+ NO WARRANTY
+
+ 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
+ WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
+ EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+ OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
+ KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
+ LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
+ THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+ WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+ AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
+ FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+ CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+ LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
+ RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
+ FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
+ SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+ DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Libraries
+
+ If you develop a new library, and you want it to be of the greatest
+ possible use to the public, we recommend making it free software that
+ everyone can redistribute and change. You can do so by permitting
+ redistribution under these terms (or, alternatively, under the terms of the
+ ordinary General Public License).
+
+ To apply these terms, attach the following notices to the library. It is
+ safest to attach them to the start of each source file to most effectively
+ convey the exclusion of warranty; and each file should have at least the
+ "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the library's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+
+ Also add information on how to contact you by electronic and paper mail.
+
+ You should also get your employer (if you work as a programmer) or your
+ school, if any, to sign a "copyright disclaimer" for the library, if
+ necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the
+ library `Frob' (a library for tweaking knobs) written by James Random Hacker.
+
+ <signature of Ty Coon>, 1 April 1990
+ Ty Coon, President of Vice
+
+ That's all there is to it!
+
+ ----------------------------------------------------------------------
+
+* libssh2 - https://www.libssh2.org/license.html
+
+ Copyright (c) 2004-2007 Sara Golemon <sarag@libssh2.org>
+ Copyright (c) 2005,2006 Mikhail Gusarov <dottedmag@dottedmag.net>
+ Copyright (c) 2006-2007 The Written Word, Inc.
+ Copyright (c) 2007 Eli Fant <elifantu@mail.ru>
+ Copyright (c) 2009 Daniel Stenberg
+ Copyright (C) 2008, 2009 Simon Josefsson
+ All rights reserved.
+
+ Redistribution and use in source and binary forms,
+ with or without modification, are permitted provided
+ that the following conditions are met:
+
+ Redistributions of source code must retain the above
+ copyright notice, this list of conditions and the
+ following disclaimer.
+
+ Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials
+ provided with the distribution.
+
+ Neither the name of the copyright holder nor the names
+ of any other contributors may be used to endorse or
+ promote products derived from this software without
+ specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
+ OF SUCH DAMAGE.
+
+* libcurl - https://curl.haxx.se/docs/copyright.html
+
+ COPYRIGHT AND PERMISSION NOTICE
+
+ Copyright (c) 1996 - 2014, Daniel Stenberg, daniel@haxx.se.
+
+ All rights reserved.
+
+ Permission to use, copy, modify, and distribute this software for any
+ purpose with or without fee is hereby granted, provided that the above
+ copyright notice and this permission notice appear in all copies.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS.
+ IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+ DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+ OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+ USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+ Except as contained in this notice, the name of a copyright holder shall not
+ be used in advertising or otherwise to promote the sale, use or other
+ dealings in this Software without prior written authorization of the
+ copyright holder.
+
+* flate2-rs - https://github.com/alexcrichton/flate2-rs/blob/master/LICENSE-MIT
+* link-config - https://github.com/alexcrichton/link-config/blob/master/LICENSE-MIT
+* openssl-static-sys - https://github.com/alexcrichton/openssl-static-sys/blob/master/LICENSE-MIT
+* toml-rs - https://github.com/alexcrichton/toml-rs/blob/master/LICENSE-MIT
+* libssh2-static-sys - https://github.com/alexcrichton/libssh2-static-sys/blob/master/LICENSE-MIT
+* git2-rs - https://github.com/alexcrichton/git2-rs/blob/master/LICENSE-MIT
+* tar-rs - https://github.com/alexcrichton/tar-rs/blob/master/LICENSE-MIT
+
+ Copyright (c) 2014 Alex Crichton
+
+ Permission is hereby granted, free of charge, to any
+ person obtaining a copy of this software and associated
+ documentation files (the "Software"), to deal in the
+ Software without restriction, including without
+ limitation the rights to use, copy, modify, merge,
+ publish, distribute, sublicense, and/or sell copies of
+ the Software, and to permit persons to whom the Software
+ is furnished to do so, subject to the following
+ conditions:
+
+ The above copyright notice and this permission notice
+ shall be included in all copies or substantial portions
+ of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+ TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+ PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+ SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+ IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ DEALINGS IN THE SOFTWARE.
+
+* glob - https://github.com/rust-lang/glob/blob/master/LICENSE-MIT
+* semver - https://github.com/rust-lang/semver/blob/master/LICENSE-MIT
+
+ Copyright (c) 2014 The Rust Project Developers
+
+ Permission is hereby granted, free of charge, to any
+ person obtaining a copy of this software and associated
+ documentation files (the "Software"), to deal in the
+ Software without restriction, including without
+ limitation the rights to use, copy, modify, merge,
+ publish, distribute, sublicense, and/or sell copies of
+ the Software, and to permit persons to whom the Software
+ is furnished to do so, subject to the following
+ conditions:
+
+ The above copyright notice and this permission notice
+ shall be included in all copies or substantial portions
+ of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+ TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+ PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+ SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+ IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ DEALINGS IN THE SOFTWARE.
+
+* rust-url - https://github.com/servo/rust-url/blob/master/LICENSE-MIT
+
+ Copyright (c) 2006-2009 Graydon Hoare
+ Copyright (c) 2009-2013 Mozilla Foundation
+
+ Permission is hereby granted, free of charge, to any
+ person obtaining a copy of this software and associated
+ documentation files (the "Software"), to deal in the
+ Software without restriction, including without
+ limitation the rights to use, copy, modify, merge,
+ publish, distribute, sublicense, and/or sell copies of
+ the Software, and to permit persons to whom the Software
+ is furnished to do so, subject to the following
+ conditions:
+
+ The above copyright notice and this permission notice
+ shall be included in all copies or substantial portions
+ of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+ TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+ PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+ SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+ IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ DEALINGS IN THE SOFTWARE.
+
+* rust-encoding - https://github.com/lifthrasiir/rust-encoding/blob/master/LICENSE.txt
+
+ The MIT License (MIT)
+
+ Copyright (c) 2013, Kang Seonghoon.
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in
+ all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ THE SOFTWARE.
+
+* curl-rust - https://github.com/carllerche/curl-rust/blob/master/LICENSE
+
+ Copyright (c) 2014 Carl Lerche
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in
+ all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ THE SOFTWARE.
+
+* docopt.rs - https://github.com/docopt/docopt.rs/blob/master/UNLICENSE
+
+ This is free and unencumbered software released into the public domain.
+
+ Anyone is free to copy, modify, publish, use, compile, sell, or
+ distribute this software, either in source code form or as a compiled
+ binary, for any purpose, commercial or non-commercial, and by any
+ means.
+
+ In jurisdictions that recognize copyright laws, the author or authors
+ of this software dedicate any and all copyright interest in the
+ software to the public domain. We make this dedication for the benefit
+ of the public at large and to the detriment of our heirs and
+ successors. We intend this dedication to be an overt act of
+ relinquishment in perpetuity of all present and future rights to this
+ software under copyright law.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+ OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ OTHER DEALINGS IN THE SOFTWARE.
+
+ For more information, please refer to <https://unlicense.org/>
+
diff --git a/src/tools/cargo/README.md b/src/tools/cargo/README.md
new file mode 100644
index 000000000..423555e62
--- /dev/null
+++ b/src/tools/cargo/README.md
@@ -0,0 +1,127 @@
+# Cargo
+
+Cargo downloads your Rust project’s dependencies and compiles your project.
+
+**To start using Cargo**, learn more at [The Cargo Book].
+
+**To start developing Cargo itself**, read the [Cargo Contributor Guide].
+
+[The Cargo Book]: https://doc.rust-lang.org/cargo/
+[Cargo Contributor Guide]: https://rust-lang.github.io/cargo/contrib/
+
+## Code Status
+
+[![CI](https://github.com/rust-lang/cargo/actions/workflows/main.yml/badge.svg?branch=auto-cargo)](https://github.com/rust-lang/cargo/actions/workflows/main.yml)
+
+Code documentation: https://docs.rs/cargo/
+
+## Installing Cargo
+
+Cargo is distributed by default with Rust, so if you've got `rustc` installed
+locally you probably also have `cargo` installed locally.
+
+## Compiling from Source
+
+### Requirements
+
+Cargo requires the following tools and packages to build:
+
+* `cargo` and `rustc`
+* A C compiler [for your platform](https://github.com/rust-lang/cc-rs#compile-time-requirements)
+* `git` (to clone this repository)
+
+**Other requirements:**
+
+The following are optional based on your platform and needs.
+
+* `pkg-config` — This is used to help locate system packages, such as `libssl` headers/libraries. This may not be required in all cases, such as using vendored OpenSSL, or on Windows.
+* OpenSSL — Only needed on Unix-like systems and only if the `vendored-openssl` Cargo feature is not used.
+
+ This requires the development headers, which can be obtained from the `libssl-dev` package on Ubuntu or `openssl-devel` with apk or yum or the `openssl` package from Homebrew on macOS.
+
+ If using the `vendored-openssl` Cargo feature, then a static copy of OpenSSL will be built from source instead of using the system OpenSSL.
+ This may require additional tools such as `perl` and `make`.
+
+ On macOS, common installation directories from Homebrew, MacPorts, or pkgsrc will be checked. Otherwise it will fall back to `pkg-config`.
+
+ On Windows, the system-provided Schannel will be used instead.
+
+ LibreSSL is also supported.
+
+**Optional system libraries:**
+
+The build will automatically use vendored versions of the following libraries. However, if they are provided by the system and can be found with `pkg-config`, then the system libraries will be used instead:
+
+* [`libcurl`](https://curl.se/libcurl/) — Used for network transfers.
+* [`libgit2`](https://libgit2.org/) — Used for fetching git dependencies.
+* [`libssh2`](https://www.libssh2.org/) — Used for SSH access to git repositories.
+* [`libz`](https://zlib.net/) (aka zlib) — Used for data compression.
+
+It is recommended to use the vendored versions as they are the versions that are tested to work with Cargo.
+
+### Compiling
+
+First, you'll want to check out this repository
+
+```
+git clone https://github.com/rust-lang/cargo.git
+cd cargo
+```
+
+With `cargo` already installed, you can simply run:
+
+```
+cargo build --release
+```
+
+## Adding new subcommands to Cargo
+
+Cargo is designed to be extensible with new subcommands without having to modify
+Cargo itself. See [the Wiki page][third-party-subcommands] for more details and
+a list of known community-developed subcommands.
+
+[third-party-subcommands]: https://github.com/rust-lang/cargo/wiki/Third-party-cargo-subcommands
+
+
+## Releases
+
+Cargo releases coincide with Rust releases.
+High level release notes are available as part of [Rust's release notes][rel].
+Detailed release notes are available in this repo at [CHANGELOG.md].
+
+[rel]: https://github.com/rust-lang/rust/blob/master/RELEASES.md
+[CHANGELOG.md]: CHANGELOG.md
+
+## Reporting issues
+
+Found a bug? We'd love to know about it!
+
+Please report all issues on the GitHub [issue tracker][issues].
+
+[issues]: https://github.com/rust-lang/cargo/issues
+
+## Contributing
+
+See the **[Cargo Contributor Guide]** for a complete introduction
+to contributing to Cargo.
+
+## License
+
+Cargo is primarily distributed under the terms of both the MIT license
+and the Apache License (Version 2.0).
+
+See [LICENSE-APACHE](LICENSE-APACHE) and [LICENSE-MIT](LICENSE-MIT) for details.
+
+### Third party software
+
+This product includes software developed by the OpenSSL Project
+for use in the OpenSSL Toolkit (https://www.openssl.org/).
+
+In binary form, this product includes software that is licensed under the
+terms of the GNU General Public License, version 2, with a linking exception,
+which can be obtained from the [upstream repository][1].
+
+See [LICENSE-THIRD-PARTY](LICENSE-THIRD-PARTY) for details.
+
+[1]: https://github.com/libgit2/libgit2
+
diff --git a/src/tools/cargo/benches/README.md b/src/tools/cargo/benches/README.md
new file mode 100644
index 000000000..b4b8b190a
--- /dev/null
+++ b/src/tools/cargo/benches/README.md
@@ -0,0 +1,124 @@
+# Cargo Benchmarking
+
+This directory contains some benchmarks for cargo itself. This uses
+[Criterion] for running benchmarks. It is recommended to read the Criterion
+book to get familiar with how to use it. A basic usage would be:
+
+```sh
+cd benches/benchsuite
+cargo bench
+```
+
+The tests involve downloading the index and benchmarking against some
+real-world and artificial workspaces located in the [`workspaces`](workspaces)
+directory.
+
+**Beware** that the initial download can take a fairly long amount of time (10
+minutes minimum on an extremely fast network) and require significant disk
+space (around 4.5GB). The benchsuite will cache the index and downloaded
+crates in the `target/tmp/bench` directory, so subsequent runs should be
+faster. You can (and probably should) specify individual benchmarks to run to
+narrow it down to a more reasonable set, for example:
+
+```sh
+cargo bench -- resolve_ws/rust
+```
+
+This will only download what's necessary for the rust-lang/rust workspace
+(which is about 330MB) and run the benchmarks against it (which should take
+about a minute). To get a list of all the benchmarks, run:
+
+```sh
+cargo bench -- --list
+```
+
+## Viewing reports
+
+The benchmarks display some basic information on the command-line while they
+run. A more complete HTML report can be found at
+`target/criterion/report/index.html` which contains links to all the
+benchmarks and summaries. Check out the Criterion book for more information on
+the extensive reporting capabilities.
+
+## Comparing implementations
+
+Knowing the raw numbers can be useful, but what you're probably most
+interested in is checking if your changes help or hurt performance. To do
+that, you need to run the benchmarks multiple times.
+
+First, run the benchmarks from the master branch of cargo without any changes.
+To make it easier to compare, Criterion supports naming the baseline so that
+you can iterate on your code and compare against it multiple times.
+
+```sh
+cargo bench -- --save-baseline master
+```
+
+Now you can switch to your branch with your changes. Re-run the benchmarks
+compared against the baseline:
+
+```sh
+cargo bench -- --baseline master
+```
+
+You can repeat the last command as you make changes to re-compare against the
+master baseline.
+
+Without the baseline arguments, it will compare against the last run, which
+can be helpful for comparing incremental changes.
+
+## Capturing workspaces
+
+The [`workspaces`](workspaces) directory contains several workspaces that
+provide a variety of different workspaces intended to provide good exercises
+for benchmarks. Some of these are shadow copies of real-world workspaces. This
+is done with the tool in the [`capture`](capture) directory. The tool will
+copy `Cargo.lock` and all of the `Cargo.toml` files of the workspace members.
+It also adds an empty `lib.rs` so Cargo won't error, and sanitizes the
+`Cargo.toml` to some degree, removing unwanted elements. Finally, it
+compresses everything into a `tgz`.
+
+To run it, do:
+
+```sh
+cd benches/capture
+cargo run -- /path/to/workspace/foo
+```
+
+The resolver benchmarks also support the `CARGO_BENCH_WORKSPACES` environment
+variable, which you can point to a Cargo workspace if you want to try
+different workspaces. For example:
+
+```sh
+CARGO_BENCH_WORKSPACES=/path/to/some/workspace cargo bench
+```
+
+## TODO
+
+This is just a start for establishing a benchmarking suite for Cargo. There's
+a lot that can be added. Some ideas:
+
+* Fix the benchmarks so that the resolver setup doesn't run every iteration.
+* Benchmark [this section of
+ code](https://github.com/rust-lang/cargo/blob/a821e2cb24d7b6013433f069ab3bad53d160e100/src/cargo/ops/cargo_compile.rs#L470-L549)
+ which builds the unit graph. The performance there isn't great, and it would
+ be good to keep an eye on it. Unfortunately that would mean doing a bit of
+ work to make `generate_targets` publicly visible, and there is a bunch of
+ setup code that may need to be duplicated.
+* Benchmark the fingerprinting code.
+* Benchmark running the `cargo` executable. Running something like `cargo
+ build` or `cargo check` with everything "Fresh" would be a good end-to-end
+ exercise to measure the overall overhead of Cargo.
+* Benchmark pathological resolver scenarios. There might be some cases where
+ the resolver can spend a significant amount of time. It would be good to
+ identify if these exist, and create benchmarks for them. This may require
+ creating an artificial index, similar to the `resolver-tests`. This should
+ also consider scenarios where the resolver ultimately fails.
+* Benchmark without `Cargo.lock`. I'm not sure if this is particularly
+ valuable, since we are mostly concerned with incremental builds which will
+ always have a lock file.
+* Benchmark just
+ [`resolve::resolve`](https://github.com/rust-lang/cargo/blob/a821e2cb24d7b6013433f069ab3bad53d160e100/src/cargo/core/resolver/mod.rs#L122)
+ without anything else. This can help focus on just the resolver.
+
+[Criterion]: https://bheisler.github.io/criterion.rs/book/
diff --git a/src/tools/cargo/benches/benchsuite/Cargo.toml b/src/tools/cargo/benches/benchsuite/Cargo.toml
new file mode 100644
index 000000000..f4bc3583a
--- /dev/null
+++ b/src/tools/cargo/benches/benchsuite/Cargo.toml
@@ -0,0 +1,28 @@
+[package]
+name = "benchsuite"
+version = "0.1.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+homepage = "https://github.com/rust-lang/cargo"
+repository = "https://github.com/rust-lang/cargo"
+documentation = "https://docs.rs/cargo-platform"
+description = "Benchmarking suite for Cargo."
+
+[dependencies]
+cargo = { path = "../.." }
+# Consider removing html_reports in 0.4 and switching to `cargo criterion`.
+criterion = { version = "0.3.5", features = ["html_reports"] }
+flate2 = { version = "1.0.3", default-features = false, features = ["zlib"] }
+tar = { version = "0.4.38", default-features = false }
+url = "2.2.2"
+
+[lib]
+bench = false
+
+[[bench]]
+name = "resolve"
+harness = false
+
+[[bench]]
+name = "workspace_initialization"
+harness = false
diff --git a/src/tools/cargo/benches/benchsuite/benches/resolve.rs b/src/tools/cargo/benches/benchsuite/benches/resolve.rs
new file mode 100644
index 000000000..d03cd620e
--- /dev/null
+++ b/src/tools/cargo/benches/benchsuite/benches/resolve.rs
@@ -0,0 +1,146 @@
+use benchsuite::fixtures;
+use cargo::core::compiler::{CompileKind, RustcTargetData};
+use cargo::core::resolver::features::{FeatureOpts, FeatureResolver};
+use cargo::core::resolver::{CliFeatures, ForceAllTargets, HasDevUnits, ResolveBehavior};
+use cargo::core::{PackageIdSpec, Workspace};
+use cargo::ops::WorkspaceResolve;
+use cargo::Config;
+use criterion::{criterion_group, criterion_main, Criterion};
+use std::path::Path;
+
+struct ResolveInfo<'cfg> {
+ ws: Workspace<'cfg>,
+ requested_kinds: [CompileKind; 1],
+ target_data: RustcTargetData<'cfg>,
+ cli_features: CliFeatures,
+ specs: Vec<PackageIdSpec>,
+ has_dev_units: HasDevUnits,
+ force_all_targets: ForceAllTargets,
+ ws_resolve: WorkspaceResolve<'cfg>,
+}
+
+/// Helper for resolving a workspace. This will run the resolver once to
+/// download everything, and returns all the data structures that are used
+/// during resolution.
+fn do_resolve<'cfg>(config: &'cfg Config, ws_root: &Path) -> ResolveInfo<'cfg> {
+ let requested_kinds = [CompileKind::Host];
+ let ws = Workspace::new(&ws_root.join("Cargo.toml"), config).unwrap();
+ let target_data = RustcTargetData::new(&ws, &requested_kinds).unwrap();
+ let cli_features = CliFeatures::from_command_line(&[], false, true).unwrap();
+ let pkgs = cargo::ops::Packages::Default;
+ let specs = pkgs.to_package_id_specs(&ws).unwrap();
+ let has_dev_units = HasDevUnits::Yes;
+ let force_all_targets = ForceAllTargets::No;
+ // Do an initial run to download anything necessary so that it does
+ // not confuse criterion's warmup.
+ let ws_resolve = cargo::ops::resolve_ws_with_opts(
+ &ws,
+ &target_data,
+ &requested_kinds,
+ &cli_features,
+ &specs,
+ has_dev_units,
+ force_all_targets,
+ )
+ .unwrap();
+ ResolveInfo {
+ ws,
+ requested_kinds,
+ target_data,
+ cli_features,
+ specs,
+ has_dev_units,
+ force_all_targets,
+ ws_resolve,
+ }
+}
+
+/// Benchmark of the full `resolve_ws_with_opts` which runs the resolver
+/// twice, the feature resolver, and more. This is a major component of a
+/// regular cargo build.
+fn resolve_ws(c: &mut Criterion) {
+ let fixtures = fixtures!();
+ let mut group = c.benchmark_group("resolve_ws");
+ for (ws_name, ws_root) in fixtures.workspaces() {
+ let config = fixtures.make_config(&ws_root);
+ // The resolver info is initialized only once in a lazy fashion. This
+ // allows criterion to skip this workspace if the user passes a filter
+ // on the command-line (like `cargo bench -- resolve_ws/tikv`).
+ //
+ // Due to the way criterion works, it tends to only run the inner
+ // iterator once, and we don't want to call `do_resolve` in every
+ // "step", since that would just be some useless work.
+ let mut lazy_info = None;
+ group.bench_function(&ws_name, |b| {
+ let ResolveInfo {
+ ws,
+ requested_kinds,
+ target_data,
+ cli_features,
+ specs,
+ has_dev_units,
+ force_all_targets,
+ ..
+ } = lazy_info.get_or_insert_with(|| do_resolve(&config, &ws_root));
+ b.iter(|| {
+ cargo::ops::resolve_ws_with_opts(
+ ws,
+ target_data,
+ requested_kinds,
+ cli_features,
+ specs,
+ *has_dev_units,
+ *force_all_targets,
+ )
+ .unwrap();
+ })
+ });
+ }
+ group.finish();
+}
+
+/// Benchmark of the feature resolver.
+fn feature_resolver(c: &mut Criterion) {
+ let fixtures = fixtures!();
+ let mut group = c.benchmark_group("feature_resolver");
+ for (ws_name, ws_root) in fixtures.workspaces() {
+ let config = fixtures.make_config(&ws_root);
+ let mut lazy_info = None;
+ group.bench_function(&ws_name, |b| {
+ let ResolveInfo {
+ ws,
+ requested_kinds,
+ target_data,
+ cli_features,
+ specs,
+ has_dev_units,
+ ws_resolve,
+ ..
+ } = lazy_info.get_or_insert_with(|| do_resolve(&config, &ws_root));
+ b.iter(|| {
+ let feature_opts = FeatureOpts::new_behavior(ResolveBehavior::V2, *has_dev_units);
+ FeatureResolver::resolve(
+ ws,
+ target_data,
+ &ws_resolve.targeted_resolve,
+ &ws_resolve.pkg_set,
+ cli_features,
+ specs,
+ requested_kinds,
+ feature_opts,
+ )
+ .unwrap();
+ })
+ });
+ }
+ group.finish();
+}
+
+// Criterion complains about the measurement time being too small, but the
+// measurement time doesn't seem important to me, what is more important is
+// the number of iterations which defaults to 100, which seems like a
+// reasonable default. Otherwise, the measurement time would need to be
+// changed per workspace. We wouldn't want to spend 60s on every workspace,
+// that would take too long and isn't necessary for the smaller workspaces.
+criterion_group!(benches, resolve_ws, feature_resolver);
+criterion_main!(benches);
diff --git a/src/tools/cargo/benches/benchsuite/benches/workspace_initialization.rs b/src/tools/cargo/benches/benchsuite/benches/workspace_initialization.rs
new file mode 100644
index 000000000..af68efe76
--- /dev/null
+++ b/src/tools/cargo/benches/benchsuite/benches/workspace_initialization.rs
@@ -0,0 +1,27 @@
+use benchsuite::fixtures;
+use cargo::core::Workspace;
+use criterion::{criterion_group, criterion_main, Criterion};
+
+fn workspace_initialization(c: &mut Criterion) {
+ let fixtures = fixtures!();
+ let mut group = c.benchmark_group("workspace_initialization");
+ for (ws_name, ws_root) in fixtures.workspaces() {
+ let config = fixtures.make_config(&ws_root);
+ // The resolver info is initialized only once in a lazy fashion. This
+ // allows criterion to skip this workspace if the user passes a filter
+ // on the command-line (like `cargo bench -- workspace_initialization/tikv`).
+ group.bench_function(ws_name, |b| {
+ b.iter(|| Workspace::new(&ws_root.join("Cargo.toml"), &config).unwrap())
+ });
+ }
+ group.finish();
+}
+
+// Criterion complains about the measurement time being too small, but the
+// measurement time doesn't seem important to me, what is more important is
+// the number of iterations which defaults to 100, which seems like a
+// reasonable default. Otherwise, the measurement time would need to be
+// changed per workspace. We wouldn't want to spend 60s on every workspace,
+// that would take too long and isn't necessary for the smaller workspaces.
+criterion_group!(benches, workspace_initialization);
+criterion_main!(benches);
diff --git a/src/tools/cargo/benches/benchsuite/src/lib.rs b/src/tools/cargo/benches/benchsuite/src/lib.rs
new file mode 100644
index 000000000..e470a03b9
--- /dev/null
+++ b/src/tools/cargo/benches/benchsuite/src/lib.rs
@@ -0,0 +1,197 @@
+use cargo::Config;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+use url::Url;
+
+#[macro_export]
+macro_rules! fixtures {
+ () => {
+ $crate::Fixtures::new(env!("CARGO_TARGET_TMPDIR"))
+ };
+}
+
+// This is an arbitrary commit that existed when I started. This helps
+// ensure consistent results. It can be updated if needed, but that can
+// make it harder to compare results with older versions of cargo.
+const CRATES_IO_COMMIT: &str = "85f7bfd61ea4fee08ec68c468762e886b2aebec6";
+
+pub struct Fixtures {
+ cargo_target_tmpdir: PathBuf,
+}
+
+impl Fixtures {
+ pub fn new(cargo_target_tmpdir: &str) -> Self {
+ let bench = Self {
+ cargo_target_tmpdir: PathBuf::from(cargo_target_tmpdir),
+ };
+ bench.create_home();
+ bench.create_target_dir();
+ bench.clone_index();
+ bench.unpack_workspaces();
+ bench
+ }
+
+ fn root(&self) -> PathBuf {
+ self.cargo_target_tmpdir.join("bench")
+ }
+
+ fn target_dir(&self) -> PathBuf {
+ let mut p = self.root();
+ p.push("target");
+ p
+ }
+
+ fn cargo_home(&self) -> PathBuf {
+ let mut p = self.root();
+ p.push("chome");
+ p
+ }
+
+ fn index(&self) -> PathBuf {
+ let mut p = self.root();
+ p.push("index");
+ p
+ }
+
+ fn workspaces_path(&self) -> PathBuf {
+ let mut p = self.root();
+ p.push("workspaces");
+ p
+ }
+
+ fn registry_url(&self) -> Url {
+ Url::from_file_path(self.index()).unwrap()
+ }
+
+ fn create_home(&self) {
+ let home = self.cargo_home();
+ if !home.exists() {
+ fs::create_dir_all(&home).unwrap();
+ }
+ fs::write(
+ home.join("config.toml"),
+ format!(
+ r#"
+ [source.crates-io]
+ replace-with = 'local-snapshot'
+
+ [source.local-snapshot]
+ registry = '{}'
+ "#,
+ self.registry_url()
+ ),
+ )
+ .unwrap();
+ }
+
+ fn create_target_dir(&self) {
+ // This is necessary to ensure the .rustc_info.json file is written.
+ // Otherwise it won't be written, and it is very expensive to create.
+ if !self.target_dir().exists() {
+ fs::create_dir_all(self.target_dir()).unwrap();
+ }
+ }
+
+ /// This clones crates.io at a specific point in time into tmp/index.
+ fn clone_index(&self) {
+ let index = self.index();
+ let maybe_git = |command: &str| {
+ let status = Command::new("git")
+ .current_dir(&index)
+ .args(command.split_whitespace().collect::<Vec<_>>())
+ .status()
+ .expect("git should be installed");
+ status.success()
+ };
+ let git = |command: &str| {
+ if !maybe_git(command) {
+ panic!("failed to run git command: {}", command);
+ }
+ };
+ if index.exists() {
+ if maybe_git(&format!(
+ "rev-parse -q --verify {}^{{commit}}",
+ CRATES_IO_COMMIT
+ )) {
+ // Already fetched.
+ return;
+ }
+ } else {
+ fs::create_dir_all(&index).unwrap();
+ git("init --bare");
+ git("remote add origin https://github.com/rust-lang/crates.io-index-archive");
+ }
+ git(&format!("fetch origin {}", CRATES_IO_COMMIT));
+ git("branch -f master FETCH_HEAD");
+ }
+
+ /// This unpacks the compressed workspace skeletons into tmp/workspaces.
+ fn unpack_workspaces(&self) {
+ let ws_dir = Path::new(env!("CARGO_MANIFEST_DIR"))
+ .parent()
+ .unwrap()
+ .join("workspaces");
+ let archives = fs::read_dir(ws_dir)
+ .unwrap()
+ .map(|e| e.unwrap().path())
+ .filter(|p| p.extension() == Some(std::ffi::OsStr::new("tgz")));
+ for archive in archives {
+ let name = archive.file_stem().unwrap();
+ let f = fs::File::open(&archive).unwrap();
+ let f = flate2::read::GzDecoder::new(f);
+ let dest = self.workspaces_path().join(&name);
+ if dest.exists() {
+ fs::remove_dir_all(&dest).unwrap();
+ }
+ let mut archive = tar::Archive::new(f);
+ archive.unpack(self.workspaces_path()).unwrap();
+ }
+ }
+
+ /// Vec of `(ws_name, ws_root)`.
+ pub fn workspaces(&self) -> Vec<(String, PathBuf)> {
+ // CARGO_BENCH_WORKSPACES can be used to override, otherwise it just uses
+ // the workspaces in the workspaces directory.
+ let mut ps: Vec<_> = match std::env::var_os("CARGO_BENCH_WORKSPACES") {
+ Some(s) => std::env::split_paths(&s).collect(),
+ None => fs::read_dir(self.workspaces_path())
+ .unwrap()
+ .map(|e| e.unwrap().path())
+ // These currently fail in most cases on Windows due to long
+ // filenames in the git checkouts.
+ .filter(|p| {
+ !(cfg!(windows)
+ && matches!(p.file_name().unwrap().to_str().unwrap(), "servo" | "tikv"))
+ })
+ .collect(),
+ };
+ // Sort so it is consistent.
+ ps.sort();
+ ps.into_iter()
+ .map(|p| (p.file_name().unwrap().to_str().unwrap().to_owned(), p))
+ .collect()
+ }
+
+ /// Creates a new Config.
+ pub fn make_config(&self, ws_root: &Path) -> Config {
+ let shell = cargo::core::Shell::new();
+ let mut config = Config::new(shell, ws_root.to_path_buf(), self.cargo_home());
+ // Configure is needed to set the target_dir which is needed to write
+ // the .rustc_info.json file which is very expensive.
+ config
+ .configure(
+ 0,
+ false,
+ None,
+ false,
+ false,
+ false,
+ &Some(self.target_dir()),
+ &[],
+ &[],
+ )
+ .unwrap();
+ config
+ }
+}
diff --git a/src/tools/cargo/benches/capture/Cargo.toml b/src/tools/cargo/benches/capture/Cargo.toml
new file mode 100644
index 000000000..25a901777
--- /dev/null
+++ b/src/tools/cargo/benches/capture/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "capture"
+version = "0.1.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+description = "Tool for capturing a real-world workspace for benchmarking."
+
+[dependencies]
+cargo_metadata = "0.14.0"
+flate2 = { version = "1.0.3", default-features = false, features = ["zlib"] }
+tar = { version = "0.4.38", default-features = false }
+toml = "0.7.0"
diff --git a/src/tools/cargo/benches/capture/src/main.rs b/src/tools/cargo/benches/capture/src/main.rs
new file mode 100644
index 000000000..f6f02c4ba
--- /dev/null
+++ b/src/tools/cargo/benches/capture/src/main.rs
@@ -0,0 +1,164 @@
+//! This tool helps to capture the `Cargo.toml` files of a workspace.
+//!
+//! Run it by passing a list of workspaces to capture.
+//! Use the `-f` flag to allow it to overwrite existing captures.
+//! The workspace will be saved in a `.tgz` file in the `../workspaces` directory.
+
+use flate2::{Compression, GzBuilder};
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+
+fn main() {
+ let force = std::env::args().any(|arg| arg == "-f");
+ let dest = Path::new(env!("CARGO_MANIFEST_DIR"))
+ .parent()
+ .unwrap()
+ .join("workspaces");
+ if !dest.exists() {
+ panic!("expected {} to exist", dest.display());
+ }
+ for arg in std::env::args().skip(1).filter(|arg| !arg.starts_with("-")) {
+ let source_root = fs::canonicalize(arg).unwrap();
+ capture(&source_root, &dest, force);
+ }
+}
+
+fn capture(source_root: &Path, dest: &Path, force: bool) {
+ let name = Path::new(source_root.file_name().unwrap());
+ let mut dest_gz = PathBuf::from(dest);
+ dest_gz.push(name);
+ dest_gz.set_extension("tgz");
+ if dest_gz.exists() {
+ if !force {
+ panic!(
+ "dest {:?} already exists, use -f to force overwriting",
+ dest_gz
+ );
+ }
+ fs::remove_file(&dest_gz).unwrap();
+ }
+ let vcs_info = capture_vcs_info(source_root, force);
+ let dst = fs::File::create(&dest_gz).unwrap();
+ let encoder = GzBuilder::new()
+ .filename(format!("{}.tar", name.to_str().unwrap()))
+ .write(dst, Compression::best());
+ let mut ar = tar::Builder::new(encoder);
+ ar.mode(tar::HeaderMode::Deterministic);
+ if let Some(info) = &vcs_info {
+ add_ar_file(&mut ar, &name.join(".cargo_vcs_info.json"), info);
+ }
+
+ // Gather all local packages.
+ let metadata = cargo_metadata::MetadataCommand::new()
+ .manifest_path(source_root.join("Cargo.toml"))
+ .features(cargo_metadata::CargoOpt::AllFeatures)
+ .exec()
+ .expect("cargo_metadata failed");
+ let mut found_root = false;
+ for package in &metadata.packages {
+ if package.source.is_some() {
+ continue;
+ }
+ let manifest_path = package.manifest_path.as_std_path();
+ copy_manifest(&manifest_path, &mut ar, name, &source_root);
+ found_root |= manifest_path == source_root.join("Cargo.toml");
+ }
+ if !found_root {
+ // A virtual workspace.
+ let contents = fs::read_to_string(source_root.join("Cargo.toml")).unwrap();
+ assert!(!contents.contains("[package]"));
+ add_ar_file(&mut ar, &name.join("Cargo.toml"), &contents);
+ }
+ let lock = fs::read_to_string(source_root.join("Cargo.lock")).unwrap();
+ add_ar_file(&mut ar, &name.join("Cargo.lock"), &lock);
+ let encoder = ar.into_inner().unwrap();
+ encoder.finish().unwrap();
+ eprintln!("created {}", dest_gz.display());
+}
+
+fn copy_manifest<W: std::io::Write>(
+ manifest_path: &Path,
+ ar: &mut tar::Builder<W>,
+ name: &Path,
+ source_root: &Path,
+) {
+ let relative_path = manifest_path
+ .parent()
+ .unwrap()
+ .strip_prefix(source_root)
+ .expect("workspace member should be under workspace root");
+ let relative_path = name.join(relative_path);
+ let contents = fs::read_to_string(&manifest_path).unwrap();
+ let mut manifest: toml::Value = toml::from_str(&contents).unwrap();
+ let remove = |obj: &mut toml::Value, name| {
+ let table = obj.as_table_mut().unwrap();
+ if table.contains_key(name) {
+ table.remove(name);
+ }
+ };
+ remove(&mut manifest, "lib");
+ remove(&mut manifest, "bin");
+ remove(&mut manifest, "example");
+ remove(&mut manifest, "test");
+ remove(&mut manifest, "bench");
+ remove(&mut manifest, "profile");
+ if let Some(package) = manifest.get_mut("package") {
+ remove(package, "default-run");
+ }
+ let contents = toml::to_string(&manifest).unwrap();
+ add_ar_file(ar, &relative_path.join("Cargo.toml"), &contents);
+ add_ar_file(ar, &relative_path.join("src").join("lib.rs"), "");
+}
+
+fn add_ar_file<W: std::io::Write>(ar: &mut tar::Builder<W>, path: &Path, contents: &str) {
+ let mut header = tar::Header::new_gnu();
+ header.set_entry_type(tar::EntryType::file());
+ header.set_mode(0o644);
+ header.set_size(contents.len() as u64);
+ header.set_mtime(123456789);
+ header.set_cksum();
+ ar.append_data(&mut header, path, contents.as_bytes())
+ .unwrap();
+}
+
+fn capture_vcs_info(ws_root: &Path, force: bool) -> Option<String> {
+ let maybe_git = |command: &str| {
+ Command::new("git")
+ .current_dir(ws_root)
+ .args(command.split_whitespace().collect::<Vec<_>>())
+ .output()
+ .expect("git should be installed")
+ };
+ assert!(ws_root.join("Cargo.toml").exists());
+ let relative = maybe_git("ls-files --full-name Cargo.toml");
+ if !relative.status.success() {
+ if !force {
+ panic!("git repository not detected, use -f to force");
+ }
+ return None;
+ }
+ let p = Path::new(std::str::from_utf8(&relative.stdout).unwrap().trim());
+ let relative = p.parent().unwrap();
+ if !force {
+ let has_changes = !maybe_git("diff-index --quiet HEAD .").status.success();
+ if has_changes {
+ panic!("git repo appears to have changes, use -f to force, or clean the repo");
+ }
+ }
+ let commit = maybe_git("rev-parse HEAD");
+ assert!(commit.status.success());
+ let commit = std::str::from_utf8(&commit.stdout).unwrap().trim();
+ let remote = maybe_git("remote get-url origin");
+ assert!(remote.status.success());
+ let remote = std::str::from_utf8(&remote.stdout).unwrap().trim();
+ let info = format!(
+ "{{\n \"git\": {{\n \"sha1\": \"{}\",\n \"remote\": \"{}\"\n }},\
+ \n \"path_in_vcs\": \"{}\"\n}}\n",
+ commit,
+ remote,
+ relative.display()
+ );
+ eprintln!("recording vcs info:\n{}", info);
+ Some(info)
+}
diff --git a/src/tools/cargo/benches/workspaces/cargo.tgz b/src/tools/cargo/benches/workspaces/cargo.tgz
new file mode 100644
index 000000000..653aff982
--- /dev/null
+++ b/src/tools/cargo/benches/workspaces/cargo.tgz
Binary files differ
diff --git a/src/tools/cargo/benches/workspaces/diem.tgz b/src/tools/cargo/benches/workspaces/diem.tgz
new file mode 100644
index 000000000..e047c6cd0
--- /dev/null
+++ b/src/tools/cargo/benches/workspaces/diem.tgz
Binary files differ
diff --git a/src/tools/cargo/benches/workspaces/empty.tgz b/src/tools/cargo/benches/workspaces/empty.tgz
new file mode 100644
index 000000000..1a7d555b4
--- /dev/null
+++ b/src/tools/cargo/benches/workspaces/empty.tgz
Binary files differ
diff --git a/src/tools/cargo/benches/workspaces/gecko-dev.tgz b/src/tools/cargo/benches/workspaces/gecko-dev.tgz
new file mode 100644
index 000000000..e89c676b2
--- /dev/null
+++ b/src/tools/cargo/benches/workspaces/gecko-dev.tgz
Binary files differ
diff --git a/src/tools/cargo/benches/workspaces/rust-ws-inherit.tgz b/src/tools/cargo/benches/workspaces/rust-ws-inherit.tgz
new file mode 100644
index 000000000..6e7b6691f
--- /dev/null
+++ b/src/tools/cargo/benches/workspaces/rust-ws-inherit.tgz
Binary files differ
diff --git a/src/tools/cargo/benches/workspaces/rust.tgz b/src/tools/cargo/benches/workspaces/rust.tgz
new file mode 100644
index 000000000..74da4759b
--- /dev/null
+++ b/src/tools/cargo/benches/workspaces/rust.tgz
Binary files differ
diff --git a/src/tools/cargo/benches/workspaces/servo.tgz b/src/tools/cargo/benches/workspaces/servo.tgz
new file mode 100644
index 000000000..511164369
--- /dev/null
+++ b/src/tools/cargo/benches/workspaces/servo.tgz
Binary files differ
diff --git a/src/tools/cargo/benches/workspaces/substrate.tgz b/src/tools/cargo/benches/workspaces/substrate.tgz
new file mode 100644
index 000000000..81c3874f6
--- /dev/null
+++ b/src/tools/cargo/benches/workspaces/substrate.tgz
Binary files differ
diff --git a/src/tools/cargo/benches/workspaces/tikv.tgz b/src/tools/cargo/benches/workspaces/tikv.tgz
new file mode 100644
index 000000000..74add19b3
--- /dev/null
+++ b/src/tools/cargo/benches/workspaces/tikv.tgz
Binary files differ
diff --git a/src/tools/cargo/benches/workspaces/toml-rs.tgz b/src/tools/cargo/benches/workspaces/toml-rs.tgz
new file mode 100644
index 000000000..9acab1982
--- /dev/null
+++ b/src/tools/cargo/benches/workspaces/toml-rs.tgz
Binary files differ
diff --git a/src/tools/cargo/build.rs b/src/tools/cargo/build.rs
new file mode 100644
index 000000000..752221f8c
--- /dev/null
+++ b/src/tools/cargo/build.rs
@@ -0,0 +1,70 @@
+use flate2::{Compression, GzBuilder};
+use std::ffi::OsStr;
+use std::fs;
+use std::path::Path;
+use std::process::Command;
+
+fn main() {
+ commit_info();
+ compress_man();
+ // ALLOWED: Accessing environment during build time shouldn't be prohibited.
+ #[allow(clippy::disallowed_methods)]
+ let target = std::env::var("TARGET").unwrap();
+ println!("cargo:rustc-env=RUST_HOST_TARGET={target}");
+}
+
+fn compress_man() {
+ // ALLOWED: Accessing environment during build time shouldn't be prohibited.
+ #[allow(clippy::disallowed_methods)]
+ let out_path = Path::new(&std::env::var("OUT_DIR").unwrap()).join("man.tgz");
+ let dst = fs::File::create(out_path).unwrap();
+ let encoder = GzBuilder::new()
+ .filename("man.tar")
+ .write(dst, Compression::best());
+ let mut ar = tar::Builder::new(encoder);
+ ar.mode(tar::HeaderMode::Deterministic);
+
+ let mut add_files = |dir, extension| {
+ let mut files = fs::read_dir(dir)
+ .unwrap()
+ .map(|e| e.unwrap().path())
+ .collect::<Vec<_>>();
+ files.sort();
+ for path in files {
+ if path.extension() != Some(extension) {
+ continue;
+ }
+ println!("cargo:rerun-if-changed={}", path.display());
+ ar.append_path_with_name(&path, path.file_name().unwrap())
+ .unwrap();
+ }
+ };
+
+ add_files(Path::new("src/etc/man"), OsStr::new("1"));
+ add_files(Path::new("src/doc/man/generated_txt"), OsStr::new("txt"));
+ let encoder = ar.into_inner().unwrap();
+ encoder.finish().unwrap();
+}
+
+fn commit_info() {
+ if !Path::new(".git").exists() {
+ return;
+ }
+ let output = match Command::new("git")
+ .arg("log")
+ .arg("-1")
+ .arg("--date=short")
+ .arg("--format=%H %h %cd")
+ .arg("--abbrev=9")
+ .output()
+ {
+ Ok(output) if output.status.success() => output,
+ _ => return,
+ };
+ let stdout = String::from_utf8(output.stdout).unwrap();
+ let mut parts = stdout.split_whitespace();
+ let mut next = || parts.next().unwrap();
+ println!("cargo:rustc-env=CARGO_COMMIT_HASH={}", next());
+ println!("cargo:rustc-env=CARGO_COMMIT_SHORT_HASH={}", next());
+ println!("cargo:rustc-env=CARGO_COMMIT_DATE={}", next())
+}
diff --git a/src/tools/cargo/ci/clean-test-output.sh b/src/tools/cargo/ci/clean-test-output.sh
new file mode 100755
index 000000000..f1f2ec61c
--- /dev/null
+++ b/src/tools/cargo/ci/clean-test-output.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+# This script remove test and benchmark output and displays disk usage.
+
+set -euo pipefail
+
+df -h
+rm -rf target/tmp
+df -h
diff --git a/src/tools/cargo/ci/dump-environment.sh b/src/tools/cargo/ci/dump-environment.sh
new file mode 100755
index 000000000..b9b7ec56c
--- /dev/null
+++ b/src/tools/cargo/ci/dump-environment.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+# This script dumps information about the build environment to stdout.
+
+set -euo pipefail
+IFS=$'\n\t'
+
+echo "environment variables:"
+printenv | sort
+echo
+
+echo "disk usage:"
+df -h
+echo
+
+echo "CPU info:"
+if [[ "${OSTYPE}" = "darwin"* ]]; then
+ system_profiler SPHardwareDataType || true
+ sysctl hw || true
+else
+ cat /proc/cpuinfo || true
+ cat /proc/meminfo || true
+fi
diff --git a/src/tools/cargo/ci/fetch-smoke-test.sh b/src/tools/cargo/ci/fetch-smoke-test.sh
new file mode 100755
index 000000000..17993d1fd
--- /dev/null
+++ b/src/tools/cargo/ci/fetch-smoke-test.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+# This script builds with static curl, and verifies that fetching works.
+
+set -ex
+
+if [[ -z "$RUNNER_TEMP" ]]
+then
+ echo "RUNNER_TEMP must be set"
+ exit 1
+fi
+
+if [ ! -f Cargo.toml ]; then
+ echo "Must be run from root of project."
+ exit 1
+fi
+
+
+# Building openssl on Windows is a pain.
+if [[ $(rustc -Vv | grep host:) != *windows* ]]; then
+ FEATURES='vendored-openssl,curl-sys/static-curl,curl-sys/force-system-lib-on-osx'
+ export LIBZ_SYS_STATIC=1
+fi
+
+cargo build --features "$FEATURES"
+export CARGO_HOME=$RUNNER_TEMP/chome
+target/debug/cargo fetch
+rm -rf $CARGO_HOME
diff --git a/src/tools/cargo/ci/validate-man.sh b/src/tools/cargo/ci/validate-man.sh
new file mode 100755
index 000000000..92df49781
--- /dev/null
+++ b/src/tools/cargo/ci/validate-man.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+# This script validates that there aren't any changes to the man pages.
+
+set -e
+
+cd src/doc
+
+changes=$(git status --porcelain)
+if [ -n "$changes" ]
+then
+ echo "git directory must be clean before running this script."
+ exit 1
+fi
+
+./build-man.sh
+
+changes=$(git status --porcelain)
+if [ -n "$changes" ]
+then
+ echo "Detected changes in man pages:"
+ echo "$changes"
+ echo
+ echo "Please run './build-man.sh' in the src/doc directory to rebuild the"
+ echo "man pages, and commit the changes."
+ exit 1
+fi
diff --git a/src/tools/cargo/clippy.toml b/src/tools/cargo/clippy.toml
new file mode 100644
index 000000000..4f9be8f9b
--- /dev/null
+++ b/src/tools/cargo/clippy.toml
@@ -0,0 +1,6 @@
+disallowed-methods = [
+ { path = "std::env::var", reason = "Use `Config::get_env` instead. See rust-lang/cargo#11588" },
+ { path = "std::env::var_os", reason = "Use `Config::get_env_os` instead. See rust-lang/cargo#11588" },
+ { path = "std::env::vars", reason = "Not recommended to use in Cargo. See rust-lang/cargo#11588" },
+ { path = "std::env::vars_os", reason = "Not recommended to use in Cargo. See rust-lang/cargo#11588" },
+]
diff --git a/src/tools/cargo/crates/cargo-platform/Cargo.toml b/src/tools/cargo/crates/cargo-platform/Cargo.toml
new file mode 100644
index 000000000..a5e51ee5d
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-platform/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "cargo-platform"
+version = "0.1.2"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+homepage = "https://github.com/rust-lang/cargo"
+repository = "https://github.com/rust-lang/cargo"
+documentation = "https://docs.rs/cargo-platform"
+description = "Cargo's representation of a target platform."
+
+[dependencies]
+serde = "1.0.82"
diff --git a/src/tools/cargo/crates/cargo-platform/LICENSE-APACHE b/src/tools/cargo/crates/cargo-platform/LICENSE-APACHE
new file mode 120000
index 000000000..1cd601d0a
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-platform/LICENSE-APACHE
@@ -0,0 +1 @@
+../../LICENSE-APACHE \ No newline at end of file
diff --git a/src/tools/cargo/crates/cargo-platform/LICENSE-MIT b/src/tools/cargo/crates/cargo-platform/LICENSE-MIT
new file mode 120000
index 000000000..b2cfbdc7b
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-platform/LICENSE-MIT
@@ -0,0 +1 @@
+../../LICENSE-MIT \ No newline at end of file
diff --git a/src/tools/cargo/crates/cargo-platform/examples/matches.rs b/src/tools/cargo/crates/cargo-platform/examples/matches.rs
new file mode 100644
index 000000000..9ad5d10dd
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-platform/examples/matches.rs
@@ -0,0 +1,55 @@
+//! This example demonstrates how to filter a Platform based on the current
+//! host target.
+
+use cargo_platform::{Cfg, Platform};
+use std::process::Command;
+use std::str::FromStr;
+
+static EXAMPLES: &[&str] = &[
+ "cfg(windows)",
+ "cfg(unix)",
+ "cfg(target_os=\"macos\")",
+ "cfg(target_os=\"linux\")",
+ "cfg(any(target_arch=\"x86\", target_arch=\"x86_64\"))",
+];
+
+fn main() {
+ let target = get_target();
+ let cfgs = get_cfgs();
+ println!("host target={} cfgs:", target);
+ for cfg in &cfgs {
+ println!(" {}", cfg);
+ }
+ let mut examples: Vec<&str> = EXAMPLES.iter().copied().collect();
+ examples.push(target.as_str());
+ for example in examples {
+ let p = Platform::from_str(example).unwrap();
+ println!("{:?} matches: {:?}", example, p.matches(&target, &cfgs));
+ }
+}
+
+fn get_target() -> String {
+ let output = Command::new("rustc")
+ .arg("-Vv")
+ .output()
+ .expect("rustc failed to run");
+ let stdout = String::from_utf8(output.stdout).unwrap();
+ for line in stdout.lines() {
+ if line.starts_with("host: ") {
+ return String::from(&line[6..]);
+ }
+ }
+ panic!("Failed to find host: {}", stdout);
+}
+
+fn get_cfgs() -> Vec<Cfg> {
+ let output = Command::new("rustc")
+ .arg("--print=cfg")
+ .output()
+ .expect("rustc failed to run");
+ let stdout = String::from_utf8(output.stdout).unwrap();
+ stdout
+ .lines()
+ .map(|line| Cfg::from_str(line).unwrap())
+ .collect()
+}
diff --git a/src/tools/cargo/crates/cargo-platform/src/cfg.rs b/src/tools/cargo/crates/cargo-platform/src/cfg.rs
new file mode 100644
index 000000000..c3ddb69bc
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-platform/src/cfg.rs
@@ -0,0 +1,319 @@
+use crate::error::{ParseError, ParseErrorKind::*};
+use std::fmt;
+use std::iter;
+use std::str::{self, FromStr};
+
+/// A cfg expression.
+#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)]
+pub enum CfgExpr {
+ Not(Box<CfgExpr>),
+ All(Vec<CfgExpr>),
+ Any(Vec<CfgExpr>),
+ Value(Cfg),
+}
+
+/// A cfg value.
+#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)]
+pub enum Cfg {
+ /// A named cfg value, like `unix`.
+ Name(String),
+ /// A key/value cfg pair, like `target_os = "linux"`.
+ KeyPair(String, String),
+}
+
+#[derive(PartialEq)]
+enum Token<'a> {
+ LeftParen,
+ RightParen,
+ Ident(&'a str),
+ Comma,
+ Equals,
+ String(&'a str),
+}
+
+#[derive(Clone)]
+struct Tokenizer<'a> {
+ s: iter::Peekable<str::CharIndices<'a>>,
+ orig: &'a str,
+}
+
+struct Parser<'a> {
+ t: Tokenizer<'a>,
+}
+
+impl FromStr for Cfg {
+ type Err = ParseError;
+
+ fn from_str(s: &str) -> Result<Cfg, Self::Err> {
+ let mut p = Parser::new(s);
+ let e = p.cfg()?;
+ if let Some(rest) = p.rest() {
+ return Err(ParseError::new(
+ p.t.orig,
+ UnterminatedExpression(rest.to_string()),
+ ));
+ }
+ Ok(e)
+ }
+}
+
+impl fmt::Display for Cfg {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ Cfg::Name(ref s) => s.fmt(f),
+ Cfg::KeyPair(ref k, ref v) => write!(f, "{} = \"{}\"", k, v),
+ }
+ }
+}
+
+impl CfgExpr {
+ /// Utility function to check if the key, "cfg(..)" matches the `target_cfg`
+ pub fn matches_key(key: &str, target_cfg: &[Cfg]) -> bool {
+ if key.starts_with("cfg(") && key.ends_with(')') {
+ let cfg = &key[4..key.len() - 1];
+
+ CfgExpr::from_str(cfg)
+ .ok()
+ .map(|ce| ce.matches(target_cfg))
+ .unwrap_or(false)
+ } else {
+ false
+ }
+ }
+
+ pub fn matches(&self, cfg: &[Cfg]) -> bool {
+ match *self {
+ CfgExpr::Not(ref e) => !e.matches(cfg),
+ CfgExpr::All(ref e) => e.iter().all(|e| e.matches(cfg)),
+ CfgExpr::Any(ref e) => e.iter().any(|e| e.matches(cfg)),
+ CfgExpr::Value(ref e) => cfg.contains(e),
+ }
+ }
+}
+
+impl FromStr for CfgExpr {
+ type Err = ParseError;
+
+ fn from_str(s: &str) -> Result<CfgExpr, Self::Err> {
+ let mut p = Parser::new(s);
+ let e = p.expr()?;
+ if let Some(rest) = p.rest() {
+ return Err(ParseError::new(
+ p.t.orig,
+ UnterminatedExpression(rest.to_string()),
+ ));
+ }
+ Ok(e)
+ }
+}
+
+impl fmt::Display for CfgExpr {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ CfgExpr::Not(ref e) => write!(f, "not({})", e),
+ CfgExpr::All(ref e) => write!(f, "all({})", CommaSep(e)),
+ CfgExpr::Any(ref e) => write!(f, "any({})", CommaSep(e)),
+ CfgExpr::Value(ref e) => write!(f, "{}", e),
+ }
+ }
+}
+
+struct CommaSep<'a, T>(&'a [T]);
+
+impl<'a, T: fmt::Display> fmt::Display for CommaSep<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ for (i, v) in self.0.iter().enumerate() {
+ if i > 0 {
+ write!(f, ", ")?;
+ }
+ write!(f, "{}", v)?;
+ }
+ Ok(())
+ }
+}
+
+impl<'a> Parser<'a> {
+ fn new(s: &'a str) -> Parser<'a> {
+ Parser {
+ t: Tokenizer {
+ s: s.char_indices().peekable(),
+ orig: s,
+ },
+ }
+ }
+
+ fn expr(&mut self) -> Result<CfgExpr, ParseError> {
+ match self.peek() {
+ Some(Ok(Token::Ident(op @ "all"))) | Some(Ok(Token::Ident(op @ "any"))) => {
+ self.t.next();
+ let mut e = Vec::new();
+ self.eat(&Token::LeftParen)?;
+ while !self.r#try(&Token::RightParen) {
+ e.push(self.expr()?);
+ if !self.r#try(&Token::Comma) {
+ self.eat(&Token::RightParen)?;
+ break;
+ }
+ }
+ if op == "all" {
+ Ok(CfgExpr::All(e))
+ } else {
+ Ok(CfgExpr::Any(e))
+ }
+ }
+ Some(Ok(Token::Ident("not"))) => {
+ self.t.next();
+ self.eat(&Token::LeftParen)?;
+ let e = self.expr()?;
+ self.eat(&Token::RightParen)?;
+ Ok(CfgExpr::Not(Box::new(e)))
+ }
+ Some(Ok(..)) => self.cfg().map(CfgExpr::Value),
+ Some(Err(..)) => Err(self.t.next().unwrap().err().unwrap()),
+ None => Err(ParseError::new(
+ self.t.orig,
+ IncompleteExpr("start of a cfg expression"),
+ )),
+ }
+ }
+
+ fn cfg(&mut self) -> Result<Cfg, ParseError> {
+ match self.t.next() {
+ Some(Ok(Token::Ident(name))) => {
+ let e = if self.r#try(&Token::Equals) {
+ let val = match self.t.next() {
+ Some(Ok(Token::String(s))) => s,
+ Some(Ok(t)) => {
+ return Err(ParseError::new(
+ self.t.orig,
+ UnexpectedToken {
+ expected: "a string",
+ found: t.classify(),
+ },
+ ))
+ }
+ Some(Err(e)) => return Err(e),
+ None => {
+ return Err(ParseError::new(self.t.orig, IncompleteExpr("a string")))
+ }
+ };
+ Cfg::KeyPair(name.to_string(), val.to_string())
+ } else {
+ Cfg::Name(name.to_string())
+ };
+ Ok(e)
+ }
+ Some(Ok(t)) => Err(ParseError::new(
+ self.t.orig,
+ UnexpectedToken {
+ expected: "identifier",
+ found: t.classify(),
+ },
+ )),
+ Some(Err(e)) => Err(e),
+ None => Err(ParseError::new(self.t.orig, IncompleteExpr("identifier"))),
+ }
+ }
+
+ fn peek(&mut self) -> Option<Result<Token<'a>, ParseError>> {
+ self.t.clone().next()
+ }
+
+ fn r#try(&mut self, token: &Token<'a>) -> bool {
+ match self.peek() {
+ Some(Ok(ref t)) if token == t => {}
+ _ => return false,
+ }
+ self.t.next();
+ true
+ }
+
+ fn eat(&mut self, token: &Token<'a>) -> Result<(), ParseError> {
+ match self.t.next() {
+ Some(Ok(ref t)) if token == t => Ok(()),
+ Some(Ok(t)) => Err(ParseError::new(
+ self.t.orig,
+ UnexpectedToken {
+ expected: token.classify(),
+ found: t.classify(),
+ },
+ )),
+ Some(Err(e)) => Err(e),
+ None => Err(ParseError::new(
+ self.t.orig,
+ IncompleteExpr(token.classify()),
+ )),
+ }
+ }
+
+ /// Returns the rest of the input from the current location.
+ fn rest(&self) -> Option<&str> {
+ let mut s = self.t.s.clone();
+ loop {
+ match s.next() {
+ Some((_, ' ')) => {}
+ Some((start, _ch)) => return Some(&self.t.orig[start..]),
+ None => return None,
+ }
+ }
+ }
+}
+
+impl<'a> Iterator for Tokenizer<'a> {
+ type Item = Result<Token<'a>, ParseError>;
+
+ fn next(&mut self) -> Option<Result<Token<'a>, ParseError>> {
+ loop {
+ match self.s.next() {
+ Some((_, ' ')) => {}
+ Some((_, '(')) => return Some(Ok(Token::LeftParen)),
+ Some((_, ')')) => return Some(Ok(Token::RightParen)),
+ Some((_, ',')) => return Some(Ok(Token::Comma)),
+ Some((_, '=')) => return Some(Ok(Token::Equals)),
+ Some((start, '"')) => {
+ while let Some((end, ch)) = self.s.next() {
+ if ch == '"' {
+ return Some(Ok(Token::String(&self.orig[start + 1..end])));
+ }
+ }
+ return Some(Err(ParseError::new(self.orig, UnterminatedString)));
+ }
+ Some((start, ch)) if is_ident_start(ch) => {
+ while let Some(&(end, ch)) = self.s.peek() {
+ if !is_ident_rest(ch) {
+ return Some(Ok(Token::Ident(&self.orig[start..end])));
+ } else {
+ self.s.next();
+ }
+ }
+ return Some(Ok(Token::Ident(&self.orig[start..])));
+ }
+ Some((_, ch)) => {
+ return Some(Err(ParseError::new(self.orig, UnexpectedChar(ch))));
+ }
+ None => return None,
+ }
+ }
+ }
+}
+
+fn is_ident_start(ch: char) -> bool {
+ ch == '_' || ch.is_ascii_alphabetic()
+}
+
+fn is_ident_rest(ch: char) -> bool {
+ is_ident_start(ch) || ch.is_ascii_digit()
+}
+
+impl<'a> Token<'a> {
+ fn classify(&self) -> &'static str {
+ match *self {
+ Token::LeftParen => "`(`",
+ Token::RightParen => "`)`",
+ Token::Ident(..) => "an identifier",
+ Token::Comma => "`,`",
+ Token::Equals => "`=`",
+ Token::String(..) => "a string",
+ }
+ }
+}
diff --git a/src/tools/cargo/crates/cargo-platform/src/error.rs b/src/tools/cargo/crates/cargo-platform/src/error.rs
new file mode 100644
index 000000000..bf4b35f27
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-platform/src/error.rs
@@ -0,0 +1,67 @@
+use std::fmt;
+
+#[derive(Debug)]
+pub struct ParseError {
+ kind: ParseErrorKind,
+ orig: String,
+}
+
+#[non_exhaustive]
+#[derive(Debug)]
+pub enum ParseErrorKind {
+ UnterminatedString,
+ UnexpectedChar(char),
+ UnexpectedToken {
+ expected: &'static str,
+ found: &'static str,
+ },
+ IncompleteExpr(&'static str),
+ UnterminatedExpression(String),
+ InvalidTarget(String),
+}
+
+impl fmt::Display for ParseError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(
+ f,
+ "failed to parse `{}` as a cfg expression: {}",
+ self.orig, self.kind
+ )
+ }
+}
+
+impl fmt::Display for ParseErrorKind {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ use ParseErrorKind::*;
+ match self {
+ UnterminatedString => write!(f, "unterminated string in cfg"),
+ UnexpectedChar(ch) => write!(
+ f,
+ "unexpected character `{}` in cfg, expected parens, a comma, \
+ an identifier, or a string",
+ ch
+ ),
+ UnexpectedToken { expected, found } => {
+ write!(f, "expected {}, found {}", expected, found)
+ }
+ IncompleteExpr(expected) => {
+ write!(f, "expected {}, but cfg expression ended", expected)
+ }
+ UnterminatedExpression(s) => {
+ write!(f, "unexpected content `{}` found after cfg expression", s)
+ }
+ InvalidTarget(s) => write!(f, "invalid target specifier: {}", s),
+ }
+ }
+}
+
+impl std::error::Error for ParseError {}
+
+impl ParseError {
+ pub fn new(orig: &str, kind: ParseErrorKind) -> ParseError {
+ ParseError {
+ kind,
+ orig: orig.to_string(),
+ }
+ }
+}
diff --git a/src/tools/cargo/crates/cargo-platform/src/lib.rs b/src/tools/cargo/crates/cargo-platform/src/lib.rs
new file mode 100644
index 000000000..0a3dcf1af
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-platform/src/lib.rs
@@ -0,0 +1,146 @@
+//! Platform definition used by Cargo.
+//!
+//! This defines a [`Platform`] type which is used in Cargo to specify a target platform.
+//! There are two kinds, a named target like `x86_64-apple-darwin`, and a "cfg expression"
+//! like `cfg(any(target_os = "macos", target_os = "ios"))`.
+//!
+//! See `examples/matches.rs` for an example of how to match against a `Platform`.
+//!
+//! [`Platform`]: enum.Platform.html
+
+use std::fmt;
+use std::str::FromStr;
+
+mod cfg;
+mod error;
+
+pub use cfg::{Cfg, CfgExpr};
+pub use error::{ParseError, ParseErrorKind};
+
+/// Platform definition.
+#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)]
+pub enum Platform {
+ /// A named platform, like `x86_64-apple-darwin`.
+ Name(String),
+ /// A cfg expression, like `cfg(windows)`.
+ Cfg(CfgExpr),
+}
+
+impl Platform {
+ /// Returns whether the Platform matches the given target and cfg.
+ ///
+ /// The named target and cfg values should be obtained from `rustc`.
+ pub fn matches(&self, name: &str, cfg: &[Cfg]) -> bool {
+ match *self {
+ Platform::Name(ref p) => p == name,
+ Platform::Cfg(ref p) => p.matches(cfg),
+ }
+ }
+
+ fn validate_named_platform(name: &str) -> Result<(), ParseError> {
+ if let Some(ch) = name
+ .chars()
+ .find(|&c| !(c.is_alphanumeric() || c == '_' || c == '-' || c == '.'))
+ {
+ if name.chars().any(|c| c == '(') {
+ return Err(ParseError::new(
+ name,
+ ParseErrorKind::InvalidTarget(
+ "unexpected `(` character, cfg expressions must start with `cfg(`"
+ .to_string(),
+ ),
+ ));
+ }
+ return Err(ParseError::new(
+ name,
+ ParseErrorKind::InvalidTarget(format!(
+ "unexpected character {} in target name",
+ ch
+ )),
+ ));
+ }
+ Ok(())
+ }
+
+ pub fn check_cfg_attributes(&self, warnings: &mut Vec<String>) {
+ fn check_cfg_expr(expr: &CfgExpr, warnings: &mut Vec<String>) {
+ match *expr {
+ CfgExpr::Not(ref e) => check_cfg_expr(e, warnings),
+ CfgExpr::All(ref e) | CfgExpr::Any(ref e) => {
+ for e in e {
+ check_cfg_expr(e, warnings);
+ }
+ }
+ CfgExpr::Value(ref e) => match e {
+ Cfg::Name(name) => match name.as_str() {
+ "test" | "debug_assertions" | "proc_macro" =>
+ warnings.push(format!(
+ "Found `{}` in `target.'cfg(...)'.dependencies`. \
+ This value is not supported for selecting dependencies \
+ and will not work as expected. \
+ To learn more visit \
+ https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#platform-specific-dependencies",
+ name
+ )),
+ _ => (),
+ },
+ Cfg::KeyPair(name, _) => if name.as_str() == "feature" {
+ warnings.push(String::from(
+ "Found `feature = ...` in `target.'cfg(...)'.dependencies`. \
+ This key is not supported for selecting dependencies \
+ and will not work as expected. \
+ Use the [features] section instead: \
+ https://doc.rust-lang.org/cargo/reference/features.html"
+ ))
+ },
+ }
+ }
+ }
+
+ if let Platform::Cfg(cfg) = self {
+ check_cfg_expr(cfg, warnings);
+ }
+ }
+}
+
+impl serde::Serialize for Platform {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ self.to_string().serialize(s)
+ }
+}
+
+impl<'de> serde::Deserialize<'de> for Platform {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ let s = String::deserialize(deserializer)?;
+ FromStr::from_str(&s).map_err(serde::de::Error::custom)
+ }
+}
+
+impl FromStr for Platform {
+ type Err = ParseError;
+
+ fn from_str(s: &str) -> Result<Platform, ParseError> {
+ if s.starts_with("cfg(") && s.ends_with(')') {
+ let s = &s[4..s.len() - 1];
+ s.parse().map(Platform::Cfg)
+ } else {
+ Platform::validate_named_platform(s)?;
+ Ok(Platform::Name(s.to_string()))
+ }
+ }
+}
+
+impl fmt::Display for Platform {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ Platform::Name(ref n) => n.fmt(f),
+ Platform::Cfg(ref e) => write!(f, "cfg({})", e),
+ }
+ }
+}
diff --git a/src/tools/cargo/crates/cargo-platform/tests/test_cfg.rs b/src/tools/cargo/crates/cargo-platform/tests/test_cfg.rs
new file mode 100644
index 000000000..dd99d9a79
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-platform/tests/test_cfg.rs
@@ -0,0 +1,251 @@
+use cargo_platform::{Cfg, CfgExpr, Platform};
+use std::fmt;
+use std::str::FromStr;
+
+macro_rules! c {
+ ($a:ident) => {
+ Cfg::Name(stringify!($a).to_string())
+ };
+ ($a:ident = $e:expr) => {
+ Cfg::KeyPair(stringify!($a).to_string(), $e.to_string())
+ };
+}
+
+macro_rules! e {
+ (any($($t:tt),*)) => (CfgExpr::Any(vec![$(e!($t)),*]));
+ (all($($t:tt),*)) => (CfgExpr::All(vec![$(e!($t)),*]));
+ (not($($t:tt)*)) => (CfgExpr::Not(Box::new(e!($($t)*))));
+ (($($t:tt)*)) => (e!($($t)*));
+ ($($t:tt)*) => (CfgExpr::Value(c!($($t)*)));
+}
+
+fn good<T>(s: &str, expected: T)
+where
+ T: FromStr + PartialEq + fmt::Debug,
+ T::Err: fmt::Display,
+{
+ let c = match T::from_str(s) {
+ Ok(c) => c,
+ Err(e) => panic!("failed to parse `{}`: {}", s, e),
+ };
+ assert_eq!(c, expected);
+}
+
+fn bad<T>(s: &str, err: &str)
+where
+ T: FromStr + fmt::Display,
+ T::Err: fmt::Display,
+{
+ let e = match T::from_str(s) {
+ Ok(cfg) => panic!("expected `{}` to not parse but got {}", s, cfg),
+ Err(e) => e.to_string(),
+ };
+ assert!(
+ e.contains(err),
+ "when parsing `{}`,\n\"{}\" not contained \
+ inside: {}",
+ s,
+ err,
+ e
+ );
+}
+
+#[test]
+fn cfg_syntax() {
+ good("foo", c!(foo));
+ good("_bar", c!(_bar));
+ good(" foo", c!(foo));
+ good(" foo ", c!(foo));
+ good(" foo = \"bar\"", c!(foo = "bar"));
+ good("foo=\"\"", c!(foo = ""));
+ good(" foo=\"3\" ", c!(foo = "3"));
+ good("foo = \"3 e\"", c!(foo = "3 e"));
+}
+
+#[test]
+fn cfg_syntax_bad() {
+ bad::<Cfg>("", "but cfg expression ended");
+ bad::<Cfg>(" ", "but cfg expression ended");
+ bad::<Cfg>("\t", "unexpected character");
+ bad::<Cfg>("7", "unexpected character");
+ bad::<Cfg>("=", "expected identifier");
+ bad::<Cfg>(",", "expected identifier");
+ bad::<Cfg>("(", "expected identifier");
+ bad::<Cfg>("foo (", "unexpected content `(` found after cfg expression");
+ bad::<Cfg>("bar =", "expected a string");
+ bad::<Cfg>("bar = \"", "unterminated string");
+ bad::<Cfg>(
+ "foo, bar",
+ "unexpected content `, bar` found after cfg expression",
+ );
+}
+
+#[test]
+fn cfg_expr() {
+ good("foo", e!(foo));
+ good("_bar", e!(_bar));
+ good(" foo", e!(foo));
+ good(" foo ", e!(foo));
+ good(" foo = \"bar\"", e!(foo = "bar"));
+ good("foo=\"\"", e!(foo = ""));
+ good(" foo=\"3\" ", e!(foo = "3"));
+ good("foo = \"3 e\"", e!(foo = "3 e"));
+
+ good("all()", e!(all()));
+ good("all(a)", e!(all(a)));
+ good("all(a, b)", e!(all(a, b)));
+ good("all(a, )", e!(all(a)));
+ good("not(a = \"b\")", e!(not(a = "b")));
+ good("not(all(a))", e!(not(all(a))));
+}
+
+#[test]
+fn cfg_expr_bad() {
+ bad::<CfgExpr>(" ", "but cfg expression ended");
+ bad::<CfgExpr>(" all", "expected `(`");
+ bad::<CfgExpr>("all(a", "expected `)`");
+ bad::<CfgExpr>("not", "expected `(`");
+ bad::<CfgExpr>("not(a", "expected `)`");
+ bad::<CfgExpr>("a = ", "expected a string");
+ bad::<CfgExpr>("all(not())", "expected identifier");
+ bad::<CfgExpr>(
+ "foo(a)",
+ "unexpected content `(a)` found after cfg expression",
+ );
+}
+
+#[test]
+fn cfg_matches() {
+ assert!(e!(foo).matches(&[c!(bar), c!(foo), c!(baz)]));
+ assert!(e!(any(foo)).matches(&[c!(bar), c!(foo), c!(baz)]));
+ assert!(e!(any(foo, bar)).matches(&[c!(bar)]));
+ assert!(e!(any(foo, bar)).matches(&[c!(foo)]));
+ assert!(e!(all(foo, bar)).matches(&[c!(foo), c!(bar)]));
+ assert!(e!(all(foo, bar)).matches(&[c!(foo), c!(bar)]));
+ assert!(e!(not(foo)).matches(&[c!(bar)]));
+ assert!(e!(not(foo)).matches(&[]));
+ assert!(e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(bar)]));
+ assert!(e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(foo), c!(bar)]));
+
+ assert!(!e!(foo).matches(&[]));
+ assert!(!e!(foo).matches(&[c!(bar)]));
+ assert!(!e!(foo).matches(&[c!(fo)]));
+ assert!(!e!(any(foo)).matches(&[]));
+ assert!(!e!(any(foo)).matches(&[c!(bar)]));
+ assert!(!e!(any(foo)).matches(&[c!(bar), c!(baz)]));
+ assert!(!e!(all(foo)).matches(&[c!(bar), c!(baz)]));
+ assert!(!e!(all(foo, bar)).matches(&[c!(bar)]));
+ assert!(!e!(all(foo, bar)).matches(&[c!(foo)]));
+ assert!(!e!(all(foo, bar)).matches(&[]));
+ assert!(!e!(not(bar)).matches(&[c!(bar)]));
+ assert!(!e!(not(bar)).matches(&[c!(baz), c!(bar)]));
+ assert!(!e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(foo)]));
+}
+
+#[test]
+fn bad_target_name() {
+ bad::<Platform>(
+ "any(cfg(unix), cfg(windows))",
+ "failed to parse `any(cfg(unix), cfg(windows))` as a cfg expression: \
+ invalid target specifier: unexpected `(` character, \
+ cfg expressions must start with `cfg(`",
+ );
+ bad::<Platform>(
+ "!foo",
+ "failed to parse `!foo` as a cfg expression: \
+ invalid target specifier: unexpected character ! in target name",
+ );
+}
+
+#[test]
+fn round_trip_platform() {
+ fn rt(s: &str) {
+ let p = Platform::from_str(s).unwrap();
+ let s2 = p.to_string();
+ let p2 = Platform::from_str(&s2).unwrap();
+ assert_eq!(p, p2);
+ }
+ rt("x86_64-apple-darwin");
+ rt("foo");
+ rt("cfg(windows)");
+ rt("cfg(target_os = \"windows\")");
+ rt(
+ "cfg(any(all(any(target_os = \"android\", target_os = \"linux\"), \
+ any(target_arch = \"aarch64\", target_arch = \"arm\", target_arch = \"powerpc64\", \
+ target_arch = \"x86\", target_arch = \"x86_64\")), \
+ all(target_os = \"freebsd\", target_arch = \"x86_64\")))",
+ );
+}
+
+#[test]
+fn check_cfg_attributes() {
+ fn ok(s: &str) {
+ let p = Platform::Cfg(s.parse().unwrap());
+ let mut warnings = Vec::new();
+ p.check_cfg_attributes(&mut warnings);
+ assert!(
+ warnings.is_empty(),
+ "Expected no warnings but got: {:?}",
+ warnings,
+ );
+ }
+
+ fn warn(s: &str, names: &[&str]) {
+ let p = Platform::Cfg(s.parse().unwrap());
+ let mut warnings = Vec::new();
+ p.check_cfg_attributes(&mut warnings);
+ assert_eq!(
+ warnings.len(),
+ names.len(),
+ "Expecter warnings about {:?} but got {:?}",
+ names,
+ warnings,
+ );
+ for (name, warning) in names.iter().zip(warnings.iter()) {
+ assert!(
+ warning.contains(name),
+ "Expected warning about '{}' but got: {}",
+ name,
+ warning,
+ );
+ }
+ }
+
+ ok("unix");
+ ok("windows");
+ ok("any(not(unix), windows)");
+ ok("foo");
+
+ ok("target_arch = \"abc\"");
+ ok("target_feature = \"abc\"");
+ ok("target_os = \"abc\"");
+ ok("target_family = \"abc\"");
+ ok("target_env = \"abc\"");
+ ok("target_endian = \"abc\"");
+ ok("target_pointer_width = \"abc\"");
+ ok("target_vendor = \"abc\"");
+ ok("bar = \"def\"");
+
+ warn("test", &["test"]);
+ warn("debug_assertions", &["debug_assertions"]);
+ warn("proc_macro", &["proc_macro"]);
+ warn("feature = \"abc\"", &["feature"]);
+
+ warn("any(not(debug_assertions), windows)", &["debug_assertions"]);
+ warn(
+ "any(not(feature = \"def\"), target_arch = \"abc\")",
+ &["feature"],
+ );
+ warn(
+ "any(not(target_os = \"windows\"), proc_macro)",
+ &["proc_macro"],
+ );
+ warn(
+ "any(not(feature = \"windows\"), proc_macro)",
+ &["feature", "proc_macro"],
+ );
+ warn(
+ "all(not(debug_assertions), any(windows, proc_macro))",
+ &["debug_assertions", "proc_macro"],
+ );
+}
diff --git a/src/tools/cargo/crates/cargo-test-macro/Cargo.toml b/src/tools/cargo/crates/cargo-test-macro/Cargo.toml
new file mode 100644
index 000000000..04dafc028
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-macro/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "cargo-test-macro"
+version = "0.1.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+homepage = "https://github.com/rust-lang/cargo"
+repository = "https://github.com/rust-lang/cargo"
+documentation = "https://github.com/rust-lang/cargo"
+description = "Helper proc-macro for Cargo's testsuite."
+
+[lib]
+proc-macro = true
diff --git a/src/tools/cargo/crates/cargo-test-macro/src/lib.rs b/src/tools/cargo/crates/cargo-test-macro/src/lib.rs
new file mode 100644
index 000000000..aa06f477d
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-macro/src/lib.rs
@@ -0,0 +1,245 @@
+extern crate proc_macro;
+
+use proc_macro::*;
+use std::process::Command;
+use std::sync::Once;
+
+#[proc_macro_attribute]
+pub fn cargo_test(attr: TokenStream, item: TokenStream) -> TokenStream {
+ // Ideally these options would be embedded in the test itself. However, I
+ // find it very helpful to have the test clearly state whether or not it
+ // is ignored. It would be nice to have some kind of runtime ignore
+ // support (such as
+ // https://internals.rust-lang.org/t/pre-rfc-skippable-tests/14611).
+ //
+ // Unfortunately a big drawback here is that if the environment changes
+ // (such as the existence of the `git` CLI), this will not trigger a
+ // rebuild and the test will still be ignored. In theory, something like
+ // `tracked_env` or `tracked_path`
+ // (https://github.com/rust-lang/rust/issues/99515) could help with this,
+ // but they don't really handle the absence of files well.
+ let mut ignore = false;
+ let mut requires_reason = false;
+ let mut explicit_reason = None;
+ let mut implicit_reasons = Vec::new();
+ macro_rules! set_ignore {
+ ($predicate:expr, $($arg:tt)*) => {
+ let p = $predicate;
+ ignore |= p;
+ if p {
+ implicit_reasons.push(std::fmt::format(format_args!($($arg)*)));
+ }
+ };
+ }
+ let is_not_nightly = !version().1;
+ for rule in split_rules(attr) {
+ match rule.as_str() {
+ "build_std_real" => {
+ // Only run the "real" build-std tests on nightly and with an
+ // explicit opt-in (these generally only work on linux, and
+ // have some extra requirements, and are slow, and can pollute
+ // the environment since it downloads dependencies).
+ set_ignore!(is_not_nightly, "requires nightly");
+ set_ignore!(
+ option_env!("CARGO_RUN_BUILD_STD_TESTS").is_none(),
+ "CARGO_RUN_BUILD_STD_TESTS must be set"
+ );
+ }
+ "build_std_mock" => {
+ // Only run the "mock" build-std tests on nightly and disable
+ // for windows-gnu which is missing object files (see
+ // https://github.com/rust-lang/wg-cargo-std-aware/issues/46).
+ set_ignore!(is_not_nightly, "requires nightly");
+ set_ignore!(
+ cfg!(all(target_os = "windows", target_env = "gnu")),
+ "does not work on windows-gnu"
+ );
+ }
+ "container_test" => {
+ // These tests must be opt-in because they require docker.
+ set_ignore!(
+ option_env!("CARGO_CONTAINER_TESTS").is_none(),
+ "CARGO_CONTAINER_TESTS must be set"
+ );
+ }
+ "public_network_test" => {
+ // These tests must be opt-in because they touch the public
+ // network. The use of these should be **EXTREMELY RARE**, and
+ // should only touch things which would nearly certainly work
+ // in CI (like github.com).
+ set_ignore!(
+ option_env!("CARGO_PUBLIC_NETWORK_TESTS").is_none(),
+ "CARGO_PUBLIC_NETWORK_TESTS must be set"
+ );
+ }
+ "nightly" => {
+ requires_reason = true;
+ set_ignore!(is_not_nightly, "requires nightly");
+ }
+ s if s.starts_with("requires_") => {
+ let command = &s[9..];
+ set_ignore!(!has_command(command), "{command} not installed");
+ }
+ s if s.starts_with(">=1.") => {
+ requires_reason = true;
+ let min_minor = s[4..].parse().unwrap();
+ let minor = version().0;
+ set_ignore!(minor < min_minor, "requires rustc 1.{minor} or newer");
+ }
+ s if s.starts_with("reason=") => {
+ explicit_reason = Some(s[7..].parse().unwrap());
+ }
+ s if s.starts_with("ignore_windows=") => {
+ set_ignore!(cfg!(windows), "{}", &s[16..s.len() - 1]);
+ }
+ _ => panic!("unknown rule {:?}", rule),
+ }
+ }
+ if requires_reason && explicit_reason.is_none() {
+ panic!(
+ "#[cargo_test] with a rule also requires a reason, \
+ such as #[cargo_test(nightly, reason = \"needs -Z unstable-thing\")]"
+ );
+ }
+
+ // Construct the appropriate attributes.
+ let span = Span::call_site();
+ let mut ret = TokenStream::new();
+ let add_attr = |ret: &mut TokenStream, attr_name, attr_input| {
+ ret.extend(Some(TokenTree::from(Punct::new('#', Spacing::Alone))));
+ let attr = TokenTree::from(Ident::new(attr_name, span));
+ let mut attr_stream: TokenStream = attr.into();
+ if let Some(input) = attr_input {
+ attr_stream.extend(input);
+ }
+ ret.extend(Some(TokenTree::from(Group::new(
+ Delimiter::Bracket,
+ attr_stream,
+ ))));
+ };
+ add_attr(&mut ret, "test", None);
+ if ignore {
+ let reason = explicit_reason
+ .or_else(|| {
+ (!implicit_reasons.is_empty())
+ .then(|| TokenTree::from(Literal::string(&implicit_reasons.join(", "))).into())
+ })
+ .map(|reason: TokenStream| {
+ let mut stream = TokenStream::new();
+ stream.extend(Some(TokenTree::from(Punct::new('=', Spacing::Alone))));
+ stream.extend(Some(reason));
+ stream
+ });
+ add_attr(&mut ret, "ignore", reason);
+ }
+
+ // Find where the function body starts, and add the boilerplate at the start.
+ for token in item {
+ let group = match token {
+ TokenTree::Group(g) => {
+ if g.delimiter() == Delimiter::Brace {
+ g
+ } else {
+ ret.extend(Some(TokenTree::Group(g)));
+ continue;
+ }
+ }
+ other => {
+ ret.extend(Some(other));
+ continue;
+ }
+ };
+
+ let mut new_body = to_token_stream(
+ r#"let _test_guard = {
+ let tmp_dir = option_env!("CARGO_TARGET_TMPDIR");
+ cargo_test_support::paths::init_root(tmp_dir)
+ };"#,
+ );
+
+ new_body.extend(group.stream());
+ ret.extend(Some(TokenTree::from(Group::new(
+ group.delimiter(),
+ new_body,
+ ))));
+ }
+
+ ret
+}
+
+fn split_rules(t: TokenStream) -> Vec<String> {
+ let tts: Vec<_> = t.into_iter().collect();
+ tts.split(|tt| match tt {
+ TokenTree::Punct(p) => p.as_char() == ',',
+ _ => false,
+ })
+ .filter(|parts| !parts.is_empty())
+ .map(|parts| {
+ parts
+ .into_iter()
+ .map(|part| part.to_string())
+ .collect::<String>()
+ })
+ .collect()
+}
+
+fn to_token_stream(code: &str) -> TokenStream {
+ code.parse().unwrap()
+}
+
+static mut VERSION: (u32, bool) = (0, false);
+
+fn version() -> &'static (u32, bool) {
+ static INIT: Once = Once::new();
+ INIT.call_once(|| {
+ let output = Command::new("rustc")
+ .arg("-V")
+ .output()
+ .expect("rustc should run");
+ let stdout = std::str::from_utf8(&output.stdout).expect("utf8");
+ let vers = stdout.split_whitespace().skip(1).next().unwrap();
+ let is_nightly = option_env!("CARGO_TEST_DISABLE_NIGHTLY").is_none()
+ && (vers.contains("-nightly") || vers.contains("-dev"));
+ let minor = vers.split('.').skip(1).next().unwrap().parse().unwrap();
+ unsafe { VERSION = (minor, is_nightly) }
+ });
+ unsafe { &VERSION }
+}
+
+fn has_command(command: &str) -> bool {
+ let output = match Command::new(command).arg("--version").output() {
+ Ok(output) => output,
+ Err(e) => {
+ // hg is not installed on GitHub macOS or certain constrained
+ // environments like Docker. Consider installing it if Cargo gains
+ // more hg support, but otherwise it isn't critical.
+ if is_ci() && command != "hg" {
+ panic!(
+ "expected command `{}` to be somewhere in PATH: {}",
+ command, e
+ );
+ }
+ return false;
+ }
+ };
+ if !output.status.success() {
+ panic!(
+ "expected command `{}` to be runnable, got error {}:\n\
+ stderr:{}\n\
+ stdout:{}\n",
+ command,
+ output.status,
+ String::from_utf8_lossy(&output.stderr),
+ String::from_utf8_lossy(&output.stdout)
+ );
+ }
+ true
+}
+
+/// Whether or not this running in a Continuous Integration environment.
+fn is_ci() -> bool {
+ // Consider using `tracked_env` instead of option_env! when it is stabilized.
+ // `tracked_env` will handle changes, but not require rebuilding the macro
+ // itself like option_env does.
+ option_env!("CI").is_some() || option_env!("TF_BUILD").is_some()
+}
diff --git a/src/tools/cargo/crates/cargo-test-support/Cargo.toml b/src/tools/cargo/crates/cargo-test-support/Cargo.toml
new file mode 100644
index 000000000..91e6e4e34
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/Cargo.toml
@@ -0,0 +1,32 @@
+[package]
+name = "cargo-test-support"
+version = "0.1.0"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+
+[lib]
+doctest = false
+
+[dependencies]
+anyhow = "1.0.34"
+cargo-test-macro = { path = "../cargo-test-macro" }
+cargo-util = { path = "../cargo-util" }
+crates-io = { path = "../crates-io" }
+filetime = "0.2"
+flate2 = { version = "1.0", default-features = false, features = ["zlib"] }
+git2 = "0.17.0"
+glob = "0.3"
+itertools = "0.10.0"
+lazy_static = "1.0"
+pasetors = { version = "0.6.4", features = ["v3", "paserk", "std", "serde"] }
+serde = { version = "1.0.123", features = ["derive"] }
+serde_json = "1.0"
+snapbox = { version = "0.4.0", features = ["diff", "path"] }
+tar = { version = "0.4.38", default-features = false }
+termcolor = "1.1.2"
+time = { version = "0.3", features = ["parsing", "formatting"]}
+toml = "0.7.0"
+url = "2.2.2"
+
+[target.'cfg(windows)'.dependencies]
+windows-sys = { version = "0.45.0", features = ["Win32_Storage_FileSystem"] }
diff --git a/src/tools/cargo/crates/cargo-test-support/build.rs b/src/tools/cargo/crates/cargo-test-support/build.rs
new file mode 100644
index 000000000..478da7d99
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/build.rs
@@ -0,0 +1,7 @@
+fn main() {
+ println!(
+ "cargo:rustc-env=NATIVE_ARCH={}",
+ std::env::var("TARGET").unwrap()
+ );
+ println!("cargo:rerun-if-changed=build.rs");
+}
diff --git a/src/tools/cargo/crates/cargo-test-support/containers/apache/Dockerfile b/src/tools/cargo/crates/cargo-test-support/containers/apache/Dockerfile
new file mode 100644
index 000000000..872602410
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/containers/apache/Dockerfile
@@ -0,0 +1,26 @@
+FROM httpd:2.4-alpine
+
+RUN apk add --no-cache git git-daemon openssl
+
+COPY bar /repos/bar
+WORKDIR /repos/bar
+RUN git config --global user.email "testuser@example.com" &&\
+ git config --global user.name "Test User" &&\
+ git init -b master . &&\
+ git add Cargo.toml src &&\
+ git commit -m "Initial commit" &&\
+ mv .git ../bar.git &&\
+ cd ../bar.git &&\
+ git config --bool core.bare true &&\
+ rm -rf ../bar
+WORKDIR /
+
+EXPOSE 443
+
+WORKDIR /usr/local/apache2/conf
+COPY httpd-cargo.conf .
+RUN cat httpd-cargo.conf >> httpd.conf
+RUN openssl req -x509 -nodes -days 3650 -newkey rsa:2048 \
+ -keyout server.key -out server.crt \
+ -subj "/emailAddress=webmaster@example.com/C=US/ST=California/L=San Francisco/O=Rust/OU=Cargo/CN=127.0.0.1"
+WORKDIR /
diff --git a/src/tools/cargo/crates/cargo-test-support/containers/apache/bar/Cargo.toml b/src/tools/cargo/crates/cargo-test-support/containers/apache/bar/Cargo.toml
new file mode 100644
index 000000000..84fd5d89b
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/containers/apache/bar/Cargo.toml
@@ -0,0 +1,4 @@
+[package]
+name = "bar"
+version = "1.0.0"
+edition = "2021"
diff --git a/src/tools/cargo/crates/cargo-test-support/containers/apache/bar/src/lib.rs b/src/tools/cargo/crates/cargo-test-support/containers/apache/bar/src/lib.rs
new file mode 100644
index 000000000..ca74e3aec
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/containers/apache/bar/src/lib.rs
@@ -0,0 +1 @@
+// Intentionally blank.
diff --git a/src/tools/cargo/crates/cargo-test-support/containers/apache/httpd-cargo.conf b/src/tools/cargo/crates/cargo-test-support/containers/apache/httpd-cargo.conf
new file mode 100644
index 000000000..a4ba7d524
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/containers/apache/httpd-cargo.conf
@@ -0,0 +1,12 @@
+SetEnv GIT_PROJECT_ROOT /repos
+SetEnv GIT_HTTP_EXPORT_ALL
+ScriptAlias /repos /usr/libexec/git-core/git-http-backend/
+LoadModule cgid_module modules/mod_cgid.so
+
+<Files "git-http-backend">
+ Require all granted
+</Files>
+
+Include conf/extra/httpd-ssl.conf
+LoadModule ssl_module modules/mod_ssl.so
+LoadModule socache_shmcb_module modules/mod_socache_shmcb.so
diff --git a/src/tools/cargo/crates/cargo-test-support/containers/sshd/Dockerfile b/src/tools/cargo/crates/cargo-test-support/containers/sshd/Dockerfile
new file mode 100644
index 000000000..b52eefbad
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/containers/sshd/Dockerfile
@@ -0,0 +1,29 @@
+FROM alpine:3.17
+
+RUN apk add --no-cache openssh git
+RUN ssh-keygen -A
+
+RUN addgroup -S testuser && adduser -S testuser -G testuser -s /bin/ash
+# NOTE: Ideally the password should be set to *, but I am uncertain how to do
+# that in alpine. It shouldn't matter since PermitEmptyPasswords is "no".
+RUN passwd -u testuser
+
+RUN mkdir /repos && chown testuser /repos
+COPY --chown=testuser:testuser bar /repos/bar
+USER testuser
+WORKDIR /repos/bar
+RUN git config --global user.email "testuser@example.com" &&\
+ git config --global user.name "Test User" &&\
+ git init -b master . &&\
+ git add Cargo.toml src &&\
+ git commit -m "Initial commit" &&\
+ mv .git ../bar.git &&\
+ cd ../bar.git &&\
+ git config --bool core.bare true &&\
+ rm -rf ../bar
+WORKDIR /
+USER root
+
+EXPOSE 22
+
+ENTRYPOINT ["/usr/sbin/sshd", "-D", "-E", "/var/log/auth.log"]
diff --git a/src/tools/cargo/crates/cargo-test-support/containers/sshd/bar/Cargo.toml b/src/tools/cargo/crates/cargo-test-support/containers/sshd/bar/Cargo.toml
new file mode 100644
index 000000000..84fd5d89b
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/containers/sshd/bar/Cargo.toml
@@ -0,0 +1,4 @@
+[package]
+name = "bar"
+version = "1.0.0"
+edition = "2021"
diff --git a/src/tools/cargo/crates/cargo-test-support/containers/sshd/bar/src/lib.rs b/src/tools/cargo/crates/cargo-test-support/containers/sshd/bar/src/lib.rs
new file mode 100644
index 000000000..ca74e3aec
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/containers/sshd/bar/src/lib.rs
@@ -0,0 +1 @@
+// Intentionally blank.
diff --git a/src/tools/cargo/crates/cargo-test-support/src/compare.rs b/src/tools/cargo/crates/cargo-test-support/src/compare.rs
new file mode 100644
index 000000000..96ce52afc
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/src/compare.rs
@@ -0,0 +1,784 @@
+//! Routines for comparing and diffing output.
+//!
+//! # Patterns
+//!
+//! Many of these functions support special markup to assist with comparing
+//! text that may vary or is otherwise uninteresting for the test at hand. The
+//! supported patterns are:
+//!
+//! - `[..]` is a wildcard that matches 0 or more characters on the same line
+//! (similar to `.*` in a regex). It is non-greedy.
+//! - `[EXE]` optionally adds `.exe` on Windows (empty string on other
+//! platforms).
+//! - `[ROOT]` is the path to the test directory's root.
+//! - `[CWD]` is the working directory of the process that was run.
+//! - There is a wide range of substitutions (such as `[COMPILING]` or
+//! `[WARNING]`) to match cargo's "status" output and allows you to ignore
+//! the alignment. See the source of `substitute_macros` for a complete list
+//! of substitutions.
+//! - `[DIRTY-MSVC]` (only when the line starts with it) would be replaced by
+//! `[DIRTY]` when `cfg(target_env = "msvc")` or the line will be ignored otherwise.
+//! Tests that work around [issue 7358](https://github.com/rust-lang/cargo/issues/7358)
+//! can use this to avoid duplicating the `with_stderr` call like:
+//! `if cfg!(target_env = "msvc") {e.with_stderr("...[DIRTY]...");} else {e.with_stderr("...");}`.
+//!
+//! # Normalization
+//!
+//! In addition to the patterns described above, the strings are normalized
+//! in such a way to avoid unwanted differences. The normalizations are:
+//!
+//! - Raw tab characters are converted to the string `<tab>`. This is helpful
+//! so that raw tabs do not need to be written in the expected string, and
+//! to avoid confusion of tabs vs spaces.
+//! - Backslashes are converted to forward slashes to deal with Windows paths.
+//! This helps so that all tests can be written assuming forward slashes.
+//! Other heuristics are applied to try to ensure Windows-style paths aren't
+//! a problem.
+//! - Carriage returns are removed, which can help when running on Windows.
+
+use crate::diff;
+use crate::paths;
+use anyhow::{bail, Context, Result};
+use serde_json::Value;
+use std::env;
+use std::fmt;
+use std::path::Path;
+use std::str;
+use url::Url;
+
+/// Default `snapbox` Assertions
+///
+/// # Snapshots
+///
+/// Updating of snapshots is controlled with the `SNAPSHOTS` environment variable:
+///
+/// - `skip`: do not run the tests
+/// - `ignore`: run the tests but ignore their failure
+/// - `verify`: run the tests
+/// - `overwrite`: update the snapshots based on the output of the tests
+///
+/// # Patterns
+///
+/// - `[..]` is a character wildcard, stopping at line breaks
+/// - `\n...\n` is a multi-line wildcard
+/// - `[EXE]` matches the exe suffix for the current platform
+/// - `[ROOT]` matches [`paths::root()`][crate::paths::root]
+/// - `[ROOTURL]` matches [`paths::root()`][crate::paths::root] as a URL
+///
+/// # Normalization
+///
+/// In addition to the patterns described above, text is normalized
+/// in such a way to avoid unwanted differences. The normalizations are:
+///
+/// - Backslashes are converted to forward slashes to deal with Windows paths.
+/// This helps so that all tests can be written assuming forward slashes.
+/// Other heuristics are applied to try to ensure Windows-style paths aren't
+/// a problem.
+/// - Carriage returns are removed, which can help when running on Windows.
+pub fn assert_ui() -> snapbox::Assert {
+ let root = paths::root();
+ // Use `from_file_path` instead of `from_dir_path` so the trailing slash is
+ // put in the users output, rather than hidden in the variable
+ let root_url = url::Url::from_file_path(&root).unwrap().to_string();
+ let root = root.display().to_string();
+
+ let mut subs = snapbox::Substitutions::new();
+ subs.extend([
+ (
+ "[EXE]",
+ std::borrow::Cow::Borrowed(std::env::consts::EXE_SUFFIX),
+ ),
+ ("[ROOT]", std::borrow::Cow::Owned(root)),
+ ("[ROOTURL]", std::borrow::Cow::Owned(root_url)),
+ ])
+ .unwrap();
+ snapbox::Assert::new()
+ .action_env(snapbox::DEFAULT_ACTION_ENV)
+ .substitutions(subs)
+}
+
+/// Normalizes the output so that it can be compared against the expected value.
+fn normalize_actual(actual: &str, cwd: Option<&Path>) -> String {
+ // It's easier to read tabs in outputs if they don't show up as literal
+ // hidden characters
+ let actual = actual.replace('\t', "<tab>");
+ if cfg!(windows) {
+ // Let's not deal with \r\n vs \n on windows...
+ let actual = actual.replace('\r', "");
+ normalize_windows(&actual, cwd)
+ } else {
+ actual
+ }
+}
+
+/// Normalizes the expected string so that it can be compared against the actual output.
+fn normalize_expected(expected: &str, cwd: Option<&Path>) -> String {
+ let expected = replace_dirty_msvc(expected);
+ let expected = substitute_macros(&expected);
+
+ if cfg!(windows) {
+ normalize_windows(&expected, cwd)
+ } else {
+ let expected = match cwd {
+ None => expected,
+ Some(cwd) => expected.replace("[CWD]", &cwd.display().to_string()),
+ };
+ let expected = expected.replace("[ROOT]", &paths::root().display().to_string());
+ expected
+ }
+}
+
+fn replace_dirty_msvc_impl(s: &str, is_msvc: bool) -> String {
+ if is_msvc {
+ s.replace("[DIRTY-MSVC]", "[DIRTY]")
+ } else {
+ use itertools::Itertools;
+
+ let mut new = s
+ .lines()
+ .filter(|it| !it.starts_with("[DIRTY-MSVC]"))
+ .join("\n");
+
+ if s.ends_with("\n") {
+ new.push_str("\n");
+ }
+
+ new
+ }
+}
+
+fn replace_dirty_msvc(s: &str) -> String {
+ replace_dirty_msvc_impl(s, cfg!(target_env = "msvc"))
+}
+
+/// Normalizes text for both actual and expected strings on Windows.
+fn normalize_windows(text: &str, cwd: Option<&Path>) -> String {
+ // Let's not deal with / vs \ (windows...)
+ let text = text.replace('\\', "/");
+
+ // Weirdness for paths on Windows extends beyond `/` vs `\` apparently.
+ // Namely paths like `c:\` and `C:\` are equivalent and that can cause
+ // issues. The return value of `env::current_dir()` may return a
+ // lowercase drive name, but we round-trip a lot of values through `Url`
+ // which will auto-uppercase the drive name. To just ignore this
+ // distinction we try to canonicalize as much as possible, taking all
+ // forms of a path and canonicalizing them to one.
+ let replace_path = |s: &str, path: &Path, with: &str| {
+ let path_through_url = Url::from_file_path(path).unwrap().to_file_path().unwrap();
+ let path1 = path.display().to_string().replace('\\', "/");
+ let path2 = path_through_url.display().to_string().replace('\\', "/");
+ s.replace(&path1, with)
+ .replace(&path2, with)
+ .replace(with, &path1)
+ };
+
+ let text = match cwd {
+ None => text,
+ Some(p) => replace_path(&text, p, "[CWD]"),
+ };
+
+ // Similar to cwd above, perform similar treatment to the root path
+ // which in theory all of our paths should otherwise get rooted at.
+ let root = paths::root();
+ let text = replace_path(&text, &root, "[ROOT]");
+
+ text
+}
+
+fn substitute_macros(input: &str) -> String {
+ let macros = [
+ ("[RUNNING]", " Running"),
+ ("[COMPILING]", " Compiling"),
+ ("[CHECKING]", " Checking"),
+ ("[COMPLETED]", " Completed"),
+ ("[CREATED]", " Created"),
+ ("[DOWNGRADING]", " Downgrading"),
+ ("[FINISHED]", " Finished"),
+ ("[ERROR]", "error:"),
+ ("[WARNING]", "warning:"),
+ ("[NOTE]", "note:"),
+ ("[HELP]", "help:"),
+ ("[DOCUMENTING]", " Documenting"),
+ ("[SCRAPING]", " Scraping"),
+ ("[FRESH]", " Fresh"),
+ ("[DIRTY]", " Dirty"),
+ ("[UPDATING]", " Updating"),
+ ("[ADDING]", " Adding"),
+ ("[REMOVING]", " Removing"),
+ ("[DOCTEST]", " Doc-tests"),
+ ("[PACKAGING]", " Packaging"),
+ ("[PACKAGED]", " Packaged"),
+ ("[DOWNLOADING]", " Downloading"),
+ ("[DOWNLOADED]", " Downloaded"),
+ ("[UPLOADING]", " Uploading"),
+ ("[UPLOADED]", " Uploaded"),
+ ("[VERIFYING]", " Verifying"),
+ ("[ARCHIVING]", " Archiving"),
+ ("[INSTALLING]", " Installing"),
+ ("[REPLACING]", " Replacing"),
+ ("[UNPACKING]", " Unpacking"),
+ ("[SUMMARY]", " Summary"),
+ ("[FIXED]", " Fixed"),
+ ("[FIXING]", " Fixing"),
+ ("[EXE]", env::consts::EXE_SUFFIX),
+ ("[IGNORED]", " Ignored"),
+ ("[INSTALLED]", " Installed"),
+ ("[REPLACED]", " Replaced"),
+ ("[BUILDING]", " Building"),
+ ("[LOGIN]", " Login"),
+ ("[LOGOUT]", " Logout"),
+ ("[YANK]", " Yank"),
+ ("[OWNER]", " Owner"),
+ ("[MIGRATING]", " Migrating"),
+ ("[EXECUTABLE]", " Executable"),
+ ("[SKIPPING]", " Skipping"),
+ ("[WAITING]", " Waiting"),
+ ("[PUBLISHED]", " Published"),
+ ];
+ let mut result = input.to_owned();
+ for &(pat, subst) in &macros {
+ result = result.replace(pat, subst)
+ }
+ result
+}
+
+/// Compares one string against another, checking that they both match.
+///
+/// See [Patterns](index.html#patterns) for more information on pattern matching.
+///
+/// - `description` explains where the output is from (usually "stdout" or "stderr").
+/// - `other_output` is other output to display in the error (usually stdout or stderr).
+pub fn match_exact(
+ expected: &str,
+ actual: &str,
+ description: &str,
+ other_output: &str,
+ cwd: Option<&Path>,
+) -> Result<()> {
+ let expected = normalize_expected(expected, cwd);
+ let actual = normalize_actual(actual, cwd);
+ let e: Vec<_> = expected.lines().map(WildStr::new).collect();
+ let a: Vec<_> = actual.lines().map(WildStr::new).collect();
+ if e == a {
+ return Ok(());
+ }
+ let diff = diff::colored_diff(&e, &a);
+ bail!(
+ "{} did not match:\n\
+ {}\n\n\
+ other output:\n\
+ {}\n",
+ description,
+ diff,
+ other_output,
+ );
+}
+
+/// Convenience wrapper around [`match_exact`] which will panic on error.
+#[track_caller]
+pub fn assert_match_exact(expected: &str, actual: &str) {
+ if let Err(e) = match_exact(expected, actual, "", "", None) {
+ crate::panic_error("", e);
+ }
+}
+
+/// Checks that the given string contains the given lines, ignoring the order
+/// of the lines.
+///
+/// See [Patterns](index.html#patterns) for more information on pattern matching.
+pub fn match_unordered(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> {
+ let expected = normalize_expected(expected, cwd);
+ let actual = normalize_actual(actual, cwd);
+ let e: Vec<_> = expected.lines().map(|line| WildStr::new(line)).collect();
+ let mut a: Vec<_> = actual.lines().map(|line| WildStr::new(line)).collect();
+ // match more-constrained lines first, although in theory we'll
+ // need some sort of recursive match here. This handles the case
+ // that you expect "a\n[..]b" and two lines are printed out,
+ // "ab\n"a", where technically we do match unordered but a naive
+ // search fails to find this. This simple sort at least gets the
+ // test suite to pass for now, but we may need to get more fancy
+ // if tests start failing again.
+ a.sort_by_key(|s| s.line.len());
+ let mut changes = Vec::new();
+ let mut a_index = 0;
+ let mut failure = false;
+
+ use crate::diff::Change;
+ for (e_i, e_line) in e.into_iter().enumerate() {
+ match a.iter().position(|a_line| e_line == *a_line) {
+ Some(index) => {
+ let a_line = a.remove(index);
+ changes.push(Change::Keep(e_i, index, a_line));
+ a_index += 1;
+ }
+ None => {
+ failure = true;
+ changes.push(Change::Remove(e_i, e_line));
+ }
+ }
+ }
+ for unmatched in a {
+ failure = true;
+ changes.push(Change::Add(a_index, unmatched));
+ a_index += 1;
+ }
+ if failure {
+ bail!(
+ "Expected lines did not match (ignoring order):\n{}\n",
+ diff::render_colored_changes(&changes)
+ );
+ } else {
+ Ok(())
+ }
+}
+
+/// Checks that the given string contains the given contiguous lines
+/// somewhere.
+///
+/// See [Patterns](index.html#patterns) for more information on pattern matching.
+pub fn match_contains(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> {
+ let expected = normalize_expected(expected, cwd);
+ let actual = normalize_actual(actual, cwd);
+ let e: Vec<_> = expected.lines().map(|line| WildStr::new(line)).collect();
+ let a: Vec<_> = actual.lines().map(|line| WildStr::new(line)).collect();
+ if e.len() == 0 {
+ bail!("expected length must not be zero");
+ }
+ for window in a.windows(e.len()) {
+ if window == e {
+ return Ok(());
+ }
+ }
+ bail!(
+ "expected to find:\n\
+ {}\n\n\
+ did not find in output:\n\
+ {}",
+ expected,
+ actual
+ );
+}
+
+/// Checks that the given string does not contain the given contiguous lines
+/// anywhere.
+///
+/// See [Patterns](index.html#patterns) for more information on pattern matching.
+pub fn match_does_not_contain(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> {
+ if match_contains(expected, actual, cwd).is_ok() {
+ bail!(
+ "expected not to find:\n\
+ {}\n\n\
+ but found in output:\n\
+ {}",
+ expected,
+ actual
+ );
+ } else {
+ Ok(())
+ }
+}
+
+/// Checks that the given string contains the given contiguous lines
+/// somewhere, and should be repeated `number` times.
+///
+/// See [Patterns](index.html#patterns) for more information on pattern matching.
+pub fn match_contains_n(
+ expected: &str,
+ number: usize,
+ actual: &str,
+ cwd: Option<&Path>,
+) -> Result<()> {
+ let expected = normalize_expected(expected, cwd);
+ let actual = normalize_actual(actual, cwd);
+ let e: Vec<_> = expected.lines().map(|line| WildStr::new(line)).collect();
+ let a: Vec<_> = actual.lines().map(|line| WildStr::new(line)).collect();
+ if e.len() == 0 {
+ bail!("expected length must not be zero");
+ }
+ let matches = a.windows(e.len()).filter(|window| *window == e).count();
+ if matches == number {
+ Ok(())
+ } else {
+ bail!(
+ "expected to find {} occurrences of:\n\
+ {}\n\n\
+ but found {} matches in the output:\n\
+ {}",
+ number,
+ expected,
+ matches,
+ actual
+ )
+ }
+}
+
+/// Checks that the given string has a line that contains the given patterns,
+/// and that line also does not contain the `without` patterns.
+///
+/// See [Patterns](index.html#patterns) for more information on pattern matching.
+///
+/// See [`crate::Execs::with_stderr_line_without`] for an example and cautions
+/// against using.
+pub fn match_with_without(
+ actual: &str,
+ with: &[String],
+ without: &[String],
+ cwd: Option<&Path>,
+) -> Result<()> {
+ let actual = normalize_actual(actual, cwd);
+ let norm = |s: &String| format!("[..]{}[..]", normalize_expected(s, cwd));
+ let with: Vec<_> = with.iter().map(norm).collect();
+ let without: Vec<_> = without.iter().map(norm).collect();
+ let with_wild: Vec<_> = with.iter().map(|w| WildStr::new(w)).collect();
+ let without_wild: Vec<_> = without.iter().map(|w| WildStr::new(w)).collect();
+
+ let matches: Vec<_> = actual
+ .lines()
+ .map(WildStr::new)
+ .filter(|line| with_wild.iter().all(|with| with == line))
+ .filter(|line| !without_wild.iter().any(|without| without == line))
+ .collect();
+ match matches.len() {
+ 0 => bail!(
+ "Could not find expected line in output.\n\
+ With contents: {:?}\n\
+ Without contents: {:?}\n\
+ Actual stderr:\n\
+ {}\n",
+ with,
+ without,
+ actual
+ ),
+ 1 => Ok(()),
+ _ => bail!(
+ "Found multiple matching lines, but only expected one.\n\
+ With contents: {:?}\n\
+ Without contents: {:?}\n\
+ Matching lines:\n\
+ {}\n",
+ with,
+ without,
+ itertools::join(matches, "\n")
+ ),
+ }
+}
+
+/// Checks that the given string of JSON objects match the given set of
+/// expected JSON objects.
+///
+/// See [`crate::Execs::with_json`] for more details.
+pub fn match_json(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> {
+ let (exp_objs, act_objs) = collect_json_objects(expected, actual)?;
+ if exp_objs.len() != act_objs.len() {
+ bail!(
+ "expected {} json lines, got {}, stdout:\n{}",
+ exp_objs.len(),
+ act_objs.len(),
+ actual
+ );
+ }
+ for (exp_obj, act_obj) in exp_objs.iter().zip(act_objs) {
+ find_json_mismatch(exp_obj, &act_obj, cwd)?;
+ }
+ Ok(())
+}
+
+/// Checks that the given string of JSON objects match the given set of
+/// expected JSON objects, ignoring their order.
+///
+/// See [`crate::Execs::with_json_contains_unordered`] for more details and
+/// cautions when using.
+pub fn match_json_contains_unordered(
+ expected: &str,
+ actual: &str,
+ cwd: Option<&Path>,
+) -> Result<()> {
+ let (exp_objs, mut act_objs) = collect_json_objects(expected, actual)?;
+ for exp_obj in exp_objs {
+ match act_objs
+ .iter()
+ .position(|act_obj| find_json_mismatch(&exp_obj, act_obj, cwd).is_ok())
+ {
+ Some(index) => act_objs.remove(index),
+ None => {
+ bail!(
+ "Did not find expected JSON:\n\
+ {}\n\
+ Remaining available output:\n\
+ {}\n",
+ serde_json::to_string_pretty(&exp_obj).unwrap(),
+ itertools::join(
+ act_objs.iter().map(|o| serde_json::to_string(o).unwrap()),
+ "\n"
+ )
+ );
+ }
+ };
+ }
+ Ok(())
+}
+
+fn collect_json_objects(
+ expected: &str,
+ actual: &str,
+) -> Result<(Vec<serde_json::Value>, Vec<serde_json::Value>)> {
+ let expected_objs: Vec<_> = expected
+ .split("\n\n")
+ .map(|expect| {
+ expect
+ .parse()
+ .with_context(|| format!("failed to parse expected JSON object:\n{}", expect))
+ })
+ .collect::<Result<_>>()?;
+ let actual_objs: Vec<_> = actual
+ .lines()
+ .filter(|line| line.starts_with('{'))
+ .map(|line| {
+ line.parse()
+ .with_context(|| format!("failed to parse JSON object:\n{}", line))
+ })
+ .collect::<Result<_>>()?;
+ Ok((expected_objs, actual_objs))
+}
+
+/// Compares JSON object for approximate equality.
+/// You can use `[..]` wildcard in strings (useful for OS-dependent things such
+/// as paths). You can use a `"{...}"` string literal as a wildcard for
+/// arbitrary nested JSON (useful for parts of object emitted by other programs
+/// (e.g., rustc) rather than Cargo itself).
+pub fn find_json_mismatch(expected: &Value, actual: &Value, cwd: Option<&Path>) -> Result<()> {
+ match find_json_mismatch_r(expected, actual, cwd) {
+ Some((expected_part, actual_part)) => bail!(
+ "JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n",
+ serde_json::to_string_pretty(expected).unwrap(),
+ serde_json::to_string_pretty(&actual).unwrap(),
+ serde_json::to_string_pretty(expected_part).unwrap(),
+ serde_json::to_string_pretty(actual_part).unwrap(),
+ ),
+ None => Ok(()),
+ }
+}
+
+fn find_json_mismatch_r<'a>(
+ expected: &'a Value,
+ actual: &'a Value,
+ cwd: Option<&Path>,
+) -> Option<(&'a Value, &'a Value)> {
+ use serde_json::Value::*;
+ match (expected, actual) {
+ (&Number(ref l), &Number(ref r)) if l == r => None,
+ (&Bool(l), &Bool(r)) if l == r => None,
+ (&String(ref l), _) if l == "{...}" => None,
+ (&String(ref l), &String(ref r)) => {
+ if match_exact(l, r, "", "", cwd).is_err() {
+ Some((expected, actual))
+ } else {
+ None
+ }
+ }
+ (&Array(ref l), &Array(ref r)) => {
+ if l.len() != r.len() {
+ return Some((expected, actual));
+ }
+
+ l.iter()
+ .zip(r.iter())
+ .filter_map(|(l, r)| find_json_mismatch_r(l, r, cwd))
+ .next()
+ }
+ (&Object(ref l), &Object(ref r)) => {
+ let same_keys = l.len() == r.len() && l.keys().all(|k| r.contains_key(k));
+ if !same_keys {
+ return Some((expected, actual));
+ }
+
+ l.values()
+ .zip(r.values())
+ .filter_map(|(l, r)| find_json_mismatch_r(l, r, cwd))
+ .next()
+ }
+ (&Null, &Null) => None,
+ // Magic string literal `"{...}"` acts as wildcard for any sub-JSON.
+ _ => Some((expected, actual)),
+ }
+}
+
+/// A single line string that supports `[..]` wildcard matching.
+pub struct WildStr<'a> {
+ has_meta: bool,
+ line: &'a str,
+}
+
+impl<'a> WildStr<'a> {
+ pub fn new(line: &'a str) -> WildStr<'a> {
+ WildStr {
+ has_meta: line.contains("[..]"),
+ line,
+ }
+ }
+}
+
+impl<'a> PartialEq for WildStr<'a> {
+ fn eq(&self, other: &Self) -> bool {
+ match (self.has_meta, other.has_meta) {
+ (false, false) => self.line == other.line,
+ (true, false) => meta_cmp(self.line, other.line),
+ (false, true) => meta_cmp(other.line, self.line),
+ (true, true) => panic!("both lines cannot have [..]"),
+ }
+ }
+}
+
+fn meta_cmp(a: &str, mut b: &str) -> bool {
+ for (i, part) in a.split("[..]").enumerate() {
+ match b.find(part) {
+ Some(j) => {
+ if i == 0 && j != 0 {
+ return false;
+ }
+ b = &b[j + part.len()..];
+ }
+ None => return false,
+ }
+ }
+ b.is_empty() || a.ends_with("[..]")
+}
+
+impl fmt::Display for WildStr<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.line)
+ }
+}
+
+impl fmt::Debug for WildStr<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{:?}", self.line)
+ }
+}
+
+#[test]
+fn wild_str_cmp() {
+ for (a, b) in &[
+ ("a b", "a b"),
+ ("a[..]b", "a b"),
+ ("a[..]", "a b"),
+ ("[..]", "a b"),
+ ("[..]b", "a b"),
+ ] {
+ assert_eq!(WildStr::new(a), WildStr::new(b));
+ }
+ for (a, b) in &[("[..]b", "c"), ("b", "c"), ("b", "cb")] {
+ assert_ne!(WildStr::new(a), WildStr::new(b));
+ }
+}
+
+#[test]
+fn dirty_msvc() {
+ let case = |expected: &str, wild: &str, msvc: bool| {
+ assert_eq!(expected, &replace_dirty_msvc_impl(wild, msvc));
+ };
+
+ // no replacements
+ case("aa", "aa", false);
+ case("aa", "aa", true);
+
+ // with replacements
+ case(
+ "\
+[DIRTY] a",
+ "\
+[DIRTY-MSVC] a",
+ true,
+ );
+ case(
+ "",
+ "\
+[DIRTY-MSVC] a",
+ false,
+ );
+ case(
+ "\
+[DIRTY] a
+[COMPILING] a",
+ "\
+[DIRTY-MSVC] a
+[COMPILING] a",
+ true,
+ );
+ case(
+ "\
+[COMPILING] a",
+ "\
+[DIRTY-MSVC] a
+[COMPILING] a",
+ false,
+ );
+
+ // test trailing newline behavior
+ case(
+ "\
+A
+B
+", "\
+A
+B
+", true,
+ );
+
+ case(
+ "\
+A
+B
+", "\
+A
+B
+", false,
+ );
+
+ case(
+ "\
+A
+B", "\
+A
+B", true,
+ );
+
+ case(
+ "\
+A
+B", "\
+A
+B", false,
+ );
+
+ case(
+ "\
+[DIRTY] a
+",
+ "\
+[DIRTY-MSVC] a
+",
+ true,
+ );
+ case(
+ "\n",
+ "\
+[DIRTY-MSVC] a
+",
+ false,
+ );
+
+ case(
+ "\
+[DIRTY] a",
+ "\
+[DIRTY-MSVC] a",
+ true,
+ );
+ case(
+ "",
+ "\
+[DIRTY-MSVC] a",
+ false,
+ );
+}
diff --git a/src/tools/cargo/crates/cargo-test-support/src/containers.rs b/src/tools/cargo/crates/cargo-test-support/src/containers.rs
new file mode 100644
index 000000000..17040d82a
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/src/containers.rs
@@ -0,0 +1,285 @@
+//! Support for testing using Docker containers.
+//!
+//! The [`Container`] type is a builder for configuring a container to run.
+//! After you call `launch`, you can use the [`ContainerHandle`] to interact
+//! with the running container.
+//!
+//! Tests using containers must use `#[cargo_test(container_test)]` to disable
+//! them unless the CARGO_CONTAINER_TESTS environment variable is set.
+
+use cargo_util::ProcessBuilder;
+use std::collections::HashMap;
+use std::io::Read;
+use std::path::PathBuf;
+use std::process::Command;
+use std::sync::atomic::{AtomicUsize, Ordering};
+use std::sync::Mutex;
+use tar::Header;
+
+/// A builder for configuring a container to run.
+pub struct Container {
+ /// The host directory that forms the basis of the Docker image.
+ build_context: PathBuf,
+ /// Files to copy over to the image.
+ files: Vec<MkFile>,
+}
+
+/// A handle to a running container.
+///
+/// You can use this to interact with the container.
+pub struct ContainerHandle {
+ /// The name of the container.
+ name: String,
+ /// The IP address of the container.
+ ///
+ /// NOTE: This is currently unused, but may be useful so I left it in.
+ /// This can only be used on Linux. macOS and Windows docker doesn't allow
+ /// direct connection to the container.
+ pub ip_address: String,
+ /// Port mappings of container_port to host_port for ports exposed via EXPOSE.
+ pub port_mappings: HashMap<u16, u16>,
+}
+
+impl Container {
+ pub fn new(context_dir: &str) -> Container {
+ assert!(std::env::var_os("CARGO_CONTAINER_TESTS").is_some());
+ let mut build_context = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
+ build_context.push("containers");
+ build_context.push(context_dir);
+ Container {
+ build_context,
+ files: Vec::new(),
+ }
+ }
+
+ /// Adds a file to be copied into the container.
+ pub fn file(mut self, file: MkFile) -> Self {
+ self.files.push(file);
+ self
+ }
+
+ /// Starts the container.
+ pub fn launch(mut self) -> ContainerHandle {
+ static NEXT_ID: AtomicUsize = AtomicUsize::new(0);
+
+ let id = NEXT_ID.fetch_add(1, Ordering::SeqCst);
+ let name = format!("cargo_test_{id}");
+ remove_if_exists(&name);
+ self.create_container(&name);
+ self.copy_files(&name);
+ self.start_container(&name);
+ let info = self.container_inspect(&name);
+ let ip_address = if cfg!(target_os = "linux") {
+ info[0]["NetworkSettings"]["IPAddress"]
+ .as_str()
+ .unwrap()
+ .to_string()
+ } else {
+ // macOS and Windows can't make direct connections to the
+ // container. It only works through exposed ports or mapped ports.
+ "127.0.0.1".to_string()
+ };
+ let port_mappings = self.port_mappings(&info);
+ self.wait_till_ready(&port_mappings);
+
+ ContainerHandle {
+ name,
+ ip_address,
+ port_mappings,
+ }
+ }
+
+ fn create_container(&self, name: &str) {
+ static BUILD_LOCK: Mutex<()> = Mutex::new(());
+
+ let image_base = self.build_context.file_name().unwrap();
+ let image_name = format!("cargo-test-{}", image_base.to_str().unwrap());
+ let _lock = BUILD_LOCK.lock().unwrap();
+ ProcessBuilder::new("docker")
+ .args(&["build", "--tag", image_name.as_str()])
+ .arg(&self.build_context)
+ .exec_with_output()
+ .unwrap();
+
+ ProcessBuilder::new("docker")
+ .args(&[
+ "container",
+ "create",
+ "--publish-all",
+ "--rm",
+ "--name",
+ name,
+ ])
+ .arg(image_name)
+ .exec_with_output()
+ .unwrap();
+ }
+
+ fn copy_files(&mut self, name: &str) {
+ if self.files.is_empty() {
+ return;
+ }
+ let mut ar = tar::Builder::new(Vec::new());
+ let files = std::mem::replace(&mut self.files, Vec::new());
+ for mut file in files {
+ ar.append_data(&mut file.header, &file.path, file.contents.as_slice())
+ .unwrap();
+ }
+ let ar = ar.into_inner().unwrap();
+ ProcessBuilder::new("docker")
+ .args(&["cp", "-"])
+ .arg(format!("{name}:/"))
+ .stdin(ar)
+ .exec_with_output()
+ .unwrap();
+ }
+
+ fn start_container(&self, name: &str) {
+ ProcessBuilder::new("docker")
+ .args(&["container", "start"])
+ .arg(name)
+ .exec_with_output()
+ .unwrap();
+ }
+
+ fn container_inspect(&self, name: &str) -> serde_json::Value {
+ let output = ProcessBuilder::new("docker")
+ .args(&["inspect", name])
+ .exec_with_output()
+ .unwrap();
+ serde_json::from_slice(&output.stdout).unwrap()
+ }
+
+ /// Returns the mapping of container_port->host_port for ports that were
+ /// exposed with EXPOSE.
+ fn port_mappings(&self, info: &serde_json::Value) -> HashMap<u16, u16> {
+ info[0]["NetworkSettings"]["Ports"]
+ .as_object()
+ .unwrap()
+ .iter()
+ .map(|(key, value)| {
+ let key = key
+ .strip_suffix("/tcp")
+ .expect("expected TCP only ports")
+ .parse()
+ .unwrap();
+ let values = value.as_array().unwrap();
+ let value = values
+ .iter()
+ .find(|value| value["HostIp"].as_str().unwrap() == "0.0.0.0")
+ .expect("expected localhost IP");
+ let host_port = value["HostPort"].as_str().unwrap().parse().unwrap();
+ (key, host_port)
+ })
+ .collect()
+ }
+
+ fn wait_till_ready(&self, port_mappings: &HashMap<u16, u16>) {
+ for port in port_mappings.values() {
+ let mut ok = false;
+ for _ in 0..30 {
+ match std::net::TcpStream::connect(format!("127.0.0.1:{port}")) {
+ Ok(_) => {
+ ok = true;
+ break;
+ }
+ Err(e) => {
+ if e.kind() != std::io::ErrorKind::ConnectionRefused {
+ panic!("unexpected localhost connection error: {e:?}");
+ }
+ std::thread::sleep(std::time::Duration::new(1, 0));
+ }
+ }
+ }
+ if !ok {
+ panic!("no listener on localhost port {port}");
+ }
+ }
+ }
+}
+
+impl ContainerHandle {
+ /// Executes a program inside a running container.
+ pub fn exec(&self, args: &[&str]) -> std::process::Output {
+ ProcessBuilder::new("docker")
+ .args(&["container", "exec", &self.name])
+ .args(args)
+ .exec_with_output()
+ .unwrap()
+ }
+
+ /// Returns the contents of a file inside the container.
+ pub fn read_file(&self, path: &str) -> String {
+ let output = ProcessBuilder::new("docker")
+ .args(&["cp", &format!("{}:{}", self.name, path), "-"])
+ .exec_with_output()
+ .unwrap();
+ let mut ar = tar::Archive::new(output.stdout.as_slice());
+ let mut entry = ar.entries().unwrap().next().unwrap().unwrap();
+ let mut contents = String::new();
+ entry.read_to_string(&mut contents).unwrap();
+ contents
+ }
+}
+
+impl Drop for ContainerHandle {
+ fn drop(&mut self) {
+ // To help with debugging, this will keep the container alive.
+ if std::env::var_os("CARGO_CONTAINER_TEST_KEEP").is_some() {
+ return;
+ }
+ remove_if_exists(&self.name);
+ }
+}
+
+fn remove_if_exists(name: &str) {
+ if let Err(e) = Command::new("docker")
+ .args(&["container", "rm", "--force", name])
+ .output()
+ {
+ panic!("failed to run docker: {e}");
+ }
+}
+
+/// Builder for configuring a file to copy into a container.
+pub struct MkFile {
+ path: String,
+ contents: Vec<u8>,
+ header: Header,
+}
+
+impl MkFile {
+ /// Defines a file to add to the container.
+ ///
+ /// This should be passed to `Container::file`.
+ ///
+ /// The path is the path inside the container to create the file.
+ pub fn path(path: &str) -> MkFile {
+ MkFile {
+ path: path.to_string(),
+ contents: Vec::new(),
+ header: Header::new_gnu(),
+ }
+ }
+
+ pub fn contents(mut self, contents: impl Into<Vec<u8>>) -> Self {
+ self.contents = contents.into();
+ self.header.set_size(self.contents.len() as u64);
+ self
+ }
+
+ pub fn mode(mut self, mode: u32) -> Self {
+ self.header.set_mode(mode);
+ self
+ }
+
+ pub fn uid(mut self, uid: u64) -> Self {
+ self.header.set_uid(uid);
+ self
+ }
+
+ pub fn gid(mut self, gid: u64) -> Self {
+ self.header.set_gid(gid);
+ self
+ }
+}
diff --git a/src/tools/cargo/crates/cargo-test-support/src/cross_compile.rs b/src/tools/cargo/crates/cargo-test-support/src/cross_compile.rs
new file mode 100644
index 000000000..a2daf882d
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/src/cross_compile.rs
@@ -0,0 +1,264 @@
+//! Support for cross-compile tests with the `--target` flag.
+//!
+//! Note that cross-testing is very limited. You need to install the
+//! "alternate" target to the host (32-bit for 64-bit hosts or vice-versa).
+//!
+//! Set CFG_DISABLE_CROSS_TESTS=1 environment variable to disable these tests
+//! if you are unable to use the alternate target. Unfortunately 32-bit
+//! support on macOS is going away, so macOS users are out of luck.
+//!
+//! These tests are all disabled on rust-lang/rust's CI, but run in Cargo's CI.
+
+use crate::{basic_manifest, main_file, project};
+use cargo_util::ProcessError;
+use std::env;
+use std::fmt::Write;
+use std::process::{Command, Output};
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::sync::Once;
+
+/// Whether or not the resulting cross binaries can run on the host.
+static CAN_RUN_ON_HOST: AtomicBool = AtomicBool::new(false);
+
+pub fn disabled() -> bool {
+ // First, disable if requested.
+ match env::var("CFG_DISABLE_CROSS_TESTS") {
+ Ok(ref s) if *s == "1" => return true,
+ _ => {}
+ }
+
+ // Cross tests are only tested to work on macos, linux, and MSVC windows.
+ if !(cfg!(target_os = "macos") || cfg!(target_os = "linux") || cfg!(target_env = "msvc")) {
+ return true;
+ }
+
+ // It's not particularly common to have a cross-compilation setup, so
+ // try to detect that before we fail a bunch of tests through no fault
+ // of the user.
+ static CAN_BUILD_CROSS_TESTS: AtomicBool = AtomicBool::new(false);
+ static CHECK: Once = Once::new();
+
+ let cross_target = alternate();
+
+ let run_cross_test = || -> anyhow::Result<Output> {
+ let p = project()
+ .at("cross_test")
+ .file("Cargo.toml", &basic_manifest("cross_test", "1.0.0"))
+ .file("src/main.rs", &main_file(r#""testing!""#, &[]))
+ .build();
+
+ let build_result = p
+ .cargo("build --target")
+ .arg(&cross_target)
+ .exec_with_output();
+
+ if build_result.is_ok() {
+ CAN_BUILD_CROSS_TESTS.store(true, Ordering::SeqCst);
+ }
+
+ let result = p
+ .cargo("run --target")
+ .arg(&cross_target)
+ .exec_with_output();
+
+ if result.is_ok() {
+ CAN_RUN_ON_HOST.store(true, Ordering::SeqCst);
+ }
+ build_result
+ };
+
+ CHECK.call_once(|| {
+ drop(run_cross_test());
+ });
+
+ if CAN_BUILD_CROSS_TESTS.load(Ordering::SeqCst) {
+ // We were able to compile a simple project, so the user has the
+ // necessary `std::` bits installed. Therefore, tests should not
+ // be disabled.
+ return false;
+ }
+
+ // We can't compile a simple cross project. We want to warn the user
+ // by failing a single test and having the remainder of the cross tests
+ // pass. We don't use `std::sync::Once` here because panicking inside its
+ // `call_once` method would poison the `Once` instance, which is not what
+ // we want.
+ static HAVE_WARNED: AtomicBool = AtomicBool::new(false);
+
+ if HAVE_WARNED.swap(true, Ordering::SeqCst) {
+ // We are some other test and somebody else is handling the warning.
+ // Just disable the current test.
+ return true;
+ }
+
+ // We are responsible for warning the user, which we do by panicking.
+ let mut message = format!(
+ "
+Cannot cross compile to {}.
+
+This failure can be safely ignored. If you would prefer to not see this
+failure, you can set the environment variable CFG_DISABLE_CROSS_TESTS to \"1\".
+
+Alternatively, you can install the necessary libraries to enable cross
+compilation tests. Cross compilation tests depend on your host platform.
+",
+ cross_target
+ );
+
+ if cfg!(target_os = "linux") {
+ message.push_str(
+ "
+Linux cross tests target i686-unknown-linux-gnu, which requires the ability to
+build and run 32-bit targets. This requires the 32-bit libraries to be
+installed. For example, on Ubuntu, run `sudo apt install gcc-multilib` to
+install the necessary libraries.
+",
+ );
+ } else if cfg!(all(target_os = "macos", target_arch = "aarch64")) {
+ message.push_str(
+ "
+macOS on aarch64 cross tests to target x86_64-apple-darwin.
+This should be natively supported via Xcode, nothing additional besides the
+rustup target should be needed.
+",
+ );
+ } else if cfg!(target_os = "macos") {
+ message.push_str(
+ "
+macOS on x86_64 cross tests to target x86_64-apple-ios, which requires the iOS
+SDK to be installed. This should be included with Xcode automatically. If you
+are using the Xcode command line tools, you'll need to install the full Xcode
+app (from the Apple App Store), and switch to it with this command:
+
+ sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer
+
+Some cross-tests want to *run* the executables on the host. These tests will
+be ignored if this is not possible. On macOS, this means you need an iOS
+simulator installed to run these tests. To install a simulator, open Xcode, go
+to preferences > Components, and download the latest iOS simulator.
+",
+ );
+ } else if cfg!(target_os = "windows") {
+ message.push_str(
+ "
+Windows cross tests target i686-pc-windows-msvc, which requires the ability
+to build and run 32-bit targets. This should work automatically if you have
+properly installed Visual Studio build tools.
+",
+ );
+ } else {
+ // The check at the top should prevent this.
+ panic!("platform should have been skipped");
+ }
+
+ let rustup_available = Command::new("rustup").output().is_ok();
+ if rustup_available {
+ write!(
+ message,
+ "
+Make sure that the appropriate `rustc` target is installed with rustup:
+
+ rustup target add {}
+",
+ cross_target
+ )
+ .unwrap();
+ } else {
+ write!(
+ message,
+ "
+rustup does not appear to be installed. Make sure that the appropriate
+`rustc` target is installed for the target `{}`.
+",
+ cross_target
+ )
+ .unwrap();
+ }
+
+ // Show the actual error message.
+ match run_cross_test() {
+ Ok(_) => message.push_str("\nUh oh, second run succeeded?\n"),
+ Err(err) => match err.downcast_ref::<ProcessError>() {
+ Some(proc_err) => write!(message, "\nTest error: {}\n", proc_err).unwrap(),
+ None => write!(message, "\nUnexpected non-process error: {}\n", err).unwrap(),
+ },
+ }
+
+ panic!("{}", message);
+}
+
+/// The arch triple of the test-running host.
+pub fn native() -> &'static str {
+ env!("NATIVE_ARCH")
+}
+
+pub fn native_arch() -> &'static str {
+ match native()
+ .split("-")
+ .next()
+ .expect("Target triple has unexpected format")
+ {
+ "x86_64" => "x86_64",
+ "aarch64" => "aarch64",
+ "i686" => "x86",
+ _ => panic!("This test should be gated on cross_compile::disabled."),
+ }
+}
+
+/// The alternate target-triple to build with.
+///
+/// Only use this function on tests that check `cross_compile::disabled`.
+pub fn alternate() -> &'static str {
+ if cfg!(all(target_os = "macos", target_arch = "aarch64")) {
+ "x86_64-apple-darwin"
+ } else if cfg!(target_os = "macos") {
+ "x86_64-apple-ios"
+ } else if cfg!(target_os = "linux") {
+ "i686-unknown-linux-gnu"
+ } else if cfg!(all(target_os = "windows", target_env = "msvc")) {
+ "i686-pc-windows-msvc"
+ } else if cfg!(all(target_os = "windows", target_env = "gnu")) {
+ "i686-pc-windows-gnu"
+ } else {
+ panic!("This test should be gated on cross_compile::disabled.");
+ }
+}
+
+pub fn alternate_arch() -> &'static str {
+ if cfg!(target_os = "macos") {
+ "x86_64"
+ } else {
+ "x86"
+ }
+}
+
+/// A target-triple that is neither the host nor the target.
+///
+/// Rustc may not work with it and it's alright, apart from being a
+/// valid target triple it is supposed to be used only as a
+/// placeholder for targets that should not be considered.
+pub fn unused() -> &'static str {
+ "wasm32-unknown-unknown"
+}
+
+/// Whether or not the host can run cross-compiled executables.
+pub fn can_run_on_host() -> bool {
+ if disabled() {
+ return false;
+ }
+ // macos is currently configured to cross compile to x86_64-apple-ios
+ // which requires a simulator to run. Azure's CI image appears to have the
+ // SDK installed, but are not configured to launch iOS images with a
+ // simulator.
+ if cfg!(target_os = "macos") {
+ if CAN_RUN_ON_HOST.load(Ordering::SeqCst) {
+ return true;
+ } else {
+ println!("Note: Cannot run on host, skipping.");
+ return false;
+ }
+ } else {
+ assert!(CAN_RUN_ON_HOST.load(Ordering::SeqCst));
+ return true;
+ }
+}
diff --git a/src/tools/cargo/crates/cargo-test-support/src/diff.rs b/src/tools/cargo/crates/cargo-test-support/src/diff.rs
new file mode 100644
index 000000000..f3b283b10
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/src/diff.rs
@@ -0,0 +1,174 @@
+//! A simple Myers diff implementation.
+//!
+//! This focuses on being short and simple, and the expense of being
+//! inefficient. A key characteristic here is that this supports cargotest's
+//! `[..]` wildcard matching. That means things like hashing can't be used.
+//! Since Cargo's output tends to be small, this should be sufficient.
+
+use std::fmt;
+use std::io::Write;
+use termcolor::{Ansi, Color, ColorSpec, NoColor, WriteColor};
+
+/// A single line change to be applied to the original.
+#[derive(Debug, Eq, PartialEq)]
+pub enum Change<T> {
+ Add(usize, T),
+ Remove(usize, T),
+ Keep(usize, usize, T),
+}
+
+pub fn diff<'a, T>(a: &'a [T], b: &'a [T]) -> Vec<Change<&'a T>>
+where
+ T: PartialEq,
+{
+ if a.is_empty() && b.is_empty() {
+ return vec![];
+ }
+ let mut diff = vec![];
+ for (prev_x, prev_y, x, y) in backtrack(&a, &b) {
+ if x == prev_x {
+ diff.push(Change::Add(prev_y + 1, &b[prev_y]));
+ } else if y == prev_y {
+ diff.push(Change::Remove(prev_x + 1, &a[prev_x]));
+ } else {
+ diff.push(Change::Keep(prev_x + 1, prev_y + 1, &a[prev_x]));
+ }
+ }
+ diff.reverse();
+ diff
+}
+
+fn shortest_edit<T>(a: &[T], b: &[T]) -> Vec<Vec<usize>>
+where
+ T: PartialEq,
+{
+ let max = a.len() + b.len();
+ let mut v = vec![0; 2 * max + 1];
+ let mut trace = vec![];
+ for d in 0..=max {
+ trace.push(v.clone());
+ for k in (0..=(2 * d)).step_by(2) {
+ let mut x = if k == 0 || (k != 2 * d && v[max - d + k - 1] < v[max - d + k + 1]) {
+ // Move down
+ v[max - d + k + 1]
+ } else {
+ // Move right
+ v[max - d + k - 1] + 1
+ };
+ let mut y = x + d - k;
+ // Step diagonally as far as possible.
+ while x < a.len() && y < b.len() && a[x] == b[y] {
+ x += 1;
+ y += 1;
+ }
+ v[max - d + k] = x;
+ // Return if reached the bottom-right position.
+ if x >= a.len() && y >= b.len() {
+ return trace;
+ }
+ }
+ }
+ panic!("finished without hitting end?");
+}
+
+fn backtrack<T>(a: &[T], b: &[T]) -> Vec<(usize, usize, usize, usize)>
+where
+ T: PartialEq,
+{
+ let mut result = vec![];
+ let mut x = a.len();
+ let mut y = b.len();
+ let max = x + y;
+ for (d, v) in shortest_edit(a, b).iter().enumerate().rev() {
+ let k = x + d - y;
+ let prev_k = if k == 0 || (k != 2 * d && v[max - d + k - 1] < v[max - d + k + 1]) {
+ k + 1
+ } else {
+ k - 1
+ };
+ let prev_x = v[max - d + prev_k];
+ let prev_y = (prev_x + d).saturating_sub(prev_k);
+ while x > prev_x && y > prev_y {
+ result.push((x - 1, y - 1, x, y));
+ x -= 1;
+ y -= 1;
+ }
+ if d > 0 {
+ result.push((prev_x, prev_y, x, y));
+ }
+ x = prev_x;
+ y = prev_y;
+ }
+ return result;
+}
+
+pub fn colored_diff<'a, T>(a: &'a [T], b: &'a [T]) -> String
+where
+ T: PartialEq + fmt::Display,
+{
+ let changes = diff(a, b);
+ render_colored_changes(&changes)
+}
+
+pub fn render_colored_changes<T: fmt::Display>(changes: &[Change<T>]) -> String {
+ // termcolor is not very ergonomic, but I don't want to bring in another dependency.
+ let mut red = ColorSpec::new();
+ red.set_fg(Some(Color::Red));
+ let mut green = ColorSpec::new();
+ green.set_fg(Some(Color::Green));
+ let mut dim = ColorSpec::new();
+ dim.set_dimmed(true);
+ let mut v = Vec::new();
+ let mut result: Box<dyn WriteColor> = if crate::is_ci() {
+ // Don't use color on CI. Even though GitHub can display colors, it
+ // makes reading the raw logs more difficult.
+ Box::new(NoColor::new(&mut v))
+ } else {
+ Box::new(Ansi::new(&mut v))
+ };
+
+ for change in changes {
+ let (nums, sign, color, text) = match change {
+ Change::Add(i, s) => (format!(" {:<4} ", i), '+', &green, s),
+ Change::Remove(i, s) => (format!("{:<4} ", i), '-', &red, s),
+ Change::Keep(x, y, s) => (format!("{:<4}{:<4} ", x, y), ' ', &dim, s),
+ };
+ result.set_color(&dim).unwrap();
+ write!(result, "{}", nums).unwrap();
+ let mut bold = color.clone();
+ bold.set_bold(true);
+ result.set_color(&bold).unwrap();
+ write!(result, "{}", sign).unwrap();
+ result.reset().unwrap();
+ result.set_color(&color).unwrap();
+ write!(result, "{}", text).unwrap();
+ result.reset().unwrap();
+ writeln!(result).unwrap();
+ }
+ drop(result);
+ String::from_utf8(v).unwrap()
+}
+
+#[cfg(test)]
+pub fn compare(a: &str, b: &str) {
+ let a: Vec<_> = a.chars().collect();
+ let b: Vec<_> = b.chars().collect();
+ let changes = diff(&a, &b);
+ let mut result = vec![];
+ for change in changes {
+ match change {
+ Change::Add(_, s) => result.push(*s),
+ Change::Remove(_, _s) => {}
+ Change::Keep(_, _, s) => result.push(*s),
+ }
+ }
+ assert_eq!(b, result);
+}
+
+#[test]
+fn basic_tests() {
+ compare("", "");
+ compare("A", "");
+ compare("", "B");
+ compare("ABCABBA", "CBABAC");
+}
diff --git a/src/tools/cargo/crates/cargo-test-support/src/git.rs b/src/tools/cargo/crates/cargo-test-support/src/git.rs
new file mode 100644
index 000000000..6fde96467
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/src/git.rs
@@ -0,0 +1,256 @@
+/*
+# Git Testing Support
+
+## Creating a git dependency
+`git::new()` is an easy way to create a new git repository containing a
+project that you can then use as a dependency. It will automatically add all
+the files you specify in the project and commit them to the repository.
+Example:
+
+```
+let git_project = git::new("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("dep1"))
+ .file("src/lib.rs", r#"pub fn f() { println!("hi!"); } "#)
+});
+
+// Use the `url()` method to get the file url to the new repository.
+let p = project()
+ .file("Cargo.toml", &format!(r#"
+ [package]
+ name = "a"
+ version = "1.0.0"
+
+ [dependencies]
+ dep1 = {{ git = '{}' }}
+ "#, git_project.url()))
+ .file("src/lib.rs", "extern crate dep1;")
+ .build();
+```
+
+## Manually creating repositories
+`git::repo()` can be used to create a `RepoBuilder` which provides a way of
+adding files to a blank repository and committing them.
+
+If you want to then manipulate the repository (such as adding new files or
+tags), you can use `git2::Repository::open()` to open the repository and then
+use some of the helper functions in this file to interact with the repository.
+
+*/
+
+use crate::{path2url, project, Project, ProjectBuilder};
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::sync::Once;
+use url::Url;
+
+#[must_use]
+pub struct RepoBuilder {
+ repo: git2::Repository,
+ files: Vec<PathBuf>,
+}
+
+pub struct Repository(git2::Repository);
+
+/// Create a `RepoBuilder` to build a new git repository.
+///
+/// Call `build()` to finalize and create the repository.
+pub fn repo(p: &Path) -> RepoBuilder {
+ RepoBuilder::init(p)
+}
+
+impl RepoBuilder {
+ pub fn init(p: &Path) -> RepoBuilder {
+ t!(fs::create_dir_all(p.parent().unwrap()));
+ let repo = init(p);
+ RepoBuilder {
+ repo,
+ files: Vec::new(),
+ }
+ }
+
+ /// Add a file to the repository.
+ pub fn file(self, path: &str, contents: &str) -> RepoBuilder {
+ let mut me = self.nocommit_file(path, contents);
+ me.files.push(PathBuf::from(path));
+ me
+ }
+
+ /// Add a file that will be left in the working directory, but not added
+ /// to the repository.
+ pub fn nocommit_file(self, path: &str, contents: &str) -> RepoBuilder {
+ let dst = self.repo.workdir().unwrap().join(path);
+ t!(fs::create_dir_all(dst.parent().unwrap()));
+ t!(fs::write(&dst, contents));
+ self
+ }
+
+ /// Create the repository and commit the new files.
+ pub fn build(self) -> Repository {
+ {
+ let mut index = t!(self.repo.index());
+ for file in self.files.iter() {
+ t!(index.add_path(file));
+ }
+ t!(index.write());
+ let id = t!(index.write_tree());
+ let tree = t!(self.repo.find_tree(id));
+ let sig = t!(self.repo.signature());
+ t!(self
+ .repo
+ .commit(Some("HEAD"), &sig, &sig, "Initial commit", &tree, &[]));
+ }
+ let RepoBuilder { repo, .. } = self;
+ Repository(repo)
+ }
+}
+
+impl Repository {
+ pub fn root(&self) -> &Path {
+ self.0.workdir().unwrap()
+ }
+
+ pub fn url(&self) -> Url {
+ path2url(self.0.workdir().unwrap().to_path_buf())
+ }
+
+ pub fn revparse_head(&self) -> String {
+ self.0
+ .revparse_single("HEAD")
+ .expect("revparse HEAD")
+ .id()
+ .to_string()
+ }
+}
+
+/// Initialize a new repository at the given path.
+pub fn init(path: &Path) -> git2::Repository {
+ default_search_path();
+ let repo = t!(git2::Repository::init(path));
+ default_repo_cfg(&repo);
+ repo
+}
+
+fn default_search_path() {
+ use crate::paths::global_root;
+ use git2::{opts::set_search_path, ConfigLevel};
+
+ static INIT: Once = Once::new();
+ INIT.call_once(|| unsafe {
+ let path = global_root().join("blank_git_search_path");
+ t!(set_search_path(ConfigLevel::System, &path));
+ t!(set_search_path(ConfigLevel::Global, &path));
+ t!(set_search_path(ConfigLevel::XDG, &path));
+ t!(set_search_path(ConfigLevel::ProgramData, &path));
+ })
+}
+
+fn default_repo_cfg(repo: &git2::Repository) {
+ let mut cfg = t!(repo.config());
+ t!(cfg.set_str("user.email", "foo@bar.com"));
+ t!(cfg.set_str("user.name", "Foo Bar"));
+}
+
+/// Create a new git repository with a project.
+pub fn new<F>(name: &str, callback: F) -> Project
+where
+ F: FnOnce(ProjectBuilder) -> ProjectBuilder,
+{
+ new_repo(name, callback).0
+}
+
+/// Create a new git repository with a project.
+/// Returns both the Project and the git Repository.
+pub fn new_repo<F>(name: &str, callback: F) -> (Project, git2::Repository)
+where
+ F: FnOnce(ProjectBuilder) -> ProjectBuilder,
+{
+ let mut git_project = project().at(name);
+ git_project = callback(git_project);
+ let git_project = git_project.build();
+
+ let repo = init(&git_project.root());
+ add(&repo);
+ commit(&repo);
+ (git_project, repo)
+}
+
+/// Add all files in the working directory to the git index.
+pub fn add(repo: &git2::Repository) {
+ // FIXME(libgit2/libgit2#2514): apparently, `add_all` will add all submodules
+ // as well, and then fail because they're directories. As a stop-gap, we just
+ // ignore all submodules.
+ let mut s = t!(repo.submodules());
+ for submodule in s.iter_mut() {
+ t!(submodule.add_to_index(false));
+ }
+ let mut index = t!(repo.index());
+ t!(index.add_all(
+ ["*"].iter(),
+ git2::IndexAddOption::DEFAULT,
+ Some(
+ &mut (|a, _b| if s.iter().any(|s| a.starts_with(s.path())) {
+ 1
+ } else {
+ 0
+ })
+ )
+ ));
+ t!(index.write());
+}
+
+/// Add a git submodule to the repository.
+pub fn add_submodule<'a>(
+ repo: &'a git2::Repository,
+ url: &str,
+ path: &Path,
+) -> git2::Submodule<'a> {
+ let path = path.to_str().unwrap().replace(r"\", "/");
+ let mut s = t!(repo.submodule(url, Path::new(&path), false));
+ let subrepo = t!(s.open());
+ default_repo_cfg(&subrepo);
+ t!(subrepo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*"));
+ let mut origin = t!(subrepo.find_remote("origin"));
+ t!(origin.fetch(&Vec::<String>::new(), None, None));
+ t!(subrepo.checkout_head(None));
+ t!(s.add_finalize());
+ s
+}
+
+/// Commit changes to the git repository.
+pub fn commit(repo: &git2::Repository) -> git2::Oid {
+ let tree_id = t!(t!(repo.index()).write_tree());
+ let sig = t!(repo.signature());
+ let mut parents = Vec::new();
+ if let Some(parent) = repo.head().ok().map(|h| h.target().unwrap()) {
+ parents.push(t!(repo.find_commit(parent)))
+ }
+ let parents = parents.iter().collect::<Vec<_>>();
+ t!(repo.commit(
+ Some("HEAD"),
+ &sig,
+ &sig,
+ "test",
+ &t!(repo.find_tree(tree_id)),
+ &parents
+ ))
+}
+
+/// Create a new tag in the git repository.
+pub fn tag(repo: &git2::Repository, name: &str) {
+ let head = repo.head().unwrap().target().unwrap();
+ t!(repo.tag(
+ name,
+ &t!(repo.find_object(head, None)),
+ &t!(repo.signature()),
+ "make a new tag",
+ false
+ ));
+}
+
+/// Returns true if gitoxide is globally activated.
+///
+/// That way, tests that normally use `git2` can transparently use `gitoxide`.
+pub fn cargo_uses_gitoxide() -> bool {
+ std::env::var_os("__CARGO_USE_GITOXIDE_INSTEAD_OF_GIT2").map_or(false, |value| value == "1")
+}
diff --git a/src/tools/cargo/crates/cargo-test-support/src/install.rs b/src/tools/cargo/crates/cargo-test-support/src/install.rs
new file mode 100644
index 000000000..478b482d2
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/src/install.rs
@@ -0,0 +1,29 @@
+use crate::paths;
+use std::env::consts::EXE_SUFFIX;
+use std::path::{Path, PathBuf};
+
+/// Used by `cargo install` tests to assert an executable binary
+/// has been installed. Example usage:
+///
+/// assert_has_installed_exe(cargo_home(), "foo");
+#[track_caller]
+pub fn assert_has_installed_exe<P: AsRef<Path>>(path: P, name: &'static str) {
+ assert!(check_has_installed_exe(path, name));
+}
+
+#[track_caller]
+pub fn assert_has_not_installed_exe<P: AsRef<Path>>(path: P, name: &'static str) {
+ assert!(!check_has_installed_exe(path, name));
+}
+
+fn check_has_installed_exe<P: AsRef<Path>>(path: P, name: &'static str) -> bool {
+ path.as_ref().join("bin").join(exe(name)).is_file()
+}
+
+pub fn cargo_home() -> PathBuf {
+ paths::home().join(".cargo")
+}
+
+pub fn exe(name: &str) -> String {
+ format!("{}{}", name, EXE_SUFFIX)
+}
diff --git a/src/tools/cargo/crates/cargo-test-support/src/lib.rs b/src/tools/cargo/crates/cargo-test-support/src/lib.rs
new file mode 100644
index 000000000..04d6ce9f8
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/src/lib.rs
@@ -0,0 +1,1424 @@
+//! # Cargo test support.
+//!
+//! See <https://rust-lang.github.io/cargo/contrib/> for a guide on writing tests.
+
+#![allow(clippy::all)]
+
+use std::env;
+use std::ffi::OsStr;
+use std::fmt::Write;
+use std::fs;
+use std::os;
+use std::path::{Path, PathBuf};
+use std::process::{Command, Output};
+use std::str;
+use std::time::{self, Duration};
+
+use anyhow::{bail, Result};
+use cargo_util::{is_ci, ProcessBuilder, ProcessError};
+use serde_json;
+use url::Url;
+
+use self::paths::CargoPathExt;
+
+#[macro_export]
+macro_rules! t {
+ ($e:expr) => {
+ match $e {
+ Ok(e) => e,
+ Err(e) => $crate::panic_error(&format!("failed running {}", stringify!($e)), e),
+ }
+ };
+}
+
+#[macro_export]
+macro_rules! curr_dir {
+ () => {
+ $crate::_curr_dir(std::path::Path::new(file!()));
+ };
+}
+
+#[doc(hidden)]
+pub fn _curr_dir(mut file_path: &'static Path) -> &'static Path {
+ if !file_path.exists() {
+ // HACK: Must be running in the rust-lang/rust workspace, adjust the paths accordingly.
+ let prefix = PathBuf::from("src").join("tools").join("cargo");
+ if let Ok(crate_relative) = file_path.strip_prefix(prefix) {
+ file_path = crate_relative
+ }
+ }
+ assert!(file_path.exists(), "{} does not exist", file_path.display());
+ file_path.parent().unwrap()
+}
+
+#[track_caller]
+pub fn panic_error(what: &str, err: impl Into<anyhow::Error>) -> ! {
+ let err = err.into();
+ pe(what, err);
+ #[track_caller]
+ fn pe(what: &str, err: anyhow::Error) -> ! {
+ let mut result = format!("{}\nerror: {}", what, err);
+ for cause in err.chain().skip(1) {
+ drop(writeln!(result, "\nCaused by:"));
+ drop(write!(result, "{}", cause));
+ }
+ panic!("\n{}", result);
+ }
+}
+
+pub use cargo_test_macro::cargo_test;
+
+pub mod compare;
+pub mod containers;
+pub mod cross_compile;
+mod diff;
+pub mod git;
+pub mod install;
+pub mod paths;
+pub mod publish;
+pub mod registry;
+pub mod tools;
+
+pub mod prelude {
+ pub use crate::ArgLine;
+ pub use crate::CargoCommand;
+ pub use crate::ChannelChanger;
+ pub use crate::TestEnv;
+}
+
+/*
+ *
+ * ===== Builders =====
+ *
+ */
+
+#[derive(PartialEq, Clone)]
+struct FileBuilder {
+ path: PathBuf,
+ body: String,
+ executable: bool,
+}
+
+impl FileBuilder {
+ pub fn new(path: PathBuf, body: &str, executable: bool) -> FileBuilder {
+ FileBuilder {
+ path,
+ body: body.to_string(),
+ executable: executable,
+ }
+ }
+
+ fn mk(&mut self) {
+ if self.executable {
+ self.path.set_extension(env::consts::EXE_EXTENSION);
+ }
+
+ self.dirname().mkdir_p();
+ fs::write(&self.path, &self.body)
+ .unwrap_or_else(|e| panic!("could not create file {}: {}", self.path.display(), e));
+
+ #[cfg(unix)]
+ if self.executable {
+ use std::os::unix::fs::PermissionsExt;
+
+ let mut perms = fs::metadata(&self.path).unwrap().permissions();
+ let mode = perms.mode();
+ perms.set_mode(mode | 0o111);
+ fs::set_permissions(&self.path, perms).unwrap();
+ }
+ }
+
+ fn dirname(&self) -> &Path {
+ self.path.parent().unwrap()
+ }
+}
+
+#[derive(PartialEq, Clone)]
+struct SymlinkBuilder {
+ dst: PathBuf,
+ src: PathBuf,
+ src_is_dir: bool,
+}
+
+impl SymlinkBuilder {
+ pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder {
+ SymlinkBuilder {
+ dst,
+ src,
+ src_is_dir: false,
+ }
+ }
+
+ pub fn new_dir(dst: PathBuf, src: PathBuf) -> SymlinkBuilder {
+ SymlinkBuilder {
+ dst,
+ src,
+ src_is_dir: true,
+ }
+ }
+
+ #[cfg(unix)]
+ fn mk(&self) {
+ self.dirname().mkdir_p();
+ t!(os::unix::fs::symlink(&self.dst, &self.src));
+ }
+
+ #[cfg(windows)]
+ fn mk(&mut self) {
+ self.dirname().mkdir_p();
+ if self.src_is_dir {
+ t!(os::windows::fs::symlink_dir(&self.dst, &self.src));
+ } else {
+ if let Some(ext) = self.dst.extension() {
+ if ext == env::consts::EXE_EXTENSION {
+ self.src.set_extension(ext);
+ }
+ }
+ t!(os::windows::fs::symlink_file(&self.dst, &self.src));
+ }
+ }
+
+ fn dirname(&self) -> &Path {
+ self.src.parent().unwrap()
+ }
+}
+
+/// A cargo project to run tests against.
+///
+/// See [`ProjectBuilder`] or [`Project::from_template`] to get started.
+pub struct Project {
+ root: PathBuf,
+}
+
+/// Create a project to run tests against
+///
+/// The project can be constructed programmatically or from the filesystem with [`Project::from_template`]
+#[must_use]
+pub struct ProjectBuilder {
+ root: Project,
+ files: Vec<FileBuilder>,
+ symlinks: Vec<SymlinkBuilder>,
+ no_manifest: bool,
+}
+
+impl ProjectBuilder {
+ /// Root of the project, ex: `/path/to/cargo/target/cit/t0/foo`
+ pub fn root(&self) -> PathBuf {
+ self.root.root()
+ }
+
+ /// Project's debug dir, ex: `/path/to/cargo/target/cit/t0/foo/target/debug`
+ pub fn target_debug_dir(&self) -> PathBuf {
+ self.root.target_debug_dir()
+ }
+
+ pub fn new(root: PathBuf) -> ProjectBuilder {
+ ProjectBuilder {
+ root: Project { root },
+ files: vec![],
+ symlinks: vec![],
+ no_manifest: false,
+ }
+ }
+
+ pub fn at<P: AsRef<Path>>(mut self, path: P) -> Self {
+ self.root = Project {
+ root: paths::root().join(path),
+ };
+ self
+ }
+
+ /// Adds a file to the project.
+ pub fn file<B: AsRef<Path>>(mut self, path: B, body: &str) -> Self {
+ self._file(path.as_ref(), body, false);
+ self
+ }
+
+ /// Adds an executable file to the project.
+ pub fn executable<B: AsRef<Path>>(mut self, path: B, body: &str) -> Self {
+ self._file(path.as_ref(), body, true);
+ self
+ }
+
+ fn _file(&mut self, path: &Path, body: &str, executable: bool) {
+ self.files.push(FileBuilder::new(
+ self.root.root().join(path),
+ body,
+ executable,
+ ));
+ }
+
+ /// Adds a symlink to a file to the project.
+ pub fn symlink<T: AsRef<Path>>(mut self, dst: T, src: T) -> Self {
+ self.symlinks.push(SymlinkBuilder::new(
+ self.root.root().join(dst),
+ self.root.root().join(src),
+ ));
+ self
+ }
+
+ /// Create a symlink to a directory
+ pub fn symlink_dir<T: AsRef<Path>>(mut self, dst: T, src: T) -> Self {
+ self.symlinks.push(SymlinkBuilder::new_dir(
+ self.root.root().join(dst),
+ self.root.root().join(src),
+ ));
+ self
+ }
+
+ pub fn no_manifest(mut self) -> Self {
+ self.no_manifest = true;
+ self
+ }
+
+ /// Creates the project.
+ pub fn build(mut self) -> Project {
+ // First, clean the directory if it already exists
+ self.rm_root();
+
+ // Create the empty directory
+ self.root.root().mkdir_p();
+
+ let manifest_path = self.root.root().join("Cargo.toml");
+ if !self.no_manifest && self.files.iter().all(|fb| fb.path != manifest_path) {
+ self._file(
+ Path::new("Cargo.toml"),
+ &basic_manifest("foo", "0.0.1"),
+ false,
+ )
+ }
+
+ let past = time::SystemTime::now() - Duration::new(1, 0);
+ let ftime = filetime::FileTime::from_system_time(past);
+
+ for file in self.files.iter_mut() {
+ file.mk();
+ if is_coarse_mtime() {
+ // Place the entire project 1 second in the past to ensure
+ // that if cargo is called multiple times, the 2nd call will
+ // see targets as "fresh". Without this, if cargo finishes in
+ // under 1 second, the second call will see the mtime of
+ // source == mtime of output and consider it dirty.
+ filetime::set_file_times(&file.path, ftime, ftime).unwrap();
+ }
+ }
+
+ for symlink in self.symlinks.iter_mut() {
+ symlink.mk();
+ }
+
+ let ProjectBuilder { root, .. } = self;
+ root
+ }
+
+ fn rm_root(&self) {
+ self.root.root().rm_rf()
+ }
+}
+
+impl Project {
+ /// Copy the test project from a fixed state
+ pub fn from_template(template_path: impl AsRef<std::path::Path>) -> Self {
+ let root = paths::root();
+ let project_root = root.join("case");
+ snapbox::path::copy_template(template_path.as_ref(), &project_root).unwrap();
+ Self { root: project_root }
+ }
+
+ /// Root of the project, ex: `/path/to/cargo/target/cit/t0/foo`
+ pub fn root(&self) -> PathBuf {
+ self.root.clone()
+ }
+
+ /// Project's target dir, ex: `/path/to/cargo/target/cit/t0/foo/target`
+ pub fn build_dir(&self) -> PathBuf {
+ self.root().join("target")
+ }
+
+ /// Project's debug dir, ex: `/path/to/cargo/target/cit/t0/foo/target/debug`
+ pub fn target_debug_dir(&self) -> PathBuf {
+ self.build_dir().join("debug")
+ }
+
+ /// File url for root, ex: `file:///path/to/cargo/target/cit/t0/foo`
+ pub fn url(&self) -> Url {
+ path2url(self.root())
+ }
+
+ /// Path to an example built as a library.
+ /// `kind` should be one of: "lib", "rlib", "staticlib", "dylib", "proc-macro"
+ /// ex: `/path/to/cargo/target/cit/t0/foo/target/debug/examples/libex.rlib`
+ pub fn example_lib(&self, name: &str, kind: &str) -> PathBuf {
+ self.target_debug_dir()
+ .join("examples")
+ .join(paths::get_lib_filename(name, kind))
+ }
+
+ /// Path to a debug binary.
+ /// ex: `/path/to/cargo/target/cit/t0/foo/target/debug/foo`
+ pub fn bin(&self, b: &str) -> PathBuf {
+ self.build_dir()
+ .join("debug")
+ .join(&format!("{}{}", b, env::consts::EXE_SUFFIX))
+ }
+
+ /// Path to a release binary.
+ /// ex: `/path/to/cargo/target/cit/t0/foo/target/release/foo`
+ pub fn release_bin(&self, b: &str) -> PathBuf {
+ self.build_dir()
+ .join("release")
+ .join(&format!("{}{}", b, env::consts::EXE_SUFFIX))
+ }
+
+ /// Path to a debug binary for a specific target triple.
+ /// ex: `/path/to/cargo/target/cit/t0/foo/target/i686-apple-darwin/debug/foo`
+ pub fn target_bin(&self, target: &str, b: &str) -> PathBuf {
+ self.build_dir().join(target).join("debug").join(&format!(
+ "{}{}",
+ b,
+ env::consts::EXE_SUFFIX
+ ))
+ }
+
+ /// Returns an iterator of paths matching the glob pattern, which is
+ /// relative to the project root.
+ pub fn glob<P: AsRef<Path>>(&self, pattern: P) -> glob::Paths {
+ let pattern = self.root().join(pattern);
+ glob::glob(pattern.to_str().expect("failed to convert pattern to str"))
+ .expect("failed to glob")
+ }
+
+ /// Changes the contents of an existing file.
+ pub fn change_file(&self, path: &str, body: &str) {
+ FileBuilder::new(self.root().join(path), body, false).mk()
+ }
+
+ /// Creates a `ProcessBuilder` to run a program in the project
+ /// and wrap it in an Execs to assert on the execution.
+ /// Example:
+ /// p.process(&p.bin("foo"))
+ /// .with_stdout("bar\n")
+ /// .run();
+ pub fn process<T: AsRef<OsStr>>(&self, program: T) -> Execs {
+ let mut p = process(program);
+ p.cwd(self.root());
+ execs().with_process_builder(p)
+ }
+
+ /// Creates a `ProcessBuilder` to run cargo.
+ /// Arguments can be separated by spaces.
+ /// Example:
+ /// p.cargo("build --bin foo").run();
+ pub fn cargo(&self, cmd: &str) -> Execs {
+ let cargo = cargo_exe();
+ let mut execs = self.process(&cargo);
+ if let Some(ref mut p) = execs.process_builder {
+ p.env("CARGO", cargo);
+ p.arg_line(cmd);
+ }
+ execs
+ }
+
+ /// Safely run a process after `cargo build`.
+ ///
+ /// Windows has a problem where a process cannot be reliably
+ /// be replaced, removed, or renamed immediately after executing it.
+ /// The action may fail (with errors like Access is denied), or
+ /// it may succeed, but future attempts to use the same filename
+ /// will fail with "Already Exists".
+ ///
+ /// If you have a test that needs to do `cargo run` multiple
+ /// times, you should instead use `cargo build` and use this
+ /// method to run the executable. Each time you call this,
+ /// use a new name for `dst`.
+ /// See rust-lang/cargo#5481.
+ pub fn rename_run(&self, src: &str, dst: &str) -> Execs {
+ let src = self.bin(src);
+ let dst = self.bin(dst);
+ fs::rename(&src, &dst)
+ .unwrap_or_else(|e| panic!("Failed to rename `{:?}` to `{:?}`: {}", src, dst, e));
+ self.process(dst)
+ }
+
+ /// Returns the contents of `Cargo.lock`.
+ pub fn read_lockfile(&self) -> String {
+ self.read_file("Cargo.lock")
+ }
+
+ /// Returns the contents of a path in the project root
+ pub fn read_file(&self, path: &str) -> String {
+ let full = self.root().join(path);
+ fs::read_to_string(&full)
+ .unwrap_or_else(|e| panic!("could not read file {}: {}", full.display(), e))
+ }
+
+ /// Modifies `Cargo.toml` to remove all commented lines.
+ pub fn uncomment_root_manifest(&self) {
+ let contents = self.read_file("Cargo.toml").replace("#", "");
+ fs::write(self.root().join("Cargo.toml"), contents).unwrap();
+ }
+
+ pub fn symlink(&self, src: impl AsRef<Path>, dst: impl AsRef<Path>) {
+ let src = self.root().join(src.as_ref());
+ let dst = self.root().join(dst.as_ref());
+ #[cfg(unix)]
+ {
+ if let Err(e) = os::unix::fs::symlink(&src, &dst) {
+ panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e);
+ }
+ }
+ #[cfg(windows)]
+ {
+ if src.is_dir() {
+ if let Err(e) = os::windows::fs::symlink_dir(&src, &dst) {
+ panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e);
+ }
+ } else {
+ if let Err(e) = os::windows::fs::symlink_file(&src, &dst) {
+ panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e);
+ }
+ }
+ }
+ }
+}
+
+// Generates a project layout
+pub fn project() -> ProjectBuilder {
+ ProjectBuilder::new(paths::root().join("foo"))
+}
+
+// Generates a project layout in given directory
+pub fn project_in(dir: &str) -> ProjectBuilder {
+ ProjectBuilder::new(paths::root().join(dir).join("foo"))
+}
+
+// Generates a project layout inside our fake home dir
+pub fn project_in_home(name: &str) -> ProjectBuilder {
+ ProjectBuilder::new(paths::home().join(name))
+}
+
+// === Helpers ===
+
+pub fn main_file(println: &str, deps: &[&str]) -> String {
+ let mut buf = String::new();
+
+ for dep in deps.iter() {
+ buf.push_str(&format!("extern crate {};\n", dep));
+ }
+
+ buf.push_str("fn main() { println!(");
+ buf.push_str(println);
+ buf.push_str("); }\n");
+
+ buf
+}
+
+pub fn cargo_exe() -> PathBuf {
+ snapbox::cmd::cargo_bin("cargo")
+}
+
+/// This is the raw output from the process.
+///
+/// This is similar to `std::process::Output`, however the `status` is
+/// translated to the raw `code`. This is necessary because `ProcessError`
+/// does not have access to the raw `ExitStatus` because `ProcessError` needs
+/// to be serializable (for the Rustc cache), and `ExitStatus` does not
+/// provide a constructor.
+pub struct RawOutput {
+ pub code: Option<i32>,
+ pub stdout: Vec<u8>,
+ pub stderr: Vec<u8>,
+}
+
+#[must_use]
+#[derive(Clone)]
+pub struct Execs {
+ ran: bool,
+ process_builder: Option<ProcessBuilder>,
+ expect_stdout: Option<String>,
+ expect_stdin: Option<String>,
+ expect_stderr: Option<String>,
+ expect_exit_code: Option<i32>,
+ expect_stdout_contains: Vec<String>,
+ expect_stderr_contains: Vec<String>,
+ expect_stdout_contains_n: Vec<(String, usize)>,
+ expect_stdout_not_contains: Vec<String>,
+ expect_stderr_not_contains: Vec<String>,
+ expect_stderr_unordered: Vec<String>,
+ expect_stderr_with_without: Vec<(Vec<String>, Vec<String>)>,
+ expect_json: Option<String>,
+ expect_json_contains_unordered: Option<String>,
+ stream_output: bool,
+}
+
+impl Execs {
+ pub fn with_process_builder(mut self, p: ProcessBuilder) -> Execs {
+ self.process_builder = Some(p);
+ self
+ }
+
+ /// Verifies that stdout is equal to the given lines.
+ /// See [`compare`] for supported patterns.
+ pub fn with_stdout<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stdout = Some(expected.to_string());
+ self
+ }
+
+ /// Verifies that stderr is equal to the given lines.
+ /// See [`compare`] for supported patterns.
+ pub fn with_stderr<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stderr = Some(expected.to_string());
+ self
+ }
+
+ /// Writes the given lines to stdin.
+ pub fn with_stdin<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stdin = Some(expected.to_string());
+ self
+ }
+
+ /// Verifies the exit code from the process.
+ ///
+ /// This is not necessary if the expected exit code is `0`.
+ pub fn with_status(&mut self, expected: i32) -> &mut Self {
+ self.expect_exit_code = Some(expected);
+ self
+ }
+
+ /// Removes exit code check for the process.
+ ///
+ /// By default, the expected exit code is `0`.
+ pub fn without_status(&mut self) -> &mut Self {
+ self.expect_exit_code = None;
+ self
+ }
+
+ /// Verifies that stdout contains the given contiguous lines somewhere in
+ /// its output.
+ ///
+ /// See [`compare`] for supported patterns.
+ pub fn with_stdout_contains<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stdout_contains.push(expected.to_string());
+ self
+ }
+
+ /// Verifies that stderr contains the given contiguous lines somewhere in
+ /// its output.
+ ///
+ /// See [`compare`] for supported patterns.
+ pub fn with_stderr_contains<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stderr_contains.push(expected.to_string());
+ self
+ }
+
+ /// Verifies that stdout contains the given contiguous lines somewhere in
+ /// its output, and should be repeated `number` times.
+ ///
+ /// See [`compare`] for supported patterns.
+ pub fn with_stdout_contains_n<S: ToString>(&mut self, expected: S, number: usize) -> &mut Self {
+ self.expect_stdout_contains_n
+ .push((expected.to_string(), number));
+ self
+ }
+
+ /// Verifies that stdout does not contain the given contiguous lines.
+ ///
+ /// See [`compare`] for supported patterns.
+ ///
+ /// See note on [`Self::with_stderr_does_not_contain`].
+ pub fn with_stdout_does_not_contain<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stdout_not_contains.push(expected.to_string());
+ self
+ }
+
+ /// Verifies that stderr does not contain the given contiguous lines.
+ ///
+ /// See [`compare`] for supported patterns.
+ ///
+ /// Care should be taken when using this method because there is a
+ /// limitless number of possible things that *won't* appear. A typo means
+ /// your test will pass without verifying the correct behavior. If
+ /// possible, write the test first so that it fails, and then implement
+ /// your fix/feature to make it pass.
+ pub fn with_stderr_does_not_contain<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stderr_not_contains.push(expected.to_string());
+ self
+ }
+
+ /// Verifies that all of the stderr output is equal to the given lines,
+ /// ignoring the order of the lines.
+ ///
+ /// See [`compare`] for supported patterns.
+ ///
+ /// This is useful when checking the output of `cargo build -v` since
+ /// the order of the output is not always deterministic.
+ /// Recommend use `with_stderr_contains` instead unless you really want to
+ /// check *every* line of output.
+ ///
+ /// Be careful when using patterns such as `[..]`, because you may end up
+ /// with multiple lines that might match, and this is not smart enough to
+ /// do anything like longest-match. For example, avoid something like:
+ ///
+ /// ```text
+ /// [RUNNING] `rustc [..]
+ /// [RUNNING] `rustc --crate-name foo [..]
+ /// ```
+ ///
+ /// This will randomly fail if the other crate name is `bar`, and the
+ /// order changes.
+ pub fn with_stderr_unordered<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stderr_unordered.push(expected.to_string());
+ self
+ }
+
+ /// Verify that a particular line appears in stderr with and without the
+ /// given substrings. Exactly one line must match.
+ ///
+ /// The substrings are matched as `contains`. Example:
+ ///
+ /// ```no_run
+ /// execs.with_stderr_line_without(
+ /// &[
+ /// "[RUNNING] `rustc --crate-name build_script_build",
+ /// "-C opt-level=3",
+ /// ],
+ /// &["-C debuginfo", "-C incremental"],
+ /// )
+ /// ```
+ ///
+ /// This will check that a build line includes `-C opt-level=3` but does
+ /// not contain `-C debuginfo` or `-C incremental`.
+ ///
+ /// Be careful writing the `without` fragments, see note in
+ /// `with_stderr_does_not_contain`.
+ pub fn with_stderr_line_without<S: ToString>(
+ &mut self,
+ with: &[S],
+ without: &[S],
+ ) -> &mut Self {
+ let with = with.iter().map(|s| s.to_string()).collect();
+ let without = without.iter().map(|s| s.to_string()).collect();
+ self.expect_stderr_with_without.push((with, without));
+ self
+ }
+
+ /// Verifies the JSON output matches the given JSON.
+ ///
+ /// This is typically used when testing cargo commands that emit JSON.
+ /// Each separate JSON object should be separated by a blank line.
+ /// Example:
+ ///
+ /// ```rust,ignore
+ /// assert_that(
+ /// p.cargo("metadata"),
+ /// execs().with_json(r#"
+ /// {"example": "abc"}
+ ///
+ /// {"example": "def"}
+ /// "#)
+ /// );
+ /// ```
+ ///
+ /// - Objects should match in the order given.
+ /// - The order of arrays is ignored.
+ /// - Strings support patterns described in [`compare`].
+ /// - Use `"{...}"` to match any object.
+ pub fn with_json(&mut self, expected: &str) -> &mut Self {
+ self.expect_json = Some(expected.to_string());
+ self
+ }
+
+ /// Verifies JSON output contains the given objects (in any order) somewhere
+ /// in its output.
+ ///
+ /// CAUTION: Be very careful when using this. Make sure every object is
+ /// unique (not a subset of one another). Also avoid using objects that
+ /// could possibly match multiple output lines unless you're very sure of
+ /// what you are doing.
+ ///
+ /// See `with_json` for more detail.
+ pub fn with_json_contains_unordered(&mut self, expected: &str) -> &mut Self {
+ match &mut self.expect_json_contains_unordered {
+ None => self.expect_json_contains_unordered = Some(expected.to_string()),
+ Some(e) => {
+ e.push_str("\n\n");
+ e.push_str(expected);
+ }
+ }
+ self
+ }
+
+ /// Forward subordinate process stdout/stderr to the terminal.
+ /// Useful for printf debugging of the tests.
+ /// CAUTION: CI will fail if you leave this in your test!
+ #[allow(unused)]
+ pub fn stream(&mut self) -> &mut Self {
+ self.stream_output = true;
+ self
+ }
+
+ pub fn arg<T: AsRef<OsStr>>(&mut self, arg: T) -> &mut Self {
+ if let Some(ref mut p) = self.process_builder {
+ p.arg(arg);
+ }
+ self
+ }
+
+ pub fn cwd<T: AsRef<OsStr>>(&mut self, path: T) -> &mut Self {
+ if let Some(ref mut p) = self.process_builder {
+ if let Some(cwd) = p.get_cwd() {
+ let new_path = cwd.join(path.as_ref());
+ p.cwd(new_path);
+ } else {
+ p.cwd(path);
+ }
+ }
+ self
+ }
+
+ fn get_cwd(&self) -> Option<&Path> {
+ self.process_builder.as_ref().and_then(|p| p.get_cwd())
+ }
+
+ pub fn env<T: AsRef<OsStr>>(&mut self, key: &str, val: T) -> &mut Self {
+ if let Some(ref mut p) = self.process_builder {
+ p.env(key, val);
+ }
+ self
+ }
+
+ pub fn env_remove(&mut self, key: &str) -> &mut Self {
+ if let Some(ref mut p) = self.process_builder {
+ p.env_remove(key);
+ }
+ self
+ }
+
+ pub fn exec_with_output(&mut self) -> Result<Output> {
+ self.ran = true;
+ // TODO avoid unwrap
+ let p = (&self.process_builder).clone().unwrap();
+ p.exec_with_output()
+ }
+
+ pub fn build_command(&mut self) -> Command {
+ self.ran = true;
+ // TODO avoid unwrap
+ let p = (&self.process_builder).clone().unwrap();
+ p.build_command()
+ }
+
+ /// Enables nightly features for testing
+ ///
+ /// The list of reasons should be why nightly cargo is needed. If it is
+ /// becuase of an unstable feature put the name of the feature as the reason,
+ /// e.g. `&["print-im-a-teapot"]`
+ pub fn masquerade_as_nightly_cargo(&mut self, reasons: &[&str]) -> &mut Self {
+ if let Some(ref mut p) = self.process_builder {
+ p.masquerade_as_nightly_cargo(reasons);
+ }
+ self
+ }
+
+ /// Overrides the crates.io URL for testing.
+ ///
+ /// Can be used for testing crates-io functionality where alt registries
+ /// cannot be used.
+ pub fn replace_crates_io(&mut self, url: &Url) -> &mut Self {
+ if let Some(ref mut p) = self.process_builder {
+ p.env("__CARGO_TEST_CRATES_IO_URL_DO_NOT_USE_THIS", url.as_str());
+ }
+ self
+ }
+
+ pub fn enable_split_debuginfo_packed(&mut self) -> &mut Self {
+ self.env("CARGO_PROFILE_DEV_SPLIT_DEBUGINFO", "packed")
+ .env("CARGO_PROFILE_TEST_SPLIT_DEBUGINFO", "packed")
+ .env("CARGO_PROFILE_RELEASE_SPLIT_DEBUGINFO", "packed")
+ .env("CARGO_PROFILE_BENCH_SPLIT_DEBUGINFO", "packed");
+ self
+ }
+
+ pub fn enable_mac_dsym(&mut self) -> &mut Self {
+ if cfg!(target_os = "macos") {
+ return self.enable_split_debuginfo_packed();
+ }
+ self
+ }
+
+ #[track_caller]
+ pub fn run(&mut self) {
+ self.ran = true;
+ let mut p = (&self.process_builder).clone().unwrap();
+ if let Some(stdin) = self.expect_stdin.take() {
+ p.stdin(stdin);
+ }
+ if let Err(e) = self.match_process(&p) {
+ panic_error(&format!("test failed running {}", p), e);
+ }
+ }
+
+ #[track_caller]
+ pub fn run_expect_error(&mut self) {
+ self.ran = true;
+ let p = (&self.process_builder).clone().unwrap();
+ if self.match_process(&p).is_ok() {
+ panic!("test was expected to fail, but succeeded running {}", p);
+ }
+ }
+
+ /// Runs the process, checks the expected output, and returns the first
+ /// JSON object on stdout.
+ #[track_caller]
+ pub fn run_json(&mut self) -> serde_json::Value {
+ self.ran = true;
+ let p = (&self.process_builder).clone().unwrap();
+ match self.match_process(&p) {
+ Err(e) => panic_error(&format!("test failed running {}", p), e),
+ Ok(output) => serde_json::from_slice(&output.stdout).unwrap_or_else(|e| {
+ panic!(
+ "\nfailed to parse JSON: {}\n\
+ output was:\n{}\n",
+ e,
+ String::from_utf8_lossy(&output.stdout)
+ );
+ }),
+ }
+ }
+
+ #[track_caller]
+ pub fn run_output(&mut self, output: &Output) {
+ self.ran = true;
+ if let Err(e) = self.match_output(output.status.code(), &output.stdout, &output.stderr) {
+ panic_error("process did not return the expected result", e)
+ }
+ }
+
+ fn verify_checks_output(&self, stdout: &[u8], stderr: &[u8]) {
+ if self.expect_exit_code.unwrap_or(0) != 0
+ && self.expect_stdout.is_none()
+ && self.expect_stdin.is_none()
+ && self.expect_stderr.is_none()
+ && self.expect_stdout_contains.is_empty()
+ && self.expect_stderr_contains.is_empty()
+ && self.expect_stdout_contains_n.is_empty()
+ && self.expect_stdout_not_contains.is_empty()
+ && self.expect_stderr_not_contains.is_empty()
+ && self.expect_stderr_unordered.is_empty()
+ && self.expect_stderr_with_without.is_empty()
+ && self.expect_json.is_none()
+ && self.expect_json_contains_unordered.is_none()
+ {
+ panic!(
+ "`with_status()` is used, but no output is checked.\n\
+ The test must check the output to ensure the correct error is triggered.\n\
+ --- stdout\n{}\n--- stderr\n{}",
+ String::from_utf8_lossy(stdout),
+ String::from_utf8_lossy(stderr),
+ );
+ }
+ }
+
+ fn match_process(&self, process: &ProcessBuilder) -> Result<RawOutput> {
+ println!("running {}", process);
+ let res = if self.stream_output {
+ if is_ci() {
+ panic!("`.stream()` is for local debugging")
+ }
+ process.exec_with_streaming(
+ &mut |out| {
+ println!("{}", out);
+ Ok(())
+ },
+ &mut |err| {
+ eprintln!("{}", err);
+ Ok(())
+ },
+ true,
+ )
+ } else {
+ process.exec_with_output()
+ };
+
+ match res {
+ Ok(out) => {
+ self.match_output(out.status.code(), &out.stdout, &out.stderr)?;
+ return Ok(RawOutput {
+ stdout: out.stdout,
+ stderr: out.stderr,
+ code: out.status.code(),
+ });
+ }
+ Err(e) => {
+ if let Some(ProcessError {
+ stdout: Some(stdout),
+ stderr: Some(stderr),
+ code,
+ ..
+ }) = e.downcast_ref::<ProcessError>()
+ {
+ self.match_output(*code, stdout, stderr)?;
+ return Ok(RawOutput {
+ stdout: stdout.to_vec(),
+ stderr: stderr.to_vec(),
+ code: *code,
+ });
+ }
+ bail!("could not exec process {}: {:?}", process, e)
+ }
+ }
+ }
+
+ fn match_output(&self, code: Option<i32>, stdout: &[u8], stderr: &[u8]) -> Result<()> {
+ self.verify_checks_output(stdout, stderr);
+ let stdout = str::from_utf8(stdout).expect("stdout is not utf8");
+ let stderr = str::from_utf8(stderr).expect("stderr is not utf8");
+ let cwd = self.get_cwd();
+
+ match self.expect_exit_code {
+ None => {}
+ Some(expected) if code == Some(expected) => {}
+ Some(expected) => bail!(
+ "process exited with code {} (expected {})\n--- stdout\n{}\n--- stderr\n{}",
+ code.unwrap_or(-1),
+ expected,
+ stdout,
+ stderr
+ ),
+ }
+
+ if let Some(expect_stdout) = &self.expect_stdout {
+ compare::match_exact(expect_stdout, stdout, "stdout", stderr, cwd)?;
+ }
+ if let Some(expect_stderr) = &self.expect_stderr {
+ compare::match_exact(expect_stderr, stderr, "stderr", stdout, cwd)?;
+ }
+ for expect in self.expect_stdout_contains.iter() {
+ compare::match_contains(expect, stdout, cwd)?;
+ }
+ for expect in self.expect_stderr_contains.iter() {
+ compare::match_contains(expect, stderr, cwd)?;
+ }
+ for &(ref expect, number) in self.expect_stdout_contains_n.iter() {
+ compare::match_contains_n(expect, number, stdout, cwd)?;
+ }
+ for expect in self.expect_stdout_not_contains.iter() {
+ compare::match_does_not_contain(expect, stdout, cwd)?;
+ }
+ for expect in self.expect_stderr_not_contains.iter() {
+ compare::match_does_not_contain(expect, stderr, cwd)?;
+ }
+ for expect in self.expect_stderr_unordered.iter() {
+ compare::match_unordered(expect, stderr, cwd)?;
+ }
+ for (with, without) in self.expect_stderr_with_without.iter() {
+ compare::match_with_without(stderr, with, without, cwd)?;
+ }
+
+ if let Some(ref expect_json) = self.expect_json {
+ compare::match_json(expect_json, stdout, cwd)?;
+ }
+
+ if let Some(ref expected) = self.expect_json_contains_unordered {
+ compare::match_json_contains_unordered(expected, stdout, cwd)?;
+ }
+ Ok(())
+ }
+}
+
+impl Drop for Execs {
+ fn drop(&mut self) {
+ if !self.ran && !std::thread::panicking() {
+ panic!("forgot to run this command");
+ }
+ }
+}
+
+pub fn execs() -> Execs {
+ Execs {
+ ran: false,
+ process_builder: None,
+ expect_stdout: None,
+ expect_stderr: None,
+ expect_stdin: None,
+ expect_exit_code: Some(0),
+ expect_stdout_contains: Vec::new(),
+ expect_stderr_contains: Vec::new(),
+ expect_stdout_contains_n: Vec::new(),
+ expect_stdout_not_contains: Vec::new(),
+ expect_stderr_not_contains: Vec::new(),
+ expect_stderr_unordered: Vec::new(),
+ expect_stderr_with_without: Vec::new(),
+ expect_json: None,
+ expect_json_contains_unordered: None,
+ stream_output: false,
+ }
+}
+
+pub fn basic_manifest(name: &str, version: &str) -> String {
+ format!(
+ r#"
+ [package]
+ name = "{}"
+ version = "{}"
+ authors = []
+ "#,
+ name, version
+ )
+}
+
+pub fn basic_bin_manifest(name: &str) -> String {
+ format!(
+ r#"
+ [package]
+
+ name = "{}"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [[bin]]
+
+ name = "{}"
+ "#,
+ name, name
+ )
+}
+
+pub fn basic_lib_manifest(name: &str) -> String {
+ format!(
+ r#"
+ [package]
+
+ name = "{}"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [lib]
+
+ name = "{}"
+ "#,
+ name, name
+ )
+}
+
+pub fn path2url<P: AsRef<Path>>(p: P) -> Url {
+ Url::from_file_path(p).ok().unwrap()
+}
+
+struct RustcInfo {
+ verbose_version: String,
+ host: String,
+}
+
+impl RustcInfo {
+ fn new() -> RustcInfo {
+ let output = ProcessBuilder::new("rustc")
+ .arg("-vV")
+ .exec_with_output()
+ .expect("rustc should exec");
+ let verbose_version = String::from_utf8(output.stdout).expect("utf8 output");
+ let host = verbose_version
+ .lines()
+ .filter_map(|line| line.strip_prefix("host: "))
+ .next()
+ .expect("verbose version has host: field")
+ .to_string();
+ RustcInfo {
+ verbose_version,
+ host,
+ }
+ }
+}
+
+lazy_static::lazy_static! {
+ static ref RUSTC_INFO: RustcInfo = RustcInfo::new();
+}
+
+/// The rustc host such as `x86_64-unknown-linux-gnu`.
+pub fn rustc_host() -> &'static str {
+ &RUSTC_INFO.host
+}
+
+/// The host triple suitable for use in a cargo environment variable (uppercased).
+pub fn rustc_host_env() -> String {
+ rustc_host().to_uppercase().replace('-', "_")
+}
+
+pub fn is_nightly() -> bool {
+ let vv = &RUSTC_INFO.verbose_version;
+ // CARGO_TEST_DISABLE_NIGHTLY is set in rust-lang/rust's CI so that all
+ // nightly-only tests are disabled there. Otherwise, it could make it
+ // difficult to land changes which would need to be made simultaneously in
+ // rust-lang/cargo and rust-lan/rust, which isn't possible.
+ env::var("CARGO_TEST_DISABLE_NIGHTLY").is_err()
+ && (vv.contains("-nightly") || vv.contains("-dev"))
+}
+
+pub fn process<T: AsRef<OsStr>>(t: T) -> ProcessBuilder {
+ _process(t.as_ref())
+}
+
+fn _process(t: &OsStr) -> ProcessBuilder {
+ let mut p = ProcessBuilder::new(t);
+ p.cwd(&paths::root()).test_env();
+ p
+}
+
+/// Enable nightly features for testing
+pub trait ChannelChanger {
+ /// The list of reasons should be why nightly cargo is needed. If it is
+ /// becuase of an unstable feature put the name of the feature as the reason,
+ /// e.g. `&["print-im-a-teapot"]`.
+ fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self;
+}
+
+impl ChannelChanger for &mut ProcessBuilder {
+ fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self {
+ self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly")
+ }
+}
+
+impl ChannelChanger for snapbox::cmd::Command {
+ fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self {
+ self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly")
+ }
+}
+
+/// Establish a process's test environment
+pub trait TestEnv: Sized {
+ fn test_env(mut self) -> Self {
+ // In general just clear out all cargo-specific configuration already in the
+ // environment. Our tests all assume a "default configuration" unless
+ // specified otherwise.
+ for (k, _v) in env::vars() {
+ if k.starts_with("CARGO_") {
+ self = self.env_remove(&k);
+ }
+ }
+ if env::var_os("RUSTUP_TOOLCHAIN").is_some() {
+ // Override the PATH to avoid executing the rustup wrapper thousands
+ // of times. This makes the testsuite run substantially faster.
+ lazy_static::lazy_static! {
+ static ref RUSTC_DIR: PathBuf = {
+ match ProcessBuilder::new("rustup")
+ .args(&["which", "rustc"])
+ .exec_with_output()
+ {
+ Ok(output) => {
+ let s = str::from_utf8(&output.stdout).expect("utf8").trim();
+ let mut p = PathBuf::from(s);
+ p.pop();
+ p
+ }
+ Err(e) => {
+ panic!("RUSTUP_TOOLCHAIN was set, but could not run rustup: {}", e);
+ }
+ }
+ };
+ }
+ let path = env::var_os("PATH").unwrap_or_default();
+ let paths = env::split_paths(&path);
+ let new_path =
+ env::join_paths(std::iter::once(RUSTC_DIR.clone()).chain(paths)).unwrap();
+ self = self.env("PATH", new_path);
+ }
+
+ self = self
+ .current_dir(&paths::root())
+ .env("HOME", paths::home())
+ .env("CARGO_HOME", paths::home().join(".cargo"))
+ .env("__CARGO_TEST_ROOT", paths::global_root())
+ // Force Cargo to think it's on the stable channel for all tests, this
+ // should hopefully not surprise us as we add cargo features over time and
+ // cargo rides the trains.
+ .env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "stable")
+ // Keeps cargo within its sandbox.
+ .env("__CARGO_TEST_DISABLE_GLOBAL_KNOWN_HOST", "1")
+ // Incremental generates a huge amount of data per test, which we
+ // don't particularly need. Tests that specifically need to check
+ // the incremental behavior should turn this back on.
+ .env("CARGO_INCREMENTAL", "0")
+ // Don't read the system git config which is out of our control.
+ .env("GIT_CONFIG_NOSYSTEM", "1")
+ .env_remove("__CARGO_DEFAULT_LIB_METADATA")
+ .env_remove("ALL_PROXY")
+ .env_remove("EMAIL")
+ .env_remove("GIT_AUTHOR_EMAIL")
+ .env_remove("GIT_AUTHOR_NAME")
+ .env_remove("GIT_COMMITTER_EMAIL")
+ .env_remove("GIT_COMMITTER_NAME")
+ .env_remove("http_proxy")
+ .env_remove("HTTPS_PROXY")
+ .env_remove("https_proxy")
+ .env_remove("MAKEFLAGS")
+ .env_remove("MFLAGS")
+ .env_remove("MSYSTEM") // assume cmd.exe everywhere on windows
+ .env_remove("RUSTC")
+ .env_remove("RUSTC_WORKSPACE_WRAPPER")
+ .env_remove("RUSTC_WRAPPER")
+ .env_remove("RUSTDOC")
+ .env_remove("RUSTDOCFLAGS")
+ .env_remove("RUSTFLAGS")
+ .env_remove("SSH_AUTH_SOCK") // ensure an outer agent is never contacted
+ .env_remove("USER") // not set on some rust-lang docker images
+ .env_remove("XDG_CONFIG_HOME"); // see #2345
+ if cfg!(target_os = "macos") {
+ // Work-around a bug in macOS 10.15, see `link_or_copy` for details.
+ self = self.env("__CARGO_COPY_DONT_LINK_DO_NOT_USE_THIS", "1");
+ }
+ if cfg!(windows) {
+ self = self.env("USERPROFILE", paths::home());
+ }
+ self
+ }
+
+ fn current_dir<S: AsRef<std::path::Path>>(self, path: S) -> Self;
+ fn env<S: AsRef<std::ffi::OsStr>>(self, key: &str, value: S) -> Self;
+ fn env_remove(self, key: &str) -> Self;
+}
+
+impl TestEnv for &mut ProcessBuilder {
+ fn current_dir<S: AsRef<std::path::Path>>(self, path: S) -> Self {
+ let path = path.as_ref();
+ self.cwd(path)
+ }
+ fn env<S: AsRef<std::ffi::OsStr>>(self, key: &str, value: S) -> Self {
+ self.env(key, value)
+ }
+ fn env_remove(self, key: &str) -> Self {
+ self.env_remove(key)
+ }
+}
+
+impl TestEnv for snapbox::cmd::Command {
+ fn current_dir<S: AsRef<std::path::Path>>(self, path: S) -> Self {
+ self.current_dir(path)
+ }
+ fn env<S: AsRef<std::ffi::OsStr>>(self, key: &str, value: S) -> Self {
+ self.env(key, value)
+ }
+ fn env_remove(self, key: &str) -> Self {
+ self.env_remove(key)
+ }
+}
+
+/// Test the cargo command
+pub trait CargoCommand {
+ fn cargo_ui() -> Self;
+}
+
+impl CargoCommand for snapbox::cmd::Command {
+ fn cargo_ui() -> Self {
+ Self::new(cargo_exe())
+ .with_assert(compare::assert_ui())
+ .test_env()
+ }
+}
+
+/// Add a list of arguments as a line
+pub trait ArgLine: Sized {
+ fn arg_line(mut self, s: &str) -> Self {
+ for mut arg in s.split_whitespace() {
+ if (arg.starts_with('"') && arg.ends_with('"'))
+ || (arg.starts_with('\'') && arg.ends_with('\''))
+ {
+ arg = &arg[1..(arg.len() - 1).max(1)];
+ } else if arg.contains(&['"', '\''][..]) {
+ panic!("shell-style argument parsing is not supported")
+ }
+ self = self.arg(arg);
+ }
+ self
+ }
+
+ fn arg<S: AsRef<std::ffi::OsStr>>(self, s: S) -> Self;
+}
+
+impl ArgLine for &mut ProcessBuilder {
+ fn arg<S: AsRef<std::ffi::OsStr>>(self, s: S) -> Self {
+ self.arg(s)
+ }
+}
+
+impl ArgLine for snapbox::cmd::Command {
+ fn arg<S: AsRef<std::ffi::OsStr>>(self, s: S) -> Self {
+ self.arg(s)
+ }
+}
+
+pub fn cargo_process(s: &str) -> Execs {
+ let cargo = cargo_exe();
+ let mut p = process(&cargo);
+ p.env("CARGO", cargo);
+ p.arg_line(s);
+ execs().with_process_builder(p)
+}
+
+pub fn git_process(s: &str) -> ProcessBuilder {
+ let mut p = process("git");
+ p.arg_line(s);
+ p
+}
+
+pub fn sleep_ms(ms: u64) {
+ ::std::thread::sleep(Duration::from_millis(ms));
+}
+
+/// Returns `true` if the local filesystem has low-resolution mtimes.
+pub fn is_coarse_mtime() -> bool {
+ // If the filetime crate is being used to emulate HFS then
+ // return `true`, without looking at the actual hardware.
+ cfg!(emulate_second_only_system) ||
+ // This should actually be a test that `$CARGO_TARGET_DIR` is on an HFS
+ // filesystem, (or any filesystem with low-resolution mtimes). However,
+ // that's tricky to detect, so for now just deal with CI.
+ cfg!(target_os = "macos") && is_ci()
+}
+
+/// Some CI setups are much slower then the equipment used by Cargo itself.
+/// Architectures that do not have a modern processor, hardware emulation, etc.
+/// This provides a way for those setups to increase the cut off for all the time based test.
+pub fn slow_cpu_multiplier(main: u64) -> Duration {
+ lazy_static::lazy_static! {
+ static ref SLOW_CPU_MULTIPLIER: u64 =
+ env::var("CARGO_TEST_SLOW_CPU_MULTIPLIER").ok().and_then(|m| m.parse().ok()).unwrap_or(1);
+ }
+ Duration::from_secs(*SLOW_CPU_MULTIPLIER * main)
+}
+
+#[cfg(windows)]
+pub fn symlink_supported() -> bool {
+ if is_ci() {
+ // We want to be absolutely sure this runs on CI.
+ return true;
+ }
+ let src = paths::root().join("symlink_src");
+ fs::write(&src, "").unwrap();
+ let dst = paths::root().join("symlink_dst");
+ let result = match os::windows::fs::symlink_file(&src, &dst) {
+ Ok(_) => {
+ fs::remove_file(&dst).unwrap();
+ true
+ }
+ Err(e) => {
+ eprintln!(
+ "symlinks not supported: {:?}\n\
+ Windows 10 users should enable developer mode.",
+ e
+ );
+ false
+ }
+ };
+ fs::remove_file(&src).unwrap();
+ return result;
+}
+
+#[cfg(not(windows))]
+pub fn symlink_supported() -> bool {
+ true
+}
+
+/// The error message for ENOENT.
+pub fn no_such_file_err_msg() -> String {
+ std::io::Error::from_raw_os_error(2).to_string()
+}
diff --git a/src/tools/cargo/crates/cargo-test-support/src/paths.rs b/src/tools/cargo/crates/cargo-test-support/src/paths.rs
new file mode 100644
index 000000000..ef1fddb70
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/src/paths.rs
@@ -0,0 +1,347 @@
+use filetime::{self, FileTime};
+use lazy_static::lazy_static;
+use std::cell::RefCell;
+use std::collections::HashMap;
+use std::env;
+use std::fs;
+use std::io::{self, ErrorKind};
+use std::path::{Path, PathBuf};
+use std::process::Command;
+use std::sync::atomic::{AtomicUsize, Ordering};
+use std::sync::Mutex;
+
+static CARGO_INTEGRATION_TEST_DIR: &str = "cit";
+
+lazy_static! {
+ // TODO: Use `SyncOnceCell` when stable
+ static ref GLOBAL_ROOT: Mutex<Option<PathBuf>> = Mutex::new(None);
+
+ static ref TEST_ROOTS: Mutex<HashMap<String, PathBuf>> = Default::default();
+}
+
+/// This is used when running cargo is pre-CARGO_TARGET_TMPDIR
+/// TODO: Remove when CARGO_TARGET_TMPDIR grows old enough.
+fn global_root_legacy() -> PathBuf {
+ let mut path = t!(env::current_exe());
+ path.pop(); // chop off exe name
+ path.pop(); // chop off "deps"
+ path.push("tmp");
+ path.mkdir_p();
+ path
+}
+
+fn set_global_root(tmp_dir: Option<&'static str>) {
+ let mut lock = GLOBAL_ROOT.lock().unwrap();
+ if lock.is_none() {
+ let mut root = match tmp_dir {
+ Some(tmp_dir) => PathBuf::from(tmp_dir),
+ None => global_root_legacy(),
+ };
+
+ root.push(CARGO_INTEGRATION_TEST_DIR);
+ *lock = Some(root);
+ }
+}
+
+pub fn global_root() -> PathBuf {
+ let lock = GLOBAL_ROOT.lock().unwrap();
+ match lock.as_ref() {
+ Some(p) => p.clone(),
+ None => unreachable!("GLOBAL_ROOT not set yet"),
+ }
+}
+
+// We need to give each test a unique id. The test name could serve this
+// purpose, but the `test` crate doesn't have a way to obtain the current test
+// name.[*] Instead, we used the `cargo-test-macro` crate to automatically
+// insert an init function for each test that sets the test name in a thread
+// local variable.
+//
+// [*] It does set the thread name, but only when running concurrently. If not
+// running concurrently, all tests are run on the main thread.
+thread_local! {
+ static TEST_ID: RefCell<Option<usize>> = RefCell::new(None);
+}
+
+pub struct TestIdGuard {
+ _private: (),
+}
+
+pub fn init_root(tmp_dir: Option<&'static str>) -> TestIdGuard {
+ static NEXT_ID: AtomicUsize = AtomicUsize::new(0);
+
+ let id = NEXT_ID.fetch_add(1, Ordering::SeqCst);
+ TEST_ID.with(|n| *n.borrow_mut() = Some(id));
+
+ let guard = TestIdGuard { _private: () };
+
+ set_global_root(tmp_dir);
+ let r = root();
+ r.rm_rf();
+ r.mkdir_p();
+
+ guard
+}
+
+impl Drop for TestIdGuard {
+ fn drop(&mut self) {
+ TEST_ID.with(|n| *n.borrow_mut() = None);
+ }
+}
+
+pub fn root() -> PathBuf {
+ let id = TEST_ID.with(|n| {
+ n.borrow().expect(
+ "Tests must use the `#[cargo_test]` attribute in \
+ order to be able to use the crate root.",
+ )
+ });
+
+ let mut root = global_root();
+ root.push(&format!("t{}", id));
+ root
+}
+
+pub fn home() -> PathBuf {
+ let mut path = root();
+ path.push("home");
+ path.mkdir_p();
+ path
+}
+
+pub trait CargoPathExt {
+ fn rm_rf(&self);
+ fn mkdir_p(&self);
+
+ fn move_into_the_past(&self) {
+ self.move_in_time(|sec, nsec| (sec - 3600, nsec))
+ }
+
+ fn move_into_the_future(&self) {
+ self.move_in_time(|sec, nsec| (sec + 3600, nsec))
+ }
+
+ fn move_in_time<F>(&self, travel_amount: F)
+ where
+ F: Fn(i64, u32) -> (i64, u32);
+}
+
+impl CargoPathExt for Path {
+ fn rm_rf(&self) {
+ let meta = match self.symlink_metadata() {
+ Ok(meta) => meta,
+ Err(e) => {
+ if e.kind() == ErrorKind::NotFound {
+ return;
+ }
+ panic!("failed to remove {:?}, could not read: {:?}", self, e);
+ }
+ };
+ // There is a race condition between fetching the metadata and
+ // actually performing the removal, but we don't care all that much
+ // for our tests.
+ if meta.is_dir() {
+ if let Err(e) = fs::remove_dir_all(self) {
+ panic!("failed to remove {:?}: {:?}", self, e)
+ }
+ } else if let Err(e) = fs::remove_file(self) {
+ panic!("failed to remove {:?}: {:?}", self, e)
+ }
+ }
+
+ fn mkdir_p(&self) {
+ fs::create_dir_all(self)
+ .unwrap_or_else(|e| panic!("failed to mkdir_p {}: {}", self.display(), e))
+ }
+
+ fn move_in_time<F>(&self, travel_amount: F)
+ where
+ F: Fn(i64, u32) -> (i64, u32),
+ {
+ if self.is_file() {
+ time_travel(self, &travel_amount);
+ } else {
+ recurse(self, &self.join("target"), &travel_amount);
+ }
+
+ fn recurse<F>(p: &Path, bad: &Path, travel_amount: &F)
+ where
+ F: Fn(i64, u32) -> (i64, u32),
+ {
+ if p.is_file() {
+ time_travel(p, travel_amount)
+ } else if !p.starts_with(bad) {
+ for f in t!(fs::read_dir(p)) {
+ let f = t!(f).path();
+ recurse(&f, bad, travel_amount);
+ }
+ }
+ }
+
+ fn time_travel<F>(path: &Path, travel_amount: &F)
+ where
+ F: Fn(i64, u32) -> (i64, u32),
+ {
+ let stat = t!(path.symlink_metadata());
+
+ let mtime = FileTime::from_last_modification_time(&stat);
+
+ let (sec, nsec) = travel_amount(mtime.unix_seconds(), mtime.nanoseconds());
+ let newtime = FileTime::from_unix_time(sec, nsec);
+
+ // Sadly change_file_times has a failure mode where a readonly file
+ // cannot have its times changed on windows.
+ do_op(path, "set file times", |path| {
+ filetime::set_file_times(path, newtime, newtime)
+ });
+ }
+ }
+}
+
+fn do_op<F>(path: &Path, desc: &str, mut f: F)
+where
+ F: FnMut(&Path) -> io::Result<()>,
+{
+ match f(path) {
+ Ok(()) => {}
+ Err(ref e) if e.kind() == ErrorKind::PermissionDenied => {
+ let mut p = t!(path.metadata()).permissions();
+ p.set_readonly(false);
+ t!(fs::set_permissions(path, p));
+
+ // Unix also requires the parent to not be readonly for example when
+ // removing files
+ let parent = path.parent().unwrap();
+ let mut p = t!(parent.metadata()).permissions();
+ p.set_readonly(false);
+ t!(fs::set_permissions(parent, p));
+
+ f(path).unwrap_or_else(|e| {
+ panic!("failed to {} {}: {}", desc, path.display(), e);
+ })
+ }
+ Err(e) => {
+ panic!("failed to {} {}: {}", desc, path.display(), e);
+ }
+ }
+}
+
+/// Get the filename for a library.
+///
+/// `kind` should be one of: "lib", "rlib", "staticlib", "dylib", "proc-macro"
+///
+/// For example, dynamic library named "foo" would return:
+/// - macOS: "libfoo.dylib"
+/// - Windows: "foo.dll"
+/// - Unix: "libfoo.so"
+pub fn get_lib_filename(name: &str, kind: &str) -> String {
+ let prefix = get_lib_prefix(kind);
+ let extension = get_lib_extension(kind);
+ format!("{}{}.{}", prefix, name, extension)
+}
+
+pub fn get_lib_prefix(kind: &str) -> &str {
+ match kind {
+ "lib" | "rlib" => "lib",
+ "staticlib" | "dylib" | "proc-macro" => {
+ if cfg!(windows) {
+ ""
+ } else {
+ "lib"
+ }
+ }
+ _ => unreachable!(),
+ }
+}
+
+pub fn get_lib_extension(kind: &str) -> &str {
+ match kind {
+ "lib" | "rlib" => "rlib",
+ "staticlib" => {
+ if cfg!(windows) {
+ "lib"
+ } else {
+ "a"
+ }
+ }
+ "dylib" | "proc-macro" => {
+ if cfg!(windows) {
+ "dll"
+ } else if cfg!(target_os = "macos") {
+ "dylib"
+ } else {
+ "so"
+ }
+ }
+ _ => unreachable!(),
+ }
+}
+
+/// Returns the sysroot as queried from rustc.
+pub fn sysroot() -> String {
+ let output = Command::new("rustc")
+ .arg("--print=sysroot")
+ .output()
+ .expect("rustc to run");
+ assert!(output.status.success());
+ let sysroot = String::from_utf8(output.stdout).unwrap();
+ sysroot.trim().to_string()
+}
+
+/// Returns true if names such as aux.* are allowed.
+///
+/// Traditionally, Windows did not allow a set of file names (see `is_windows_reserved`
+/// for a list). More recent versions of Windows have relaxed this restriction. This test
+/// determines whether we are running in a mode that allows Windows reserved names.
+#[cfg(windows)]
+pub fn windows_reserved_names_are_allowed() -> bool {
+ use cargo_util::is_ci;
+
+ // Ensure tests still run in CI until we need to migrate.
+ if is_ci() {
+ return false;
+ }
+
+ use std::ffi::OsStr;
+ use std::os::windows::ffi::OsStrExt;
+ use std::ptr;
+ use windows_sys::Win32::Storage::FileSystem::GetFullPathNameW;
+
+ let test_file_name: Vec<_> = OsStr::new("aux.rs").encode_wide().collect();
+
+ let buffer_length =
+ unsafe { GetFullPathNameW(test_file_name.as_ptr(), 0, ptr::null_mut(), ptr::null_mut()) };
+
+ if buffer_length == 0 {
+ // This means the call failed, so we'll conservatively assume reserved names are not allowed.
+ return false;
+ }
+
+ let mut buffer = vec![0u16; buffer_length as usize];
+
+ let result = unsafe {
+ GetFullPathNameW(
+ test_file_name.as_ptr(),
+ buffer_length,
+ buffer.as_mut_ptr(),
+ ptr::null_mut(),
+ )
+ };
+
+ if result == 0 {
+ // Once again, conservatively assume reserved names are not allowed if the
+ // GetFullPathNameW call failed.
+ return false;
+ }
+
+ // Under the old rules, a file name like aux.rs would get converted into \\.\aux, so
+ // we detect this case by checking if the string starts with \\.\
+ //
+ // Otherwise, the filename will be something like C:\Users\Foo\Documents\aux.rs
+ let prefix: Vec<_> = OsStr::new("\\\\.\\").encode_wide().collect();
+ if buffer.starts_with(&prefix) {
+ false
+ } else {
+ true
+ }
+}
diff --git a/src/tools/cargo/crates/cargo-test-support/src/publish.rs b/src/tools/cargo/crates/cargo-test-support/src/publish.rs
new file mode 100644
index 000000000..64774bc43
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/src/publish.rs
@@ -0,0 +1,245 @@
+use crate::compare::{assert_match_exact, find_json_mismatch};
+use crate::registry::{self, alt_api_path, FeatureMap};
+use flate2::read::GzDecoder;
+use std::collections::{HashMap, HashSet};
+use std::fs;
+use std::fs::File;
+use std::io::{self, prelude::*, SeekFrom};
+use std::path::{Path, PathBuf};
+use tar::Archive;
+
+fn read_le_u32<R>(mut reader: R) -> io::Result<u32>
+where
+ R: Read,
+{
+ let mut buf = [0; 4];
+ reader.read_exact(&mut buf)?;
+ Ok(u32::from_le_bytes(buf))
+}
+
+/// Checks the result of a crate publish.
+pub fn validate_upload(expected_json: &str, expected_crate_name: &str, expected_files: &[&str]) {
+ let new_path = registry::api_path().join("api/v1/crates/new");
+ _validate_upload(
+ &new_path,
+ expected_json,
+ expected_crate_name,
+ expected_files,
+ &[],
+ );
+}
+
+/// Checks the result of a crate publish, along with the contents of the files.
+pub fn validate_upload_with_contents(
+ expected_json: &str,
+ expected_crate_name: &str,
+ expected_files: &[&str],
+ expected_contents: &[(&str, &str)],
+) {
+ let new_path = registry::api_path().join("api/v1/crates/new");
+ _validate_upload(
+ &new_path,
+ expected_json,
+ expected_crate_name,
+ expected_files,
+ expected_contents,
+ );
+}
+
+/// Checks the result of a crate publish to an alternative registry.
+pub fn validate_alt_upload(
+ expected_json: &str,
+ expected_crate_name: &str,
+ expected_files: &[&str],
+) {
+ let new_path = alt_api_path().join("api/v1/crates/new");
+ _validate_upload(
+ &new_path,
+ expected_json,
+ expected_crate_name,
+ expected_files,
+ &[],
+ );
+}
+
+fn _validate_upload(
+ new_path: &Path,
+ expected_json: &str,
+ expected_crate_name: &str,
+ expected_files: &[&str],
+ expected_contents: &[(&str, &str)],
+) {
+ let mut f = File::open(new_path).unwrap();
+ // 32-bit little-endian integer of length of JSON data.
+ let json_sz = read_le_u32(&mut f).expect("read json length");
+ let mut json_bytes = vec![0; json_sz as usize];
+ f.read_exact(&mut json_bytes).expect("read JSON data");
+ let actual_json = serde_json::from_slice(&json_bytes).expect("uploaded JSON should be valid");
+ let expected_json = serde_json::from_str(expected_json).expect("expected JSON does not parse");
+
+ if let Err(e) = find_json_mismatch(&expected_json, &actual_json, None) {
+ panic!("{}", e);
+ }
+
+ // 32-bit little-endian integer of length of crate file.
+ let crate_sz = read_le_u32(&mut f).expect("read crate length");
+ let mut krate_bytes = vec![0; crate_sz as usize];
+ f.read_exact(&mut krate_bytes).expect("read crate data");
+ // Check at end.
+ let current = f.seek(SeekFrom::Current(0)).unwrap();
+ assert_eq!(f.seek(SeekFrom::End(0)).unwrap(), current);
+
+ // Verify the tarball.
+ validate_crate_contents(
+ &krate_bytes[..],
+ expected_crate_name,
+ expected_files,
+ expected_contents,
+ );
+}
+
+/// Checks the contents of a `.crate` file.
+///
+/// - `expected_crate_name` should be something like `foo-0.0.1.crate`.
+/// - `expected_files` should be a complete list of files in the crate
+/// (relative to expected_crate_name).
+/// - `expected_contents` should be a list of `(file_name, contents)` tuples
+/// to validate the contents of the given file. Only the listed files will
+/// be checked (others will be ignored).
+pub fn validate_crate_contents(
+ reader: impl Read,
+ expected_crate_name: &str,
+ expected_files: &[&str],
+ expected_contents: &[(&str, &str)],
+) {
+ let mut rdr = GzDecoder::new(reader);
+ assert_eq!(
+ rdr.header().unwrap().filename().unwrap(),
+ expected_crate_name.as_bytes()
+ );
+ let mut contents = Vec::new();
+ rdr.read_to_end(&mut contents).unwrap();
+ let mut ar = Archive::new(&contents[..]);
+ let files: HashMap<PathBuf, String> = ar
+ .entries()
+ .unwrap()
+ .map(|entry| {
+ let mut entry = entry.unwrap();
+ let name = entry.path().unwrap().into_owned();
+ let mut contents = String::new();
+ entry.read_to_string(&mut contents).unwrap();
+ (name, contents)
+ })
+ .collect();
+ assert!(expected_crate_name.ends_with(".crate"));
+ let base_crate_name = Path::new(&expected_crate_name[..expected_crate_name.len() - 6]);
+ let actual_files: HashSet<PathBuf> = files.keys().cloned().collect();
+ let expected_files: HashSet<PathBuf> = expected_files
+ .iter()
+ .map(|name| base_crate_name.join(name))
+ .collect();
+ let missing: Vec<&PathBuf> = expected_files.difference(&actual_files).collect();
+ let extra: Vec<&PathBuf> = actual_files.difference(&expected_files).collect();
+ if !missing.is_empty() || !extra.is_empty() {
+ panic!(
+ "uploaded archive does not match.\nMissing: {:?}\nExtra: {:?}\n",
+ missing, extra
+ );
+ }
+ if !expected_contents.is_empty() {
+ for (e_file_name, e_file_contents) in expected_contents {
+ let full_e_name = base_crate_name.join(e_file_name);
+ let actual_contents = files
+ .get(&full_e_name)
+ .unwrap_or_else(|| panic!("file `{}` missing in archive", e_file_name));
+ assert_match_exact(e_file_contents, actual_contents);
+ }
+ }
+}
+
+pub(crate) fn create_index_line(
+ name: serde_json::Value,
+ vers: &str,
+ deps: Vec<serde_json::Value>,
+ cksum: &str,
+ features: crate::registry::FeatureMap,
+ yanked: bool,
+ links: Option<String>,
+ v: Option<u32>,
+) -> String {
+ // This emulates what crates.io does to retain backwards compatibility.
+ let (features, features2) = split_index_features(features.clone());
+ let mut json = serde_json::json!({
+ "name": name,
+ "vers": vers,
+ "deps": deps,
+ "cksum": cksum,
+ "features": features,
+ "yanked": yanked,
+ "links": links,
+ });
+ if let Some(f2) = &features2 {
+ json["features2"] = serde_json::json!(f2);
+ json["v"] = serde_json::json!(2);
+ }
+ if let Some(v) = v {
+ json["v"] = serde_json::json!(v);
+ }
+
+ json.to_string()
+}
+
+pub(crate) fn write_to_index(registry_path: &Path, name: &str, line: String, local: bool) {
+ let file = cargo_util::registry::make_dep_path(name, false);
+
+ // Write file/line in the index.
+ let dst = if local {
+ registry_path.join("index").join(&file)
+ } else {
+ registry_path.join(&file)
+ };
+ let prev = fs::read_to_string(&dst).unwrap_or_default();
+ t!(fs::create_dir_all(dst.parent().unwrap()));
+ t!(fs::write(&dst, prev + &line[..] + "\n"));
+
+ // Add the new file to the index.
+ if !local {
+ let repo = t!(git2::Repository::open(&registry_path));
+ let mut index = t!(repo.index());
+ t!(index.add_path(Path::new(&file)));
+ t!(index.write());
+ let id = t!(index.write_tree());
+
+ // Commit this change.
+ let tree = t!(repo.find_tree(id));
+ let sig = t!(repo.signature());
+ let parent = t!(repo.refname_to_id("refs/heads/master"));
+ let parent = t!(repo.find_commit(parent));
+ t!(repo.commit(
+ Some("HEAD"),
+ &sig,
+ &sig,
+ "Another commit",
+ &tree,
+ &[&parent]
+ ));
+ }
+}
+
+fn split_index_features(mut features: FeatureMap) -> (FeatureMap, Option<FeatureMap>) {
+ let mut features2 = FeatureMap::new();
+ for (feat, values) in features.iter_mut() {
+ if values
+ .iter()
+ .any(|value| value.starts_with("dep:") || value.contains("?/"))
+ {
+ let new_values = values.drain(..).collect();
+ features2.insert(feat.clone(), new_values);
+ }
+ }
+ if features2.is_empty() {
+ (features, None)
+ } else {
+ (features, Some(features2))
+ }
+}
diff --git a/src/tools/cargo/crates/cargo-test-support/src/registry.rs b/src/tools/cargo/crates/cargo-test-support/src/registry.rs
new file mode 100644
index 000000000..5faf23540
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/src/registry.rs
@@ -0,0 +1,1635 @@
+use crate::git::repo;
+use crate::paths;
+use crate::publish::{create_index_line, write_to_index};
+use cargo_util::paths::append;
+use cargo_util::Sha256;
+use flate2::write::GzEncoder;
+use flate2::Compression;
+use pasetors::keys::{AsymmetricPublicKey, AsymmetricSecretKey};
+use pasetors::paserk::FormatAsPaserk;
+use pasetors::token::UntrustedToken;
+use std::collections::{BTreeMap, HashMap};
+use std::fmt;
+use std::fs::{self, File};
+use std::io::{BufRead, BufReader, Read, Write};
+use std::net::{SocketAddr, TcpListener, TcpStream};
+use std::path::{Path, PathBuf};
+use std::thread::{self, JoinHandle};
+use tar::{Builder, Header};
+use time::format_description::well_known::Rfc3339;
+use time::{Duration, OffsetDateTime};
+use url::Url;
+
+/// Gets the path to the local index pretending to be crates.io. This is a Git repo
+/// initialized with a `config.json` file pointing to `dl_path` for downloads
+/// and `api_path` for uploads.
+pub fn registry_path() -> PathBuf {
+ generate_path("registry")
+}
+/// Gets the path for local web API uploads. Cargo will place the contents of a web API
+/// request here. For example, `api/v1/crates/new` is the result of publishing a crate.
+pub fn api_path() -> PathBuf {
+ generate_path("api")
+}
+/// Gets the path where crates can be downloaded using the web API endpoint. Crates
+/// should be organized as `{name}/{version}/download` to match the web API
+/// endpoint. This is rarely used and must be manually set up.
+fn dl_path() -> PathBuf {
+ generate_path("dl")
+}
+/// Gets the alternative-registry version of `registry_path`.
+fn alt_registry_path() -> PathBuf {
+ generate_path("alternative-registry")
+}
+/// Gets the alternative-registry version of `registry_url`.
+fn alt_registry_url() -> Url {
+ generate_url("alternative-registry")
+}
+/// Gets the alternative-registry version of `dl_path`.
+pub fn alt_dl_path() -> PathBuf {
+ generate_path("alternative-dl")
+}
+/// Gets the alternative-registry version of `api_path`.
+pub fn alt_api_path() -> PathBuf {
+ generate_path("alternative-api")
+}
+fn generate_path(name: &str) -> PathBuf {
+ paths::root().join(name)
+}
+fn generate_url(name: &str) -> Url {
+ Url::from_file_path(generate_path(name)).ok().unwrap()
+}
+
+#[derive(Clone)]
+pub enum Token {
+ Plaintext(String),
+ Keys(String, Option<String>),
+}
+
+impl Token {
+ /// This is a valid PASETO secret key.
+ /// This one is already publicly available as part of the text of the RFC so is safe to use for tests.
+ pub fn rfc_key() -> Token {
+ Token::Keys(
+ "k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36"
+ .to_string(),
+ Some("sub".to_string()),
+ )
+ }
+}
+
+type RequestCallback = Box<dyn Send + Fn(&Request, &HttpServer) -> Response>;
+
+/// A builder for initializing registries.
+pub struct RegistryBuilder {
+ /// If set, configures an alternate registry with the given name.
+ alternative: Option<String>,
+ /// The authorization token for the registry.
+ token: Option<Token>,
+ /// If set, the registry requires authorization for all operations.
+ auth_required: bool,
+ /// If set, serves the index over http.
+ http_index: bool,
+ /// If set, serves the API over http.
+ http_api: bool,
+ /// If set, config.json includes 'api'
+ api: bool,
+ /// Write the token in the configuration.
+ configure_token: bool,
+ /// Write the registry in configuration.
+ configure_registry: bool,
+ /// API responders.
+ custom_responders: HashMap<String, RequestCallback>,
+ /// Handler for 404 responses.
+ not_found_handler: RequestCallback,
+ /// If nonzero, the git index update to be delayed by the given number of seconds.
+ delayed_index_update: usize,
+}
+
+pub struct TestRegistry {
+ server: Option<HttpServerHandle>,
+ index_url: Url,
+ path: PathBuf,
+ api_url: Url,
+ dl_url: Url,
+ token: Token,
+}
+
+impl TestRegistry {
+ pub fn index_url(&self) -> &Url {
+ &self.index_url
+ }
+
+ pub fn api_url(&self) -> &Url {
+ &self.api_url
+ }
+
+ pub fn token(&self) -> &str {
+ match &self.token {
+ Token::Plaintext(s) => s,
+ Token::Keys(_, _) => panic!("registry was not configured with a plaintext token"),
+ }
+ }
+
+ pub fn key(&self) -> &str {
+ match &self.token {
+ Token::Plaintext(_) => panic!("registry was not configured with a secret key"),
+ Token::Keys(s, _) => s,
+ }
+ }
+
+ /// Shutdown the server thread and wait for it to stop.
+ /// `Drop` automatically stops the server, but this additionally
+ /// waits for the thread to stop.
+ pub fn join(self) {
+ if let Some(mut server) = self.server {
+ server.stop();
+ let handle = server.handle.take().unwrap();
+ handle.join().unwrap();
+ }
+ }
+}
+
+impl RegistryBuilder {
+ #[must_use]
+ pub fn new() -> RegistryBuilder {
+ let not_found = |_req: &Request, _server: &HttpServer| -> Response {
+ Response {
+ code: 404,
+ headers: vec![],
+ body: b"not found".to_vec(),
+ }
+ };
+ RegistryBuilder {
+ alternative: None,
+ token: None,
+ auth_required: false,
+ http_api: false,
+ http_index: false,
+ api: true,
+ configure_registry: true,
+ configure_token: true,
+ custom_responders: HashMap::new(),
+ not_found_handler: Box::new(not_found),
+ delayed_index_update: 0,
+ }
+ }
+
+ /// Adds a custom HTTP response for a specific url
+ #[must_use]
+ pub fn add_responder<R: 'static + Send + Fn(&Request, &HttpServer) -> Response>(
+ mut self,
+ url: impl Into<String>,
+ responder: R,
+ ) -> Self {
+ self.custom_responders
+ .insert(url.into(), Box::new(responder));
+ self
+ }
+
+ #[must_use]
+ pub fn not_found_handler<R: 'static + Send + Fn(&Request, &HttpServer) -> Response>(
+ mut self,
+ responder: R,
+ ) -> Self {
+ self.not_found_handler = Box::new(responder);
+ self
+ }
+
+ /// Configures the git index update to be delayed by the given number of seconds.
+ #[must_use]
+ pub fn delayed_index_update(mut self, delay: usize) -> Self {
+ self.delayed_index_update = delay;
+ self
+ }
+
+ /// Sets whether or not to initialize as an alternative registry.
+ #[must_use]
+ pub fn alternative_named(mut self, alt: &str) -> Self {
+ self.alternative = Some(alt.to_string());
+ self
+ }
+
+ /// Sets whether or not to initialize as an alternative registry.
+ #[must_use]
+ pub fn alternative(self) -> Self {
+ self.alternative_named("alternative")
+ }
+
+ /// Prevents placing a token in the configuration
+ #[must_use]
+ pub fn no_configure_token(mut self) -> Self {
+ self.configure_token = false;
+ self
+ }
+
+ /// Prevents adding the registry to the configuration.
+ #[must_use]
+ pub fn no_configure_registry(mut self) -> Self {
+ self.configure_registry = false;
+ self
+ }
+
+ /// Sets the token value
+ #[must_use]
+ pub fn token(mut self, token: Token) -> Self {
+ self.token = Some(token);
+ self
+ }
+
+ /// Sets this registry to require the authentication token for
+ /// all operations.
+ #[must_use]
+ pub fn auth_required(mut self) -> Self {
+ self.auth_required = true;
+ self
+ }
+
+ /// Operate the index over http
+ #[must_use]
+ pub fn http_index(mut self) -> Self {
+ self.http_index = true;
+ self
+ }
+
+ /// Operate the api over http
+ #[must_use]
+ pub fn http_api(mut self) -> Self {
+ self.http_api = true;
+ self
+ }
+
+ /// The registry has no api.
+ #[must_use]
+ pub fn no_api(mut self) -> Self {
+ self.api = false;
+ self
+ }
+
+ /// Initializes the registry.
+ #[must_use]
+ pub fn build(self) -> TestRegistry {
+ let config_path = paths::home().join(".cargo/config");
+ t!(fs::create_dir_all(config_path.parent().unwrap()));
+ let prefix = if let Some(alternative) = &self.alternative {
+ format!("{alternative}-")
+ } else {
+ String::new()
+ };
+ let registry_path = generate_path(&format!("{prefix}registry"));
+ let index_url = generate_url(&format!("{prefix}registry"));
+ let api_url = generate_url(&format!("{prefix}api"));
+ let dl_url = generate_url(&format!("{prefix}dl"));
+ let dl_path = generate_path(&format!("{prefix}dl"));
+ let api_path = generate_path(&format!("{prefix}api"));
+ let token = self
+ .token
+ .unwrap_or_else(|| Token::Plaintext(format!("{prefix}sekrit")));
+
+ let (server, index_url, api_url, dl_url) = if !self.http_index && !self.http_api {
+ // No need to start the HTTP server.
+ (None, index_url, api_url, dl_url)
+ } else {
+ let server = HttpServer::new(
+ registry_path.clone(),
+ dl_path,
+ api_path.clone(),
+ token.clone(),
+ self.auth_required,
+ self.custom_responders,
+ self.not_found_handler,
+ self.delayed_index_update,
+ );
+ let index_url = if self.http_index {
+ server.index_url()
+ } else {
+ index_url
+ };
+ let api_url = if self.http_api {
+ server.api_url()
+ } else {
+ api_url
+ };
+ let dl_url = server.dl_url();
+ (Some(server), index_url, api_url, dl_url)
+ };
+
+ let registry = TestRegistry {
+ api_url,
+ index_url,
+ server,
+ dl_url,
+ path: registry_path,
+ token,
+ };
+
+ if self.configure_registry {
+ if let Some(alternative) = &self.alternative {
+ append(
+ &config_path,
+ format!(
+ "
+ [registries.{alternative}]
+ index = '{}'",
+ registry.index_url
+ )
+ .as_bytes(),
+ )
+ .unwrap();
+ } else {
+ append(
+ &config_path,
+ format!(
+ "
+ [source.crates-io]
+ replace-with = 'dummy-registry'
+
+ [registries.dummy-registry]
+ index = '{}'",
+ registry.index_url
+ )
+ .as_bytes(),
+ )
+ .unwrap();
+ }
+ }
+
+ if self.configure_token {
+ let credentials = paths::home().join(".cargo/credentials.toml");
+ match &registry.token {
+ Token::Plaintext(token) => {
+ if let Some(alternative) = &self.alternative {
+ append(
+ &credentials,
+ format!(
+ r#"
+ [registries.{alternative}]
+ token = "{token}"
+ "#
+ )
+ .as_bytes(),
+ )
+ .unwrap();
+ } else {
+ append(
+ &credentials,
+ format!(
+ r#"
+ [registry]
+ token = "{token}"
+ "#
+ )
+ .as_bytes(),
+ )
+ .unwrap();
+ }
+ }
+ Token::Keys(key, subject) => {
+ let mut out = if let Some(alternative) = &self.alternative {
+ format!("\n[registries.{alternative}]\n")
+ } else {
+ format!("\n[registry]\n")
+ };
+ out += &format!("secret-key = \"{key}\"\n");
+ if let Some(subject) = subject {
+ out += &format!("secret-key-subject = \"{subject}\"\n");
+ }
+
+ append(&credentials, out.as_bytes()).unwrap();
+ }
+ }
+ }
+
+ let auth = if self.auth_required {
+ r#","auth-required":true"#
+ } else {
+ ""
+ };
+ let api = if self.api {
+ format!(r#","api":"{}""#, registry.api_url)
+ } else {
+ String::new()
+ };
+ // Initialize a new registry.
+ repo(&registry.path)
+ .file(
+ "config.json",
+ &format!(r#"{{"dl":"{}"{api}{auth}}}"#, registry.dl_url),
+ )
+ .build();
+ fs::create_dir_all(api_path.join("api/v1/crates")).unwrap();
+
+ registry
+ }
+}
+
+/// A builder for creating a new package in a registry.
+///
+/// This uses "source replacement" using an automatically generated
+/// `.cargo/config` file to ensure that dependencies will use these packages
+/// instead of contacting crates.io. See `source-replacement.md` for more
+/// details on how source replacement works.
+///
+/// Call `publish` to finalize and create the package.
+///
+/// If no files are specified, an empty `lib.rs` file is automatically created.
+///
+/// The `Cargo.toml` file is automatically generated based on the methods
+/// called on `Package` (for example, calling `dep()` will add to the
+/// `[dependencies]` automatically). You may also specify a `Cargo.toml` file
+/// to override the generated one.
+///
+/// This supports different registry types:
+/// - Regular source replacement that replaces `crates.io` (the default).
+/// - A "local registry" which is a subset for vendoring (see
+/// `Package::local`).
+/// - An "alternative registry" which requires specifying the registry name
+/// (see `Package::alternative`).
+///
+/// This does not support "directory sources". See `directory.rs` for
+/// `VendorPackage` which implements directory sources.
+///
+/// # Example
+/// ```
+/// // Publish package "a" depending on "b".
+/// Package::new("a", "1.0.0")
+/// .dep("b", "1.0.0")
+/// .file("src/lib.rs", r#"
+/// extern crate b;
+/// pub fn f() -> i32 { b::f() * 2 }
+/// "#)
+/// .publish();
+///
+/// // Publish package "b".
+/// Package::new("b", "1.0.0")
+/// .file("src/lib.rs", r#"
+/// pub fn f() -> i32 { 12 }
+/// "#)
+/// .publish();
+///
+/// // Create a project that uses package "a".
+/// let p = project()
+/// .file("Cargo.toml", r#"
+/// [package]
+/// name = "foo"
+/// version = "0.0.1"
+///
+/// [dependencies]
+/// a = "1.0"
+/// "#)
+/// .file("src/main.rs", r#"
+/// extern crate a;
+/// fn main() { println!("{}", a::f()); }
+/// "#)
+/// .build();
+///
+/// p.cargo("run").with_stdout("24").run();
+/// ```
+#[must_use]
+pub struct Package {
+ name: String,
+ vers: String,
+ deps: Vec<Dependency>,
+ files: Vec<PackageFile>,
+ yanked: bool,
+ features: FeatureMap,
+ local: bool,
+ alternative: bool,
+ invalid_json: bool,
+ proc_macro: bool,
+ links: Option<String>,
+ rust_version: Option<String>,
+ cargo_features: Vec<String>,
+ v: Option<u32>,
+}
+
+pub(crate) type FeatureMap = BTreeMap<String, Vec<String>>;
+
+#[derive(Clone)]
+pub struct Dependency {
+ name: String,
+ vers: String,
+ kind: String,
+ artifact: Option<(String, Option<String>)>,
+ target: Option<String>,
+ features: Vec<String>,
+ registry: Option<String>,
+ package: Option<String>,
+ optional: bool,
+}
+
+/// Entry with data that corresponds to [`tar::EntryType`].
+#[non_exhaustive]
+enum EntryData {
+ Regular(String),
+ Symlink(PathBuf),
+}
+
+/// A file to be created in a package.
+struct PackageFile {
+ path: String,
+ contents: EntryData,
+ /// The Unix mode for the file. Note that when extracted on Windows, this
+ /// is mostly ignored since it doesn't have the same style of permissions.
+ mode: u32,
+ /// If `true`, the file is created in the root of the tarfile, used for
+ /// testing invalid packages.
+ extra: bool,
+}
+
+const DEFAULT_MODE: u32 = 0o644;
+
+/// Initializes the on-disk registry and sets up the config so that crates.io
+/// is replaced with the one on disk.
+pub fn init() -> TestRegistry {
+ RegistryBuilder::new().build()
+}
+
+/// Variant of `init` that initializes the "alternative" registry and crates.io
+/// replacement.
+pub fn alt_init() -> TestRegistry {
+ init();
+ RegistryBuilder::new().alternative().build()
+}
+
+pub struct HttpServerHandle {
+ addr: SocketAddr,
+ handle: Option<JoinHandle<()>>,
+}
+
+impl HttpServerHandle {
+ pub fn index_url(&self) -> Url {
+ Url::parse(&format!("sparse+http://{}/index/", self.addr.to_string())).unwrap()
+ }
+
+ pub fn api_url(&self) -> Url {
+ Url::parse(&format!("http://{}/", self.addr.to_string())).unwrap()
+ }
+
+ pub fn dl_url(&self) -> Url {
+ Url::parse(&format!("http://{}/dl", self.addr.to_string())).unwrap()
+ }
+
+ fn stop(&self) {
+ if let Ok(mut stream) = TcpStream::connect(self.addr) {
+ // shutdown the server
+ let _ = stream.write_all(b"stop");
+ let _ = stream.flush();
+ }
+ }
+}
+
+impl Drop for HttpServerHandle {
+ fn drop(&mut self) {
+ self.stop();
+ }
+}
+
+/// Request to the test http server
+#[derive(Clone)]
+pub struct Request {
+ pub url: Url,
+ pub method: String,
+ pub body: Option<Vec<u8>>,
+ pub authorization: Option<String>,
+ pub if_modified_since: Option<String>,
+ pub if_none_match: Option<String>,
+}
+
+impl fmt::Debug for Request {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // body is not included as it can produce long debug outputs
+ f.debug_struct("Request")
+ .field("url", &self.url)
+ .field("method", &self.method)
+ .field("authorization", &self.authorization)
+ .field("if_modified_since", &self.if_modified_since)
+ .field("if_none_match", &self.if_none_match)
+ .finish()
+ }
+}
+
+/// Response from the test http server
+pub struct Response {
+ pub code: u32,
+ pub headers: Vec<String>,
+ pub body: Vec<u8>,
+}
+
+pub struct HttpServer {
+ listener: TcpListener,
+ registry_path: PathBuf,
+ dl_path: PathBuf,
+ api_path: PathBuf,
+ addr: SocketAddr,
+ token: Token,
+ auth_required: bool,
+ custom_responders: HashMap<String, RequestCallback>,
+ not_found_handler: RequestCallback,
+ delayed_index_update: usize,
+}
+
+/// A helper struct that collects the arguments for [`HttpServer::check_authorized`].
+/// Based on looking at the request, these are the fields that the authentication header should attest to.
+struct Mutation<'a> {
+ mutation: &'a str,
+ name: Option<&'a str>,
+ vers: Option<&'a str>,
+ cksum: Option<&'a str>,
+}
+
+impl HttpServer {
+ pub fn new(
+ registry_path: PathBuf,
+ dl_path: PathBuf,
+ api_path: PathBuf,
+ token: Token,
+ auth_required: bool,
+ custom_responders: HashMap<String, RequestCallback>,
+ not_found_handler: RequestCallback,
+ delayed_index_update: usize,
+ ) -> HttpServerHandle {
+ let listener = TcpListener::bind("127.0.0.1:0").unwrap();
+ let addr = listener.local_addr().unwrap();
+ let server = HttpServer {
+ listener,
+ registry_path,
+ dl_path,
+ api_path,
+ addr,
+ token,
+ auth_required,
+ custom_responders,
+ not_found_handler,
+ delayed_index_update,
+ };
+ let handle = Some(thread::spawn(move || server.start()));
+ HttpServerHandle { addr, handle }
+ }
+
+ fn start(&self) {
+ let mut line = String::new();
+ 'server: loop {
+ let (socket, _) = self.listener.accept().unwrap();
+ let mut buf = BufReader::new(socket);
+ line.clear();
+ if buf.read_line(&mut line).unwrap() == 0 {
+ // Connection terminated.
+ continue;
+ }
+ // Read the "GET path HTTP/1.1" line.
+ let mut parts = line.split_ascii_whitespace();
+ let method = parts.next().unwrap().to_ascii_lowercase();
+ if method == "stop" {
+ // Shutdown the server.
+ return;
+ }
+ let addr = self.listener.local_addr().unwrap();
+ let url = format!(
+ "http://{}/{}",
+ addr,
+ parts.next().unwrap().trim_start_matches('/')
+ );
+ let url = Url::parse(&url).unwrap();
+
+ // Grab headers we care about.
+ let mut if_modified_since = None;
+ let mut if_none_match = None;
+ let mut authorization = None;
+ let mut content_len = None;
+ loop {
+ line.clear();
+ if buf.read_line(&mut line).unwrap() == 0 {
+ continue 'server;
+ }
+ if line == "\r\n" {
+ // End of headers.
+ line.clear();
+ break;
+ }
+ let (name, value) = line.split_once(':').unwrap();
+ let name = name.trim().to_ascii_lowercase();
+ let value = value.trim().to_string();
+ match name.as_str() {
+ "if-modified-since" => if_modified_since = Some(value),
+ "if-none-match" => if_none_match = Some(value),
+ "authorization" => authorization = Some(value),
+ "content-length" => content_len = Some(value),
+ _ => {}
+ }
+ }
+
+ let mut body = None;
+ if let Some(con_len) = content_len {
+ let len = con_len.parse::<u64>().unwrap();
+ let mut content = vec![0u8; len as usize];
+ buf.read_exact(&mut content).unwrap();
+ body = Some(content)
+ }
+
+ let req = Request {
+ authorization,
+ if_modified_since,
+ if_none_match,
+ method,
+ url,
+ body,
+ };
+ println!("req: {:#?}", req);
+ let response = self.route(&req);
+ let buf = buf.get_mut();
+ write!(buf, "HTTP/1.1 {}\r\n", response.code).unwrap();
+ write!(buf, "Content-Length: {}\r\n", response.body.len()).unwrap();
+ for header in response.headers {
+ write!(buf, "{}\r\n", header).unwrap();
+ }
+ write!(buf, "\r\n").unwrap();
+ buf.write_all(&response.body).unwrap();
+ buf.flush().unwrap();
+ }
+ }
+
+ fn check_authorized(&self, req: &Request, mutation: Option<Mutation>) -> bool {
+ let (private_key, private_key_subject) = if mutation.is_some() || self.auth_required {
+ match &self.token {
+ Token::Plaintext(token) => return Some(token) == req.authorization.as_ref(),
+ Token::Keys(private_key, private_key_subject) => {
+ (private_key.as_str(), private_key_subject)
+ }
+ }
+ } else {
+ assert!(req.authorization.is_none(), "unexpected token");
+ return true;
+ };
+
+ macro_rules! t {
+ ($e:expr) => {
+ match $e {
+ Some(e) => e,
+ None => return false,
+ }
+ };
+ }
+
+ let secret: AsymmetricSecretKey<pasetors::version3::V3> = private_key.try_into().unwrap();
+ let public: AsymmetricPublicKey<pasetors::version3::V3> = (&secret).try_into().unwrap();
+ let pub_key_id: pasetors::paserk::Id = (&public).into();
+ let mut paserk_pub_key_id = String::new();
+ FormatAsPaserk::fmt(&pub_key_id, &mut paserk_pub_key_id).unwrap();
+ // https://github.com/rust-lang/rfcs/blob/master/text/3231-cargo-asymmetric-tokens.md#how-the-registry-server-will-validate-an-asymmetric-token
+
+ // - The PASETO is in v3.public format.
+ let authorization = t!(&req.authorization);
+ let untrusted_token = t!(
+ UntrustedToken::<pasetors::Public, pasetors::version3::V3>::try_from(authorization)
+ .ok()
+ );
+
+ // - The PASETO validates using the public key it looked up based on the key ID.
+ #[derive(serde::Deserialize, Debug)]
+ struct Footer<'a> {
+ url: &'a str,
+ kip: &'a str,
+ }
+ let footer: Footer = t!(serde_json::from_slice(untrusted_token.untrusted_footer()).ok());
+ if footer.kip != paserk_pub_key_id {
+ return false;
+ }
+ let trusted_token =
+ t!(
+ pasetors::version3::PublicToken::verify(&public, &untrusted_token, None, None,)
+ .ok()
+ );
+
+ // - The URL matches the registry base URL
+ if footer.url != "https://github.com/rust-lang/crates.io-index"
+ && footer.url != &format!("sparse+http://{}/index/", self.addr.to_string())
+ {
+ dbg!(footer.url);
+ return false;
+ }
+
+ // - The PASETO is still within its valid time period.
+ #[derive(serde::Deserialize)]
+ struct Message<'a> {
+ iat: &'a str,
+ sub: Option<&'a str>,
+ mutation: Option<&'a str>,
+ name: Option<&'a str>,
+ vers: Option<&'a str>,
+ cksum: Option<&'a str>,
+ _challenge: Option<&'a str>, // todo: PASETO with challenges
+ v: Option<u8>,
+ }
+ let message: Message = t!(serde_json::from_str(trusted_token.payload()).ok());
+ let token_time = t!(OffsetDateTime::parse(message.iat, &Rfc3339).ok());
+ let now = OffsetDateTime::now_utc();
+ if (now - token_time) > Duration::MINUTE {
+ return false;
+ }
+ if private_key_subject.as_deref() != message.sub {
+ dbg!(message.sub);
+ return false;
+ }
+ // - If the claim v is set, that it has the value of 1.
+ if let Some(v) = message.v {
+ if v != 1 {
+ dbg!(message.v);
+ return false;
+ }
+ }
+ // - If the server issues challenges, that the challenge has not yet been answered.
+ // todo: PASETO with challenges
+ // - If the operation is a mutation:
+ if let Some(mutation) = mutation {
+ // - That the operation matches the mutation field and is one of publish, yank, or unyank.
+ if message.mutation != Some(mutation.mutation) {
+ dbg!(message.mutation);
+ return false;
+ }
+ // - That the package, and version match the request.
+ if message.name != mutation.name {
+ dbg!(message.name);
+ return false;
+ }
+ if message.vers != mutation.vers {
+ dbg!(message.vers);
+ return false;
+ }
+ // - If the mutation is publish, that the version has not already been published, and that the hash matches the request.
+ if mutation.mutation == "publish" {
+ if message.cksum != mutation.cksum {
+ dbg!(message.cksum);
+ return false;
+ }
+ }
+ } else {
+ // - If the operation is a read, that the mutation field is not set.
+ if message.mutation.is_some()
+ || message.name.is_some()
+ || message.vers.is_some()
+ || message.cksum.is_some()
+ {
+ return false;
+ }
+ }
+ true
+ }
+
+ /// Route the request
+ fn route(&self, req: &Request) -> Response {
+ // Check for custom responder
+ if let Some(responder) = self.custom_responders.get(req.url.path()) {
+ return responder(&req, self);
+ }
+ let path: Vec<_> = req.url.path()[1..].split('/').collect();
+ match (req.method.as_str(), path.as_slice()) {
+ ("get", ["index", ..]) => {
+ if !self.check_authorized(req, None) {
+ self.unauthorized(req)
+ } else {
+ self.index(&req)
+ }
+ }
+ ("get", ["dl", ..]) => {
+ if !self.check_authorized(req, None) {
+ self.unauthorized(req)
+ } else {
+ self.dl(&req)
+ }
+ }
+ // publish
+ ("put", ["api", "v1", "crates", "new"]) => self.check_authorized_publish(req),
+ // The remainder of the operators in the test framework do nothing other than responding 'ok'.
+ //
+ // Note: We don't need to support anything real here because there are no tests that
+ // currently require anything other than publishing via the http api.
+
+ // yank / unyank
+ ("delete" | "put", ["api", "v1", "crates", crate_name, version, mutation]) => {
+ if !self.check_authorized(
+ req,
+ Some(Mutation {
+ mutation,
+ name: Some(crate_name),
+ vers: Some(version),
+ cksum: None,
+ }),
+ ) {
+ self.unauthorized(req)
+ } else {
+ self.ok(&req)
+ }
+ }
+ // owners
+ ("get" | "put" | "delete", ["api", "v1", "crates", crate_name, "owners"]) => {
+ if !self.check_authorized(
+ req,
+ Some(Mutation {
+ mutation: "owners",
+ name: Some(crate_name),
+ vers: None,
+ cksum: None,
+ }),
+ ) {
+ self.unauthorized(req)
+ } else {
+ self.ok(&req)
+ }
+ }
+ _ => self.not_found(&req),
+ }
+ }
+
+ /// Unauthorized response
+ pub fn unauthorized(&self, _req: &Request) -> Response {
+ Response {
+ code: 401,
+ headers: vec![
+ r#"WWW-Authenticate: Cargo login_url="https://test-registry-login/me""#.to_string(),
+ ],
+ body: b"Unauthorized message from server.".to_vec(),
+ }
+ }
+
+ /// Not found response
+ pub fn not_found(&self, req: &Request) -> Response {
+ (self.not_found_handler)(req, self)
+ }
+
+ /// Respond OK without doing anything
+ pub fn ok(&self, _req: &Request) -> Response {
+ Response {
+ code: 200,
+ headers: vec![],
+ body: br#"{"ok": true, "msg": "completed!"}"#.to_vec(),
+ }
+ }
+
+ /// Return an internal server error (HTTP 500)
+ pub fn internal_server_error(&self, _req: &Request) -> Response {
+ Response {
+ code: 500,
+ headers: vec![],
+ body: br#"internal server error"#.to_vec(),
+ }
+ }
+
+ /// Serve the download endpoint
+ pub fn dl(&self, req: &Request) -> Response {
+ let file = self
+ .dl_path
+ .join(req.url.path().strip_prefix("/dl/").unwrap());
+ println!("{}", file.display());
+ if !file.exists() {
+ return self.not_found(req);
+ }
+ return Response {
+ body: fs::read(&file).unwrap(),
+ code: 200,
+ headers: vec![],
+ };
+ }
+
+ /// Serve the registry index
+ pub fn index(&self, req: &Request) -> Response {
+ let file = self
+ .registry_path
+ .join(req.url.path().strip_prefix("/index/").unwrap());
+ if !file.exists() {
+ return self.not_found(req);
+ } else {
+ // Now grab info about the file.
+ let data = fs::read(&file).unwrap();
+ let etag = Sha256::new().update(&data).finish_hex();
+ let last_modified = format!("{:?}", file.metadata().unwrap().modified().unwrap());
+
+ // Start to construct our response:
+ let mut any_match = false;
+ let mut all_match = true;
+ if let Some(expected) = &req.if_none_match {
+ if &etag != expected {
+ all_match = false;
+ } else {
+ any_match = true;
+ }
+ }
+ if let Some(expected) = &req.if_modified_since {
+ // NOTE: Equality comparison is good enough for tests.
+ if &last_modified != expected {
+ all_match = false;
+ } else {
+ any_match = true;
+ }
+ }
+
+ if any_match && all_match {
+ return Response {
+ body: Vec::new(),
+ code: 304,
+ headers: vec![],
+ };
+ } else {
+ return Response {
+ body: data,
+ code: 200,
+ headers: vec![
+ format!("ETag: \"{}\"", etag),
+ format!("Last-Modified: {}", last_modified),
+ ],
+ };
+ }
+ }
+ }
+
+ pub fn check_authorized_publish(&self, req: &Request) -> Response {
+ if let Some(body) = &req.body {
+ // Mimic the publish behavior for local registries by writing out the request
+ // so tests can verify publishes made to either registry type.
+ let path = self.api_path.join("api/v1/crates/new");
+ t!(fs::create_dir_all(path.parent().unwrap()));
+ t!(fs::write(&path, body));
+
+ // Get the metadata of the package
+ let (len, remaining) = body.split_at(4);
+ let json_len = u32::from_le_bytes(len.try_into().unwrap());
+ let (json, remaining) = remaining.split_at(json_len as usize);
+ let new_crate = serde_json::from_slice::<crates_io::NewCrate>(json).unwrap();
+ // Get the `.crate` file
+ let (len, remaining) = remaining.split_at(4);
+ let file_len = u32::from_le_bytes(len.try_into().unwrap());
+ let (file, _remaining) = remaining.split_at(file_len as usize);
+ let file_cksum = cksum(&file);
+
+ if !self.check_authorized(
+ req,
+ Some(Mutation {
+ mutation: "publish",
+ name: Some(&new_crate.name),
+ vers: Some(&new_crate.vers),
+ cksum: Some(&file_cksum),
+ }),
+ ) {
+ return self.unauthorized(req);
+ }
+
+ let dst = self
+ .dl_path
+ .join(&new_crate.name)
+ .join(&new_crate.vers)
+ .join("download");
+
+ if self.delayed_index_update == 0 {
+ save_new_crate(dst, new_crate, file, file_cksum, &self.registry_path);
+ } else {
+ let delayed_index_update = self.delayed_index_update;
+ let registry_path = self.registry_path.clone();
+ let file = Vec::from(file);
+ thread::spawn(move || {
+ thread::sleep(std::time::Duration::new(delayed_index_update as u64, 0));
+ save_new_crate(dst, new_crate, &file, file_cksum, &registry_path);
+ });
+ }
+
+ self.ok(&req)
+ } else {
+ Response {
+ code: 400,
+ headers: vec![],
+ body: b"The request was missing a body".to_vec(),
+ }
+ }
+ }
+}
+
+fn save_new_crate(
+ dst: PathBuf,
+ new_crate: crates_io::NewCrate,
+ file: &[u8],
+ file_cksum: String,
+ registry_path: &Path,
+) {
+ // Write the `.crate`
+ t!(fs::create_dir_all(dst.parent().unwrap()));
+ t!(fs::write(&dst, file));
+
+ let deps = new_crate
+ .deps
+ .iter()
+ .map(|dep| {
+ let (name, package) = match &dep.explicit_name_in_toml {
+ Some(explicit) => (explicit.to_string(), Some(dep.name.to_string())),
+ None => (dep.name.to_string(), None),
+ };
+ serde_json::json!({
+ "name": name,
+ "req": dep.version_req,
+ "features": dep.features,
+ "default_features": true,
+ "target": dep.target,
+ "optional": dep.optional,
+ "kind": dep.kind,
+ "registry": dep.registry,
+ "package": package,
+ })
+ })
+ .collect::<Vec<_>>();
+
+ let line = create_index_line(
+ serde_json::json!(new_crate.name),
+ &new_crate.vers,
+ deps,
+ &file_cksum,
+ new_crate.features,
+ false,
+ new_crate.links,
+ None,
+ );
+
+ write_to_index(registry_path, &new_crate.name, line, false);
+}
+
+impl Package {
+ /// Creates a new package builder.
+ /// Call `publish()` to finalize and build the package.
+ pub fn new(name: &str, vers: &str) -> Package {
+ let config = paths::home().join(".cargo/config");
+ if !config.exists() {
+ init();
+ }
+ Package {
+ name: name.to_string(),
+ vers: vers.to_string(),
+ deps: Vec::new(),
+ files: Vec::new(),
+ yanked: false,
+ features: BTreeMap::new(),
+ local: false,
+ alternative: false,
+ invalid_json: false,
+ proc_macro: false,
+ links: None,
+ rust_version: None,
+ cargo_features: Vec::new(),
+ v: None,
+ }
+ }
+
+ /// Call with `true` to publish in a "local registry".
+ ///
+ /// See `source-replacement.html#local-registry-sources` for more details
+ /// on local registries. See `local_registry.rs` for the tests that use
+ /// this.
+ pub fn local(&mut self, local: bool) -> &mut Package {
+ self.local = local;
+ self
+ }
+
+ /// Call with `true` to publish in an "alternative registry".
+ ///
+ /// The name of the alternative registry is called "alternative".
+ ///
+ /// See `src/doc/src/reference/registries.md` for more details on
+ /// alternative registries. See `alt_registry.rs` for the tests that use
+ /// this.
+ pub fn alternative(&mut self, alternative: bool) -> &mut Package {
+ self.alternative = alternative;
+ self
+ }
+
+ /// Adds a file to the package.
+ pub fn file(&mut self, name: &str, contents: &str) -> &mut Package {
+ self.file_with_mode(name, DEFAULT_MODE, contents)
+ }
+
+ /// Adds a file with a specific Unix mode.
+ pub fn file_with_mode(&mut self, path: &str, mode: u32, contents: &str) -> &mut Package {
+ self.files.push(PackageFile {
+ path: path.to_string(),
+ contents: EntryData::Regular(contents.into()),
+ mode,
+ extra: false,
+ });
+ self
+ }
+
+ /// Adds a symlink to a path to the package.
+ pub fn symlink(&mut self, dst: &str, src: &str) -> &mut Package {
+ self.files.push(PackageFile {
+ path: dst.to_string(),
+ contents: EntryData::Symlink(src.into()),
+ mode: DEFAULT_MODE,
+ extra: false,
+ });
+ self
+ }
+
+ /// Adds an "extra" file that is not rooted within the package.
+ ///
+ /// Normal files are automatically placed within a directory named
+ /// `$PACKAGE-$VERSION`. This allows you to override that behavior,
+ /// typically for testing invalid behavior.
+ pub fn extra_file(&mut self, path: &str, contents: &str) -> &mut Package {
+ self.files.push(PackageFile {
+ path: path.to_string(),
+ contents: EntryData::Regular(contents.to_string()),
+ mode: DEFAULT_MODE,
+ extra: true,
+ });
+ self
+ }
+
+ /// Adds a normal dependency. Example:
+ /// ```
+ /// [dependencies]
+ /// foo = {version = "1.0"}
+ /// ```
+ pub fn dep(&mut self, name: &str, vers: &str) -> &mut Package {
+ self.add_dep(&Dependency::new(name, vers))
+ }
+
+ /// Adds a dependency with the given feature. Example:
+ /// ```
+ /// [dependencies]
+ /// foo = {version = "1.0", "features": ["feat1", "feat2"]}
+ /// ```
+ pub fn feature_dep(&mut self, name: &str, vers: &str, features: &[&str]) -> &mut Package {
+ self.add_dep(Dependency::new(name, vers).enable_features(features))
+ }
+
+ /// Adds a platform-specific dependency. Example:
+ /// ```toml
+ /// [target.'cfg(windows)'.dependencies]
+ /// foo = {version = "1.0"}
+ /// ```
+ pub fn target_dep(&mut self, name: &str, vers: &str, target: &str) -> &mut Package {
+ self.add_dep(Dependency::new(name, vers).target(target))
+ }
+
+ /// Adds a dependency to the alternative registry.
+ pub fn registry_dep(&mut self, name: &str, vers: &str) -> &mut Package {
+ self.add_dep(Dependency::new(name, vers).registry("alternative"))
+ }
+
+ /// Adds a dev-dependency. Example:
+ /// ```
+ /// [dev-dependencies]
+ /// foo = {version = "1.0"}
+ /// ```
+ pub fn dev_dep(&mut self, name: &str, vers: &str) -> &mut Package {
+ self.add_dep(Dependency::new(name, vers).dev())
+ }
+
+ /// Adds a build-dependency. Example:
+ /// ```
+ /// [build-dependencies]
+ /// foo = {version = "1.0"}
+ /// ```
+ pub fn build_dep(&mut self, name: &str, vers: &str) -> &mut Package {
+ self.add_dep(Dependency::new(name, vers).build())
+ }
+
+ pub fn add_dep(&mut self, dep: &Dependency) -> &mut Package {
+ self.deps.push(dep.clone());
+ self
+ }
+
+ /// Specifies whether or not the package is "yanked".
+ pub fn yanked(&mut self, yanked: bool) -> &mut Package {
+ self.yanked = yanked;
+ self
+ }
+
+ /// Specifies whether or not this is a proc macro.
+ pub fn proc_macro(&mut self, proc_macro: bool) -> &mut Package {
+ self.proc_macro = proc_macro;
+ self
+ }
+
+ /// Adds an entry in the `[features]` section.
+ pub fn feature(&mut self, name: &str, deps: &[&str]) -> &mut Package {
+ let deps = deps.iter().map(|s| s.to_string()).collect();
+ self.features.insert(name.to_string(), deps);
+ self
+ }
+
+ /// Specify a minimal Rust version.
+ pub fn rust_version(&mut self, rust_version: &str) -> &mut Package {
+ self.rust_version = Some(rust_version.into());
+ self
+ }
+
+ /// Causes the JSON line emitted in the index to be invalid, presumably
+ /// causing Cargo to skip over this version.
+ pub fn invalid_json(&mut self, invalid: bool) -> &mut Package {
+ self.invalid_json = invalid;
+ self
+ }
+
+ pub fn links(&mut self, links: &str) -> &mut Package {
+ self.links = Some(links.to_string());
+ self
+ }
+
+ pub fn cargo_feature(&mut self, feature: &str) -> &mut Package {
+ self.cargo_features.push(feature.to_owned());
+ self
+ }
+
+ /// Sets the index schema version for this package.
+ ///
+ /// See `cargo::sources::registry::RegistryPackage` for more information.
+ pub fn schema_version(&mut self, version: u32) -> &mut Package {
+ self.v = Some(version);
+ self
+ }
+
+ /// Creates the package and place it in the registry.
+ ///
+ /// This does not actually use Cargo's publishing system, but instead
+ /// manually creates the entry in the registry on the filesystem.
+ ///
+ /// Returns the checksum for the package.
+ pub fn publish(&self) -> String {
+ self.make_archive();
+
+ // Figure out what we're going to write into the index.
+ let deps = self
+ .deps
+ .iter()
+ .map(|dep| {
+ // In the index, the `registry` is null if it is from the same registry.
+ // In Cargo.toml, it is None if it is from crates.io.
+ let registry_url = match (self.alternative, dep.registry.as_deref()) {
+ (false, None) => None,
+ (false, Some("alternative")) => Some(alt_registry_url().to_string()),
+ (true, None) => {
+ Some("https://github.com/rust-lang/crates.io-index".to_string())
+ }
+ (true, Some("alternative")) => None,
+ _ => panic!("registry_dep currently only supports `alternative`"),
+ };
+ serde_json::json!({
+ "name": dep.name,
+ "req": dep.vers,
+ "features": dep.features,
+ "default_features": true,
+ "target": dep.target,
+ "artifact": dep.artifact,
+ "optional": dep.optional,
+ "kind": dep.kind,
+ "registry": registry_url,
+ "package": dep.package,
+ })
+ })
+ .collect::<Vec<_>>();
+ let cksum = {
+ let c = t!(fs::read(&self.archive_dst()));
+ cksum(&c)
+ };
+ let name = if self.invalid_json {
+ serde_json::json!(1)
+ } else {
+ serde_json::json!(self.name)
+ };
+ let line = create_index_line(
+ name,
+ &self.vers,
+ deps,
+ &cksum,
+ self.features.clone(),
+ self.yanked,
+ self.links.clone(),
+ self.v,
+ );
+
+ let registry_path = if self.alternative {
+ alt_registry_path()
+ } else {
+ registry_path()
+ };
+
+ write_to_index(&registry_path, &self.name, line, self.local);
+
+ cksum
+ }
+
+ fn make_archive(&self) {
+ let dst = self.archive_dst();
+ t!(fs::create_dir_all(dst.parent().unwrap()));
+ let f = t!(File::create(&dst));
+ let mut a = Builder::new(GzEncoder::new(f, Compression::default()));
+
+ if !self
+ .files
+ .iter()
+ .any(|PackageFile { path, .. }| path == "Cargo.toml")
+ {
+ self.append_manifest(&mut a);
+ }
+ if self.files.is_empty() {
+ self.append(
+ &mut a,
+ "src/lib.rs",
+ DEFAULT_MODE,
+ &EntryData::Regular("".into()),
+ );
+ } else {
+ for PackageFile {
+ path,
+ contents,
+ mode,
+ extra,
+ } in &self.files
+ {
+ if *extra {
+ self.append_raw(&mut a, path, *mode, contents);
+ } else {
+ self.append(&mut a, path, *mode, contents);
+ }
+ }
+ }
+ }
+
+ fn append_manifest<W: Write>(&self, ar: &mut Builder<W>) {
+ let mut manifest = String::new();
+
+ if !self.cargo_features.is_empty() {
+ let mut features = String::new();
+ serde::Serialize::serialize(
+ &self.cargo_features,
+ toml::ser::ValueSerializer::new(&mut features),
+ )
+ .unwrap();
+ manifest.push_str(&format!("cargo-features = {}\n\n", features));
+ }
+
+ manifest.push_str(&format!(
+ r#"
+ [package]
+ name = "{}"
+ version = "{}"
+ authors = []
+ "#,
+ self.name, self.vers
+ ));
+
+ if let Some(version) = &self.rust_version {
+ manifest.push_str(&format!("rust-version = \"{}\"", version));
+ }
+
+ for dep in self.deps.iter() {
+ let target = match dep.target {
+ None => String::new(),
+ Some(ref s) => format!("target.'{}'.", s),
+ };
+ let kind = match &dep.kind[..] {
+ "build" => "build-",
+ "dev" => "dev-",
+ _ => "",
+ };
+ manifest.push_str(&format!(
+ r#"
+ [{}{}dependencies.{}]
+ version = "{}"
+ "#,
+ target, kind, dep.name, dep.vers
+ ));
+ if let Some((artifact, target)) = &dep.artifact {
+ manifest.push_str(&format!("artifact = \"{}\"\n", artifact));
+ if let Some(target) = &target {
+ manifest.push_str(&format!("target = \"{}\"\n", target))
+ }
+ }
+ if let Some(registry) = &dep.registry {
+ assert_eq!(registry, "alternative");
+ manifest.push_str(&format!("registry-index = \"{}\"", alt_registry_url()));
+ }
+ }
+ if self.proc_macro {
+ manifest.push_str("[lib]\nproc-macro = true\n");
+ }
+
+ self.append(
+ ar,
+ "Cargo.toml",
+ DEFAULT_MODE,
+ &EntryData::Regular(manifest.into()),
+ );
+ }
+
+ fn append<W: Write>(&self, ar: &mut Builder<W>, file: &str, mode: u32, contents: &EntryData) {
+ self.append_raw(
+ ar,
+ &format!("{}-{}/{}", self.name, self.vers, file),
+ mode,
+ contents,
+ );
+ }
+
+ fn append_raw<W: Write>(
+ &self,
+ ar: &mut Builder<W>,
+ path: &str,
+ mode: u32,
+ contents: &EntryData,
+ ) {
+ let mut header = Header::new_ustar();
+ let contents = match contents {
+ EntryData::Regular(contents) => contents.as_str(),
+ EntryData::Symlink(src) => {
+ header.set_entry_type(tar::EntryType::Symlink);
+ t!(header.set_link_name(src));
+ "" // Symlink has no contents.
+ }
+ };
+ header.set_size(contents.len() as u64);
+ t!(header.set_path(path));
+ header.set_mode(mode);
+ header.set_cksum();
+ t!(ar.append(&header, contents.as_bytes()));
+ }
+
+ /// Returns the path to the compressed package file.
+ pub fn archive_dst(&self) -> PathBuf {
+ if self.local {
+ registry_path().join(format!("{}-{}.crate", self.name, self.vers))
+ } else if self.alternative {
+ alt_dl_path()
+ .join(&self.name)
+ .join(&self.vers)
+ .join("download")
+ } else {
+ dl_path().join(&self.name).join(&self.vers).join("download")
+ }
+ }
+}
+
+pub fn cksum(s: &[u8]) -> String {
+ Sha256::new().update(s).finish_hex()
+}
+
+impl Dependency {
+ pub fn new(name: &str, vers: &str) -> Dependency {
+ Dependency {
+ name: name.to_string(),
+ vers: vers.to_string(),
+ kind: "normal".to_string(),
+ artifact: None,
+ target: None,
+ features: Vec::new(),
+ package: None,
+ optional: false,
+ registry: None,
+ }
+ }
+
+ /// Changes this to `[build-dependencies]`.
+ pub fn build(&mut self) -> &mut Self {
+ self.kind = "build".to_string();
+ self
+ }
+
+ /// Changes this to `[dev-dependencies]`.
+ pub fn dev(&mut self) -> &mut Self {
+ self.kind = "dev".to_string();
+ self
+ }
+
+ /// Changes this to `[target.$target.dependencies]`.
+ pub fn target(&mut self, target: &str) -> &mut Self {
+ self.target = Some(target.to_string());
+ self
+ }
+
+ /// Change the artifact to be of the given kind, like "bin", or "staticlib",
+ /// along with a specific target triple if provided.
+ pub fn artifact(&mut self, kind: &str, target: Option<String>) -> &mut Self {
+ self.artifact = Some((kind.to_string(), target));
+ self
+ }
+
+ /// Adds `registry = $registry` to this dependency.
+ pub fn registry(&mut self, registry: &str) -> &mut Self {
+ self.registry = Some(registry.to_string());
+ self
+ }
+
+ /// Adds `features = [ ... ]` to this dependency.
+ pub fn enable_features(&mut self, features: &[&str]) -> &mut Self {
+ self.features.extend(features.iter().map(|s| s.to_string()));
+ self
+ }
+
+ /// Adds `package = ...` to this dependency.
+ pub fn package(&mut self, pkg: &str) -> &mut Self {
+ self.package = Some(pkg.to_string());
+ self
+ }
+
+ /// Changes this to an optional dependency.
+ pub fn optional(&mut self, optional: bool) -> &mut Self {
+ self.optional = optional;
+ self
+ }
+}
diff --git a/src/tools/cargo/crates/cargo-test-support/src/tools.rs b/src/tools/cargo/crates/cargo-test-support/src/tools.rs
new file mode 100644
index 000000000..7c056b6fa
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-test-support/src/tools.rs
@@ -0,0 +1,108 @@
+//! Common executables that can be reused by various tests.
+
+use crate::{basic_manifest, paths, project, Project};
+use lazy_static::lazy_static;
+use std::path::{Path, PathBuf};
+use std::sync::Mutex;
+
+lazy_static! {
+ static ref ECHO_WRAPPER: Mutex<Option<PathBuf>> = Mutex::new(None);
+ static ref ECHO: Mutex<Option<PathBuf>> = Mutex::new(None);
+}
+
+/// Returns the path to an executable that works as a wrapper around rustc.
+///
+/// The wrapper will echo the command line it was called with to stderr.
+pub fn echo_wrapper() -> PathBuf {
+ let mut lock = ECHO_WRAPPER.lock().unwrap();
+ if let Some(path) = &*lock {
+ return path.clone();
+ }
+ let p = project()
+ .at(paths::global_root().join("rustc-echo-wrapper"))
+ .file("Cargo.toml", &basic_manifest("rustc-echo-wrapper", "1.0.0"))
+ .file(
+ "src/main.rs",
+ r#"
+ use std::fs::read_to_string;
+ use std::path::PathBuf;
+ fn main() {
+ // Handle args from `@path` argfile for rustc
+ let args = std::env::args()
+ .flat_map(|p| if let Some(p) = p.strip_prefix("@") {
+ read_to_string(p).unwrap().lines().map(String::from).collect()
+ } else {
+ vec![p]
+ })
+ .collect::<Vec<_>>();
+ eprintln!("WRAPPER CALLED: {}", args[1..].join(" "));
+ let status = std::process::Command::new(&args[1])
+ .args(&args[2..]).status().unwrap();
+ std::process::exit(status.code().unwrap_or(1));
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build").run();
+ let path = p.bin("rustc-echo-wrapper");
+ *lock = Some(path.clone());
+ path
+}
+
+/// Returns the path to an executable that prints its arguments.
+///
+/// Do not expect this to be anything fancy.
+pub fn echo() -> PathBuf {
+ let mut lock = ECHO.lock().unwrap();
+ if let Some(path) = &*lock {
+ return path.clone();
+ }
+ if let Ok(path) = cargo_util::paths::resolve_executable(Path::new("echo")) {
+ *lock = Some(path.clone());
+ return path;
+ }
+ // Often on Windows, `echo` is not available.
+ let p = project()
+ .at(paths::global_root().join("basic-echo"))
+ .file("Cargo.toml", &basic_manifest("basic-echo", "1.0.0"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ let mut s = String::new();
+ let mut it = std::env::args().skip(1).peekable();
+ while let Some(n) = it.next() {
+ s.push_str(&n);
+ if it.peek().is_some() {
+ s.push(' ');
+ }
+ }
+ println!("{}", s);
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build").run();
+ let path = p.bin("basic-echo");
+ *lock = Some(path.clone());
+ path
+}
+
+/// Returns a project which builds a cargo-echo simple subcommand
+pub fn echo_subcommand() -> Project {
+ let p = project()
+ .at("cargo-echo")
+ .file("Cargo.toml", &basic_manifest("cargo-echo", "0.0.1"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ let args: Vec<_> = ::std::env::args().skip(1).collect();
+ println!("{}", args.join(" "));
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build").run();
+ p
+}
diff --git a/src/tools/cargo/crates/cargo-util/Cargo.toml b/src/tools/cargo/crates/cargo-util/Cargo.toml
new file mode 100644
index 000000000..7427ceb1a
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-util/Cargo.toml
@@ -0,0 +1,28 @@
+[package]
+name = "cargo-util"
+version = "0.2.4"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+homepage = "https://github.com/rust-lang/cargo"
+repository = "https://github.com/rust-lang/cargo"
+description = "Miscellaneous support code used by Cargo."
+
+[dependencies]
+anyhow = "1.0.34"
+sha2 = "0.10.6"
+filetime = "0.2.9"
+hex = "0.4.2"
+jobserver = "0.1.26"
+libc = "0.2.88"
+log = "0.4.6"
+same-file = "1.0.6"
+shell-escape = "0.1.4"
+tempfile = "3.1.0"
+walkdir = "2.3.1"
+
+[target.'cfg(target_os = "macos")'.dependencies]
+core-foundation = { version = "0.9.0", features = ["mac_os_10_7_support"] }
+
+[target.'cfg(windows)'.dependencies]
+miow = "0.5.0"
+windows-sys = { version = "0.45.0", features = ["Win32_Storage_FileSystem", "Win32_Foundation", "Win32_System_Console"] }
diff --git a/src/tools/cargo/crates/cargo-util/LICENSE-APACHE b/src/tools/cargo/crates/cargo-util/LICENSE-APACHE
new file mode 120000
index 000000000..1cd601d0a
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-util/LICENSE-APACHE
@@ -0,0 +1 @@
+../../LICENSE-APACHE \ No newline at end of file
diff --git a/src/tools/cargo/crates/cargo-util/LICENSE-MIT b/src/tools/cargo/crates/cargo-util/LICENSE-MIT
new file mode 120000
index 000000000..b2cfbdc7b
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-util/LICENSE-MIT
@@ -0,0 +1 @@
+../../LICENSE-MIT \ No newline at end of file
diff --git a/src/tools/cargo/crates/cargo-util/src/lib.rs b/src/tools/cargo/crates/cargo-util/src/lib.rs
new file mode 100644
index 000000000..0cbc920ec
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-util/src/lib.rs
@@ -0,0 +1,18 @@
+//! Miscellaneous support code used by Cargo.
+
+pub use self::read2::read2;
+pub use process_builder::ProcessBuilder;
+pub use process_error::{exit_status_to_string, is_simple_exit_code, ProcessError};
+pub use sha256::Sha256;
+
+pub mod paths;
+mod process_builder;
+mod process_error;
+mod read2;
+pub mod registry;
+mod sha256;
+
+/// Whether or not this running in a Continuous Integration environment.
+pub fn is_ci() -> bool {
+ std::env::var("CI").is_ok() || std::env::var("TF_BUILD").is_ok()
+}
diff --git a/src/tools/cargo/crates/cargo-util/src/paths.rs b/src/tools/cargo/crates/cargo-util/src/paths.rs
new file mode 100644
index 000000000..69df7a209
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-util/src/paths.rs
@@ -0,0 +1,788 @@
+//! Various utilities for working with files and paths.
+
+use anyhow::{Context, Result};
+use filetime::FileTime;
+use std::env;
+use std::ffi::{OsStr, OsString};
+use std::fs::{self, File, OpenOptions};
+use std::io;
+use std::io::prelude::*;
+use std::iter;
+use std::path::{Component, Path, PathBuf};
+use tempfile::Builder as TempFileBuilder;
+
+/// Joins paths into a string suitable for the `PATH` environment variable.
+///
+/// This is equivalent to [`std::env::join_paths`], but includes a more
+/// detailed error message. The given `env` argument is the name of the
+/// environment variable this is will be used for, which is included in the
+/// error message.
+pub fn join_paths<T: AsRef<OsStr>>(paths: &[T], env: &str) -> Result<OsString> {
+ env::join_paths(paths.iter()).with_context(|| {
+ let mut message = format!(
+ "failed to join paths from `${env}` together\n\n\
+ Check if any of path segments listed below contain an \
+ unterminated quote character or path separator:"
+ );
+ for path in paths {
+ use std::fmt::Write;
+ write!(&mut message, "\n {:?}", Path::new(path)).unwrap();
+ }
+
+ message
+ })
+}
+
+/// Returns the name of the environment variable used for searching for
+/// dynamic libraries.
+pub fn dylib_path_envvar() -> &'static str {
+ if cfg!(windows) {
+ "PATH"
+ } else if cfg!(target_os = "macos") {
+ // When loading and linking a dynamic library or bundle, dlopen
+ // searches in LD_LIBRARY_PATH, DYLD_LIBRARY_PATH, PWD, and
+ // DYLD_FALLBACK_LIBRARY_PATH.
+ // In the Mach-O format, a dynamic library has an "install path."
+ // Clients linking against the library record this path, and the
+ // dynamic linker, dyld, uses it to locate the library.
+ // dyld searches DYLD_LIBRARY_PATH *before* the install path.
+ // dyld searches DYLD_FALLBACK_LIBRARY_PATH only if it cannot
+ // find the library in the install path.
+ // Setting DYLD_LIBRARY_PATH can easily have unintended
+ // consequences.
+ //
+ // Also, DYLD_LIBRARY_PATH appears to have significant performance
+ // penalty starting in 10.13. Cargo's testsuite ran more than twice as
+ // slow with it on CI.
+ "DYLD_FALLBACK_LIBRARY_PATH"
+ } else {
+ "LD_LIBRARY_PATH"
+ }
+}
+
+/// Returns a list of directories that are searched for dynamic libraries.
+///
+/// Note that some operating systems will have defaults if this is empty that
+/// will need to be dealt with.
+pub fn dylib_path() -> Vec<PathBuf> {
+ match env::var_os(dylib_path_envvar()) {
+ Some(var) => env::split_paths(&var).collect(),
+ None => Vec::new(),
+ }
+}
+
+/// Normalize a path, removing things like `.` and `..`.
+///
+/// CAUTION: This does not resolve symlinks (unlike
+/// [`std::fs::canonicalize`]). This may cause incorrect or surprising
+/// behavior at times. This should be used carefully. Unfortunately,
+/// [`std::fs::canonicalize`] can be hard to use correctly, since it can often
+/// fail, or on Windows returns annoying device paths. This is a problem Cargo
+/// needs to improve on.
+pub fn normalize_path(path: &Path) -> PathBuf {
+ let mut components = path.components().peekable();
+ let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {
+ components.next();
+ PathBuf::from(c.as_os_str())
+ } else {
+ PathBuf::new()
+ };
+
+ for component in components {
+ match component {
+ Component::Prefix(..) => unreachable!(),
+ Component::RootDir => {
+ ret.push(component.as_os_str());
+ }
+ Component::CurDir => {}
+ Component::ParentDir => {
+ ret.pop();
+ }
+ Component::Normal(c) => {
+ ret.push(c);
+ }
+ }
+ }
+ ret
+}
+
+/// Returns the absolute path of where the given executable is located based
+/// on searching the `PATH` environment variable.
+///
+/// Returns an error if it cannot be found.
+pub fn resolve_executable(exec: &Path) -> Result<PathBuf> {
+ if exec.components().count() == 1 {
+ let paths = env::var_os("PATH").ok_or_else(|| anyhow::format_err!("no PATH"))?;
+ let candidates = env::split_paths(&paths).flat_map(|path| {
+ let candidate = path.join(&exec);
+ let with_exe = if env::consts::EXE_EXTENSION.is_empty() {
+ None
+ } else {
+ Some(candidate.with_extension(env::consts::EXE_EXTENSION))
+ };
+ iter::once(candidate).chain(with_exe)
+ });
+ for candidate in candidates {
+ if candidate.is_file() {
+ return Ok(candidate);
+ }
+ }
+
+ anyhow::bail!("no executable for `{}` found in PATH", exec.display())
+ } else {
+ Ok(exec.into())
+ }
+}
+
+/// Reads a file to a string.
+///
+/// Equivalent to [`std::fs::read_to_string`] with better error messages.
+pub fn read(path: &Path) -> Result<String> {
+ match String::from_utf8(read_bytes(path)?) {
+ Ok(s) => Ok(s),
+ Err(_) => anyhow::bail!("path at `{}` was not valid utf-8", path.display()),
+ }
+}
+
+/// Reads a file into a bytes vector.
+///
+/// Equivalent to [`std::fs::read`] with better error messages.
+pub fn read_bytes(path: &Path) -> Result<Vec<u8>> {
+ fs::read(path).with_context(|| format!("failed to read `{}`", path.display()))
+}
+
+/// Writes a file to disk.
+///
+/// Equivalent to [`std::fs::write`] with better error messages.
+pub fn write<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> {
+ let path = path.as_ref();
+ fs::write(path, contents.as_ref())
+ .with_context(|| format!("failed to write `{}`", path.display()))
+}
+
+/// Equivalent to [`write()`], but does not write anything if the file contents
+/// are identical to the given contents.
+pub fn write_if_changed<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> {
+ (|| -> Result<()> {
+ let contents = contents.as_ref();
+ let mut f = OpenOptions::new()
+ .read(true)
+ .write(true)
+ .create(true)
+ .open(&path)?;
+ let mut orig = Vec::new();
+ f.read_to_end(&mut orig)?;
+ if orig != contents {
+ f.set_len(0)?;
+ f.seek(io::SeekFrom::Start(0))?;
+ f.write_all(contents)?;
+ }
+ Ok(())
+ })()
+ .with_context(|| format!("failed to write `{}`", path.as_ref().display()))?;
+ Ok(())
+}
+
+/// Equivalent to [`write()`], but appends to the end instead of replacing the
+/// contents.
+pub fn append(path: &Path, contents: &[u8]) -> Result<()> {
+ (|| -> Result<()> {
+ let mut f = OpenOptions::new()
+ .write(true)
+ .append(true)
+ .create(true)
+ .open(path)?;
+
+ f.write_all(contents)?;
+ Ok(())
+ })()
+ .with_context(|| format!("failed to write `{}`", path.display()))?;
+ Ok(())
+}
+
+/// Creates a new file.
+pub fn create<P: AsRef<Path>>(path: P) -> Result<File> {
+ let path = path.as_ref();
+ File::create(path).with_context(|| format!("failed to create file `{}`", path.display()))
+}
+
+/// Opens an existing file.
+pub fn open<P: AsRef<Path>>(path: P) -> Result<File> {
+ let path = path.as_ref();
+ File::open(path).with_context(|| format!("failed to open file `{}`", path.display()))
+}
+
+/// Returns the last modification time of a file.
+pub fn mtime(path: &Path) -> Result<FileTime> {
+ let meta =
+ fs::metadata(path).with_context(|| format!("failed to stat `{}`", path.display()))?;
+ Ok(FileTime::from_last_modification_time(&meta))
+}
+
+/// Returns the maximum mtime of the given path, recursing into
+/// subdirectories, and following symlinks.
+pub fn mtime_recursive(path: &Path) -> Result<FileTime> {
+ let meta =
+ fs::metadata(path).with_context(|| format!("failed to stat `{}`", path.display()))?;
+ if !meta.is_dir() {
+ return Ok(FileTime::from_last_modification_time(&meta));
+ }
+ let max_meta = walkdir::WalkDir::new(path)
+ .follow_links(true)
+ .into_iter()
+ .filter_map(|e| match e {
+ Ok(e) => Some(e),
+ Err(e) => {
+ // Ignore errors while walking. If Cargo can't access it, the
+ // build script probably can't access it, either.
+ log::debug!("failed to determine mtime while walking directory: {}", e);
+ None
+ }
+ })
+ .filter_map(|e| {
+ if e.path_is_symlink() {
+ // Use the mtime of both the symlink and its target, to
+ // handle the case where the symlink is modified to a
+ // different target.
+ let sym_meta = match std::fs::symlink_metadata(e.path()) {
+ Ok(m) => m,
+ Err(err) => {
+ // I'm not sure when this is really possible (maybe a
+ // race with unlinking?). Regardless, if Cargo can't
+ // read it, the build script probably can't either.
+ log::debug!(
+ "failed to determine mtime while fetching symlink metadata of {}: {}",
+ e.path().display(),
+ err
+ );
+ return None;
+ }
+ };
+ let sym_mtime = FileTime::from_last_modification_time(&sym_meta);
+ // Walkdir follows symlinks.
+ match e.metadata() {
+ Ok(target_meta) => {
+ let target_mtime = FileTime::from_last_modification_time(&target_meta);
+ Some(sym_mtime.max(target_mtime))
+ }
+ Err(err) => {
+ // Can't access the symlink target. If Cargo can't
+ // access it, the build script probably can't access
+ // it either.
+ log::debug!(
+ "failed to determine mtime of symlink target for {}: {}",
+ e.path().display(),
+ err
+ );
+ Some(sym_mtime)
+ }
+ }
+ } else {
+ let meta = match e.metadata() {
+ Ok(m) => m,
+ Err(err) => {
+ // I'm not sure when this is really possible (maybe a
+ // race with unlinking?). Regardless, if Cargo can't
+ // read it, the build script probably can't either.
+ log::debug!(
+ "failed to determine mtime while fetching metadata of {}: {}",
+ e.path().display(),
+ err
+ );
+ return None;
+ }
+ };
+ Some(FileTime::from_last_modification_time(&meta))
+ }
+ })
+ .max()
+ // or_else handles the case where there are no files in the directory.
+ .unwrap_or_else(|| FileTime::from_last_modification_time(&meta));
+ Ok(max_meta)
+}
+
+/// Record the current time on the filesystem (using the filesystem's clock)
+/// using a file at the given directory. Returns the current time.
+pub fn set_invocation_time(path: &Path) -> Result<FileTime> {
+ // note that if `FileTime::from_system_time(SystemTime::now());` is determined to be sufficient,
+ // then this can be removed.
+ let timestamp = path.join("invoked.timestamp");
+ write(
+ &timestamp,
+ "This file has an mtime of when this was started.",
+ )?;
+ let ft = mtime(&timestamp)?;
+ log::debug!("invocation time for {:?} is {}", path, ft);
+ Ok(ft)
+}
+
+/// Converts a path to UTF-8 bytes.
+pub fn path2bytes(path: &Path) -> Result<&[u8]> {
+ #[cfg(unix)]
+ {
+ use std::os::unix::prelude::*;
+ Ok(path.as_os_str().as_bytes())
+ }
+ #[cfg(windows)]
+ {
+ match path.as_os_str().to_str() {
+ Some(s) => Ok(s.as_bytes()),
+ None => Err(anyhow::format_err!(
+ "invalid non-unicode path: {}",
+ path.display()
+ )),
+ }
+ }
+}
+
+/// Converts UTF-8 bytes to a path.
+pub fn bytes2path(bytes: &[u8]) -> Result<PathBuf> {
+ #[cfg(unix)]
+ {
+ use std::os::unix::prelude::*;
+ Ok(PathBuf::from(OsStr::from_bytes(bytes)))
+ }
+ #[cfg(windows)]
+ {
+ use std::str;
+ match str::from_utf8(bytes) {
+ Ok(s) => Ok(PathBuf::from(s)),
+ Err(..) => Err(anyhow::format_err!("invalid non-unicode path")),
+ }
+ }
+}
+
+/// Returns an iterator that walks up the directory hierarchy towards the root.
+///
+/// Each item is a [`Path`]. It will start with the given path, finishing at
+/// the root. If the `stop_root_at` parameter is given, it will stop at the
+/// given path (which will be the last item).
+pub fn ancestors<'a>(path: &'a Path, stop_root_at: Option<&Path>) -> PathAncestors<'a> {
+ PathAncestors::new(path, stop_root_at)
+}
+
+pub struct PathAncestors<'a> {
+ current: Option<&'a Path>,
+ stop_at: Option<PathBuf>,
+}
+
+impl<'a> PathAncestors<'a> {
+ fn new(path: &'a Path, stop_root_at: Option<&Path>) -> PathAncestors<'a> {
+ let stop_at = env::var("__CARGO_TEST_ROOT")
+ .ok()
+ .map(PathBuf::from)
+ .or_else(|| stop_root_at.map(|p| p.to_path_buf()));
+ PathAncestors {
+ current: Some(path),
+ //HACK: avoid reading `~/.cargo/config` when testing Cargo itself.
+ stop_at,
+ }
+ }
+}
+
+impl<'a> Iterator for PathAncestors<'a> {
+ type Item = &'a Path;
+
+ fn next(&mut self) -> Option<&'a Path> {
+ if let Some(path) = self.current {
+ self.current = path.parent();
+
+ if let Some(ref stop_at) = self.stop_at {
+ if path == stop_at {
+ self.current = None;
+ }
+ }
+
+ Some(path)
+ } else {
+ None
+ }
+ }
+}
+
+/// Equivalent to [`std::fs::create_dir_all`] with better error messages.
+pub fn create_dir_all(p: impl AsRef<Path>) -> Result<()> {
+ _create_dir_all(p.as_ref())
+}
+
+fn _create_dir_all(p: &Path) -> Result<()> {
+ fs::create_dir_all(p)
+ .with_context(|| format!("failed to create directory `{}`", p.display()))?;
+ Ok(())
+}
+
+/// Recursively remove all files and directories at the given directory.
+///
+/// This does *not* follow symlinks.
+pub fn remove_dir_all<P: AsRef<Path>>(p: P) -> Result<()> {
+ _remove_dir_all(p.as_ref())
+}
+
+fn _remove_dir_all(p: &Path) -> Result<()> {
+ if p.symlink_metadata()
+ .with_context(|| format!("could not get metadata for `{}` to remove", p.display()))?
+ .is_symlink()
+ {
+ return remove_file(p);
+ }
+ let entries = p
+ .read_dir()
+ .with_context(|| format!("failed to read directory `{}`", p.display()))?;
+ for entry in entries {
+ let entry = entry?;
+ let path = entry.path();
+ if entry.file_type()?.is_dir() {
+ remove_dir_all(&path)?;
+ } else {
+ remove_file(&path)?;
+ }
+ }
+ remove_dir(&p)
+}
+
+/// Equivalent to [`std::fs::remove_dir`] with better error messages.
+pub fn remove_dir<P: AsRef<Path>>(p: P) -> Result<()> {
+ _remove_dir(p.as_ref())
+}
+
+fn _remove_dir(p: &Path) -> Result<()> {
+ fs::remove_dir(p).with_context(|| format!("failed to remove directory `{}`", p.display()))?;
+ Ok(())
+}
+
+/// Equivalent to [`std::fs::remove_file`] with better error messages.
+///
+/// If the file is readonly, this will attempt to change the permissions to
+/// force the file to be deleted.
+pub fn remove_file<P: AsRef<Path>>(p: P) -> Result<()> {
+ _remove_file(p.as_ref())
+}
+
+fn _remove_file(p: &Path) -> Result<()> {
+ let mut err = match fs::remove_file(p) {
+ Ok(()) => return Ok(()),
+ Err(e) => e,
+ };
+
+ if err.kind() == io::ErrorKind::PermissionDenied && set_not_readonly(p).unwrap_or(false) {
+ match fs::remove_file(p) {
+ Ok(()) => return Ok(()),
+ Err(e) => err = e,
+ }
+ }
+
+ Err(err).with_context(|| format!("failed to remove file `{}`", p.display()))?;
+ Ok(())
+}
+
+fn set_not_readonly(p: &Path) -> io::Result<bool> {
+ let mut perms = p.metadata()?.permissions();
+ if !perms.readonly() {
+ return Ok(false);
+ }
+ perms.set_readonly(false);
+ fs::set_permissions(p, perms)?;
+ Ok(true)
+}
+
+/// Hardlink (file) or symlink (dir) src to dst if possible, otherwise copy it.
+///
+/// If the destination already exists, it is removed before linking.
+pub fn link_or_copy(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> Result<()> {
+ let src = src.as_ref();
+ let dst = dst.as_ref();
+ _link_or_copy(src, dst)
+}
+
+fn _link_or_copy(src: &Path, dst: &Path) -> Result<()> {
+ log::debug!("linking {} to {}", src.display(), dst.display());
+ if same_file::is_same_file(src, dst).unwrap_or(false) {
+ return Ok(());
+ }
+
+ // NB: we can't use dst.exists(), as if dst is a broken symlink,
+ // dst.exists() will return false. This is problematic, as we still need to
+ // unlink dst in this case. symlink_metadata(dst).is_ok() will tell us
+ // whether dst exists *without* following symlinks, which is what we want.
+ if fs::symlink_metadata(dst).is_ok() {
+ remove_file(&dst)?;
+ }
+
+ let link_result = if src.is_dir() {
+ #[cfg(target_os = "redox")]
+ use std::os::redox::fs::symlink;
+ #[cfg(unix)]
+ use std::os::unix::fs::symlink;
+ #[cfg(windows)]
+ // FIXME: This should probably panic or have a copy fallback. Symlinks
+ // are not supported in all windows environments. Currently symlinking
+ // is only used for .dSYM directories on macos, but this shouldn't be
+ // accidentally relied upon.
+ use std::os::windows::fs::symlink_dir as symlink;
+
+ let dst_dir = dst.parent().unwrap();
+ let src = if src.starts_with(dst_dir) {
+ src.strip_prefix(dst_dir).unwrap()
+ } else {
+ src
+ };
+ symlink(src, dst)
+ } else if env::var_os("__CARGO_COPY_DONT_LINK_DO_NOT_USE_THIS").is_some() {
+ // This is a work-around for a bug in macOS 10.15. When running on
+ // APFS, there seems to be a strange race condition with
+ // Gatekeeper where it will forcefully kill a process launched via
+ // `cargo run` with SIGKILL. Copying seems to avoid the problem.
+ // This shouldn't affect anyone except Cargo's test suite because
+ // it is very rare, and only seems to happen under heavy load and
+ // rapidly creating lots of executables and running them.
+ // See https://github.com/rust-lang/cargo/issues/7821 for the
+ // gory details.
+ fs::copy(src, dst).map(|_| ())
+ } else {
+ if cfg!(target_os = "macos") {
+ // This is a work-around for a bug on macos. There seems to be a race condition
+ // with APFS when hard-linking binaries. Gatekeeper does not have signing or
+ // hash information stored in kernel when running the process. Therefore killing it.
+ // This problem does not appear when copying files as kernel has time to process it.
+ // Note that: fs::copy on macos is using CopyOnWrite (syscall fclonefileat) which should be
+ // as fast as hardlinking.
+ // See https://github.com/rust-lang/cargo/issues/10060 for the details
+ fs::copy(src, dst).map(|_| ())
+ } else {
+ fs::hard_link(src, dst)
+ }
+ };
+ link_result
+ .or_else(|err| {
+ log::debug!("link failed {}. falling back to fs::copy", err);
+ fs::copy(src, dst).map(|_| ())
+ })
+ .with_context(|| {
+ format!(
+ "failed to link or copy `{}` to `{}`",
+ src.display(),
+ dst.display()
+ )
+ })?;
+ Ok(())
+}
+
+/// Copies a file from one location to another.
+///
+/// Equivalent to [`std::fs::copy`] with better error messages.
+pub fn copy<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> Result<u64> {
+ let from = from.as_ref();
+ let to = to.as_ref();
+ fs::copy(from, to)
+ .with_context(|| format!("failed to copy `{}` to `{}`", from.display(), to.display()))
+}
+
+/// Changes the filesystem mtime (and atime if possible) for the given file.
+///
+/// This intentionally does not return an error, as this is sometimes not
+/// supported on network filesystems. For the current uses in Cargo, this is a
+/// "best effort" approach, and errors shouldn't be propagated.
+pub fn set_file_time_no_err<P: AsRef<Path>>(path: P, time: FileTime) {
+ let path = path.as_ref();
+ match filetime::set_file_times(path, time, time) {
+ Ok(()) => log::debug!("set file mtime {} to {}", path.display(), time),
+ Err(e) => log::warn!(
+ "could not set mtime of {} to {}: {:?}",
+ path.display(),
+ time,
+ e
+ ),
+ }
+}
+
+/// Strips `base` from `path`.
+///
+/// This canonicalizes both paths before stripping. This is useful if the
+/// paths are obtained in different ways, and one or the other may or may not
+/// have been normalized in some way.
+pub fn strip_prefix_canonical<P: AsRef<Path>>(
+ path: P,
+ base: P,
+) -> Result<PathBuf, std::path::StripPrefixError> {
+ // Not all filesystems support canonicalize. Just ignore if it doesn't work.
+ let safe_canonicalize = |path: &Path| match path.canonicalize() {
+ Ok(p) => p,
+ Err(e) => {
+ log::warn!("cannot canonicalize {:?}: {:?}", path, e);
+ path.to_path_buf()
+ }
+ };
+ let canon_path = safe_canonicalize(path.as_ref());
+ let canon_base = safe_canonicalize(base.as_ref());
+ canon_path.strip_prefix(canon_base).map(|p| p.to_path_buf())
+}
+
+/// Creates an excluded from cache directory atomically with its parents as needed.
+///
+/// The atomicity only covers creating the leaf directory and exclusion from cache. Any missing
+/// parent directories will not be created in an atomic manner.
+///
+/// This function is idempotent and in addition to that it won't exclude ``p`` from cache if it
+/// already exists.
+pub fn create_dir_all_excluded_from_backups_atomic(p: impl AsRef<Path>) -> Result<()> {
+ let path = p.as_ref();
+ if path.is_dir() {
+ return Ok(());
+ }
+
+ let parent = path.parent().unwrap();
+ let base = path.file_name().unwrap();
+ create_dir_all(parent)?;
+ // We do this in two steps (first create a temporary directory and exclude
+ // it from backups, then rename it to the desired name. If we created the
+ // directory directly where it should be and then excluded it from backups
+ // we would risk a situation where cargo is interrupted right after the directory
+ // creation but before the exclusion the directory would remain non-excluded from
+ // backups because we only perform exclusion right after we created the directory
+ // ourselves.
+ //
+ // We need the tempdir created in parent instead of $TMP, because only then we can be
+ // easily sure that rename() will succeed (the new name needs to be on the same mount
+ // point as the old one).
+ let tempdir = TempFileBuilder::new().prefix(base).tempdir_in(parent)?;
+ exclude_from_backups(tempdir.path());
+ exclude_from_content_indexing(tempdir.path());
+ // Previously std::fs::create_dir_all() (through paths::create_dir_all()) was used
+ // here to create the directory directly and fs::create_dir_all() explicitly treats
+ // the directory being created concurrently by another thread or process as success,
+ // hence the check below to follow the existing behavior. If we get an error at
+ // rename() and suddenly the directory (which didn't exist a moment earlier) exists
+ // we can infer from it's another cargo process doing work.
+ if let Err(e) = fs::rename(tempdir.path(), path) {
+ if !path.exists() {
+ return Err(anyhow::Error::from(e));
+ }
+ }
+ Ok(())
+}
+
+/// Mark an existing directory as excluded from backups and indexing.
+///
+/// Errors in marking it are ignored.
+pub fn exclude_from_backups_and_indexing(p: impl AsRef<Path>) {
+ let path = p.as_ref();
+ exclude_from_backups(path);
+ exclude_from_content_indexing(path);
+}
+
+/// Marks the directory as excluded from archives/backups.
+///
+/// This is recommended to prevent derived/temporary files from bloating backups. There are two
+/// mechanisms used to achieve this right now:
+///
+/// * A dedicated resource property excluding from Time Machine backups on macOS
+/// * CACHEDIR.TAG files supported by various tools in a platform-independent way
+fn exclude_from_backups(path: &Path) {
+ exclude_from_time_machine(path);
+ let _ = std::fs::write(
+ path.join("CACHEDIR.TAG"),
+ "Signature: 8a477f597d28d172789f06886806bc55
+# This file is a cache directory tag created by cargo.
+# For information about cache directory tags see https://bford.info/cachedir/
+",
+ );
+ // Similarly to exclude_from_time_machine() we ignore errors here as it's an optional feature.
+}
+
+/// Marks the directory as excluded from content indexing.
+///
+/// This is recommended to prevent the content of derived/temporary files from being indexed.
+/// This is very important for Windows users, as the live content indexing significantly slows
+/// cargo's I/O operations.
+///
+/// This is currently a no-op on non-Windows platforms.
+fn exclude_from_content_indexing(path: &Path) {
+ #[cfg(windows)]
+ {
+ use std::iter::once;
+ use std::os::windows::prelude::OsStrExt;
+ use windows_sys::Win32::Storage::FileSystem::{
+ GetFileAttributesW, SetFileAttributesW, FILE_ATTRIBUTE_NOT_CONTENT_INDEXED,
+ };
+
+ let path: Vec<u16> = path.as_os_str().encode_wide().chain(once(0)).collect();
+ unsafe {
+ SetFileAttributesW(
+ path.as_ptr(),
+ GetFileAttributesW(path.as_ptr()) | FILE_ATTRIBUTE_NOT_CONTENT_INDEXED,
+ );
+ }
+ }
+ #[cfg(not(windows))]
+ {
+ let _ = path;
+ }
+}
+
+#[cfg(not(target_os = "macos"))]
+fn exclude_from_time_machine(_: &Path) {}
+
+#[cfg(target_os = "macos")]
+/// Marks files or directories as excluded from Time Machine on macOS
+fn exclude_from_time_machine(path: &Path) {
+ use core_foundation::base::TCFType;
+ use core_foundation::{number, string, url};
+ use std::ptr;
+
+ // For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey
+ let is_excluded_key: Result<string::CFString, _> = "NSURLIsExcludedFromBackupKey".parse();
+ let path = url::CFURL::from_path(path, false);
+ if let (Some(path), Ok(is_excluded_key)) = (path, is_excluded_key) {
+ unsafe {
+ url::CFURLSetResourcePropertyForKey(
+ path.as_concrete_TypeRef(),
+ is_excluded_key.as_concrete_TypeRef(),
+ number::kCFBooleanTrue as *const _,
+ ptr::null_mut(),
+ );
+ }
+ }
+ // Errors are ignored, since it's an optional feature and failure
+ // doesn't prevent Cargo from working
+}
+
+#[cfg(test)]
+mod tests {
+ use super::join_paths;
+
+ #[test]
+ fn join_paths_lists_paths_on_error() {
+ let valid_paths = vec!["/testing/one", "/testing/two"];
+ // does not fail on valid input
+ let _joined = join_paths(&valid_paths, "TESTING1").unwrap();
+
+ #[cfg(unix)]
+ {
+ let invalid_paths = vec!["/testing/one", "/testing/t:wo/three"];
+ let err = join_paths(&invalid_paths, "TESTING2").unwrap_err();
+ assert_eq!(
+ err.to_string(),
+ "failed to join paths from `$TESTING2` together\n\n\
+ Check if any of path segments listed below contain an \
+ unterminated quote character or path separator:\
+ \n \"/testing/one\"\
+ \n \"/testing/t:wo/three\"\
+ "
+ );
+ }
+ #[cfg(windows)]
+ {
+ let invalid_paths = vec!["/testing/one", "/testing/t\"wo/three"];
+ let err = join_paths(&invalid_paths, "TESTING2").unwrap_err();
+ assert_eq!(
+ err.to_string(),
+ "failed to join paths from `$TESTING2` together\n\n\
+ Check if any of path segments listed below contain an \
+ unterminated quote character or path separator:\
+ \n \"/testing/one\"\
+ \n \"/testing/t\\\"wo/three\"\
+ "
+ );
+ }
+ }
+}
diff --git a/src/tools/cargo/crates/cargo-util/src/process_builder.rs b/src/tools/cargo/crates/cargo-util/src/process_builder.rs
new file mode 100644
index 000000000..76392f256
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-util/src/process_builder.rs
@@ -0,0 +1,689 @@
+use crate::process_error::ProcessError;
+use crate::read2;
+
+use anyhow::{bail, Context, Result};
+use jobserver::Client;
+use shell_escape::escape;
+use tempfile::NamedTempFile;
+
+use std::collections::BTreeMap;
+use std::env;
+use std::ffi::{OsStr, OsString};
+use std::fmt;
+use std::io::{self, Write};
+use std::iter::once;
+use std::path::Path;
+use std::process::{Command, ExitStatus, Output, Stdio};
+
+/// A builder object for an external process, similar to [`std::process::Command`].
+#[derive(Clone, Debug)]
+pub struct ProcessBuilder {
+ /// The program to execute.
+ program: OsString,
+ /// A list of arguments to pass to the program.
+ args: Vec<OsString>,
+ /// Any environment variables that should be set for the program.
+ env: BTreeMap<String, Option<OsString>>,
+ /// The directory to run the program from.
+ cwd: Option<OsString>,
+ /// A list of wrappers that wrap the original program when calling
+ /// [`ProcessBuilder::wrapped`]. The last one is the outermost one.
+ wrappers: Vec<OsString>,
+ /// The `make` jobserver. See the [jobserver crate] for
+ /// more information.
+ ///
+ /// [jobserver crate]: https://docs.rs/jobserver/
+ jobserver: Option<Client>,
+ /// `true` to include environment variable in display.
+ display_env_vars: bool,
+ /// `true` to retry with an argfile if hitting "command line too big" error.
+ /// See [`ProcessBuilder::retry_with_argfile`] for more information.
+ retry_with_argfile: bool,
+ /// Data to write to stdin.
+ stdin: Option<Vec<u8>>,
+}
+
+impl fmt::Display for ProcessBuilder {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "`")?;
+
+ if self.display_env_vars {
+ for (key, val) in self.env.iter() {
+ if let Some(val) = val {
+ let val = escape(val.to_string_lossy());
+ if cfg!(windows) {
+ write!(f, "set {}={}&& ", key, val)?;
+ } else {
+ write!(f, "{}={} ", key, val)?;
+ }
+ }
+ }
+ }
+
+ write!(f, "{}", self.get_program().to_string_lossy())?;
+
+ for arg in self.get_args() {
+ write!(f, " {}", escape(arg.to_string_lossy()))?;
+ }
+
+ write!(f, "`")
+ }
+}
+
+impl ProcessBuilder {
+ /// Creates a new [`ProcessBuilder`] with the given executable path.
+ pub fn new<T: AsRef<OsStr>>(cmd: T) -> ProcessBuilder {
+ ProcessBuilder {
+ program: cmd.as_ref().to_os_string(),
+ args: Vec::new(),
+ cwd: None,
+ env: BTreeMap::new(),
+ wrappers: Vec::new(),
+ jobserver: None,
+ display_env_vars: false,
+ retry_with_argfile: false,
+ stdin: None,
+ }
+ }
+
+ /// (chainable) Sets the executable for the process.
+ pub fn program<T: AsRef<OsStr>>(&mut self, program: T) -> &mut ProcessBuilder {
+ self.program = program.as_ref().to_os_string();
+ self
+ }
+
+ /// (chainable) Adds `arg` to the args list.
+ pub fn arg<T: AsRef<OsStr>>(&mut self, arg: T) -> &mut ProcessBuilder {
+ self.args.push(arg.as_ref().to_os_string());
+ self
+ }
+
+ /// (chainable) Adds multiple `args` to the args list.
+ pub fn args<T: AsRef<OsStr>>(&mut self, args: &[T]) -> &mut ProcessBuilder {
+ self.args
+ .extend(args.iter().map(|t| t.as_ref().to_os_string()));
+ self
+ }
+
+ /// (chainable) Replaces the args list with the given `args`.
+ pub fn args_replace<T: AsRef<OsStr>>(&mut self, args: &[T]) -> &mut ProcessBuilder {
+ if let Some(program) = self.wrappers.pop() {
+ // User intend to replace all args, so we
+ // - use the outermost wrapper as the main program, and
+ // - cleanup other inner wrappers.
+ self.program = program;
+ self.wrappers = Vec::new();
+ }
+ self.args = args.iter().map(|t| t.as_ref().to_os_string()).collect();
+ self
+ }
+
+ /// (chainable) Sets the current working directory of the process.
+ pub fn cwd<T: AsRef<OsStr>>(&mut self, path: T) -> &mut ProcessBuilder {
+ self.cwd = Some(path.as_ref().to_os_string());
+ self
+ }
+
+ /// (chainable) Sets an environment variable for the process.
+ pub fn env<T: AsRef<OsStr>>(&mut self, key: &str, val: T) -> &mut ProcessBuilder {
+ self.env
+ .insert(key.to_string(), Some(val.as_ref().to_os_string()));
+ self
+ }
+
+ /// (chainable) Unsets an environment variable for the process.
+ pub fn env_remove(&mut self, key: &str) -> &mut ProcessBuilder {
+ self.env.insert(key.to_string(), None);
+ self
+ }
+
+ /// Gets the executable name.
+ pub fn get_program(&self) -> &OsString {
+ self.wrappers.last().unwrap_or(&self.program)
+ }
+
+ /// Gets the program arguments.
+ pub fn get_args(&self) -> impl Iterator<Item = &OsString> {
+ self.wrappers
+ .iter()
+ .rev()
+ .chain(once(&self.program))
+ .chain(self.args.iter())
+ .skip(1) // Skip the main `program
+ }
+
+ /// Gets the current working directory for the process.
+ pub fn get_cwd(&self) -> Option<&Path> {
+ self.cwd.as_ref().map(Path::new)
+ }
+
+ /// Gets an environment variable as the process will see it (will inherit from environment
+ /// unless explicitally unset).
+ pub fn get_env(&self, var: &str) -> Option<OsString> {
+ self.env
+ .get(var)
+ .cloned()
+ .or_else(|| Some(env::var_os(var)))
+ .and_then(|s| s)
+ }
+
+ /// Gets all environment variables explicitly set or unset for the process (not inherited
+ /// vars).
+ pub fn get_envs(&self) -> &BTreeMap<String, Option<OsString>> {
+ &self.env
+ }
+
+ /// Sets the `make` jobserver. See the [jobserver crate][jobserver_docs] for
+ /// more information.
+ ///
+ /// [jobserver_docs]: https://docs.rs/jobserver/0.1.6/jobserver/
+ pub fn inherit_jobserver(&mut self, jobserver: &Client) -> &mut Self {
+ self.jobserver = Some(jobserver.clone());
+ self
+ }
+
+ /// Enables environment variable display.
+ pub fn display_env_vars(&mut self) -> &mut Self {
+ self.display_env_vars = true;
+ self
+ }
+
+ /// Enables retrying with an argfile if hitting "command line too big" error
+ ///
+ /// This is primarily for the `@path` arg of rustc and rustdoc, which treat
+ /// each line as an command-line argument, so `LF` and `CRLF` bytes are not
+ /// valid as an argument for argfile at this moment.
+ /// For example, `RUSTDOCFLAGS="--crate-version foo\nbar" cargo doc` is
+ /// valid when invoking from command-line but not from argfile.
+ ///
+ /// To sum up, the limitations of the argfile are:
+ ///
+ /// - Must be valid UTF-8 encoded.
+ /// - Must not contain any newlines in each argument.
+ ///
+ /// Ref:
+ ///
+ /// - <https://doc.rust-lang.org/rustdoc/command-line-arguments.html#path-load-command-line-flags-from-a-path>
+ /// - <https://doc.rust-lang.org/rustc/command-line-arguments.html#path-load-command-line-flags-from-a-path>
+ pub fn retry_with_argfile(&mut self, enabled: bool) -> &mut Self {
+ self.retry_with_argfile = enabled;
+ self
+ }
+
+ /// Sets a value that will be written to stdin of the process on launch.
+ pub fn stdin<T: Into<Vec<u8>>>(&mut self, stdin: T) -> &mut Self {
+ self.stdin = Some(stdin.into());
+ self
+ }
+
+ fn should_retry_with_argfile(&self, err: &io::Error) -> bool {
+ self.retry_with_argfile && imp::command_line_too_big(err)
+ }
+
+ /// Like [`Command::status`] but with a better error message.
+ pub fn status(&self) -> Result<ExitStatus> {
+ self._status()
+ .with_context(|| ProcessError::could_not_execute(self))
+ }
+
+ fn _status(&self) -> io::Result<ExitStatus> {
+ if !debug_force_argfile(self.retry_with_argfile) {
+ let mut cmd = self.build_command();
+ match cmd.spawn() {
+ Err(ref e) if self.should_retry_with_argfile(e) => {}
+ Err(e) => return Err(e),
+ Ok(mut child) => return child.wait(),
+ }
+ }
+ let (mut cmd, argfile) = self.build_command_with_argfile()?;
+ let status = cmd.spawn()?.wait();
+ close_tempfile_and_log_error(argfile);
+ status
+ }
+
+ /// Runs the process, waiting for completion, and mapping non-success exit codes to an error.
+ pub fn exec(&self) -> Result<()> {
+ let exit = self.status()?;
+ if exit.success() {
+ Ok(())
+ } else {
+ Err(ProcessError::new(
+ &format!("process didn't exit successfully: {}", self),
+ Some(exit),
+ None,
+ )
+ .into())
+ }
+ }
+
+ /// Replaces the current process with the target process.
+ ///
+ /// On Unix, this executes the process using the Unix syscall `execvp`, which will block
+ /// this process, and will only return if there is an error.
+ ///
+ /// On Windows this isn't technically possible. Instead we emulate it to the best of our
+ /// ability. One aspect we fix here is that we specify a handler for the Ctrl-C handler.
+ /// In doing so (and by effectively ignoring it) we should emulate proxying Ctrl-C
+ /// handling to the application at hand, which will either terminate or handle it itself.
+ /// According to Microsoft's documentation at
+ /// <https://docs.microsoft.com/en-us/windows/console/ctrl-c-and-ctrl-break-signals>.
+ /// the Ctrl-C signal is sent to all processes attached to a terminal, which should
+ /// include our child process. If the child terminates then we'll reap them in Cargo
+ /// pretty quickly, and if the child handles the signal then we won't terminate
+ /// (and we shouldn't!) until the process itself later exits.
+ pub fn exec_replace(&self) -> Result<()> {
+ imp::exec_replace(self)
+ }
+
+ /// Like [`Command::output`] but with a better error message.
+ pub fn output(&self) -> Result<Output> {
+ self._output()
+ .with_context(|| ProcessError::could_not_execute(self))
+ }
+
+ fn _output(&self) -> io::Result<Output> {
+ if !debug_force_argfile(self.retry_with_argfile) {
+ let mut cmd = self.build_command();
+ match piped(&mut cmd, self.stdin.is_some()).spawn() {
+ Err(ref e) if self.should_retry_with_argfile(e) => {}
+ Err(e) => return Err(e),
+ Ok(mut child) => {
+ if let Some(stdin) = &self.stdin {
+ child.stdin.take().unwrap().write_all(stdin)?;
+ }
+ return child.wait_with_output();
+ }
+ }
+ }
+ let (mut cmd, argfile) = self.build_command_with_argfile()?;
+ let mut child = piped(&mut cmd, self.stdin.is_some()).spawn()?;
+ if let Some(stdin) = &self.stdin {
+ child.stdin.take().unwrap().write_all(stdin)?;
+ }
+ let output = child.wait_with_output();
+ close_tempfile_and_log_error(argfile);
+ output
+ }
+
+ /// Executes the process, returning the stdio output, or an error if non-zero exit status.
+ pub fn exec_with_output(&self) -> Result<Output> {
+ let output = self.output()?;
+ if output.status.success() {
+ Ok(output)
+ } else {
+ Err(ProcessError::new(
+ &format!("process didn't exit successfully: {}", self),
+ Some(output.status),
+ Some(&output),
+ )
+ .into())
+ }
+ }
+
+ /// Executes a command, passing each line of stdout and stderr to the supplied callbacks, which
+ /// can mutate the string data.
+ ///
+ /// If any invocations of these function return an error, it will be propagated.
+ ///
+ /// If `capture_output` is true, then all the output will also be buffered
+ /// and stored in the returned `Output` object. If it is false, no caching
+ /// is done, and the callbacks are solely responsible for handling the
+ /// output.
+ pub fn exec_with_streaming(
+ &self,
+ on_stdout_line: &mut dyn FnMut(&str) -> Result<()>,
+ on_stderr_line: &mut dyn FnMut(&str) -> Result<()>,
+ capture_output: bool,
+ ) -> Result<Output> {
+ let mut stdout = Vec::new();
+ let mut stderr = Vec::new();
+
+ let mut callback_error = None;
+ let mut stdout_pos = 0;
+ let mut stderr_pos = 0;
+
+ let spawn = |mut cmd| {
+ if !debug_force_argfile(self.retry_with_argfile) {
+ match piped(&mut cmd, false).spawn() {
+ Err(ref e) if self.should_retry_with_argfile(e) => {}
+ Err(e) => return Err(e),
+ Ok(child) => return Ok((child, None)),
+ }
+ }
+ let (mut cmd, argfile) = self.build_command_with_argfile()?;
+ Ok((piped(&mut cmd, false).spawn()?, Some(argfile)))
+ };
+
+ let status = (|| {
+ let cmd = self.build_command();
+ let (mut child, argfile) = spawn(cmd)?;
+ let out = child.stdout.take().unwrap();
+ let err = child.stderr.take().unwrap();
+ read2(out, err, &mut |is_out, data, eof| {
+ let pos = if is_out {
+ &mut stdout_pos
+ } else {
+ &mut stderr_pos
+ };
+ let idx = if eof {
+ data.len()
+ } else {
+ match data[*pos..].iter().rposition(|b| *b == b'\n') {
+ Some(i) => *pos + i + 1,
+ None => {
+ *pos = data.len();
+ return;
+ }
+ }
+ };
+
+ let new_lines = &data[..idx];
+
+ for line in String::from_utf8_lossy(new_lines).lines() {
+ if callback_error.is_some() {
+ break;
+ }
+ let callback_result = if is_out {
+ on_stdout_line(line)
+ } else {
+ on_stderr_line(line)
+ };
+ if let Err(e) = callback_result {
+ callback_error = Some(e);
+ break;
+ }
+ }
+
+ if capture_output {
+ let dst = if is_out { &mut stdout } else { &mut stderr };
+ dst.extend(new_lines);
+ }
+
+ data.drain(..idx);
+ *pos = 0;
+ })?;
+ let status = child.wait();
+ if let Some(argfile) = argfile {
+ close_tempfile_and_log_error(argfile);
+ }
+ status
+ })()
+ .with_context(|| ProcessError::could_not_execute(self))?;
+ let output = Output {
+ status,
+ stdout,
+ stderr,
+ };
+
+ {
+ let to_print = if capture_output { Some(&output) } else { None };
+ if let Some(e) = callback_error {
+ let cx = ProcessError::new(
+ &format!("failed to parse process output: {}", self),
+ Some(output.status),
+ to_print,
+ );
+ bail!(anyhow::Error::new(cx).context(e));
+ } else if !output.status.success() {
+ bail!(ProcessError::new(
+ &format!("process didn't exit successfully: {}", self),
+ Some(output.status),
+ to_print,
+ ));
+ }
+ }
+
+ Ok(output)
+ }
+
+ /// Builds the command with an `@<path>` argfile that contains all the
+ /// arguments. This is primarily served for rustc/rustdoc command family.
+ fn build_command_with_argfile(&self) -> io::Result<(Command, NamedTempFile)> {
+ use std::io::Write as _;
+
+ let mut tmp = tempfile::Builder::new()
+ .prefix("cargo-argfile.")
+ .tempfile()?;
+
+ let mut arg = OsString::from("@");
+ arg.push(tmp.path());
+ let mut cmd = self.build_command_without_args();
+ cmd.arg(arg);
+ log::debug!("created argfile at {} for {self}", tmp.path().display());
+
+ let cap = self.get_args().map(|arg| arg.len() + 1).sum::<usize>();
+ let mut buf = Vec::with_capacity(cap);
+ for arg in &self.args {
+ let arg = arg.to_str().ok_or_else(|| {
+ io::Error::new(
+ io::ErrorKind::Other,
+ format!(
+ "argument for argfile contains invalid UTF-8 characters: `{}`",
+ arg.to_string_lossy()
+ ),
+ )
+ })?;
+ if arg.contains('\n') {
+ return Err(io::Error::new(
+ io::ErrorKind::Other,
+ format!("argument for argfile contains newlines: `{arg}`"),
+ ));
+ }
+ writeln!(buf, "{arg}")?;
+ }
+ tmp.write_all(&mut buf)?;
+ Ok((cmd, tmp))
+ }
+
+ /// Builds a command from `ProcessBuilder` for everything but not `args`.
+ fn build_command_without_args(&self) -> Command {
+ let mut command = {
+ let mut iter = self.wrappers.iter().rev().chain(once(&self.program));
+ let mut cmd = Command::new(iter.next().expect("at least one `program` exists"));
+ cmd.args(iter);
+ cmd
+ };
+ if let Some(cwd) = self.get_cwd() {
+ command.current_dir(cwd);
+ }
+ for (k, v) in &self.env {
+ match *v {
+ Some(ref v) => {
+ command.env(k, v);
+ }
+ None => {
+ command.env_remove(k);
+ }
+ }
+ }
+ if let Some(ref c) = self.jobserver {
+ c.configure(&mut command);
+ }
+ command
+ }
+
+ /// Converts `ProcessBuilder` into a `std::process::Command`, and handles
+ /// the jobserver, if present.
+ ///
+ /// Note that this method doesn't take argfile fallback into account. The
+ /// caller should handle it by themselves.
+ pub fn build_command(&self) -> Command {
+ let mut command = self.build_command_without_args();
+ for arg in &self.args {
+ command.arg(arg);
+ }
+ command
+ }
+
+ /// Wraps an existing command with the provided wrapper, if it is present and valid.
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// use cargo_util::ProcessBuilder;
+ /// // Running this would execute `rustc`
+ /// let cmd = ProcessBuilder::new("rustc");
+ ///
+ /// // Running this will execute `sccache rustc`
+ /// let cmd = cmd.wrapped(Some("sccache"));
+ /// ```
+ pub fn wrapped(mut self, wrapper: Option<impl AsRef<OsStr>>) -> Self {
+ if let Some(wrapper) = wrapper.as_ref() {
+ let wrapper = wrapper.as_ref();
+ if !wrapper.is_empty() {
+ self.wrappers.push(wrapper.to_os_string());
+ }
+ }
+ self
+ }
+}
+
+/// Forces the command to use `@path` argfile.
+///
+/// You should set `__CARGO_TEST_FORCE_ARGFILE` to enable this.
+fn debug_force_argfile(retry_enabled: bool) -> bool {
+ cfg!(debug_assertions) && env::var("__CARGO_TEST_FORCE_ARGFILE").is_ok() && retry_enabled
+}
+
+/// Creates new pipes for stderr, stdout, and optionally stdin.
+fn piped(cmd: &mut Command, pipe_stdin: bool) -> &mut Command {
+ cmd.stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .stdin(if pipe_stdin {
+ Stdio::piped()
+ } else {
+ Stdio::null()
+ })
+}
+
+fn close_tempfile_and_log_error(file: NamedTempFile) {
+ file.close().unwrap_or_else(|e| {
+ log::warn!("failed to close temporary file: {e}");
+ });
+}
+
+#[cfg(unix)]
+mod imp {
+ use super::{close_tempfile_and_log_error, debug_force_argfile, ProcessBuilder, ProcessError};
+ use anyhow::Result;
+ use std::io;
+ use std::os::unix::process::CommandExt;
+
+ pub fn exec_replace(process_builder: &ProcessBuilder) -> Result<()> {
+ let mut error;
+ let mut file = None;
+ if debug_force_argfile(process_builder.retry_with_argfile) {
+ let (mut command, argfile) = process_builder.build_command_with_argfile()?;
+ file = Some(argfile);
+ error = command.exec()
+ } else {
+ let mut command = process_builder.build_command();
+ error = command.exec();
+ if process_builder.should_retry_with_argfile(&error) {
+ let (mut command, argfile) = process_builder.build_command_with_argfile()?;
+ file = Some(argfile);
+ error = command.exec()
+ }
+ }
+ if let Some(file) = file {
+ close_tempfile_and_log_error(file);
+ }
+
+ Err(anyhow::Error::from(error).context(ProcessError::new(
+ &format!("could not execute process {}", process_builder),
+ None,
+ None,
+ )))
+ }
+
+ pub fn command_line_too_big(err: &io::Error) -> bool {
+ err.raw_os_error() == Some(libc::E2BIG)
+ }
+}
+
+#[cfg(windows)]
+mod imp {
+ use super::{ProcessBuilder, ProcessError};
+ use anyhow::Result;
+ use std::io;
+ use windows_sys::Win32::Foundation::{BOOL, FALSE, TRUE};
+ use windows_sys::Win32::System::Console::SetConsoleCtrlHandler;
+
+ unsafe extern "system" fn ctrlc_handler(_: u32) -> BOOL {
+ // Do nothing; let the child process handle it.
+ TRUE
+ }
+
+ pub fn exec_replace(process_builder: &ProcessBuilder) -> Result<()> {
+ unsafe {
+ if SetConsoleCtrlHandler(Some(ctrlc_handler), TRUE) == FALSE {
+ return Err(ProcessError::new("Could not set Ctrl-C handler.", None, None).into());
+ }
+ }
+
+ // Just execute the process as normal.
+ process_builder.exec()
+ }
+
+ pub fn command_line_too_big(err: &io::Error) -> bool {
+ use windows_sys::Win32::Foundation::ERROR_FILENAME_EXCED_RANGE;
+ err.raw_os_error() == Some(ERROR_FILENAME_EXCED_RANGE as i32)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::ProcessBuilder;
+ use std::fs;
+
+ #[test]
+ fn argfile_build_succeeds() {
+ let mut cmd = ProcessBuilder::new("echo");
+ cmd.args(["foo", "bar"].as_slice());
+ let (cmd, argfile) = cmd.build_command_with_argfile().unwrap();
+
+ assert_eq!(cmd.get_program(), "echo");
+ let cmd_args: Vec<_> = cmd.get_args().map(|s| s.to_str().unwrap()).collect();
+ assert_eq!(cmd_args.len(), 1);
+ assert!(cmd_args[0].starts_with("@"));
+ assert!(cmd_args[0].contains("cargo-argfile."));
+
+ let buf = fs::read_to_string(argfile.path()).unwrap();
+ assert_eq!(buf, "foo\nbar\n");
+ }
+
+ #[test]
+ fn argfile_build_fails_if_arg_contains_newline() {
+ let mut cmd = ProcessBuilder::new("echo");
+ cmd.arg("foo\n");
+ let err = cmd.build_command_with_argfile().unwrap_err();
+ assert_eq!(
+ err.to_string(),
+ "argument for argfile contains newlines: `foo\n`"
+ );
+ }
+
+ #[test]
+ fn argfile_build_fails_if_arg_contains_invalid_utf8() {
+ let mut cmd = ProcessBuilder::new("echo");
+
+ #[cfg(windows)]
+ let invalid_arg = {
+ use std::os::windows::prelude::*;
+ std::ffi::OsString::from_wide(&[0x0066, 0x006f, 0xD800, 0x006f])
+ };
+
+ #[cfg(unix)]
+ let invalid_arg = {
+ use std::os::unix::ffi::OsStrExt;
+ std::ffi::OsStr::from_bytes(&[0x66, 0x6f, 0x80, 0x6f]).to_os_string()
+ };
+
+ cmd.arg(invalid_arg);
+ let err = cmd.build_command_with_argfile().unwrap_err();
+ assert_eq!(
+ err.to_string(),
+ "argument for argfile contains invalid UTF-8 characters: `fo�o`"
+ );
+ }
+}
diff --git a/src/tools/cargo/crates/cargo-util/src/process_error.rs b/src/tools/cargo/crates/cargo-util/src/process_error.rs
new file mode 100644
index 000000000..9b4a38cb5
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-util/src/process_error.rs
@@ -0,0 +1,200 @@
+//! Error value for [`crate::ProcessBuilder`] when a process fails.
+
+use std::fmt;
+use std::process::{ExitStatus, Output};
+use std::str;
+
+#[derive(Debug)]
+pub struct ProcessError {
+ /// A detailed description to show to the user why the process failed.
+ pub desc: String,
+
+ /// The exit status of the process.
+ ///
+ /// This can be `None` if the process failed to launch (like process not
+ /// found) or if the exit status wasn't a code but was instead something
+ /// like termination via a signal.
+ pub code: Option<i32>,
+
+ /// The stdout from the process.
+ ///
+ /// This can be `None` if the process failed to launch, or the output was
+ /// not captured.
+ pub stdout: Option<Vec<u8>>,
+
+ /// The stderr from the process.
+ ///
+ /// This can be `None` if the process failed to launch, or the output was
+ /// not captured.
+ pub stderr: Option<Vec<u8>>,
+}
+
+impl fmt::Display for ProcessError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.desc.fmt(f)
+ }
+}
+
+impl std::error::Error for ProcessError {}
+
+impl ProcessError {
+ /// Creates a new [`ProcessError`].
+ ///
+ /// * `status` can be `None` if the process did not launch.
+ /// * `output` can be `None` if the process did not launch, or output was not captured.
+ pub fn new(msg: &str, status: Option<ExitStatus>, output: Option<&Output>) -> ProcessError {
+ let exit = match status {
+ Some(s) => exit_status_to_string(s),
+ None => "never executed".to_string(),
+ };
+
+ Self::new_raw(
+ msg,
+ status.and_then(|s| s.code()),
+ &exit,
+ output.map(|s| s.stdout.as_slice()),
+ output.map(|s| s.stderr.as_slice()),
+ )
+ }
+
+ /// Creates a new [`ProcessError`] with the raw output data.
+ ///
+ /// * `code` can be `None` for situations like being killed by a signal on unix.
+ pub fn new_raw(
+ msg: &str,
+ code: Option<i32>,
+ status: &str,
+ stdout: Option<&[u8]>,
+ stderr: Option<&[u8]>,
+ ) -> ProcessError {
+ let mut desc = format!("{} ({})", msg, status);
+
+ if let Some(out) = stdout {
+ match str::from_utf8(out) {
+ Ok(s) if !s.trim().is_empty() => {
+ desc.push_str("\n--- stdout\n");
+ desc.push_str(s);
+ }
+ Ok(..) | Err(..) => {}
+ }
+ }
+ if let Some(out) = stderr {
+ match str::from_utf8(out) {
+ Ok(s) if !s.trim().is_empty() => {
+ desc.push_str("\n--- stderr\n");
+ desc.push_str(s);
+ }
+ Ok(..) | Err(..) => {}
+ }
+ }
+
+ ProcessError {
+ desc,
+ code,
+ stdout: stdout.map(|s| s.to_vec()),
+ stderr: stderr.map(|s| s.to_vec()),
+ }
+ }
+
+ /// Creates a [`ProcessError`] with "could not execute process {cmd}".
+ ///
+ /// * `cmd` is usually but not limited to [`std::process::Command`].
+ pub fn could_not_execute(cmd: impl fmt::Display) -> ProcessError {
+ ProcessError::new(&format!("could not execute process {cmd}"), None, None)
+ }
+}
+
+/// Converts an [`ExitStatus`] to a human-readable string suitable for
+/// displaying to a user.
+pub fn exit_status_to_string(status: ExitStatus) -> String {
+ return status_to_string(status);
+
+ #[cfg(unix)]
+ fn status_to_string(status: ExitStatus) -> String {
+ use std::os::unix::process::*;
+
+ if let Some(signal) = status.signal() {
+ let name = match signal as libc::c_int {
+ libc::SIGABRT => ", SIGABRT: process abort signal",
+ libc::SIGALRM => ", SIGALRM: alarm clock",
+ libc::SIGFPE => ", SIGFPE: erroneous arithmetic operation",
+ libc::SIGHUP => ", SIGHUP: hangup",
+ libc::SIGILL => ", SIGILL: illegal instruction",
+ libc::SIGINT => ", SIGINT: terminal interrupt signal",
+ libc::SIGKILL => ", SIGKILL: kill",
+ libc::SIGPIPE => ", SIGPIPE: write on a pipe with no one to read",
+ libc::SIGQUIT => ", SIGQUIT: terminal quit signal",
+ libc::SIGSEGV => ", SIGSEGV: invalid memory reference",
+ libc::SIGTERM => ", SIGTERM: termination signal",
+ libc::SIGBUS => ", SIGBUS: access to undefined memory",
+ #[cfg(not(target_os = "haiku"))]
+ libc::SIGSYS => ", SIGSYS: bad system call",
+ libc::SIGTRAP => ", SIGTRAP: trace/breakpoint trap",
+ _ => "",
+ };
+ format!("signal: {}{}", signal, name)
+ } else {
+ status.to_string()
+ }
+ }
+
+ #[cfg(windows)]
+ fn status_to_string(status: ExitStatus) -> String {
+ use windows_sys::Win32::Foundation::*;
+
+ let mut base = status.to_string();
+ let extra = match status.code().unwrap() as i32 {
+ STATUS_ACCESS_VIOLATION => "STATUS_ACCESS_VIOLATION",
+ STATUS_IN_PAGE_ERROR => "STATUS_IN_PAGE_ERROR",
+ STATUS_INVALID_HANDLE => "STATUS_INVALID_HANDLE",
+ STATUS_INVALID_PARAMETER => "STATUS_INVALID_PARAMETER",
+ STATUS_NO_MEMORY => "STATUS_NO_MEMORY",
+ STATUS_ILLEGAL_INSTRUCTION => "STATUS_ILLEGAL_INSTRUCTION",
+ STATUS_NONCONTINUABLE_EXCEPTION => "STATUS_NONCONTINUABLE_EXCEPTION",
+ STATUS_INVALID_DISPOSITION => "STATUS_INVALID_DISPOSITION",
+ STATUS_ARRAY_BOUNDS_EXCEEDED => "STATUS_ARRAY_BOUNDS_EXCEEDED",
+ STATUS_FLOAT_DENORMAL_OPERAND => "STATUS_FLOAT_DENORMAL_OPERAND",
+ STATUS_FLOAT_DIVIDE_BY_ZERO => "STATUS_FLOAT_DIVIDE_BY_ZERO",
+ STATUS_FLOAT_INEXACT_RESULT => "STATUS_FLOAT_INEXACT_RESULT",
+ STATUS_FLOAT_INVALID_OPERATION => "STATUS_FLOAT_INVALID_OPERATION",
+ STATUS_FLOAT_OVERFLOW => "STATUS_FLOAT_OVERFLOW",
+ STATUS_FLOAT_STACK_CHECK => "STATUS_FLOAT_STACK_CHECK",
+ STATUS_FLOAT_UNDERFLOW => "STATUS_FLOAT_UNDERFLOW",
+ STATUS_INTEGER_DIVIDE_BY_ZERO => "STATUS_INTEGER_DIVIDE_BY_ZERO",
+ STATUS_INTEGER_OVERFLOW => "STATUS_INTEGER_OVERFLOW",
+ STATUS_PRIVILEGED_INSTRUCTION => "STATUS_PRIVILEGED_INSTRUCTION",
+ STATUS_STACK_OVERFLOW => "STATUS_STACK_OVERFLOW",
+ STATUS_DLL_NOT_FOUND => "STATUS_DLL_NOT_FOUND",
+ STATUS_ORDINAL_NOT_FOUND => "STATUS_ORDINAL_NOT_FOUND",
+ STATUS_ENTRYPOINT_NOT_FOUND => "STATUS_ENTRYPOINT_NOT_FOUND",
+ STATUS_CONTROL_C_EXIT => "STATUS_CONTROL_C_EXIT",
+ STATUS_DLL_INIT_FAILED => "STATUS_DLL_INIT_FAILED",
+ STATUS_FLOAT_MULTIPLE_FAULTS => "STATUS_FLOAT_MULTIPLE_FAULTS",
+ STATUS_FLOAT_MULTIPLE_TRAPS => "STATUS_FLOAT_MULTIPLE_TRAPS",
+ STATUS_REG_NAT_CONSUMPTION => "STATUS_REG_NAT_CONSUMPTION",
+ STATUS_HEAP_CORRUPTION => "STATUS_HEAP_CORRUPTION",
+ STATUS_STACK_BUFFER_OVERRUN => "STATUS_STACK_BUFFER_OVERRUN",
+ STATUS_ASSERTION_FAILURE => "STATUS_ASSERTION_FAILURE",
+ _ => return base,
+ };
+ base.push_str(", ");
+ base.push_str(extra);
+ base
+ }
+}
+
+/// Returns `true` if the given process exit code is something a normal
+/// process would exit with.
+///
+/// This helps differentiate from abnormal termination codes, such as
+/// segmentation faults or signals.
+pub fn is_simple_exit_code(code: i32) -> bool {
+ // Typical unix exit codes are 0 to 127.
+ // Windows doesn't have anything "typical", and is a
+ // 32-bit number (which appears signed here, but is really
+ // unsigned). However, most of the interesting NTSTATUS
+ // codes are very large. This is just a rough
+ // approximation of which codes are "normal" and which
+ // ones are abnormal termination.
+ code >= 0 && code <= 127
+}
diff --git a/src/tools/cargo/crates/cargo-util/src/read2.rs b/src/tools/cargo/crates/cargo-util/src/read2.rs
new file mode 100644
index 000000000..2e6b66a14
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-util/src/read2.rs
@@ -0,0 +1,185 @@
+pub use self::imp::read2;
+
+#[cfg(unix)]
+mod imp {
+ use libc::{c_int, fcntl, F_GETFL, F_SETFL, O_NONBLOCK};
+ use std::io;
+ use std::io::prelude::*;
+ use std::mem;
+ use std::os::unix::prelude::*;
+ use std::process::{ChildStderr, ChildStdout};
+
+ fn set_nonblock(fd: c_int) -> io::Result<()> {
+ let flags = unsafe { fcntl(fd, F_GETFL) };
+ if flags == -1 || unsafe { fcntl(fd, F_SETFL, flags | O_NONBLOCK) } == -1 {
+ return Err(io::Error::last_os_error());
+ }
+ Ok(())
+ }
+
+ pub fn read2(
+ mut out_pipe: ChildStdout,
+ mut err_pipe: ChildStderr,
+ data: &mut dyn FnMut(bool, &mut Vec<u8>, bool),
+ ) -> io::Result<()> {
+ set_nonblock(out_pipe.as_raw_fd())?;
+ set_nonblock(err_pipe.as_raw_fd())?;
+
+ let mut out_done = false;
+ let mut err_done = false;
+ let mut out = Vec::new();
+ let mut err = Vec::new();
+
+ let mut fds: [libc::pollfd; 2] = unsafe { mem::zeroed() };
+ fds[0].fd = out_pipe.as_raw_fd();
+ fds[0].events = libc::POLLIN;
+ fds[1].fd = err_pipe.as_raw_fd();
+ fds[1].events = libc::POLLIN;
+ let mut nfds = 2;
+ let mut errfd = 1;
+
+ while nfds > 0 {
+ // wait for either pipe to become readable using `poll`
+ let r = unsafe { libc::poll(fds.as_mut_ptr(), nfds, -1) };
+ if r == -1 {
+ let err = io::Error::last_os_error();
+ if err.kind() == io::ErrorKind::Interrupted {
+ continue;
+ }
+ return Err(err);
+ }
+
+ // Read as much as we can from each pipe, ignoring EWOULDBLOCK or
+ // EAGAIN. If we hit EOF, then this will happen because the underlying
+ // reader will return Ok(0), in which case we'll see `Ok` ourselves. In
+ // this case we flip the other fd back into blocking mode and read
+ // whatever's leftover on that file descriptor.
+ let handle = |res: io::Result<_>| match res {
+ Ok(_) => Ok(true),
+ Err(e) => {
+ if e.kind() == io::ErrorKind::WouldBlock {
+ Ok(false)
+ } else {
+ Err(e)
+ }
+ }
+ };
+ if !err_done && fds[errfd].revents != 0 && handle(err_pipe.read_to_end(&mut err))? {
+ err_done = true;
+ nfds -= 1;
+ }
+ data(false, &mut err, err_done);
+ if !out_done && fds[0].revents != 0 && handle(out_pipe.read_to_end(&mut out))? {
+ out_done = true;
+ fds[0].fd = err_pipe.as_raw_fd();
+ errfd = 0;
+ nfds -= 1;
+ }
+ data(true, &mut out, out_done);
+ }
+ Ok(())
+ }
+}
+
+#[cfg(windows)]
+mod imp {
+ use std::io;
+ use std::os::windows::prelude::*;
+ use std::process::{ChildStderr, ChildStdout};
+ use std::slice;
+
+ use miow::iocp::{CompletionPort, CompletionStatus};
+ use miow::pipe::NamedPipe;
+ use miow::Overlapped;
+ use windows_sys::Win32::Foundation::ERROR_BROKEN_PIPE;
+
+ struct Pipe<'a> {
+ dst: &'a mut Vec<u8>,
+ overlapped: Overlapped,
+ pipe: NamedPipe,
+ done: bool,
+ }
+
+ pub fn read2(
+ out_pipe: ChildStdout,
+ err_pipe: ChildStderr,
+ data: &mut dyn FnMut(bool, &mut Vec<u8>, bool),
+ ) -> io::Result<()> {
+ let mut out = Vec::new();
+ let mut err = Vec::new();
+
+ let port = CompletionPort::new(1)?;
+ port.add_handle(0, &out_pipe)?;
+ port.add_handle(1, &err_pipe)?;
+
+ unsafe {
+ let mut out_pipe = Pipe::new(out_pipe, &mut out);
+ let mut err_pipe = Pipe::new(err_pipe, &mut err);
+
+ out_pipe.read()?;
+ err_pipe.read()?;
+
+ let mut status = [CompletionStatus::zero(), CompletionStatus::zero()];
+
+ while !out_pipe.done || !err_pipe.done {
+ for status in port.get_many(&mut status, None)? {
+ if status.token() == 0 {
+ out_pipe.complete(status);
+ data(true, out_pipe.dst, out_pipe.done);
+ out_pipe.read()?;
+ } else {
+ err_pipe.complete(status);
+ data(false, err_pipe.dst, err_pipe.done);
+ err_pipe.read()?;
+ }
+ }
+ }
+
+ Ok(())
+ }
+ }
+
+ impl<'a> Pipe<'a> {
+ unsafe fn new<P: IntoRawHandle>(p: P, dst: &'a mut Vec<u8>) -> Pipe<'a> {
+ Pipe {
+ dst,
+ pipe: NamedPipe::from_raw_handle(p.into_raw_handle()),
+ overlapped: Overlapped::zero(),
+ done: false,
+ }
+ }
+
+ unsafe fn read(&mut self) -> io::Result<()> {
+ let dst = slice_to_end(self.dst);
+ match self.pipe.read_overlapped(dst, self.overlapped.raw()) {
+ Ok(_) => Ok(()),
+ Err(e) => {
+ if e.raw_os_error() == Some(ERROR_BROKEN_PIPE as i32) {
+ self.done = true;
+ Ok(())
+ } else {
+ Err(e)
+ }
+ }
+ }
+ }
+
+ unsafe fn complete(&mut self, status: &CompletionStatus) {
+ let prev = self.dst.len();
+ self.dst.set_len(prev + status.bytes_transferred() as usize);
+ if status.bytes_transferred() == 0 {
+ self.done = true;
+ }
+ }
+ }
+
+ unsafe fn slice_to_end(v: &mut Vec<u8>) -> &mut [u8] {
+ if v.capacity() == 0 {
+ v.reserve(16);
+ }
+ if v.capacity() == v.len() {
+ v.reserve(1);
+ }
+ slice::from_raw_parts_mut(v.as_mut_ptr().add(v.len()), v.capacity() - v.len())
+ }
+}
diff --git a/src/tools/cargo/crates/cargo-util/src/registry.rs b/src/tools/cargo/crates/cargo-util/src/registry.rs
new file mode 100644
index 000000000..6b1ccd22c
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-util/src/registry.rs
@@ -0,0 +1,45 @@
+/// Make a path to a dependency, which aligns to
+///
+/// - [index from of Cargo's index on filesystem][1], and
+/// - [index from Crates.io][2].
+///
+/// [1]: https://docs.rs/cargo/latest/cargo/sources/registry/index.html#the-format-of-the-index
+/// [2]: https://github.com/rust-lang/crates.io-index
+pub fn make_dep_path(dep_name: &str, prefix_only: bool) -> String {
+ let (slash, name) = if prefix_only {
+ ("", "")
+ } else {
+ ("/", dep_name)
+ };
+ match dep_name.len() {
+ 1 => format!("1{}{}", slash, name),
+ 2 => format!("2{}{}", slash, name),
+ 3 => format!("3/{}{}{}", &dep_name[..1], slash, name),
+ _ => format!("{}/{}{}{}", &dep_name[0..2], &dep_name[2..4], slash, name),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::make_dep_path;
+
+ #[test]
+ fn prefix_only() {
+ assert_eq!(make_dep_path("a", true), "1");
+ assert_eq!(make_dep_path("ab", true), "2");
+ assert_eq!(make_dep_path("abc", true), "3/a");
+ assert_eq!(make_dep_path("Abc", true), "3/A");
+ assert_eq!(make_dep_path("AbCd", true), "Ab/Cd");
+ assert_eq!(make_dep_path("aBcDe", true), "aB/cD");
+ }
+
+ #[test]
+ fn full() {
+ assert_eq!(make_dep_path("a", false), "1/a");
+ assert_eq!(make_dep_path("ab", false), "2/ab");
+ assert_eq!(make_dep_path("abc", false), "3/a/abc");
+ assert_eq!(make_dep_path("Abc", false), "3/A/Abc");
+ assert_eq!(make_dep_path("AbCd", false), "Ab/Cd/AbCd");
+ assert_eq!(make_dep_path("aBcDe", false), "aB/cD/aBcDe");
+ }
+}
diff --git a/src/tools/cargo/crates/cargo-util/src/sha256.rs b/src/tools/cargo/crates/cargo-util/src/sha256.rs
new file mode 100644
index 000000000..8906fe93d
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-util/src/sha256.rs
@@ -0,0 +1,53 @@
+use super::paths;
+use anyhow::{Context, Result};
+use sha2::{Digest, Sha256 as Sha2_sha256};
+use std::fs::File;
+use std::io::{self, Read};
+use std::path::Path;
+
+pub struct Sha256(Sha2_sha256);
+
+impl Sha256 {
+ pub fn new() -> Sha256 {
+ let hasher = Sha2_sha256::new();
+ Sha256(hasher)
+ }
+
+ pub fn update(&mut self, bytes: &[u8]) -> &mut Sha256 {
+ let _ = self.0.update(bytes);
+ self
+ }
+
+ pub fn update_file(&mut self, mut file: &File) -> io::Result<&mut Sha256> {
+ let mut buf = [0; 64 * 1024];
+ loop {
+ let n = file.read(&mut buf)?;
+ if n == 0 {
+ break Ok(self);
+ }
+ self.update(&buf[..n]);
+ }
+ }
+
+ pub fn update_path<P: AsRef<Path>>(&mut self, path: P) -> Result<&mut Sha256> {
+ let path = path.as_ref();
+ let file = paths::open(path)?;
+ self.update_file(&file)
+ .with_context(|| format!("failed to read `{}`", path.display()))?;
+ Ok(self)
+ }
+
+ pub fn finish(&mut self) -> [u8; 32] {
+ self.0.finalize_reset().into()
+ }
+
+ pub fn finish_hex(&mut self) -> String {
+ hex::encode(self.finish())
+ }
+}
+
+impl Default for Sha256 {
+ fn default() -> Self {
+ Self::new()
+ }
+}
diff --git a/src/tools/cargo/crates/crates-io/Cargo.toml b/src/tools/cargo/crates/crates-io/Cargo.toml
new file mode 100644
index 000000000..004e2daff
--- /dev/null
+++ b/src/tools/cargo/crates/crates-io/Cargo.toml
@@ -0,0 +1,21 @@
+[package]
+name = "crates-io"
+version = "0.36.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/cargo"
+description = """
+Helpers for interacting with crates.io
+"""
+
+[lib]
+name = "crates_io"
+path = "lib.rs"
+
+[dependencies]
+anyhow = "1.0.34"
+curl = "0.4"
+percent-encoding = "2.0"
+serde = { version = "1.0", features = ['derive'] }
+serde_json = "1.0"
+url = "2.0"
diff --git a/src/tools/cargo/crates/crates-io/LICENSE-APACHE b/src/tools/cargo/crates/crates-io/LICENSE-APACHE
new file mode 120000
index 000000000..1cd601d0a
--- /dev/null
+++ b/src/tools/cargo/crates/crates-io/LICENSE-APACHE
@@ -0,0 +1 @@
+../../LICENSE-APACHE \ No newline at end of file
diff --git a/src/tools/cargo/crates/crates-io/LICENSE-MIT b/src/tools/cargo/crates/crates-io/LICENSE-MIT
new file mode 120000
index 000000000..b2cfbdc7b
--- /dev/null
+++ b/src/tools/cargo/crates/crates-io/LICENSE-MIT
@@ -0,0 +1 @@
+../../LICENSE-MIT \ No newline at end of file
diff --git a/src/tools/cargo/crates/crates-io/lib.rs b/src/tools/cargo/crates/crates-io/lib.rs
new file mode 100644
index 000000000..e0197568a
--- /dev/null
+++ b/src/tools/cargo/crates/crates-io/lib.rs
@@ -0,0 +1,538 @@
+#![allow(clippy::all)]
+
+use std::collections::BTreeMap;
+use std::fmt;
+use std::fs::File;
+use std::io::prelude::*;
+use std::io::{Cursor, SeekFrom};
+use std::time::Instant;
+
+use anyhow::{bail, format_err, Context, Result};
+use curl::easy::{Easy, List};
+use percent_encoding::{percent_encode, NON_ALPHANUMERIC};
+use serde::{Deserialize, Serialize};
+use url::Url;
+
+pub struct Registry {
+ /// The base URL for issuing API requests.
+ host: String,
+ /// Optional authorization token.
+ /// If None, commands requiring authorization will fail.
+ token: Option<String>,
+ /// Curl handle for issuing requests.
+ handle: Easy,
+ /// Whether to include the authorization token with all requests.
+ auth_required: bool,
+}
+
+#[derive(PartialEq, Clone, Copy)]
+pub enum Auth {
+ Authorized,
+ Unauthorized,
+}
+
+#[derive(Deserialize)]
+pub struct Crate {
+ pub name: String,
+ pub description: Option<String>,
+ pub max_version: String,
+}
+
+#[derive(Serialize, Deserialize)]
+pub struct NewCrate {
+ pub name: String,
+ pub vers: String,
+ pub deps: Vec<NewCrateDependency>,
+ pub features: BTreeMap<String, Vec<String>>,
+ pub authors: Vec<String>,
+ pub description: Option<String>,
+ pub documentation: Option<String>,
+ pub homepage: Option<String>,
+ pub readme: Option<String>,
+ pub readme_file: Option<String>,
+ pub keywords: Vec<String>,
+ pub categories: Vec<String>,
+ pub license: Option<String>,
+ pub license_file: Option<String>,
+ pub repository: Option<String>,
+ pub badges: BTreeMap<String, BTreeMap<String, String>>,
+ pub links: Option<String>,
+}
+
+#[derive(Serialize, Deserialize)]
+pub struct NewCrateDependency {
+ pub optional: bool,
+ pub default_features: bool,
+ pub name: String,
+ pub features: Vec<String>,
+ pub version_req: String,
+ pub target: Option<String>,
+ pub kind: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub registry: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub explicit_name_in_toml: Option<String>,
+}
+
+#[derive(Deserialize)]
+pub struct User {
+ pub id: u32,
+ pub login: String,
+ pub avatar: Option<String>,
+ pub email: Option<String>,
+ pub name: Option<String>,
+}
+
+pub struct Warnings {
+ pub invalid_categories: Vec<String>,
+ pub invalid_badges: Vec<String>,
+ pub other: Vec<String>,
+}
+
+#[derive(Deserialize)]
+struct R {
+ ok: bool,
+}
+#[derive(Deserialize)]
+struct OwnerResponse {
+ ok: bool,
+ msg: String,
+}
+#[derive(Deserialize)]
+struct ApiErrorList {
+ errors: Vec<ApiError>,
+}
+#[derive(Deserialize)]
+struct ApiError {
+ detail: String,
+}
+#[derive(Serialize)]
+struct OwnersReq<'a> {
+ users: &'a [&'a str],
+}
+#[derive(Deserialize)]
+struct Users {
+ users: Vec<User>,
+}
+#[derive(Deserialize)]
+struct TotalCrates {
+ total: u32,
+}
+#[derive(Deserialize)]
+struct Crates {
+ crates: Vec<Crate>,
+ meta: TotalCrates,
+}
+
+#[derive(Debug)]
+pub enum ResponseError {
+ Curl(curl::Error),
+ Api {
+ code: u32,
+ errors: Vec<String>,
+ },
+ Code {
+ code: u32,
+ headers: Vec<String>,
+ body: String,
+ },
+ Other(anyhow::Error),
+}
+
+impl std::error::Error for ResponseError {
+ fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
+ match self {
+ ResponseError::Curl(..) => None,
+ ResponseError::Api { .. } => None,
+ ResponseError::Code { .. } => None,
+ ResponseError::Other(e) => Some(e.as_ref()),
+ }
+ }
+}
+
+impl fmt::Display for ResponseError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ ResponseError::Curl(e) => write!(f, "{}", e),
+ ResponseError::Api { code, errors } => {
+ f.write_str("the remote server responded with an error")?;
+ if *code != 200 {
+ write!(f, " (status {} {})", code, reason(*code))?;
+ };
+ write!(f, ": {}", errors.join(", "))
+ }
+ ResponseError::Code {
+ code,
+ headers,
+ body,
+ } => write!(
+ f,
+ "failed to get a 200 OK response, got {}\n\
+ headers:\n\
+ \t{}\n\
+ body:\n\
+ {}",
+ code,
+ headers.join("\n\t"),
+ body
+ ),
+ ResponseError::Other(..) => write!(f, "invalid response from server"),
+ }
+ }
+}
+
+impl From<curl::Error> for ResponseError {
+ fn from(error: curl::Error) -> Self {
+ ResponseError::Curl(error)
+ }
+}
+
+impl Registry {
+ /// Creates a new `Registry`.
+ ///
+ /// ## Example
+ ///
+ /// ```rust
+ /// use curl::easy::Easy;
+ /// use crates_io::Registry;
+ ///
+ /// let mut handle = Easy::new();
+ /// // If connecting to crates.io, a user-agent is required.
+ /// handle.useragent("my_crawler (example.com/info)");
+ /// let mut reg = Registry::new_handle(String::from("https://crates.io"), None, handle);
+ /// ```
+ pub fn new_handle(
+ host: String,
+ token: Option<String>,
+ handle: Easy,
+ auth_required: bool,
+ ) -> Registry {
+ Registry {
+ host,
+ token,
+ handle,
+ auth_required,
+ }
+ }
+
+ pub fn set_token(&mut self, token: Option<String>) {
+ self.token = token;
+ }
+
+ fn token(&self) -> Result<&str> {
+ let token = match self.token.as_ref() {
+ Some(s) => s,
+ None => bail!("no upload token found, please run `cargo login`"),
+ };
+ check_token(token)?;
+ Ok(token)
+ }
+
+ pub fn host(&self) -> &str {
+ &self.host
+ }
+
+ pub fn host_is_crates_io(&self) -> bool {
+ is_url_crates_io(&self.host)
+ }
+
+ pub fn add_owners(&mut self, krate: &str, owners: &[&str]) -> Result<String> {
+ let body = serde_json::to_string(&OwnersReq { users: owners })?;
+ let body = self.put(&format!("/crates/{}/owners", krate), body.as_bytes())?;
+ assert!(serde_json::from_str::<OwnerResponse>(&body)?.ok);
+ Ok(serde_json::from_str::<OwnerResponse>(&body)?.msg)
+ }
+
+ pub fn remove_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> {
+ let body = serde_json::to_string(&OwnersReq { users: owners })?;
+ let body = self.delete(&format!("/crates/{}/owners", krate), Some(body.as_bytes()))?;
+ assert!(serde_json::from_str::<OwnerResponse>(&body)?.ok);
+ Ok(())
+ }
+
+ pub fn list_owners(&mut self, krate: &str) -> Result<Vec<User>> {
+ let body = self.get(&format!("/crates/{}/owners", krate))?;
+ Ok(serde_json::from_str::<Users>(&body)?.users)
+ }
+
+ pub fn publish(&mut self, krate: &NewCrate, mut tarball: &File) -> Result<Warnings> {
+ let json = serde_json::to_string(krate)?;
+ // Prepare the body. The format of the upload request is:
+ //
+ // <le u32 of json>
+ // <json request> (metadata for the package)
+ // <le u32 of tarball>
+ // <source tarball>
+
+ // NOTE: This can be replaced with `stream_len` if it is ever stabilized.
+ //
+ // This checks the length using seeking instead of metadata, because
+ // on some filesystems, getting the metadata will fail because
+ // the file was renamed in ops::package.
+ let tarball_len = tarball
+ .seek(SeekFrom::End(0))
+ .with_context(|| "failed to seek tarball")?;
+ tarball
+ .seek(SeekFrom::Start(0))
+ .with_context(|| "failed to seek tarball")?;
+ let header = {
+ let mut w = Vec::new();
+ w.extend(&(json.len() as u32).to_le_bytes());
+ w.extend(json.as_bytes().iter().cloned());
+ w.extend(&(tarball_len as u32).to_le_bytes());
+ w
+ };
+ let size = tarball_len as usize + header.len();
+ let mut body = Cursor::new(header).chain(tarball);
+
+ let url = format!("{}/api/v1/crates/new", self.host);
+
+ self.handle.put(true)?;
+ self.handle.url(&url)?;
+ self.handle.in_filesize(size as u64)?;
+ let mut headers = List::new();
+ headers.append("Accept: application/json")?;
+ headers.append(&format!("Authorization: {}", self.token()?))?;
+ self.handle.http_headers(headers)?;
+
+ let started = Instant::now();
+ let body = self
+ .handle(&mut |buf| body.read(buf).unwrap_or(0))
+ .map_err(|e| match e {
+ ResponseError::Code { code, .. }
+ if code == 503
+ && started.elapsed().as_secs() >= 29
+ && self.host_is_crates_io() =>
+ {
+ format_err!(
+ "Request timed out after 30 seconds. If you're trying to \
+ upload a crate it may be too large. If the crate is under \
+ 10MB in size, you can email help@crates.io for assistance.\n\
+ Total size was {}.",
+ tarball_len
+ )
+ }
+ _ => e.into(),
+ })?;
+
+ let response = if body.is_empty() {
+ "{}".parse()?
+ } else {
+ body.parse::<serde_json::Value>()?
+ };
+
+ let invalid_categories: Vec<String> = response
+ .get("warnings")
+ .and_then(|j| j.get("invalid_categories"))
+ .and_then(|j| j.as_array())
+ .map(|x| x.iter().flat_map(|j| j.as_str()).map(Into::into).collect())
+ .unwrap_or_else(Vec::new);
+
+ let invalid_badges: Vec<String> = response
+ .get("warnings")
+ .and_then(|j| j.get("invalid_badges"))
+ .and_then(|j| j.as_array())
+ .map(|x| x.iter().flat_map(|j| j.as_str()).map(Into::into).collect())
+ .unwrap_or_else(Vec::new);
+
+ let other: Vec<String> = response
+ .get("warnings")
+ .and_then(|j| j.get("other"))
+ .and_then(|j| j.as_array())
+ .map(|x| x.iter().flat_map(|j| j.as_str()).map(Into::into).collect())
+ .unwrap_or_else(Vec::new);
+
+ Ok(Warnings {
+ invalid_categories,
+ invalid_badges,
+ other,
+ })
+ }
+
+ pub fn search(&mut self, query: &str, limit: u32) -> Result<(Vec<Crate>, u32)> {
+ let formatted_query = percent_encode(query.as_bytes(), NON_ALPHANUMERIC);
+ let body = self.req(
+ &format!("/crates?q={}&per_page={}", formatted_query, limit),
+ None,
+ Auth::Unauthorized,
+ )?;
+
+ let crates = serde_json::from_str::<Crates>(&body)?;
+ Ok((crates.crates, crates.meta.total))
+ }
+
+ pub fn yank(&mut self, krate: &str, version: &str) -> Result<()> {
+ let body = self.delete(&format!("/crates/{}/{}/yank", krate, version), None)?;
+ assert!(serde_json::from_str::<R>(&body)?.ok);
+ Ok(())
+ }
+
+ pub fn unyank(&mut self, krate: &str, version: &str) -> Result<()> {
+ let body = self.put(&format!("/crates/{}/{}/unyank", krate, version), &[])?;
+ assert!(serde_json::from_str::<R>(&body)?.ok);
+ Ok(())
+ }
+
+ fn put(&mut self, path: &str, b: &[u8]) -> Result<String> {
+ self.handle.put(true)?;
+ self.req(path, Some(b), Auth::Authorized)
+ }
+
+ fn get(&mut self, path: &str) -> Result<String> {
+ self.handle.get(true)?;
+ self.req(path, None, Auth::Authorized)
+ }
+
+ fn delete(&mut self, path: &str, b: Option<&[u8]>) -> Result<String> {
+ self.handle.custom_request("DELETE")?;
+ self.req(path, b, Auth::Authorized)
+ }
+
+ fn req(&mut self, path: &str, body: Option<&[u8]>, authorized: Auth) -> Result<String> {
+ self.handle.url(&format!("{}/api/v1{}", self.host, path))?;
+ let mut headers = List::new();
+ headers.append("Accept: application/json")?;
+ headers.append("Content-Type: application/json")?;
+
+ if self.auth_required || authorized == Auth::Authorized {
+ headers.append(&format!("Authorization: {}", self.token()?))?;
+ }
+ self.handle.http_headers(headers)?;
+ match body {
+ Some(mut body) => {
+ self.handle.upload(true)?;
+ self.handle.in_filesize(body.len() as u64)?;
+ self.handle(&mut |buf| body.read(buf).unwrap_or(0))
+ .map_err(|e| e.into())
+ }
+ None => self.handle(&mut |_| 0).map_err(|e| e.into()),
+ }
+ }
+
+ fn handle(
+ &mut self,
+ read: &mut dyn FnMut(&mut [u8]) -> usize,
+ ) -> std::result::Result<String, ResponseError> {
+ let mut headers = Vec::new();
+ let mut body = Vec::new();
+ {
+ let mut handle = self.handle.transfer();
+ handle.read_function(|buf| Ok(read(buf)))?;
+ handle.write_function(|data| {
+ body.extend_from_slice(data);
+ Ok(data.len())
+ })?;
+ handle.header_function(|data| {
+ // Headers contain trailing \r\n, trim them to make it easier
+ // to work with.
+ let s = String::from_utf8_lossy(data).trim().to_string();
+ headers.push(s);
+ true
+ })?;
+ handle.perform()?;
+ }
+
+ let body = match String::from_utf8(body) {
+ Ok(body) => body,
+ Err(..) => {
+ return Err(ResponseError::Other(format_err!(
+ "response body was not valid utf-8"
+ )))
+ }
+ };
+ let errors = serde_json::from_str::<ApiErrorList>(&body)
+ .ok()
+ .map(|s| s.errors.into_iter().map(|s| s.detail).collect::<Vec<_>>());
+
+ match (self.handle.response_code()?, errors) {
+ (0, None) | (200, None) => Ok(body),
+ (code, Some(errors)) => Err(ResponseError::Api { code, errors }),
+ (code, None) => Err(ResponseError::Code {
+ code,
+ headers,
+ body,
+ }),
+ }
+ }
+}
+
+fn reason(code: u32) -> &'static str {
+ // Taken from https://developer.mozilla.org/en-US/docs/Web/HTTP/Status
+ match code {
+ 100 => "Continue",
+ 101 => "Switching Protocol",
+ 103 => "Early Hints",
+ 200 => "OK",
+ 201 => "Created",
+ 202 => "Accepted",
+ 203 => "Non-Authoritative Information",
+ 204 => "No Content",
+ 205 => "Reset Content",
+ 206 => "Partial Content",
+ 300 => "Multiple Choice",
+ 301 => "Moved Permanently",
+ 302 => "Found",
+ 303 => "See Other",
+ 304 => "Not Modified",
+ 307 => "Temporary Redirect",
+ 308 => "Permanent Redirect",
+ 400 => "Bad Request",
+ 401 => "Unauthorized",
+ 402 => "Payment Required",
+ 403 => "Forbidden",
+ 404 => "Not Found",
+ 405 => "Method Not Allowed",
+ 406 => "Not Acceptable",
+ 407 => "Proxy Authentication Required",
+ 408 => "Request Timeout",
+ 409 => "Conflict",
+ 410 => "Gone",
+ 411 => "Length Required",
+ 412 => "Precondition Failed",
+ 413 => "Payload Too Large",
+ 414 => "URI Too Long",
+ 415 => "Unsupported Media Type",
+ 416 => "Request Range Not Satisfiable",
+ 417 => "Expectation Failed",
+ 429 => "Too Many Requests",
+ 431 => "Request Header Fields Too Large",
+ 500 => "Internal Server Error",
+ 501 => "Not Implemented",
+ 502 => "Bad Gateway",
+ 503 => "Service Unavailable",
+ 504 => "Gateway Timeout",
+ _ => "<unknown>",
+ }
+}
+
+/// Returns `true` if the host of the given URL is "crates.io".
+pub fn is_url_crates_io(url: &str) -> bool {
+ Url::parse(url)
+ .map(|u| u.host_str() == Some("crates.io"))
+ .unwrap_or(false)
+}
+
+/// Checks if a token is valid or malformed.
+///
+/// This check is necessary to prevent sending tokens which create an invalid HTTP request.
+/// It would be easier to check just for alphanumeric tokens, but we can't be sure that all
+/// registries only create tokens in that format so that is as less restricted as possible.
+pub fn check_token(token: &str) -> Result<()> {
+ if token.is_empty() {
+ bail!("please provide a non-empty token");
+ }
+ if token.bytes().all(|b| {
+ // This is essentially the US-ASCII limitation of
+ // https://www.rfc-editor.org/rfc/rfc9110#name-field-values. That is,
+ // visible ASCII characters (0x21-0x7e), space, and tab. We want to be
+ // able to pass this in an HTTP header without encoding.
+ b >= 32 && b < 127 || b == b'\t'
+ }) {
+ Ok(())
+ } else {
+ Err(anyhow::anyhow!(
+ "token contains invalid characters.\nOnly printable ISO-8859-1 characters \
+ are allowed as it is sent in a HTTPS header."
+ ))
+ }
+}
diff --git a/src/tools/cargo/crates/credential/README.md b/src/tools/cargo/crates/credential/README.md
new file mode 100644
index 000000000..168cc71c3
--- /dev/null
+++ b/src/tools/cargo/crates/credential/README.md
@@ -0,0 +1,8 @@
+# Cargo Credential Packages
+
+This directory contains Cargo packages for handling storage of tokens in a
+secure manner.
+
+`cargo-credential` is a generic library to assist writing a credential
+process. The other directories contain implementations that integrate with
+specific credential systems.
diff --git a/src/tools/cargo/crates/credential/cargo-credential-1password/Cargo.toml b/src/tools/cargo/crates/credential/cargo-credential-1password/Cargo.toml
new file mode 100644
index 000000000..093fde8e5
--- /dev/null
+++ b/src/tools/cargo/crates/credential/cargo-credential-1password/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "cargo-credential-1password"
+version = "0.2.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/cargo"
+description = "A Cargo credential process that stores tokens in a 1password vault."
+
+[dependencies]
+cargo-credential = { version = "0.2.0", path = "../cargo-credential" }
+serde = { version = "1.0.117", features = ["derive"] }
+serde_json = "1.0.59"
diff --git a/src/tools/cargo/crates/credential/cargo-credential-1password/src/main.rs b/src/tools/cargo/crates/credential/cargo-credential-1password/src/main.rs
new file mode 100644
index 000000000..4f512b717
--- /dev/null
+++ b/src/tools/cargo/crates/credential/cargo-credential-1password/src/main.rs
@@ -0,0 +1,314 @@
+//! Cargo registry 1password credential process.
+
+use cargo_credential::{Credential, Error};
+use serde::Deserialize;
+use std::io::Read;
+use std::process::{Command, Stdio};
+
+const CARGO_TAG: &str = "cargo-registry";
+
+/// Implementation of 1password keychain access for Cargo registries.
+struct OnePasswordKeychain {
+ account: Option<String>,
+ vault: Option<String>,
+}
+
+/// 1password Login item type, used for the JSON output of `op item get`.
+#[derive(Deserialize)]
+struct Login {
+ fields: Vec<Field>,
+}
+
+#[derive(Deserialize)]
+struct Field {
+ id: String,
+ value: Option<String>,
+}
+
+/// 1password item from `op items list`.
+#[derive(Deserialize)]
+struct ListItem {
+ id: String,
+ urls: Vec<Url>,
+}
+
+#[derive(Deserialize)]
+struct Url {
+ href: String,
+}
+
+impl OnePasswordKeychain {
+ fn new() -> Result<OnePasswordKeychain, Error> {
+ let mut args = std::env::args().skip(1);
+ let mut action = false;
+ let mut account = None;
+ let mut vault = None;
+ while let Some(arg) = args.next() {
+ match arg.as_str() {
+ "--account" => {
+ account = Some(args.next().ok_or("--account needs an arg")?);
+ }
+ "--vault" => {
+ vault = Some(args.next().ok_or("--vault needs an arg")?);
+ }
+ s if s.starts_with('-') => {
+ return Err(format!("unknown option {}", s).into());
+ }
+ _ => {
+ if action {
+ return Err("too many arguments".into());
+ } else {
+ action = true;
+ }
+ }
+ }
+ }
+ Ok(OnePasswordKeychain { account, vault })
+ }
+
+ fn signin(&self) -> Result<Option<String>, Error> {
+ // If there are any session env vars, we'll assume that this is the
+ // correct account, and that the user knows what they are doing.
+ if std::env::vars().any(|(name, _)| name.starts_with("OP_SESSION_")) {
+ return Ok(None);
+ }
+ let mut cmd = Command::new("op");
+ cmd.args(&["signin", "--raw"]);
+ cmd.stdout(Stdio::piped());
+ self.with_tty(&mut cmd)?;
+ let mut child = cmd
+ .spawn()
+ .map_err(|e| format!("failed to spawn `op`: {}", e))?;
+ let mut buffer = String::new();
+ child
+ .stdout
+ .as_mut()
+ .unwrap()
+ .read_to_string(&mut buffer)
+ .map_err(|e| format!("failed to get session from `op`: {}", e))?;
+ if let Some(end) = buffer.find('\n') {
+ buffer.truncate(end);
+ }
+ let status = child
+ .wait()
+ .map_err(|e| format!("failed to wait for `op`: {}", e))?;
+ if !status.success() {
+ return Err(format!("failed to run `op signin`: {}", status).into());
+ }
+ if buffer.is_empty() {
+ // When using CLI integration, `op signin` returns no output,
+ // so there is no need to set the session.
+ return Ok(None);
+ }
+ Ok(Some(buffer))
+ }
+
+ fn make_cmd(&self, session: &Option<String>, args: &[&str]) -> Command {
+ let mut cmd = Command::new("op");
+ cmd.args(args);
+ if let Some(account) = &self.account {
+ cmd.arg("--account");
+ cmd.arg(account);
+ }
+ if let Some(vault) = &self.vault {
+ cmd.arg("--vault");
+ cmd.arg(vault);
+ }
+ if let Some(session) = session {
+ cmd.arg("--session");
+ cmd.arg(session);
+ }
+ cmd
+ }
+
+ fn with_tty(&self, cmd: &mut Command) -> Result<(), Error> {
+ #[cfg(unix)]
+ const IN_DEVICE: &str = "/dev/tty";
+ #[cfg(windows)]
+ const IN_DEVICE: &str = "CONIN$";
+ let stdin = std::fs::OpenOptions::new()
+ .read(true)
+ .write(true)
+ .open(IN_DEVICE)?;
+ cmd.stdin(stdin);
+ Ok(())
+ }
+
+ fn run_cmd(&self, mut cmd: Command) -> Result<String, Error> {
+ cmd.stdout(Stdio::piped());
+ let mut child = cmd
+ .spawn()
+ .map_err(|e| format!("failed to spawn `op`: {}", e))?;
+ let mut buffer = String::new();
+ child
+ .stdout
+ .as_mut()
+ .unwrap()
+ .read_to_string(&mut buffer)
+ .map_err(|e| format!("failed to read `op` output: {}", e))?;
+ let status = child
+ .wait()
+ .map_err(|e| format!("failed to wait for `op`: {}", e))?;
+ if !status.success() {
+ return Err(format!("`op` command exit error: {}", status).into());
+ }
+ Ok(buffer)
+ }
+
+ fn search(&self, session: &Option<String>, index_url: &str) -> Result<Option<String>, Error> {
+ let cmd = self.make_cmd(
+ session,
+ &[
+ "items",
+ "list",
+ "--categories",
+ "Login",
+ "--tags",
+ CARGO_TAG,
+ "--format",
+ "json",
+ ],
+ );
+ let buffer = self.run_cmd(cmd)?;
+ let items: Vec<ListItem> = serde_json::from_str(&buffer)
+ .map_err(|e| format!("failed to deserialize JSON from 1password list: {}", e))?;
+ let mut matches = items
+ .into_iter()
+ .filter(|item| item.urls.iter().any(|url| url.href == index_url));
+ match matches.next() {
+ Some(login) => {
+ // Should this maybe just sort on `updatedAt` and return the newest one?
+ if matches.next().is_some() {
+ return Err(format!(
+ "too many 1password logins match registry `{}`, \
+ consider deleting the excess entries",
+ index_url
+ )
+ .into());
+ }
+ Ok(Some(login.id))
+ }
+ None => Ok(None),
+ }
+ }
+
+ fn modify(
+ &self,
+ session: &Option<String>,
+ id: &str,
+ token: &str,
+ _name: Option<&str>,
+ ) -> Result<(), Error> {
+ let cmd = self.make_cmd(
+ session,
+ &["item", "edit", id, &format!("password={}", token)],
+ );
+ self.run_cmd(cmd)?;
+ Ok(())
+ }
+
+ fn create(
+ &self,
+ session: &Option<String>,
+ index_url: &str,
+ token: &str,
+ name: Option<&str>,
+ ) -> Result<(), Error> {
+ let title = match name {
+ Some(name) => format!("Cargo registry token for {}", name),
+ None => "Cargo registry token".to_string(),
+ };
+ let mut cmd = self.make_cmd(
+ session,
+ &[
+ "item",
+ "create",
+ "--category",
+ "Login",
+ &format!("password={}", token),
+ &format!("url={}", index_url),
+ "--title",
+ &title,
+ "--tags",
+ CARGO_TAG,
+ ],
+ );
+ // For unknown reasons, `op item create` seems to not be happy if
+ // stdin is not a tty. Otherwise it returns with a 0 exit code without
+ // doing anything.
+ self.with_tty(&mut cmd)?;
+ self.run_cmd(cmd)?;
+ Ok(())
+ }
+
+ fn get_token(&self, session: &Option<String>, id: &str) -> Result<String, Error> {
+ let cmd = self.make_cmd(session, &["item", "get", "--format=json", id]);
+ let buffer = self.run_cmd(cmd)?;
+ let item: Login = serde_json::from_str(&buffer)
+ .map_err(|e| format!("failed to deserialize JSON from 1password get: {}", e))?;
+ let password = item.fields.into_iter().find(|item| item.id == "password");
+ match password {
+ Some(password) => password
+ .value
+ .ok_or_else(|| format!("missing password value for entry").into()),
+ None => Err("could not find password field".into()),
+ }
+ }
+
+ fn delete(&self, session: &Option<String>, id: &str) -> Result<(), Error> {
+ let cmd = self.make_cmd(session, &["item", "delete", id]);
+ self.run_cmd(cmd)?;
+ Ok(())
+ }
+}
+
+impl Credential for OnePasswordKeychain {
+ fn name(&self) -> &'static str {
+ env!("CARGO_PKG_NAME")
+ }
+
+ fn get(&self, index_url: &str) -> Result<String, Error> {
+ let session = self.signin()?;
+ if let Some(id) = self.search(&session, index_url)? {
+ self.get_token(&session, &id)
+ } else {
+ return Err(format!(
+ "no 1password entry found for registry `{}`, try `cargo login` to add a token",
+ index_url
+ )
+ .into());
+ }
+ }
+
+ fn store(&self, index_url: &str, token: &str, name: Option<&str>) -> Result<(), Error> {
+ let session = self.signin()?;
+ // Check if an item already exists.
+ if let Some(id) = self.search(&session, index_url)? {
+ self.modify(&session, &id, token, name)
+ } else {
+ self.create(&session, index_url, token, name)
+ }
+ }
+
+ fn erase(&self, index_url: &str) -> Result<(), Error> {
+ let session = self.signin()?;
+ // Check if an item already exists.
+ if let Some(id) = self.search(&session, index_url)? {
+ self.delete(&session, &id)?;
+ } else {
+ eprintln!("not currently logged in to `{}`", index_url);
+ }
+ Ok(())
+ }
+}
+
+fn main() {
+ let op = match OnePasswordKeychain::new() {
+ Ok(op) => op,
+ Err(e) => {
+ eprintln!("error: {}", e);
+ std::process::exit(1);
+ }
+ };
+ cargo_credential::main(op);
+}
diff --git a/src/tools/cargo/crates/credential/cargo-credential-gnome-secret/Cargo.toml b/src/tools/cargo/crates/credential/cargo-credential-gnome-secret/Cargo.toml
new file mode 100644
index 000000000..12e25cfb6
--- /dev/null
+++ b/src/tools/cargo/crates/credential/cargo-credential-gnome-secret/Cargo.toml
@@ -0,0 +1,13 @@
+[package]
+name = "cargo-credential-gnome-secret"
+version = "0.2.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/cargo"
+description = "A Cargo credential process that stores tokens with GNOME libsecret."
+
+[dependencies]
+cargo-credential = { version = "0.2.0", path = "../cargo-credential" }
+
+[build-dependencies]
+pkg-config = "0.3.19"
diff --git a/src/tools/cargo/crates/credential/cargo-credential-gnome-secret/build.rs b/src/tools/cargo/crates/credential/cargo-credential-gnome-secret/build.rs
new file mode 100644
index 000000000..9283535af
--- /dev/null
+++ b/src/tools/cargo/crates/credential/cargo-credential-gnome-secret/build.rs
@@ -0,0 +1,3 @@
+fn main() {
+ pkg_config::probe_library("libsecret-1").unwrap();
+}
diff --git a/src/tools/cargo/crates/credential/cargo-credential-gnome-secret/src/main.rs b/src/tools/cargo/crates/credential/cargo-credential-gnome-secret/src/main.rs
new file mode 100644
index 000000000..40972b05d
--- /dev/null
+++ b/src/tools/cargo/crates/credential/cargo-credential-gnome-secret/src/main.rs
@@ -0,0 +1,194 @@
+//! Cargo registry gnome libsecret credential process.
+
+use cargo_credential::{Credential, Error};
+use std::ffi::{CStr, CString};
+use std::os::raw::{c_char, c_int};
+use std::ptr::{null, null_mut};
+
+#[allow(non_camel_case_types)]
+type gchar = c_char;
+
+#[allow(non_camel_case_types)]
+type gboolean = c_int;
+
+type GQuark = u32;
+
+#[repr(C)]
+struct GError {
+ domain: GQuark,
+ code: c_int,
+ message: *mut gchar,
+}
+
+#[repr(C)]
+struct GCancellable {
+ _private: [u8; 0],
+}
+
+#[repr(C)]
+struct SecretSchema {
+ name: *const gchar,
+ flags: SecretSchemaFlags,
+ attributes: [SecretSchemaAttribute; 32],
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+struct SecretSchemaAttribute {
+ name: *const gchar,
+ attr_type: SecretSchemaAttributeType,
+}
+
+#[repr(C)]
+enum SecretSchemaFlags {
+ None = 0,
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+enum SecretSchemaAttributeType {
+ String = 0,
+}
+
+extern "C" {
+ fn secret_password_store_sync(
+ schema: *const SecretSchema,
+ collection: *const gchar,
+ label: *const gchar,
+ password: *const gchar,
+ cancellable: *mut GCancellable,
+ error: *mut *mut GError,
+ ...
+ ) -> gboolean;
+ fn secret_password_clear_sync(
+ schema: *const SecretSchema,
+ cancellable: *mut GCancellable,
+ error: *mut *mut GError,
+ ...
+ ) -> gboolean;
+ fn secret_password_lookup_sync(
+ schema: *const SecretSchema,
+ cancellable: *mut GCancellable,
+ error: *mut *mut GError,
+ ...
+ ) -> *mut gchar;
+}
+
+struct GnomeSecret;
+
+fn label(index_url: &str) -> CString {
+ CString::new(format!("cargo-registry:{}", index_url)).unwrap()
+}
+
+fn schema() -> SecretSchema {
+ let mut attributes = [SecretSchemaAttribute {
+ name: null(),
+ attr_type: SecretSchemaAttributeType::String,
+ }; 32];
+ attributes[0] = SecretSchemaAttribute {
+ name: b"url\0".as_ptr() as *const gchar,
+ attr_type: SecretSchemaAttributeType::String,
+ };
+ SecretSchema {
+ name: b"org.rust-lang.cargo.registry\0".as_ptr() as *const gchar,
+ flags: SecretSchemaFlags::None,
+ attributes,
+ }
+}
+
+impl Credential for GnomeSecret {
+ fn name(&self) -> &'static str {
+ env!("CARGO_PKG_NAME")
+ }
+
+ fn get(&self, index_url: &str) -> Result<String, Error> {
+ let mut error: *mut GError = null_mut();
+ let attr_url = CString::new("url").unwrap();
+ let index_url_c = CString::new(index_url).unwrap();
+ let schema = schema();
+ unsafe {
+ let token_c = secret_password_lookup_sync(
+ &schema,
+ null_mut(),
+ &mut error,
+ attr_url.as_ptr(),
+ index_url_c.as_ptr(),
+ null() as *const gchar,
+ );
+ if !error.is_null() {
+ return Err(format!(
+ "failed to get token: {}",
+ CStr::from_ptr((*error).message).to_str()?
+ )
+ .into());
+ }
+ if token_c.is_null() {
+ return Err(format!("cannot find token for {}", index_url).into());
+ }
+ let token = CStr::from_ptr(token_c)
+ .to_str()
+ .map_err(|e| format!("expected utf8 token: {}", e))?
+ .to_string();
+ Ok(token)
+ }
+ }
+
+ fn store(&self, index_url: &str, token: &str, name: Option<&str>) -> Result<(), Error> {
+ let label = label(name.unwrap_or(index_url));
+ let token = CString::new(token).unwrap();
+ let mut error: *mut GError = null_mut();
+ let attr_url = CString::new("url").unwrap();
+ let index_url_c = CString::new(index_url).unwrap();
+ let schema = schema();
+ unsafe {
+ secret_password_store_sync(
+ &schema,
+ b"default\0".as_ptr() as *const gchar,
+ label.as_ptr(),
+ token.as_ptr(),
+ null_mut(),
+ &mut error,
+ attr_url.as_ptr(),
+ index_url_c.as_ptr(),
+ null() as *const gchar,
+ );
+ if !error.is_null() {
+ return Err(format!(
+ "failed to store token: {}",
+ CStr::from_ptr((*error).message).to_str()?
+ )
+ .into());
+ }
+ }
+ Ok(())
+ }
+
+ fn erase(&self, index_url: &str) -> Result<(), Error> {
+ let schema = schema();
+ let mut error: *mut GError = null_mut();
+ let attr_url = CString::new("url").unwrap();
+ let index_url_c = CString::new(index_url).unwrap();
+ unsafe {
+ secret_password_clear_sync(
+ &schema,
+ null_mut(),
+ &mut error,
+ attr_url.as_ptr(),
+ index_url_c.as_ptr(),
+ null() as *const gchar,
+ );
+ if !error.is_null() {
+ return Err(format!(
+ "failed to erase token: {}",
+ CStr::from_ptr((*error).message).to_str()?
+ )
+ .into());
+ }
+ }
+ Ok(())
+ }
+}
+
+fn main() {
+ cargo_credential::main(GnomeSecret);
+}
diff --git a/src/tools/cargo/crates/credential/cargo-credential-macos-keychain/Cargo.toml b/src/tools/cargo/crates/credential/cargo-credential-macos-keychain/Cargo.toml
new file mode 100644
index 000000000..c2c22a425
--- /dev/null
+++ b/src/tools/cargo/crates/credential/cargo-credential-macos-keychain/Cargo.toml
@@ -0,0 +1,11 @@
+[package]
+name = "cargo-credential-macos-keychain"
+version = "0.2.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/cargo"
+description = "A Cargo credential process that stores tokens in a macOS keychain."
+
+[dependencies]
+cargo-credential = { version = "0.2.0", path = "../cargo-credential" }
+security-framework = "2.0.0"
diff --git a/src/tools/cargo/crates/credential/cargo-credential-macos-keychain/src/main.rs b/src/tools/cargo/crates/credential/cargo-credential-macos-keychain/src/main.rs
new file mode 100644
index 000000000..3fef3f92a
--- /dev/null
+++ b/src/tools/cargo/crates/credential/cargo-credential-macos-keychain/src/main.rs
@@ -0,0 +1,50 @@
+//! Cargo registry macos keychain credential process.
+
+use cargo_credential::{Credential, Error};
+use security_framework::os::macos::keychain::SecKeychain;
+
+struct MacKeychain;
+
+/// The account name is not used.
+const ACCOUNT: &'static str = "";
+
+fn registry(registry_name: &str) -> String {
+ format!("cargo-registry:{}", registry_name)
+}
+
+impl Credential for MacKeychain {
+ fn name(&self) -> &'static str {
+ env!("CARGO_PKG_NAME")
+ }
+
+ fn get(&self, index_url: &str) -> Result<String, Error> {
+ let keychain = SecKeychain::default().unwrap();
+ let service_name = registry(index_url);
+ let (pass, _item) = keychain.find_generic_password(&service_name, ACCOUNT)?;
+ String::from_utf8(pass.as_ref().to_vec())
+ .map_err(|_| "failed to convert token to UTF8".into())
+ }
+
+ fn store(&self, index_url: &str, token: &str, name: Option<&str>) -> Result<(), Error> {
+ let keychain = SecKeychain::default().unwrap();
+ let service_name = registry(name.unwrap_or(index_url));
+ if let Ok((_pass, mut item)) = keychain.find_generic_password(&service_name, ACCOUNT) {
+ item.set_password(token.as_bytes())?;
+ } else {
+ keychain.add_generic_password(&service_name, ACCOUNT, token.as_bytes())?;
+ }
+ Ok(())
+ }
+
+ fn erase(&self, index_url: &str) -> Result<(), Error> {
+ let keychain = SecKeychain::default().unwrap();
+ let service_name = registry(index_url);
+ let (_pass, item) = keychain.find_generic_password(&service_name, ACCOUNT)?;
+ item.delete();
+ Ok(())
+ }
+}
+
+fn main() {
+ cargo_credential::main(MacKeychain);
+}
diff --git a/src/tools/cargo/crates/credential/cargo-credential-wincred/Cargo.toml b/src/tools/cargo/crates/credential/cargo-credential-wincred/Cargo.toml
new file mode 100644
index 000000000..83c38e80d
--- /dev/null
+++ b/src/tools/cargo/crates/credential/cargo-credential-wincred/Cargo.toml
@@ -0,0 +1,11 @@
+[package]
+name = "cargo-credential-wincred"
+version = "0.2.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/cargo"
+description = "A Cargo credential process that stores tokens with Windows Credential Manager."
+
+[dependencies]
+cargo-credential = { version = "0.2.0", path = "../cargo-credential" }
+windows-sys = { version = "0.45", features = ["Win32_Foundation", "Win32_Security_Credentials"] }
diff --git a/src/tools/cargo/crates/credential/cargo-credential-wincred/src/main.rs b/src/tools/cargo/crates/credential/cargo-credential-wincred/src/main.rs
new file mode 100644
index 000000000..8ae48f348
--- /dev/null
+++ b/src/tools/cargo/crates/credential/cargo-credential-wincred/src/main.rs
@@ -0,0 +1,111 @@
+//! Cargo registry windows credential process.
+
+use cargo_credential::{Credential, Error};
+use std::ffi::OsStr;
+use std::os::windows::ffi::OsStrExt;
+
+use windows_sys::core::PWSTR;
+use windows_sys::Win32::Foundation::ERROR_NOT_FOUND;
+use windows_sys::Win32::Foundation::FILETIME;
+use windows_sys::Win32::Foundation::TRUE;
+use windows_sys::Win32::Security::Credentials::CredDeleteW;
+use windows_sys::Win32::Security::Credentials::CredReadW;
+use windows_sys::Win32::Security::Credentials::CredWriteW;
+use windows_sys::Win32::Security::Credentials::CREDENTIALW;
+use windows_sys::Win32::Security::Credentials::CRED_PERSIST_LOCAL_MACHINE;
+use windows_sys::Win32::Security::Credentials::CRED_TYPE_GENERIC;
+
+struct WindowsCredential;
+
+/// Converts a string to a nul-terminated wide UTF-16 byte sequence.
+fn wstr(s: &str) -> Vec<u16> {
+ let mut wide: Vec<u16> = OsStr::new(s).encode_wide().collect();
+ if wide.iter().any(|b| *b == 0) {
+ panic!("nul byte in wide string");
+ }
+ wide.push(0);
+ wide
+}
+
+fn target_name(registry_name: &str) -> Vec<u16> {
+ wstr(&format!("cargo-registry:{}", registry_name))
+}
+
+impl Credential for WindowsCredential {
+ fn name(&self) -> &'static str {
+ env!("CARGO_PKG_NAME")
+ }
+
+ fn get(&self, index_url: &str) -> Result<String, Error> {
+ let target_name = target_name(index_url);
+ let p_credential: *mut CREDENTIALW = std::ptr::null_mut() as *mut _;
+ unsafe {
+ if CredReadW(
+ target_name.as_ptr(),
+ CRED_TYPE_GENERIC,
+ 0,
+ p_credential as *mut _ as *mut _,
+ ) != TRUE
+ {
+ return Err(
+ format!("failed to fetch token: {}", std::io::Error::last_os_error()).into(),
+ );
+ }
+ let bytes = std::slice::from_raw_parts(
+ (*p_credential).CredentialBlob,
+ (*p_credential).CredentialBlobSize as usize,
+ );
+ String::from_utf8(bytes.to_vec()).map_err(|_| "failed to convert token to UTF8".into())
+ }
+ }
+
+ fn store(&self, index_url: &str, token: &str, name: Option<&str>) -> Result<(), Error> {
+ let token = token.as_bytes();
+ let target_name = target_name(index_url);
+ let comment = match name {
+ Some(name) => wstr(&format!("Cargo registry token for {}", name)),
+ None => wstr("Cargo registry token"),
+ };
+ let mut credential = CREDENTIALW {
+ Flags: 0,
+ Type: CRED_TYPE_GENERIC,
+ TargetName: target_name.as_ptr() as PWSTR,
+ Comment: comment.as_ptr() as PWSTR,
+ LastWritten: FILETIME {
+ dwLowDateTime: 0,
+ dwHighDateTime: 0,
+ },
+ CredentialBlobSize: token.len() as u32,
+ CredentialBlob: token.as_ptr() as *mut u8,
+ Persist: CRED_PERSIST_LOCAL_MACHINE,
+ AttributeCount: 0,
+ Attributes: std::ptr::null_mut(),
+ TargetAlias: std::ptr::null_mut(),
+ UserName: std::ptr::null_mut(),
+ };
+ let result = unsafe { CredWriteW(&mut credential, 0) };
+ if result != TRUE {
+ let err = std::io::Error::last_os_error();
+ return Err(format!("failed to store token: {}", err).into());
+ }
+ Ok(())
+ }
+
+ fn erase(&self, index_url: &str) -> Result<(), Error> {
+ let target_name = target_name(index_url);
+ let result = unsafe { CredDeleteW(target_name.as_ptr(), CRED_TYPE_GENERIC, 0) };
+ if result != TRUE {
+ let err = std::io::Error::last_os_error();
+ if err.raw_os_error() == Some(ERROR_NOT_FOUND as i32) {
+ eprintln!("not currently logged in to `{}`", index_url);
+ return Ok(());
+ }
+ return Err(format!("failed to remove token: {}", err).into());
+ }
+ Ok(())
+ }
+}
+
+fn main() {
+ cargo_credential::main(WindowsCredential);
+}
diff --git a/src/tools/cargo/crates/credential/cargo-credential/Cargo.toml b/src/tools/cargo/crates/credential/cargo-credential/Cargo.toml
new file mode 100644
index 000000000..2addaf5af
--- /dev/null
+++ b/src/tools/cargo/crates/credential/cargo-credential/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "cargo-credential"
+version = "0.2.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/cargo"
+description = "A library to assist writing Cargo credential helpers."
+
+[dependencies]
diff --git a/src/tools/cargo/crates/credential/cargo-credential/README.md b/src/tools/cargo/crates/credential/cargo-credential/README.md
new file mode 100644
index 000000000..1f75e598a
--- /dev/null
+++ b/src/tools/cargo/crates/credential/cargo-credential/README.md
@@ -0,0 +1,41 @@
+# cargo-credential
+
+This package is a library to assist writing a Cargo credential helper, which
+provides an interface to store tokens for authorizing access to a registry
+such as https://crates.io/.
+
+Documentation about credential processes may be found at
+https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#credential-process
+
+Example implementations may be found at
+https://github.com/rust-lang/cargo/tree/master/crates/credential
+
+## Usage
+
+Create a Cargo project with this as a dependency:
+
+```toml
+# Add this to your Cargo.toml:
+
+[dependencies]
+cargo-credential = "0.1"
+```
+
+And then include a `main.rs` binary which implements the `Credential` trait, and calls
+the `main` function which will call the appropriate method of the trait:
+
+```rust
+// src/main.rs
+
+use cargo_credential::{Credential, Error};
+
+struct MyCredential;
+
+impl Credential for MyCredential {
+ /// implement trait methods here...
+}
+
+fn main() {
+ cargo_credential::main(MyCredential);
+}
+```
diff --git a/src/tools/cargo/crates/credential/cargo-credential/src/lib.rs b/src/tools/cargo/crates/credential/cargo-credential/src/lib.rs
new file mode 100644
index 000000000..3baf42d77
--- /dev/null
+++ b/src/tools/cargo/crates/credential/cargo-credential/src/lib.rs
@@ -0,0 +1,86 @@
+//! Helper library for writing Cargo credential processes.
+//!
+//! A credential process should have a `struct` that implements the `Credential` trait.
+//! The `main` function should be called with an instance of that struct, such as:
+//!
+//! ```rust,ignore
+//! fn main() {
+//! cargo_credential::main(MyCredential);
+//! }
+//! ```
+//!
+//! This will determine the action to perform (get/store/erase) by looking at
+//! the CLI arguments for the first argument that does not start with `-`. It
+//! will then call the corresponding method of the trait to perform the
+//! requested action.
+
+pub type Error = Box<dyn std::error::Error>;
+
+pub trait Credential {
+ /// Returns the name of this credential process.
+ fn name(&self) -> &'static str;
+
+ /// Retrieves a token for the given registry.
+ fn get(&self, index_url: &str) -> Result<String, Error>;
+
+ /// Stores the given token for the given registry.
+ fn store(&self, index_url: &str, token: &str, name: Option<&str>) -> Result<(), Error>;
+
+ /// Removes the token for the given registry.
+ ///
+ /// If the user is not logged in, this should print a message to stderr if
+ /// possible indicating that the user is not currently logged in, and
+ /// return `Ok`.
+ fn erase(&self, index_url: &str) -> Result<(), Error>;
+}
+
+/// Runs the credential interaction by processing the command-line and
+/// environment variables.
+pub fn main(credential: impl Credential) {
+ let name = credential.name();
+ if let Err(e) = doit(credential) {
+ eprintln!("{} error: {}", name, e);
+ std::process::exit(1);
+ }
+}
+
+fn env(name: &str) -> Result<String, Error> {
+ std::env::var(name).map_err(|_| format!("environment variable `{}` is not set", name).into())
+}
+
+fn doit(credential: impl Credential) -> Result<(), Error> {
+ let which = std::env::args()
+ .skip(1)
+ .skip_while(|arg| arg.starts_with('-'))
+ .next()
+ .ok_or_else(|| "first argument must be the {action}")?;
+ let index_url = env("CARGO_REGISTRY_INDEX_URL")?;
+ let name = std::env::var("CARGO_REGISTRY_NAME_OPT").ok();
+ let result = match which.as_ref() {
+ "get" => credential.get(&index_url).and_then(|token| {
+ println!("{}", token);
+ Ok(())
+ }),
+ "store" => {
+ read_token().and_then(|token| credential.store(&index_url, &token, name.as_deref()))
+ }
+ "erase" => credential.erase(&index_url),
+ _ => {
+ return Err(format!(
+ "unexpected command-line argument `{}`, expected get/store/erase",
+ which
+ )
+ .into())
+ }
+ };
+ result.map_err(|e| format!("failed to `{}` token: {}", which, e).into())
+}
+
+fn read_token() -> Result<String, Error> {
+ let mut buffer = String::new();
+ std::io::stdin().read_line(&mut buffer)?;
+ if buffer.ends_with('\n') {
+ buffer.pop();
+ }
+ Ok(buffer)
+}
diff --git a/src/tools/cargo/crates/home/CHANGELOG.md b/src/tools/cargo/crates/home/CHANGELOG.md
new file mode 100644
index 000000000..7674667c9
--- /dev/null
+++ b/src/tools/cargo/crates/home/CHANGELOG.md
@@ -0,0 +1,46 @@
+# Changelog
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
+and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+
+<!-- ## [Unreleased] -->
+
+## [0.5.4] - 2022-10-10
+- Add `_with_env` variants of functions to support in-process threaded tests for
+ rustup.
+
+## [0.5.3] - 2020-01-07
+
+Use Rust 1.36.0 as minimum Rust version.
+
+## [0.5.2] - 2020-01-05
+
+*YANKED since it cannot be built on Rust 1.36.0*
+
+### Changed
+- Check for emptiness of `CARGO_HOME` and `RUSTUP_HOME` environment variables.
+- Windows: Use `SHGetFolderPath` to replace `GetUserProfileDirectory` syscall.
+ * Remove `scopeguard` dependency.
+
+## [0.5.1] - 2019-10-12
+### Changed
+- Disable unnecessary features for `scopeguard`. Thanks @mati865.
+
+## [0.5.0] - 2019-08-21
+### Added
+- Add `home_dir` implementation for Windows UWP platforms.
+
+### Fixed
+- Fix `rustup_home` implementation when `RUSTUP_HOME` is an absolute directory.
+- Fix `cargo_home` implementation when `CARGO_HOME` is an absolute directory.
+
+### Removed
+- Remove support for `multirust` folder used in old version of `rustup`.
+
+[Unreleased]: https://github.com/brson/home/compare/v0.5.4...HEAD
+[0.5.4]: https://github.com/brson/home/compare/v0.5.3...v0.5.4
+[0.5.3]: https://github.com/brson/home/compare/v0.5.2...v0.5.3
+[0.5.2]: https://github.com/brson/home/compare/v0.5.1...v0.5.2
+[0.5.1]: https://github.com/brson/home/compare/v0.5.0...v0.5.1
+[0.5.0]: https://github.com/brson/home/compare/0.4.2...v0.5.0
diff --git a/src/tools/cargo/crates/home/Cargo.toml b/src/tools/cargo/crates/home/Cargo.toml
new file mode 100644
index 000000000..2c5b92bcb
--- /dev/null
+++ b/src/tools/cargo/crates/home/Cargo.toml
@@ -0,0 +1,20 @@
+[package]
+name = "home"
+version = "0.5.4" # also update `html_root_url` in `src/lib.rs`
+authors = ["Brian Anderson <andersrb@gmail.com>"]
+documentation = "https://docs.rs/home"
+edition = "2018"
+include = [
+ "/src",
+ "/Cargo.toml",
+ "/CHANGELOG",
+ "/LICENSE-*",
+ "/README.md",
+]
+license = "MIT OR Apache-2.0"
+readme = "README.md"
+repository = "https://github.com/rust-lang/cargo"
+description = "Shared definitions of home directories."
+
+[target.'cfg(windows)'.dependencies]
+windows-sys = { version = "0.45.0", features = ["Win32_Foundation", "Win32_UI_Shell"] }
diff --git a/src/tools/cargo/crates/home/LICENSE-APACHE b/src/tools/cargo/crates/home/LICENSE-APACHE
new file mode 120000
index 000000000..1cd601d0a
--- /dev/null
+++ b/src/tools/cargo/crates/home/LICENSE-APACHE
@@ -0,0 +1 @@
+../../LICENSE-APACHE \ No newline at end of file
diff --git a/src/tools/cargo/crates/home/LICENSE-MIT b/src/tools/cargo/crates/home/LICENSE-MIT
new file mode 120000
index 000000000..b2cfbdc7b
--- /dev/null
+++ b/src/tools/cargo/crates/home/LICENSE-MIT
@@ -0,0 +1 @@
+../../LICENSE-MIT \ No newline at end of file
diff --git a/src/tools/cargo/crates/home/README.md b/src/tools/cargo/crates/home/README.md
new file mode 100644
index 000000000..a80adbd3b
--- /dev/null
+++ b/src/tools/cargo/crates/home/README.md
@@ -0,0 +1,27 @@
+[![Documentation](https://docs.rs/home/badge.svg)](https://docs.rs/home)
+[![crates.io](https://img.shields.io/crates/v/home.svg)](https://crates.io/crates/home)
+
+Canonical definitions of `home_dir`, `cargo_home`, and `rustup_home`.
+
+This provides the definition of `home_dir` used by Cargo and rustup,
+as well functions to find the correct value of `CARGO_HOME` and
+`RUSTUP_HOME`.
+
+The definition of `home_dir` provided by the standard library is
+incorrect because it considers the `HOME` environment variable on
+Windows. This causes surprising situations where a Rust program will
+behave differently depending on whether it is run under a Unix
+emulation environment like Cygwin or MinGW. Neither Cargo nor rustup
+use the standard library's definition - they use the definition here.
+
+This crate further provides two functions, `cargo_home` and
+`rustup_home`, which are the canonical way to determine the location
+that Cargo and rustup store their data.
+
+See [rust-lang/rust#43321].
+
+[rust-lang/rust#43321]: https://github.com/rust-lang/rust/issues/43321
+
+## License
+
+MIT OR Apache-2.0
diff --git a/src/tools/cargo/crates/home/src/env.rs b/src/tools/cargo/crates/home/src/env.rs
new file mode 100644
index 000000000..e47273bc8
--- /dev/null
+++ b/src/tools/cargo/crates/home/src/env.rs
@@ -0,0 +1,106 @@
+//! Lower-level utilities for mocking the process environment.
+
+use std::{
+ ffi::OsString,
+ io,
+ path::{Path, PathBuf},
+};
+
+/// Permits parameterizing the home functions via the _from variants - used for
+/// in-process unit testing by rustup.
+pub trait Env {
+ /// Return the path to the the users home dir, or None if any error occurs:
+ /// see home_inner.
+ fn home_dir(&self) -> Option<PathBuf>;
+ /// Return the current working directory.
+ fn current_dir(&self) -> io::Result<PathBuf>;
+ /// Get an environment variable, as per std::env::var_os.
+ fn var_os(&self, key: &str) -> Option<OsString>;
+}
+
+/// Implements Env for the OS context, both Unix style and Windows.
+///
+/// This is trait permits in-process testing by providing a control point to
+/// allow in-process divergence on what is normally process wide state.
+///
+/// Implementations should be provided by whatever testing framework the caller
+/// is using. Code that is not performing in-process threaded testing requiring
+/// isolated rustup/cargo directories does not need this trait or the _from
+/// functions.
+pub struct OsEnv;
+impl Env for OsEnv {
+ fn home_dir(&self) -> Option<PathBuf> {
+ crate::home_dir_inner()
+ }
+ fn current_dir(&self) -> io::Result<PathBuf> {
+ std::env::current_dir()
+ }
+ fn var_os(&self, key: &str) -> Option<OsString> {
+ std::env::var_os(key)
+ }
+}
+
+pub const OS_ENV: OsEnv = OsEnv {};
+
+/// Returns the path of the current user's home directory from [`Env::home_dir`].
+pub fn home_dir_with_env(env: &dyn Env) -> Option<PathBuf> {
+ env.home_dir()
+}
+
+/// Variant of cargo_home where the environment source is parameterized. This is
+/// specifically to support in-process testing scenarios as environment
+/// variables and user home metadata are normally process global state. See the
+/// [`Env`] trait.
+pub fn cargo_home_with_env(env: &dyn Env) -> io::Result<PathBuf> {
+ let cwd = env.current_dir()?;
+ cargo_home_with_cwd_env(env, &cwd)
+}
+
+/// Variant of cargo_home_with_cwd where the environment source is
+/// parameterized. This is specifically to support in-process testing scenarios
+/// as environment variables and user home metadata are normally process global
+/// state. See the OsEnv trait.
+pub fn cargo_home_with_cwd_env(env: &dyn Env, cwd: &Path) -> io::Result<PathBuf> {
+ match env.var_os("CARGO_HOME").filter(|h| !h.is_empty()) {
+ Some(home) => {
+ let home = PathBuf::from(home);
+ if home.is_absolute() {
+ Ok(home)
+ } else {
+ Ok(cwd.join(&home))
+ }
+ }
+ _ => home_dir_with_env(env)
+ .map(|p| p.join(".cargo"))
+ .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "could not find cargo home dir")),
+ }
+}
+
+/// Variant of cargo_home_with_cwd where the environment source is
+/// parameterized. This is specifically to support in-process testing scenarios
+/// as environment variables and user home metadata are normally process global
+/// state. See the OsEnv trait.
+pub fn rustup_home_with_env(env: &dyn Env) -> io::Result<PathBuf> {
+ let cwd = env.current_dir()?;
+ rustup_home_with_cwd_env(env, &cwd)
+}
+
+/// Variant of cargo_home_with_cwd where the environment source is
+/// parameterized. This is specifically to support in-process testing scenarios
+/// as environment variables and user home metadata are normally process global
+/// state. See the OsEnv trait.
+pub fn rustup_home_with_cwd_env(env: &dyn Env, cwd: &Path) -> io::Result<PathBuf> {
+ match env.var_os("RUSTUP_HOME").filter(|h| !h.is_empty()) {
+ Some(home) => {
+ let home = PathBuf::from(home);
+ if home.is_absolute() {
+ Ok(home)
+ } else {
+ Ok(cwd.join(&home))
+ }
+ }
+ _ => home_dir_with_env(env)
+ .map(|d| d.join(".rustup"))
+ .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "could not find rustup home dir")),
+ }
+}
diff --git a/src/tools/cargo/crates/home/src/lib.rs b/src/tools/cargo/crates/home/src/lib.rs
new file mode 100644
index 000000000..306026e40
--- /dev/null
+++ b/src/tools/cargo/crates/home/src/lib.rs
@@ -0,0 +1,149 @@
+//! Canonical definitions of `home_dir`, `cargo_home`, and `rustup_home`.
+//!
+//! This provides the definition of `home_dir` used by Cargo and
+//! rustup, as well functions to find the correct value of
+//! `CARGO_HOME` and `RUSTUP_HOME`.
+//!
+//! See also the [`dirs`](https://docs.rs/dirs) crate.
+//!
+//! _Note that as of 2019/08/06 it appears that cargo uses this crate. And
+//! rustup has used this crate since 2019/08/21._
+//!
+//! The definition of `home_dir` provided by the standard library is
+//! incorrect because it considers the `HOME` environment variable on
+//! Windows. This causes surprising situations where a Rust program
+//! will behave differently depending on whether it is run under a
+//! Unix emulation environment like Cygwin or MinGW. Neither Cargo nor
+//! rustup use the standard libraries definition - they use the
+//! definition here.
+//!
+//! This crate further provides two functions, `cargo_home` and
+//! `rustup_home`, which are the canonical way to determine the
+//! location that Cargo and rustup store their data.
+//!
+//! See also this [discussion].
+//!
+//! [discussion]: https://github.com/rust-lang/rust/pull/46799#issuecomment-361156935
+
+#![doc(html_root_url = "https://docs.rs/home/0.5.4")]
+#![deny(rust_2018_idioms)]
+
+pub mod env;
+
+#[cfg(target_os = "windows")]
+mod windows;
+
+use std::io;
+use std::path::{Path, PathBuf};
+
+/// Returns the path of the current user's home directory if known.
+///
+/// # Unix
+///
+/// Returns the value of the `HOME` environment variable if it is set
+/// and not equal to the empty string. Otherwise, it tries to determine the
+/// home directory by invoking the `getpwuid_r` function on the UID of the
+/// current user.
+///
+/// # Windows
+///
+/// Returns the value of the `USERPROFILE` environment variable if it
+/// is set and not equal to the empty string. If both do not exist,
+/// [`SHGetFolderPathW`][msdn] is used to return the appropriate path.
+///
+/// [msdn]: https://docs.microsoft.com/en-us/windows/win32/api/shlobj_core/nf-shlobj_core-shgetfolderpathw
+///
+/// # Examples
+///
+/// ```
+/// match home::home_dir() {
+/// Some(path) => println!("{}", path.display()),
+/// None => println!("Impossible to get your home dir!"),
+/// }
+/// ```
+pub fn home_dir() -> Option<PathBuf> {
+ env::home_dir_with_env(&env::OS_ENV)
+}
+
+#[cfg(windows)]
+use windows::home_dir_inner;
+
+#[cfg(any(unix, target_os = "redox"))]
+fn home_dir_inner() -> Option<PathBuf> {
+ #[allow(deprecated)]
+ std::env::home_dir()
+}
+
+/// Returns the storage directory used by Cargo, often knowns as
+/// `.cargo` or `CARGO_HOME`.
+///
+/// It returns one of the following values, in this order of
+/// preference:
+///
+/// - The value of the `CARGO_HOME` environment variable, if it is
+/// an absolute path.
+/// - The value of the current working directory joined with the value
+/// of the `CARGO_HOME` environment variable, if `CARGO_HOME` is a
+/// relative directory.
+/// - The `.cargo` directory in the user's home directory, as reported
+/// by the `home_dir` function.
+///
+/// # Errors
+///
+/// This function fails if it fails to retrieve the current directory,
+/// or if the home directory cannot be determined.
+///
+/// # Examples
+///
+/// ```
+/// match home::cargo_home() {
+/// Ok(path) => println!("{}", path.display()),
+/// Err(err) => eprintln!("Cannot get your cargo home dir: {:?}", err),
+/// }
+/// ```
+pub fn cargo_home() -> io::Result<PathBuf> {
+ env::cargo_home_with_env(&env::OS_ENV)
+}
+
+/// Returns the storage directory used by Cargo within `cwd`.
+/// For more details, see [`cargo_home`](fn.cargo_home.html).
+pub fn cargo_home_with_cwd(cwd: &Path) -> io::Result<PathBuf> {
+ env::cargo_home_with_cwd_env(&env::OS_ENV, cwd)
+}
+
+/// Returns the storage directory used by rustup, often knowns as
+/// `.rustup` or `RUSTUP_HOME`.
+///
+/// It returns one of the following values, in this order of
+/// preference:
+///
+/// - The value of the `RUSTUP_HOME` environment variable, if it is
+/// an absolute path.
+/// - The value of the current working directory joined with the value
+/// of the `RUSTUP_HOME` environment variable, if `RUSTUP_HOME` is a
+/// relative directory.
+/// - The `.rustup` directory in the user's home directory, as reported
+/// by the `home_dir` function.
+///
+/// # Errors
+///
+/// This function fails if it fails to retrieve the current directory,
+/// or if the home directory cannot be determined.
+///
+/// # Examples
+///
+/// ```
+/// match home::rustup_home() {
+/// Ok(path) => println!("{}", path.display()),
+/// Err(err) => eprintln!("Cannot get your rustup home dir: {:?}", err),
+/// }
+/// ```
+pub fn rustup_home() -> io::Result<PathBuf> {
+ env::rustup_home_with_env(&env::OS_ENV)
+}
+
+/// Returns the storage directory used by rustup within `cwd`.
+/// For more details, see [`rustup_home`](fn.rustup_home.html).
+pub fn rustup_home_with_cwd(cwd: &Path) -> io::Result<PathBuf> {
+ env::rustup_home_with_cwd_env(&env::OS_ENV, cwd)
+}
diff --git a/src/tools/cargo/crates/home/src/windows.rs b/src/tools/cargo/crates/home/src/windows.rs
new file mode 100644
index 000000000..a35dc9c57
--- /dev/null
+++ b/src/tools/cargo/crates/home/src/windows.rs
@@ -0,0 +1,66 @@
+use std::env;
+use std::ffi::OsString;
+use std::os::windows::ffi::OsStringExt;
+use std::path::PathBuf;
+
+use windows_sys::Win32::Foundation::{MAX_PATH, S_OK};
+use windows_sys::Win32::UI::Shell::{SHGetFolderPathW, CSIDL_PROFILE};
+
+pub fn home_dir_inner() -> Option<PathBuf> {
+ env::var_os("USERPROFILE")
+ .filter(|s| !s.is_empty())
+ .map(PathBuf::from)
+ .or_else(home_dir_crt)
+}
+
+#[cfg(not(target_vendor = "uwp"))]
+fn home_dir_crt() -> Option<PathBuf> {
+ unsafe {
+ let mut path: Vec<u16> = Vec::with_capacity(MAX_PATH as usize);
+ match SHGetFolderPathW(0, CSIDL_PROFILE as i32, 0, 0, path.as_mut_ptr()) {
+ S_OK => {
+ let len = wcslen(path.as_ptr());
+ path.set_len(len);
+ let s = OsString::from_wide(&path);
+ Some(PathBuf::from(s))
+ }
+ _ => None,
+ }
+ }
+}
+
+#[cfg(target_vendor = "uwp")]
+fn home_dir_crt() -> Option<PathBuf> {
+ None
+}
+
+extern "C" {
+ fn wcslen(buf: *const u16) -> usize;
+}
+
+#[cfg(not(target_vendor = "uwp"))]
+#[cfg(test)]
+mod tests {
+ use super::home_dir_inner;
+ use std::env;
+ use std::ops::Deref;
+ use std::path::{Path, PathBuf};
+
+ #[test]
+ fn test_with_without() {
+ let olduserprofile = env::var_os("USERPROFILE").unwrap();
+
+ env::remove_var("HOME");
+ env::remove_var("USERPROFILE");
+
+ assert_eq!(home_dir_inner(), Some(PathBuf::from(olduserprofile)));
+
+ let home = Path::new(r"C:\Users\foo tar baz");
+
+ env::set_var("HOME", home.as_os_str());
+ assert_ne!(home_dir_inner().as_ref().map(Deref::deref), Some(home));
+
+ env::set_var("USERPROFILE", home.as_os_str());
+ assert_eq!(home_dir_inner().as_ref().map(Deref::deref), Some(home));
+ }
+}
diff --git a/src/tools/cargo/crates/mdman/Cargo.lock b/src/tools/cargo/crates/mdman/Cargo.lock
new file mode 100644
index 000000000..51fe47a9c
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/Cargo.lock
@@ -0,0 +1,459 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "anyhow"
+version = "1.0.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6b602bfe940d21c130f3895acd65221e8a61270debe89d628b9cb4e3ccb8569b"
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "block-buffer"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b"
+dependencies = [
+ "block-padding",
+ "byte-tools",
+ "byteorder",
+ "generic-array",
+]
+
+[[package]]
+name = "block-padding"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5"
+dependencies = [
+ "byte-tools",
+]
+
+[[package]]
+name = "byte-tools"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7"
+
+[[package]]
+name = "byteorder"
+version = "1.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
+
+[[package]]
+name = "cfg-if"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
+
+[[package]]
+name = "ctor"
+version = "0.1.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "39858aa5bac06462d4dd4b9164848eb81ffc4aa5c479746393598fd193afa227"
+dependencies = [
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "diff"
+version = "0.1.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
+
+[[package]]
+name = "digest"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "fake-simd"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed"
+
+[[package]]
+name = "form_urlencoded"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191"
+dependencies = [
+ "matches",
+ "percent-encoding",
+]
+
+[[package]]
+name = "generic-array"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c68f0274ae0e023facc3c97b2e00f076be70e254bc851d972503b328db79b2ec"
+dependencies = [
+ "typenum",
+]
+
+[[package]]
+name = "handlebars"
+version = "3.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "86dbc8a0746b08f363d2e00da48e6c9ceb75c198ac692d2715fcbb5bee74c87d"
+dependencies = [
+ "log",
+ "pest",
+ "pest_derive",
+ "quick-error",
+ "serde",
+ "serde_json",
+ "walkdir",
+]
+
+[[package]]
+name = "idna"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9"
+dependencies = [
+ "matches",
+ "unicode-bidi",
+ "unicode-normalization",
+]
+
+[[package]]
+name = "itoa"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dc6f3ad7b9d11a0c00842ff8de1b60ee58661048eb8049ed33c73594f359d7e6"
+
+[[package]]
+name = "log"
+version = "0.4.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "maplit"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d"
+
+[[package]]
+name = "matches"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08"
+
+[[package]]
+name = "mdman"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "handlebars",
+ "pretty_assertions",
+ "pulldown-cmark",
+ "same-file",
+ "serde_json",
+ "url",
+]
+
+[[package]]
+name = "memchr"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
+
+[[package]]
+name = "opaque-debug"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c"
+
+[[package]]
+name = "output_vt100"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "percent-encoding"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
+
+[[package]]
+name = "pest"
+version = "2.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53"
+dependencies = [
+ "ucd-trie",
+]
+
+[[package]]
+name = "pest_derive"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "833d1ae558dc601e9a60366421196a8d94bc0ac980476d0b67e1d0988d72b2d0"
+dependencies = [
+ "pest",
+ "pest_generator",
+]
+
+[[package]]
+name = "pest_generator"
+version = "2.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "99b8db626e31e5b81787b9783425769681b347011cc59471e33ea46d2ea0cf55"
+dependencies = [
+ "pest",
+ "pest_meta",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "pest_meta"
+version = "2.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "54be6e404f5317079812fc8f9f5279de376d8856929e21c184ecf6bbd692a11d"
+dependencies = [
+ "maplit",
+ "pest",
+ "sha-1",
+]
+
+[[package]]
+name = "pretty_assertions"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a25e9bcb20aa780fd0bb16b72403a9064d6b3f22f026946029acb941a50af755"
+dependencies = [
+ "ctor",
+ "diff",
+ "output_vt100",
+ "yansi",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "04f5f085b5d71e2188cb8271e5da0161ad52c3f227a661a3c135fdf28e258b12"
+dependencies = [
+ "unicode-xid",
+]
+
+[[package]]
+name = "pulldown-cmark"
+version = "0.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2d9cc634bc78768157b5cbfe988ffcd1dcba95cd2b2f03a88316c08c6d00ed63"
+dependencies = [
+ "bitflags",
+ "memchr",
+ "unicase",
+]
+
+[[package]]
+name = "quick-error"
+version = "1.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
+
+[[package]]
+name = "quote"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "ryu"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e"
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "serde"
+version = "1.0.114"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5317f7588f0a5078ee60ef675ef96735a1442132dc645eb1d12c018620ed8cd3"
+
+[[package]]
+name = "serde_json"
+version = "1.0.57"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "164eacbdb13512ec2745fb09d51fd5b22b0d65ed294a1dcf7285a360c80a675c"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "sha-1"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f7d94d0bede923b3cea61f3f1ff57ff8cdfd77b400fb8f9998949e0cf04163df"
+dependencies = [
+ "block-buffer",
+ "digest",
+ "fake-simd",
+ "opaque-debug",
+]
+
+[[package]]
+name = "syn"
+version = "1.0.36"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4cdb98bcb1f9d81d07b536179c269ea15999b5d14ea958196413869445bb5250"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-xid",
+]
+
+[[package]]
+name = "tinyvec"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "53953d2d3a5ad81d9f844a32f14ebb121f50b650cd59d0ee2a07cf13c617efed"
+
+[[package]]
+name = "typenum"
+version = "1.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33"
+
+[[package]]
+name = "ucd-trie"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c"
+
+[[package]]
+name = "unicase"
+version = "2.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
+dependencies = [
+ "version_check",
+]
+
+[[package]]
+name = "unicode-bidi"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
+dependencies = [
+ "matches",
+]
+
+[[package]]
+name = "unicode-normalization"
+version = "0.1.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6fb19cf769fa8c6a80a162df694621ebeb4dafb606470b2b2fce0be40a98a977"
+dependencies = [
+ "tinyvec",
+]
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
+
+[[package]]
+name = "url"
+version = "2.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c"
+dependencies = [
+ "form_urlencoded",
+ "idna",
+ "matches",
+ "percent-encoding",
+]
+
+[[package]]
+name = "version_check"
+version = "0.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed"
+
+[[package]]
+name = "walkdir"
+version = "2.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "777182bc735b6424e1a57516d35ed72cb8019d85c8c9bf536dccb3445c1a2f7d"
+dependencies = [
+ "same-file",
+ "winapi",
+ "winapi-util",
+]
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
+[[package]]
+name = "yansi"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
diff --git a/src/tools/cargo/crates/mdman/Cargo.toml b/src/tools/cargo/crates/mdman/Cargo.toml
new file mode 100644
index 000000000..92cdf2eb6
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/Cargo.toml
@@ -0,0 +1,17 @@
+[package]
+name = "mdman"
+version = "0.1.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+description = "Creates a man page page from markdown."
+
+[dependencies]
+anyhow = "1.0.31"
+handlebars = { version = "3.2.1", features = ["dir_source"] }
+pulldown-cmark = { version = "0.9.2", default-features = false }
+same-file = "1.0.6"
+serde_json = "1.0.56"
+url = "2.2.2"
+
+[dev-dependencies]
+pretty_assertions = "1.3.0"
diff --git a/src/tools/cargo/crates/mdman/README.md b/src/tools/cargo/crates/mdman/README.md
new file mode 100644
index 000000000..e28b596ba
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/README.md
@@ -0,0 +1,7 @@
+# mdman
+
+mdman is a small utility for creating man pages from markdown text files.
+
+## Usage
+
+See the [man page](doc/out/mdman.md) generated by this tool.
diff --git a/src/tools/cargo/crates/mdman/build-man.sh b/src/tools/cargo/crates/mdman/build-man.sh
new file mode 100755
index 000000000..9286b17c2
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/build-man.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+set -e
+
+cargo run -- -t md -o doc/out doc/*.md
+cargo run -- -t txt -o doc/out doc/*.md
+cargo run -- -t man -o doc/out doc/*.md
diff --git a/src/tools/cargo/crates/mdman/doc/mdman.md b/src/tools/cargo/crates/mdman/doc/mdman.md
new file mode 100644
index 000000000..2025c13dc
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/doc/mdman.md
@@ -0,0 +1,95 @@
+# mdman(1)
+
+## NAME
+
+mdman - Converts markdown to a man page
+
+## SYNOPSIS
+
+`mdman` [_options_] `-t` _type_ `-o` _outdir_ _sources..._
+
+## DESCRIPTION
+
+Converts a markdown file to a man page.
+
+The source file is first processed as a
+[handlebars](https://handlebarsjs.com/) template. Then, it is processed as
+markdown into the target format. This supports different output formats,
+such as troff or plain text.
+
+Every man page should start with a level-1 header with the man name and
+section, such as `# mdman(1)`.
+
+The handlebars template has several special tags to assist with generating the
+man page:
+
+{{{{raw}}}}
+- Every block of command-line options must be wrapped between `{{#options}}`
+ and `{{/options}}` tags. This tells the processor where the options start
+ and end.
+- Each option must be expressed with a `{{#option}}` block. The parameters to
+ the the block are a sequence of strings indicating the option. For example,
+ ```{{#option "`-p` _spec_..." "`--package` _spec_..."}}``` is an option that
+ has two different forms. The text within the string is processed as markdown.
+ It is recommended to use formatting similar to this example.
+
+ The content of the `{{#option}}` block should contain a detailed description
+ of the option.
+
+ Use the `{{/option}}` tag to end the option block.
+- References to other man pages should use the `{{man name section}}`
+ expression. For example, `{{man "mdman" 1}}` will generate a reference to
+ the `mdman(1)` man page. For non-troff output, the `--man` option will tell
+ `mdman` how to create links to the man page. If there is no matching `--man`
+ option, then it links to a file named _name_`.md` in the same directory.
+- Variables can be set with `{{*set name="value"}}`. These variables can
+ then be referenced with `{{name}}` expressions.
+- Partial templates should be placed in a directory named `includes`
+ next to the source file. Templates can be included with an expression like
+ `{{> template-name}}`.
+- Other helpers include:
+ - `{{lower value}}` Converts the given value to lowercase.
+{{{{/raw}}}}
+
+## OPTIONS
+
+{{#options}}
+
+{{#option "`-t` _type_"}}
+Specifies the output type. The following output types are supported:
+- `man` — A troff-style man page. Outputs with a numbered extension (like
+ `.1`) matching the man page section.
+- `md` — A markdown file, after all handlebars processing has been finished.
+ Outputs with the `.md` extension.
+- `txt` — A text file, rendered for situations where a man page viewer isn't
+ available. Outputs with the `.txt` extension.
+{{/option}}
+
+{{#option "`-o` _outdir_"}}
+Specifies the directory where to save the output.
+{{/option}}
+
+{{#option "`--url` _base_url_"}}
+Specifies a base URL to use for relative URLs within the document. Any
+relative URL will be joined with this URL.
+{{/option}}
+
+{{#option "`--man` _name_`:`_section_`=`_url_"}}
+Specifies a URL to use for the given man page. When the `\{{man name
+section}}` expression is used, the given URL will be inserted as a link. This
+may be specified multiple times. If a man page reference does not have a
+matching `--man` entry, then a relative link to a file named _name_`.md` will
+be used.
+{{/option}}
+
+{{#option "_sources..._"}}
+The source input filename, may be specified multiple times.
+{{/option}}
+
+{{/options}}
+
+## EXAMPLES
+
+1. Convert the given documents to man pages:
+
+ mdman -t man -o doc doc/mdman.md
diff --git a/src/tools/cargo/crates/mdman/doc/out/mdman.1 b/src/tools/cargo/crates/mdman/doc/out/mdman.1
new file mode 100644
index 000000000..0718d6ddb
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/doc/out/mdman.1
@@ -0,0 +1,124 @@
+'\" t
+.TH "MDMAN" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+mdman \- Converts markdown to a man page
+.SH "SYNOPSIS"
+\fBmdman\fR [\fIoptions\fR] \fB\-t\fR \fItype\fR \fB\-o\fR \fIoutdir\fR \fIsources...\fR
+.SH "DESCRIPTION"
+Converts a markdown file to a man page.
+.sp
+The source file is first processed as a
+\fIhandlebars\fR <https://handlebarsjs.com/> template. Then, it is processed as
+markdown into the target format. This supports different output formats,
+such as troff or plain text.
+.sp
+Every man page should start with a level\-1 header with the man name and
+section, such as \fB# mdman(1)\fR\&.
+.sp
+The handlebars template has several special tags to assist with generating the
+man page:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Every block of command\-line options must be wrapped between \fB{{#options}}\fR
+and \fB{{/options}}\fR tags. This tells the processor where the options start
+and end.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Each option must be expressed with a \fB{{#option}}\fR block. The parameters to
+the the block are a sequence of strings indicating the option. For example,
+\fB{{#option "`\-p` _spec_..." "`\-\-package` _spec_..."}}\fR is an option that
+has two different forms. The text within the string is processed as markdown.
+It is recommended to use formatting similar to this example.
+.sp
+The content of the \fB{{#option}}\fR block should contain a detailed description
+of the option.
+.sp
+Use the \fB{{/option}}\fR tag to end the option block.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'References to other man pages should use the \fB{{man name section}}\fR
+expression. For example, \fB{{man "mdman" 1}}\fR will generate a reference to
+the \fBmdman(1)\fR man page. For non\-troff output, the \fB\-\-man\fR option will tell
+\fBmdman\fR how to create links to the man page. If there is no matching \fB\-\-man\fR
+option, then it links to a file named \fIname\fR\fB\&.md\fR in the same directory.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Variables can be set with \fB{{*set name="value"}}\fR\&. These variables can
+then be referenced with \fB{{name}}\fR expressions.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Partial templates should be placed in a directory named \fBincludes\fR
+next to the source file. Templates can be included with an expression like
+\fB{{> template\-name}}\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Other helpers include:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB{{lower value}}\fR Converts the given value to lowercase.
+.RE
+.RE
+.SH "OPTIONS"
+.sp
+\fB\-t\fR \fItype\fR
+.RS 4
+Specifies the output type. The following output types are supported:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBman\fR \[em] A troff\-style man page. Outputs with a numbered extension (like
+\fB\&.1\fR) matching the man page section.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBmd\fR \[em] A markdown file, after all handlebars processing has been finished.
+Outputs with the \fB\&.md\fR extension.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBtxt\fR \[em] A text file, rendered for situations where a man page viewer isn't
+available. Outputs with the \fB\&.txt\fR extension.
+.RE
+.RE
+.sp
+\fB\-o\fR \fIoutdir\fR
+.RS 4
+Specifies the directory where to save the output.
+.RE
+.sp
+\fB\-\-url\fR \fIbase_url\fR
+.RS 4
+Specifies a base URL to use for relative URLs within the document. Any
+relative URL will be joined with this URL.
+.RE
+.sp
+\fB\-\-man\fR \fIname\fR\fB:\fR\fIsection\fR\fB=\fR\fIurl\fR
+.RS 4
+Specifies a URL to use for the given man page. When the \fB{{man name section}}\fR expression is used, the given URL will be inserted as a link. This
+may be specified multiple times. If a man page reference does not have a
+matching \fB\-\-man\fR entry, then a relative link to a file named \fIname\fR\fB\&.md\fR will
+be used.
+.RE
+.sp
+\fIsources...\fR
+.RS 4
+The source input filename, may be specified multiple times.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Convert the given documents to man pages:
+.sp
+.RS 4
+.nf
+mdman \-t man \-o doc doc/mdman.md
+.fi
+.RE
+.RE
diff --git a/src/tools/cargo/crates/mdman/doc/out/mdman.md b/src/tools/cargo/crates/mdman/doc/out/mdman.md
new file mode 100644
index 000000000..d0dd34511
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/doc/out/mdman.md
@@ -0,0 +1,95 @@
+# mdman(1)
+
+## NAME
+
+mdman - Converts markdown to a man page
+
+## SYNOPSIS
+
+`mdman` [_options_] `-t` _type_ `-o` _outdir_ _sources..._
+
+## DESCRIPTION
+
+Converts a markdown file to a man page.
+
+The source file is first processed as a
+[handlebars](https://handlebarsjs.com/) template. Then, it is processed as
+markdown into the target format. This supports different output formats,
+such as troff or plain text.
+
+Every man page should start with a level-1 header with the man name and
+section, such as `# mdman(1)`.
+
+The handlebars template has several special tags to assist with generating the
+man page:
+
+- Every block of command-line options must be wrapped between `{{#options}}`
+ and `{{/options}}` tags. This tells the processor where the options start
+ and end.
+- Each option must be expressed with a `{{#option}}` block. The parameters to
+ the the block are a sequence of strings indicating the option. For example,
+ ```{{#option "`-p` _spec_..." "`--package` _spec_..."}}``` is an option that
+ has two different forms. The text within the string is processed as markdown.
+ It is recommended to use formatting similar to this example.
+
+ The content of the `{{#option}}` block should contain a detailed description
+ of the option.
+
+ Use the `{{/option}}` tag to end the option block.
+- References to other man pages should use the `{{man name section}}`
+ expression. For example, `{{man "mdman" 1}}` will generate a reference to
+ the `mdman(1)` man page. For non-troff output, the `--man` option will tell
+ `mdman` how to create links to the man page. If there is no matching `--man`
+ option, then it links to a file named _name_`.md` in the same directory.
+- Variables can be set with `{{*set name="value"}}`. These variables can
+ then be referenced with `{{name}}` expressions.
+- Partial templates should be placed in a directory named `includes`
+ next to the source file. Templates can be included with an expression like
+ `{{> template-name}}`.
+- Other helpers include:
+ - `{{lower value}}` Converts the given value to lowercase.
+
+
+## OPTIONS
+
+<dl>
+
+<dt class="option-term" id="option-mdman--t"><a class="option-anchor" href="#option-mdman--t"></a><code>-t</code> <em>type</em></dt>
+<dd class="option-desc">Specifies the output type. The following output types are supported:</p>
+<ul>
+<li><code>man</code> — A troff-style man page. Outputs with a numbered extension (like
+<code>.1</code>) matching the man page section.</li>
+<li><code>md</code> — A markdown file, after all handlebars processing has been finished.
+Outputs with the <code>.md</code> extension.</li>
+<li><code>txt</code> — A text file, rendered for situations where a man page viewer isn't
+available. Outputs with the <code>.txt</code> extension.</li>
+</ul></dd>
+
+
+<dt class="option-term" id="option-mdman--o"><a class="option-anchor" href="#option-mdman--o"></a><code>-o</code> <em>outdir</em></dt>
+<dd class="option-desc">Specifies the directory where to save the output.</dd>
+
+
+<dt class="option-term" id="option-mdman---url"><a class="option-anchor" href="#option-mdman---url"></a><code>--url</code> <em>base_url</em></dt>
+<dd class="option-desc">Specifies a base URL to use for relative URLs within the document. Any
+relative URL will be joined with this URL.</dd>
+
+
+<dt class="option-term" id="option-mdman---man"><a class="option-anchor" href="#option-mdman---man"></a><code>--man</code> <em>name</em><code>:</code><em>section</em><code>=</code><em>url</em></dt>
+<dd class="option-desc">Specifies a URL to use for the given man page. When the <code>{{man name section}}</code> expression is used, the given URL will be inserted as a link. This
+may be specified multiple times. If a man page reference does not have a
+matching <code>--man</code> entry, then a relative link to a file named <em>name</em><code>.md</code> will
+be used.</dd>
+
+
+<dt class="option-term" id="option-mdman-sources..."><a class="option-anchor" href="#option-mdman-sources..."></a><em>sources...</em></dt>
+<dd class="option-desc">The source input filename, may be specified multiple times.</dd>
+
+
+</dl>
+
+## EXAMPLES
+
+1. Convert the given documents to man pages:
+
+ mdman -t man -o doc doc/mdman.md
diff --git a/src/tools/cargo/crates/mdman/doc/out/mdman.txt b/src/tools/cargo/crates/mdman/doc/out/mdman.txt
new file mode 100644
index 000000000..83fa7de90
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/doc/out/mdman.txt
@@ -0,0 +1,91 @@
+MDMAN(1)
+
+NAME
+ mdman - Converts markdown to a man page
+
+SYNOPSIS
+ mdman [options] -t type -o outdir sources...
+
+DESCRIPTION
+ Converts a markdown file to a man page.
+
+ The source file is first processed as a handlebars
+ <https://handlebarsjs.com/> template. Then, it is processed as markdown
+ into the target format. This supports different output formats, such as
+ troff or plain text.
+
+ Every man page should start with a level-1 header with the man name and
+ section, such as # mdman(1).
+
+ The handlebars template has several special tags to assist with
+ generating the man page:
+
+ o Every block of command-line options must be wrapped between
+ {{#options}} and {{/options}} tags. This tells the processor where
+ the options start and end.
+
+ o Each option must be expressed with a {{#option}} block. The
+ parameters to the the block are a sequence of strings indicating the
+ option. For example, {{#option "`-p` _spec_..." "`--package`
+ _spec_..."}} is an option that has two different forms. The text
+ within the string is processed as markdown. It is recommended to use
+ formatting similar to this example.
+
+ The content of the {{#option}} block should contain a detailed
+ description of the option.
+
+ Use the {{/option}} tag to end the option block.
+
+ o References to other man pages should use the {{man name section}}
+ expression. For example, {{man "mdman" 1}} will generate a reference
+ to the mdman(1) man page. For non-troff output, the --man option will
+ tell mdman how to create links to the man page. If there is no
+ matching --man option, then it links to a file named name.md in the
+ same directory.
+
+ o Variables can be set with {{*set name="value"}}. These variables can
+ then be referenced with {{name}} expressions.
+
+ o Partial templates should be placed in a directory named includes next
+ to the source file. Templates can be included with an expression like
+ {{> template-name}}.
+
+ o Other helpers include:
+
+ o {{lower value}} Converts the given value to lowercase.
+
+OPTIONS
+ -t type
+ Specifies the output type. The following output types are supported:
+
+ o man — A troff-style man page. Outputs with a numbered extension
+ (like .1) matching the man page section.
+
+ o md — A markdown file, after all handlebars processing has been
+ finished. Outputs with the .md extension.
+
+ o txt — A text file, rendered for situations where a man page
+ viewer isn't available. Outputs with the .txt extension.
+
+ -o outdir
+ Specifies the directory where to save the output.
+
+ --url base_url
+ Specifies a base URL to use for relative URLs within the document.
+ Any relative URL will be joined with this URL.
+
+ --man name:section=url
+ Specifies a URL to use for the given man page. When the {{man name
+ section}} expression is used, the given URL will be inserted as a
+ link. This may be specified multiple times. If a man page reference
+ does not have a matching --man entry, then a relative link to a file
+ named name.md will be used.
+
+ sources...
+ The source input filename, may be specified multiple times.
+
+EXAMPLES
+ 1. Convert the given documents to man pages:
+
+ mdman -t man -o doc doc/mdman.md
+
diff --git a/src/tools/cargo/crates/mdman/src/format.rs b/src/tools/cargo/crates/mdman/src/format.rs
new file mode 100644
index 000000000..7bc9781b9
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/src/format.rs
@@ -0,0 +1,20 @@
+use anyhow::Error;
+
+pub mod man;
+pub mod md;
+pub mod text;
+
+pub trait Formatter {
+ /// Renders the given markdown to the formatter's output.
+ fn render(&self, input: &str) -> Result<String, Error>;
+ /// Renders the start of a block of options (triggered by `{{#options}}`).
+ fn render_options_start(&self) -> &'static str;
+ /// Renders the end of a block of options (triggered by `{{/options}}`).
+ fn render_options_end(&self) -> &'static str;
+ /// Renders an option (triggered by `{{#option}}`).
+ fn render_option(&self, params: &[&str], block: &str, man_name: &str) -> Result<String, Error>;
+ /// Converts a man page reference into markdown that is appropriate for this format.
+ ///
+ /// Triggered by `{{man name section}}`.
+ fn linkify_man_to_md(&self, name: &str, section: u8) -> Result<String, Error>;
+}
diff --git a/src/tools/cargo/crates/mdman/src/format/man.rs b/src/tools/cargo/crates/mdman/src/format/man.rs
new file mode 100644
index 000000000..9767fdd51
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/src/format/man.rs
@@ -0,0 +1,436 @@
+//! Man-page formatter.
+
+use crate::util::{header_text, parse_name_and_section};
+use crate::EventIter;
+use anyhow::{bail, Error};
+use pulldown_cmark::{Alignment, Event, HeadingLevel, LinkType, Tag};
+use std::fmt::Write;
+use url::Url;
+
+pub struct ManFormatter {
+ url: Option<Url>,
+}
+
+impl ManFormatter {
+ pub fn new(url: Option<Url>) -> ManFormatter {
+ ManFormatter { url }
+ }
+}
+
+impl super::Formatter for ManFormatter {
+ fn render(&self, input: &str) -> Result<String, Error> {
+ ManRenderer::render(input, self.url.clone())
+ }
+
+ fn render_options_start(&self) -> &'static str {
+ // Tell pulldown_cmark to ignore this.
+ // This will be stripped out later.
+ "<![CDATA["
+ }
+
+ fn render_options_end(&self) -> &'static str {
+ "]]>"
+ }
+
+ fn render_option(
+ &self,
+ params: &[&str],
+ block: &str,
+ _man_name: &str,
+ ) -> Result<String, Error> {
+ let rendered_options = params
+ .iter()
+ .map(|param| {
+ let r = self.render(param)?;
+ Ok(r.trim().trim_start_matches(".sp").to_string())
+ })
+ .collect::<Result<Vec<_>, Error>>()?;
+ let rendered_block = self.render(block)?;
+ let rendered_block = rendered_block.trim().trim_start_matches(".sp").trim();
+ // .RS = move left margin to right 4.
+ // .RE = move margin back one level.
+ Ok(format!(
+ "\n.sp\n{}\n.RS 4\n{}\n.RE\n",
+ rendered_options.join(", "),
+ rendered_block
+ ))
+ }
+
+ fn linkify_man_to_md(&self, name: &str, section: u8) -> Result<String, Error> {
+ Ok(format!("`{}`({})", name, section))
+ }
+}
+
+#[derive(Copy, Clone)]
+enum Font {
+ Bold,
+ Italic,
+}
+
+impl Font {
+ fn str_from_stack(font_stack: &[Font]) -> &'static str {
+ let has_bold = font_stack.iter().any(|font| matches!(font, Font::Bold));
+ let has_italic = font_stack.iter().any(|font| matches!(font, Font::Italic));
+ match (has_bold, has_italic) {
+ (false, false) => "\\fR", // roman (normal)
+ (false, true) => "\\fI", // italic
+ (true, false) => "\\fB", // bold
+ (true, true) => "\\f(BI", // bold italic
+ }
+ }
+}
+
+struct ManRenderer<'e> {
+ output: String,
+ parser: EventIter<'e>,
+ font_stack: Vec<Font>,
+}
+
+impl<'e> ManRenderer<'e> {
+ fn render(input: &str, url: Option<Url>) -> Result<String, Error> {
+ let parser = crate::md_parser(input, url);
+ let output = String::with_capacity(input.len() * 3 / 2);
+ let mut mr = ManRenderer {
+ parser,
+ output,
+ font_stack: Vec::new(),
+ };
+ mr.push_man()?;
+ Ok(mr.output)
+ }
+
+ fn push_man(&mut self) -> Result<(), Error> {
+ // If this is true, this is inside a cdata block used for hiding
+ // content from pulldown_cmark.
+ let mut in_cdata = false;
+ // The current list stack. None if unordered, Some if ordered with the
+ // given number as the current index.
+ let mut list: Vec<Option<u64>> = Vec::new();
+ // Used in some cases where spacing isn't desired.
+ let mut suppress_paragraph = false;
+ let mut table_cell_index = 0;
+
+ while let Some((event, range)) = self.parser.next() {
+ let this_suppress_paragraph = suppress_paragraph;
+ suppress_paragraph = false;
+ match event {
+ Event::Start(tag) => {
+ match tag {
+ Tag::Paragraph => {
+ if !this_suppress_paragraph {
+ self.flush();
+ self.output.push_str(".sp\n");
+ }
+ }
+ Tag::Heading(level, ..) => {
+ if level == HeadingLevel::H1 {
+ self.push_top_header()?;
+ } else if level == HeadingLevel::H2 {
+ // Section header
+ let text = header_text(&mut self.parser)?;
+ self.flush();
+ write!(self.output, ".SH \"{}\"\n", text)?;
+ suppress_paragraph = true;
+ } else {
+ // Subsection header
+ let text = header_text(&mut self.parser)?;
+ self.flush();
+ write!(self.output, ".SS \"{}\"\n", text)?;
+ suppress_paragraph = true;
+ }
+ }
+ Tag::BlockQuote => {
+ self.flush();
+ // .RS = move left margin over 3
+ // .ll = shrink line length
+ self.output.push_str(".RS 3\n.ll -5\n.sp\n");
+ suppress_paragraph = true;
+ }
+ Tag::CodeBlock(_kind) => {
+ // space down, indent 4, no-fill mode
+ self.flush();
+ self.output.push_str(".sp\n.RS 4\n.nf\n");
+ }
+ Tag::List(start) => list.push(start),
+ Tag::Item => {
+ // Note: This uses explicit movement instead of .IP
+ // because the spacing on .IP looks weird to me.
+ // space down, indent 4
+ self.flush();
+ self.output.push_str(".sp\n.RS 4\n");
+ match list.last_mut().expect("item must have list start") {
+ // Ordered list.
+ Some(n) => {
+ // move left 4, output the list index number, move right 1.
+ write!(self.output, "\\h'-04' {}.\\h'+01'", n)?;
+ *n += 1;
+ }
+ // Unordered list.
+ None => self.output.push_str("\\h'-04'\\(bu\\h'+02'"),
+ }
+ suppress_paragraph = true;
+ }
+ Tag::FootnoteDefinition(_label) => unimplemented!(),
+ Tag::Table(alignment) => {
+ // Table start
+ // allbox = draw a box around all the cells
+ // tab(:) = Use `:` to separate cell data (instead of tab)
+ // ; = end of options
+ self.output.push_str(
+ "\n.TS\n\
+ allbox tab(:);\n",
+ );
+ let alignments: Vec<_> = alignment
+ .iter()
+ .map(|a| match a {
+ Alignment::Left | Alignment::None => "lt",
+ Alignment::Center => "ct",
+ Alignment::Right => "rt",
+ })
+ .collect();
+ self.output.push_str(&alignments.join(" "));
+ self.output.push_str(".\n");
+ table_cell_index = 0;
+ }
+ Tag::TableHead => {
+ table_cell_index = 0;
+ }
+ Tag::TableRow => {
+ table_cell_index = 0;
+ self.output.push('\n');
+ }
+ Tag::TableCell => {
+ if table_cell_index != 0 {
+ // Separator between columns.
+ self.output.push(':');
+ }
+ // Start a text block.
+ self.output.push_str("T{\n");
+ table_cell_index += 1
+ }
+ Tag::Emphasis => self.push_font(Font::Italic),
+ Tag::Strong => self.push_font(Font::Bold),
+ // Strikethrough isn't usually supported for TTY.
+ Tag::Strikethrough => self.output.push_str("~~"),
+ Tag::Link(link_type, dest_url, _title) => {
+ if dest_url.starts_with('#') {
+ // In a man page, page-relative anchors don't
+ // have much meaning.
+ continue;
+ }
+ match link_type {
+ LinkType::Autolink | LinkType::Email => {
+ // The text is a copy of the URL, which is not needed.
+ match self.parser.next() {
+ Some((Event::Text(_), _range)) => {}
+ _ => bail!("expected text after autolink"),
+ }
+ }
+ LinkType::Inline
+ | LinkType::Reference
+ | LinkType::Collapsed
+ | LinkType::Shortcut => {
+ self.push_font(Font::Italic);
+ }
+ // This is currently unused. This is only
+ // emitted with a broken link callback, but I
+ // felt it is too annoying to escape `[` in
+ // option descriptions.
+ LinkType::ReferenceUnknown
+ | LinkType::CollapsedUnknown
+ | LinkType::ShortcutUnknown => {
+ bail!(
+ "link with missing reference `{}` located at offset {}",
+ dest_url,
+ range.start
+ );
+ }
+ }
+ }
+ Tag::Image(_link_type, _dest_url, _title) => {
+ bail!("images are not currently supported")
+ }
+ }
+ }
+ Event::End(tag) => {
+ match &tag {
+ Tag::Paragraph => self.flush(),
+ Tag::Heading(..) => {}
+ Tag::BlockQuote => {
+ self.flush();
+ // restore left margin, restore line length
+ self.output.push_str(".br\n.RE\n.ll\n");
+ }
+ Tag::CodeBlock(_kind) => {
+ self.flush();
+ // Restore fill mode, move margin back one level.
+ self.output.push_str(".fi\n.RE\n");
+ }
+ Tag::List(_) => {
+ list.pop();
+ }
+ Tag::Item => {
+ self.flush();
+ // Move margin back one level.
+ self.output.push_str(".RE\n");
+ }
+ Tag::FootnoteDefinition(_label) => {}
+ Tag::Table(_) => {
+ // Table end
+ // I don't know why, but the .sp is needed to provide
+ // space with the following content.
+ self.output.push_str("\n.TE\n.sp\n");
+ }
+ Tag::TableHead => {}
+ Tag::TableRow => {}
+ Tag::TableCell => {
+ // End text block.
+ self.output.push_str("\nT}");
+ }
+ Tag::Emphasis | Tag::Strong => self.pop_font(),
+ Tag::Strikethrough => self.output.push_str("~~"),
+ Tag::Link(link_type, dest_url, _title) => {
+ if dest_url.starts_with('#') {
+ continue;
+ }
+ match link_type {
+ LinkType::Autolink | LinkType::Email => {}
+ LinkType::Inline
+ | LinkType::Reference
+ | LinkType::Collapsed
+ | LinkType::Shortcut => {
+ self.pop_font();
+ self.output.push(' ');
+ }
+ _ => {
+ panic!("unexpected tag {:?}", tag);
+ }
+ }
+ write!(self.output, "<{}>", escape(&dest_url)?)?;
+ }
+ Tag::Image(_link_type, _dest_url, _title) => {}
+ }
+ }
+ Event::Text(t) => {
+ self.output.push_str(&escape(&t)?);
+ }
+ Event::Code(t) => {
+ self.push_font(Font::Bold);
+ self.output.push_str(&escape(&t)?);
+ self.pop_font();
+ }
+ Event::Html(t) => {
+ if t.starts_with("<![CDATA[") {
+ // CDATA is a special marker used for handling options.
+ in_cdata = true;
+ } else if in_cdata {
+ if t.trim().ends_with("]]>") {
+ in_cdata = false;
+ } else if !t.trim().is_empty() {
+ self.output.push_str(&t);
+ }
+ } else {
+ self.output.push_str(&escape(&t)?);
+ }
+ }
+ Event::FootnoteReference(_t) => {}
+ Event::SoftBreak => self.output.push('\n'),
+ Event::HardBreak => {
+ self.flush();
+ self.output.push_str(".br\n");
+ }
+ Event::Rule => {
+ self.flush();
+ // \l' **length** ' Draw horizontal line (default underscore).
+ // \n(.lu Gets value from register "lu" (current line length)
+ self.output.push_str("\\l'\\n(.lu'\n");
+ }
+ Event::TaskListMarker(_b) => unimplemented!(),
+ }
+ }
+ Ok(())
+ }
+
+ fn flush(&mut self) {
+ if !self.output.ends_with('\n') {
+ self.output.push('\n');
+ }
+ }
+
+ /// Switch to the given font.
+ ///
+ /// Because the troff sequence `\fP` for switching to the "previous" font
+ /// doesn't support nesting, this needs to emulate it here. This is needed
+ /// for situations like **hi _there_**.
+ fn push_font(&mut self, font: Font) {
+ self.font_stack.push(font);
+ self.output.push_str(Font::str_from_stack(&self.font_stack));
+ }
+
+ fn pop_font(&mut self) {
+ self.font_stack.pop();
+ self.output.push_str(Font::str_from_stack(&self.font_stack));
+ }
+
+ /// Parse and render the first top-level header of the document.
+ fn push_top_header(&mut self) -> Result<(), Error> {
+ // This enables the tbl preprocessor for tables.
+ // This seems to be enabled by default on every modern system I could
+ // find, but it doesn't seem to hurt to enable this.
+ self.output.push_str("'\\\" t\n");
+ // Extract the name of the man page.
+ let text = header_text(&mut self.parser)?;
+ let (name, section) = parse_name_and_section(&text)?;
+ // .TH = Table header
+ // .nh = disable hyphenation
+ // .ad l = Left-adjust mode (disable justified).
+ // .ss sets sentence_space_size to 0 (prevents double spaces after .
+ // if . is last on the line)
+ write!(
+ self.output,
+ ".TH \"{}\" \"{}\"\n\
+ .nh\n\
+ .ad l\n\
+ .ss \\n[.ss] 0\n",
+ escape(&name.to_uppercase())?,
+ section
+ )?;
+ Ok(())
+ }
+}
+
+fn escape(s: &str) -> Result<String, Error> {
+ // Note: Possible source on output escape sequences: https://man7.org/linux/man-pages/man7/groff_char.7.html.
+ // Otherwise, use generic escaping in the form `\[u1EE7]` or `\[u1F994]`.
+
+ let mut replaced = s
+ .replace('\\', "\\(rs")
+ .replace('-', "\\-")
+ .replace('\u{00A0}', "\\ ") // non-breaking space (non-stretchable)
+ .replace('–', "\\[en]") // \u{2013} en-dash
+ .replace('—', "\\[em]") // \u{2014} em-dash
+ .replace('‘', "\\[oq]") // \u{2018} left single quote
+ .replace('’', "\\[cq]") // \u{2019} right single quote or apostrophe
+ .replace('“', "\\[lq]") // \u{201C} left double quote
+ .replace('”', "\\[rq]") // \u{201D} right double quote
+ .replace('…', "\\[u2026]") // \u{2026} ellipsis
+ .replace('│', "|") // \u{2502} box drawing light vertical (could use \[br])
+ .replace('├', "|") // \u{251C} box drawings light vertical and right
+ .replace('└', "`") // \u{2514} box drawings light up and right
+ .replace('─', "\\-") // \u{2500} box drawing light horizontal
+ ;
+ if replaced.starts_with('.') {
+ replaced = format!("\\&.{}", &replaced[1..]);
+ }
+
+ if let Some(ch) = replaced.chars().find(|ch| {
+ !matches!(ch, '\n' | ' ' | '!'..='/' | '0'..='9'
+ | ':'..='@' | 'A'..='Z' | '['..='`' | 'a'..='z' | '{'..='~')
+ }) {
+ bail!(
+ "character {:?} is not allowed (update the translation table if needed)",
+ ch
+ );
+ }
+ Ok(replaced)
+}
diff --git a/src/tools/cargo/crates/mdman/src/format/md.rs b/src/tools/cargo/crates/mdman/src/format/md.rs
new file mode 100644
index 000000000..0e1c49837
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/src/format/md.rs
@@ -0,0 +1,112 @@
+//! Markdown formatter.
+
+use crate::util::unwrap;
+use crate::ManMap;
+use anyhow::{bail, format_err, Error};
+use std::fmt::Write;
+
+pub struct MdFormatter {
+ man_map: ManMap,
+}
+
+impl MdFormatter {
+ pub fn new(man_map: ManMap) -> MdFormatter {
+ MdFormatter { man_map }
+ }
+}
+
+impl MdFormatter {
+ fn render_html(&self, input: &str) -> Result<String, Error> {
+ let parser = crate::md_parser(input, None);
+ let mut html_output: String = String::with_capacity(input.len() * 3 / 2);
+ pulldown_cmark::html::push_html(&mut html_output, parser.map(|(e, _r)| e));
+ Ok(html_output)
+ }
+}
+
+impl super::Formatter for MdFormatter {
+ fn render(&self, input: &str) -> Result<String, Error> {
+ Ok(input.replace("\r\n", "\n"))
+ }
+
+ fn render_options_start(&self) -> &'static str {
+ "<dl>"
+ }
+
+ fn render_options_end(&self) -> &'static str {
+ "</dl>"
+ }
+
+ fn render_option(&self, params: &[&str], block: &str, man_name: &str) -> Result<String, Error> {
+ let mut result = String::new();
+ fn unwrap_p(t: &str) -> &str {
+ unwrap(t, "<p>", "</p>")
+ }
+
+ for param in params {
+ let rendered = self.render_html(param)?;
+ let no_p = unwrap_p(&rendered);
+ // split out first term to use as the id.
+ let first = no_p
+ .split_whitespace()
+ .next()
+ .ok_or_else(|| format_err!("did not expect option `{}` to be empty", param))?;
+ let no_tags = trim_tags(first);
+ if no_tags.is_empty() {
+ bail!("unexpected empty option with no tags `{}`", param);
+ }
+ let id = format!("option-{}-{}", man_name, no_tags);
+ write!(
+ result,
+ "<dt class=\"option-term\" id=\"{ID}\">\
+ <a class=\"option-anchor\" href=\"#{ID}\"></a>{OPTION}</dt>\n",
+ ID = id,
+ OPTION = no_p
+ )?;
+ }
+ let rendered_block = self.render_html(block)?;
+ write!(
+ result,
+ "<dd class=\"option-desc\">{}</dd>\n",
+ unwrap_p(&rendered_block)
+ )?;
+ Ok(result)
+ }
+
+ fn linkify_man_to_md(&self, name: &str, section: u8) -> Result<String, Error> {
+ let s = match self.man_map.get(&(name.to_string(), section)) {
+ Some(link) => format!("[{}({})]({})", name, section, link),
+ None => format!("[{}({})]({}.html)", name, section, name),
+ };
+ Ok(s)
+ }
+}
+
+fn trim_tags(s: &str) -> String {
+ // This is a hack. It removes all HTML tags.
+ let mut in_tag = false;
+ let mut in_char_ref = false;
+ s.chars()
+ .filter(|&ch| match ch {
+ '<' if in_tag => panic!("unexpected nested tag"),
+ '&' if in_char_ref => panic!("unexpected nested char ref"),
+ '<' => {
+ in_tag = true;
+ false
+ }
+ '&' => {
+ in_char_ref = true;
+ false
+ }
+ '>' if in_tag => {
+ in_tag = false;
+ false
+ }
+ ';' if in_char_ref => {
+ in_char_ref = false;
+ false
+ }
+ _ => !in_tag && !in_char_ref,
+ })
+ .collect()
+}
diff --git a/src/tools/cargo/crates/mdman/src/format/text.rs b/src/tools/cargo/crates/mdman/src/format/text.rs
new file mode 100644
index 000000000..ae07985a6
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/src/format/text.rs
@@ -0,0 +1,605 @@
+//! Text formatter.
+
+use crate::util::{header_text, unwrap};
+use crate::EventIter;
+use anyhow::{bail, Error};
+use pulldown_cmark::{Alignment, Event, HeadingLevel, LinkType, Tag};
+use std::fmt::Write;
+use std::mem;
+use url::Url;
+
+pub struct TextFormatter {
+ url: Option<Url>,
+}
+
+impl TextFormatter {
+ pub fn new(url: Option<Url>) -> TextFormatter {
+ TextFormatter { url }
+ }
+}
+
+impl super::Formatter for TextFormatter {
+ fn render(&self, input: &str) -> Result<String, Error> {
+ TextRenderer::render(input, self.url.clone(), 0)
+ }
+
+ fn render_options_start(&self) -> &'static str {
+ // Tell pulldown_cmark to ignore this.
+ // This will be stripped out later.
+ "<![CDATA["
+ }
+
+ fn render_options_end(&self) -> &'static str {
+ "]]>"
+ }
+
+ fn render_option(
+ &self,
+ params: &[&str],
+ block: &str,
+ _man_name: &str,
+ ) -> Result<String, Error> {
+ let rendered_options = params
+ .iter()
+ .map(|param| TextRenderer::render(param, self.url.clone(), 0))
+ .collect::<Result<Vec<_>, Error>>()?;
+ let trimmed: Vec<_> = rendered_options.iter().map(|o| o.trim()).collect();
+ // Wrap in HTML tags, they will be stripped out during rendering.
+ Ok(format!(
+ "<dt>{}</dt>\n<dd>{}</dd>\n<br>\n",
+ trimmed.join(", "),
+ block
+ ))
+ }
+
+ fn linkify_man_to_md(&self, name: &str, section: u8) -> Result<String, Error> {
+ Ok(format!("`{}`({})", name, section))
+ }
+}
+
+struct TextRenderer<'e> {
+ output: String,
+ indent: usize,
+ /// The current line being written. Once a line break is encountered (such
+ /// as starting a new paragraph), this will be written to `output` via
+ /// `flush`.
+ line: String,
+ /// The current word being written. Once a break is encountered (such as a
+ /// space) this will be written to `line` via `flush_word`.
+ word: String,
+ parser: EventIter<'e>,
+ /// The base URL used for relative URLs.
+ url: Option<Url>,
+ table: Table,
+}
+
+impl<'e> TextRenderer<'e> {
+ fn render(input: &str, url: Option<Url>, indent: usize) -> Result<String, Error> {
+ let parser = crate::md_parser(input, url.clone());
+ let output = String::with_capacity(input.len() * 3 / 2);
+ let mut mr = TextRenderer {
+ output,
+ indent,
+ line: String::new(),
+ word: String::new(),
+ parser,
+ url,
+ table: Table::new(),
+ };
+ mr.push_md()?;
+ Ok(mr.output)
+ }
+
+ fn push_md(&mut self) -> Result<(), Error> {
+ // If this is true, this is inside a cdata block used for hiding
+ // content from pulldown_cmark.
+ let mut in_cdata = false;
+ // The current list stack. None if unordered, Some if ordered with the
+ // given number as the current index.
+ let mut list: Vec<Option<u64>> = Vec::new();
+ // Used in some cases where spacing isn't desired.
+ let mut suppress_paragraph = false;
+ // Whether or not word-wrapping is enabled.
+ let mut wrap_text = true;
+
+ while let Some((event, range)) = self.parser.next() {
+ let this_suppress_paragraph = suppress_paragraph;
+ // Always reset suppression, even if the next event isn't a
+ // paragraph. This is in essence, a 1-token lookahead where the
+ // suppression is only enabled if the next event is a paragraph.
+ suppress_paragraph = false;
+ match event {
+ Event::Start(tag) => {
+ match tag {
+ Tag::Paragraph => {
+ if !this_suppress_paragraph {
+ self.flush();
+ }
+ }
+ Tag::Heading(level, ..) => {
+ self.flush();
+ if level == HeadingLevel::H1 {
+ let text = header_text(&mut self.parser)?;
+ self.push_to_line(&text.to_uppercase());
+ self.hard_break();
+ self.hard_break();
+ } else if level == HeadingLevel::H2 {
+ let text = header_text(&mut self.parser)?;
+ self.push_to_line(&text.to_uppercase());
+ self.flush();
+ self.indent = 7;
+ } else {
+ let text = header_text(&mut self.parser)?;
+ self.push_indent((level as usize - 2) * 3);
+ self.push_to_line(&text);
+ self.flush();
+ self.indent = (level as usize - 1) * 3 + 1;
+ }
+ }
+ Tag::BlockQuote => {
+ self.indent += 3;
+ }
+ Tag::CodeBlock(_kind) => {
+ self.flush();
+ wrap_text = false;
+ self.indent += 4;
+ }
+ Tag::List(start) => list.push(start),
+ Tag::Item => {
+ self.flush();
+ match list.last_mut().expect("item must have list start") {
+ // Ordered list.
+ Some(n) => {
+ self.push_indent(self.indent);
+ write!(self.line, "{}.", n)?;
+ *n += 1;
+ }
+ // Unordered list.
+ None => {
+ self.push_indent(self.indent);
+ self.push_to_line("o ")
+ }
+ }
+ self.indent += 3;
+ suppress_paragraph = true;
+ }
+ Tag::FootnoteDefinition(_label) => unimplemented!(),
+ Tag::Table(alignment) => {
+ assert!(self.table.alignment.is_empty());
+ self.flush();
+ self.table.alignment.extend(alignment);
+ let table = self.table.process(&mut self.parser, self.indent)?;
+ self.output.push_str(&table);
+ self.hard_break();
+ self.table = Table::new();
+ }
+ Tag::TableHead | Tag::TableRow | Tag::TableCell => {
+ bail!("unexpected table element")
+ }
+ Tag::Emphasis => {}
+ Tag::Strong => {}
+ // Strikethrough isn't usually supported for TTY.
+ Tag::Strikethrough => self.word.push_str("~~"),
+ Tag::Link(link_type, dest_url, _title) => {
+ if dest_url.starts_with('#') {
+ // In a man page, page-relative anchors don't
+ // have much meaning.
+ continue;
+ }
+ match link_type {
+ LinkType::Autolink | LinkType::Email => {
+ // The text is a copy of the URL, which is not needed.
+ match self.parser.next() {
+ Some((Event::Text(_), _range)) => {}
+ _ => bail!("expected text after autolink"),
+ }
+ }
+ LinkType::Inline
+ | LinkType::Reference
+ | LinkType::Collapsed
+ | LinkType::Shortcut => {}
+ // This is currently unused. This is only
+ // emitted with a broken link callback, but I
+ // felt it is too annoying to escape `[` in
+ // option descriptions.
+ LinkType::ReferenceUnknown
+ | LinkType::CollapsedUnknown
+ | LinkType::ShortcutUnknown => {
+ bail!(
+ "link with missing reference `{}` located at offset {}",
+ dest_url,
+ range.start
+ );
+ }
+ }
+ }
+ Tag::Image(_link_type, _dest_url, _title) => {
+ bail!("images are not currently supported")
+ }
+ }
+ }
+ Event::End(tag) => match &tag {
+ Tag::Paragraph => {
+ self.flush();
+ self.hard_break();
+ }
+ Tag::Heading(..) => {}
+ Tag::BlockQuote => {
+ self.indent -= 3;
+ }
+ Tag::CodeBlock(_kind) => {
+ self.hard_break();
+ wrap_text = true;
+ self.indent -= 4;
+ }
+ Tag::List(_) => {
+ list.pop();
+ }
+ Tag::Item => {
+ self.flush();
+ self.indent -= 3;
+ self.hard_break();
+ }
+ Tag::FootnoteDefinition(_label) => {}
+ Tag::Table(_) => {}
+ Tag::TableHead => {}
+ Tag::TableRow => {}
+ Tag::TableCell => {}
+ Tag::Emphasis => {}
+ Tag::Strong => {}
+ Tag::Strikethrough => self.word.push_str("~~"),
+ Tag::Link(link_type, dest_url, _title) => {
+ if dest_url.starts_with('#') {
+ continue;
+ }
+ match link_type {
+ LinkType::Autolink | LinkType::Email => {}
+ LinkType::Inline
+ | LinkType::Reference
+ | LinkType::Collapsed
+ | LinkType::Shortcut => self.flush_word(),
+ _ => {
+ panic!("unexpected tag {:?}", tag);
+ }
+ }
+ self.flush_word();
+ write!(self.word, "<{}>", dest_url)?;
+ }
+ Tag::Image(_link_type, _dest_url, _title) => {}
+ },
+ Event::Text(t) | Event::Code(t) => {
+ if wrap_text {
+ let chunks = split_chunks(&t);
+ for chunk in chunks {
+ if chunk == " " {
+ self.flush_word();
+ } else {
+ self.word.push_str(chunk);
+ }
+ }
+ } else {
+ for line in t.lines() {
+ self.push_indent(self.indent);
+ self.push_to_line(line);
+ self.flush();
+ }
+ }
+ }
+ Event::Html(t) => {
+ if t.starts_with("<![CDATA[") {
+ // CDATA is a special marker used for handling options.
+ in_cdata = true;
+ self.flush();
+ } else if in_cdata {
+ if t.trim().ends_with("]]>") {
+ in_cdata = false;
+ } else {
+ let trimmed = t.trim();
+ if trimmed.is_empty() {
+ continue;
+ }
+ if trimmed == "<br>" {
+ self.hard_break();
+ } else if trimmed.starts_with("<dt>") {
+ let opts = unwrap(trimmed, "<dt>", "</dt>");
+ self.push_indent(self.indent);
+ self.push_to_line(opts);
+ self.flush();
+ } else if trimmed.starts_with("<dd>") {
+ let mut def = String::new();
+ while let Some((Event::Html(t), _range)) = self.parser.next() {
+ if t.starts_with("</dd>") {
+ break;
+ }
+ def.push_str(&t);
+ }
+ let rendered =
+ TextRenderer::render(&def, self.url.clone(), self.indent + 4)?;
+ self.push_to_line(rendered.trim_end());
+ self.flush();
+ } else {
+ self.push_to_line(&t);
+ self.flush();
+ }
+ }
+ } else {
+ self.push_to_line(&t);
+ self.flush();
+ }
+ }
+ Event::FootnoteReference(_t) => {}
+ Event::SoftBreak => self.flush_word(),
+ Event::HardBreak => self.flush(),
+ Event::Rule => {
+ self.flush();
+ self.push_indent(self.indent);
+ self.push_to_line(&"_".repeat(79 - self.indent * 2));
+ self.flush();
+ }
+ Event::TaskListMarker(_b) => unimplemented!(),
+ }
+ }
+ Ok(())
+ }
+
+ fn flush(&mut self) {
+ self.flush_word();
+ if !self.line.is_empty() {
+ self.output.push_str(&self.line);
+ self.output.push('\n');
+ self.line.clear();
+ }
+ }
+
+ fn hard_break(&mut self) {
+ self.flush();
+ if !self.output.ends_with("\n\n") {
+ self.output.push('\n');
+ }
+ }
+
+ fn flush_word(&mut self) {
+ if self.word.is_empty() {
+ return;
+ }
+ if self.line.len() + self.word.len() >= 79 {
+ self.output.push_str(&self.line);
+ self.output.push('\n');
+ self.line.clear();
+ }
+ if self.line.is_empty() {
+ self.push_indent(self.indent);
+ self.line.push_str(&self.word);
+ } else {
+ self.line.push(' ');
+ self.line.push_str(&self.word);
+ }
+ self.word.clear();
+ }
+
+ fn push_indent(&mut self, indent: usize) {
+ for _ in 0..indent {
+ self.line.push(' ');
+ }
+ }
+
+ fn push_to_line(&mut self, text: &str) {
+ self.flush_word();
+ self.line.push_str(text);
+ }
+}
+
+/// Splits the text on whitespace.
+///
+/// Consecutive whitespace is collapsed to a single ' ', and is included as a
+/// separate element in the result.
+fn split_chunks(text: &str) -> Vec<&str> {
+ let mut result = Vec::new();
+ let mut start = 0;
+ while start < text.len() {
+ match text[start..].find(' ') {
+ Some(i) => {
+ if i != 0 {
+ result.push(&text[start..start + i]);
+ }
+ result.push(" ");
+ // Skip past whitespace.
+ match text[start + i..].find(|c| c != ' ') {
+ Some(n) => {
+ start = start + i + n;
+ }
+ None => {
+ break;
+ }
+ }
+ }
+ None => {
+ result.push(&text[start..]);
+ break;
+ }
+ }
+ }
+ result
+}
+
+struct Table {
+ alignment: Vec<Alignment>,
+ rows: Vec<Vec<String>>,
+ row: Vec<String>,
+ cell: String,
+}
+
+impl Table {
+ fn new() -> Table {
+ Table {
+ alignment: Vec::new(),
+ rows: Vec::new(),
+ row: Vec::new(),
+ cell: String::new(),
+ }
+ }
+
+ /// Processes table events and generates a text table.
+ fn process(&mut self, parser: &mut EventIter<'_>, indent: usize) -> Result<String, Error> {
+ while let Some((event, _range)) = parser.next() {
+ match event {
+ Event::Start(tag) => match tag {
+ Tag::TableHead
+ | Tag::TableRow
+ | Tag::TableCell
+ | Tag::Emphasis
+ | Tag::Strong => {}
+ Tag::Strikethrough => self.cell.push_str("~~"),
+ // Links not yet supported, they usually won't fit.
+ Tag::Link(_, _, _) => {}
+ _ => bail!("unexpected tag in table: {:?}", tag),
+ },
+ Event::End(tag) => match tag {
+ Tag::Table(_) => return self.render(indent),
+ Tag::TableCell => {
+ let cell = mem::replace(&mut self.cell, String::new());
+ self.row.push(cell);
+ }
+ Tag::TableHead | Tag::TableRow => {
+ let row = mem::replace(&mut self.row, Vec::new());
+ self.rows.push(row);
+ }
+ Tag::Strikethrough => self.cell.push_str("~~"),
+ _ => {}
+ },
+ Event::Text(t) | Event::Code(t) => {
+ self.cell.push_str(&t);
+ }
+ Event::Html(t) => bail!("html unsupported in tables: {:?}", t),
+ _ => bail!("unexpected event in table: {:?}", event),
+ }
+ }
+ bail!("table end not reached");
+ }
+
+ fn render(&self, indent: usize) -> Result<String, Error> {
+ // This is an extremely primitive layout routine.
+ // First compute the potential maximum width of each cell.
+ // 2 for 1 space margin on left and right.
+ let width_acc = vec![2; self.alignment.len()];
+ let mut col_widths = self
+ .rows
+ .iter()
+ .map(|row| row.iter().map(|cell| cell.len()))
+ .fold(width_acc, |mut acc, row| {
+ acc.iter_mut()
+ .zip(row)
+ // +3 for left/right margin and | symbol
+ .for_each(|(a, b)| *a = (*a).max(b + 3));
+ acc
+ });
+ // Shrink each column until it fits the total width, proportional to
+ // the columns total percent width.
+ let max_width = 78 - indent;
+ // Include total len for | characters, and +1 for final |.
+ let total_width = col_widths.iter().sum::<usize>() + col_widths.len() + 1;
+ if total_width > max_width {
+ let to_shrink = total_width - max_width;
+ // Compute percentage widths, and shrink each column based on its
+ // total percentage.
+ for width in &mut col_widths {
+ let percent = *width as f64 / total_width as f64;
+ *width -= (to_shrink as f64 * percent).ceil() as usize;
+ }
+ }
+ // Start rendering.
+ let mut result = String::new();
+
+ // Draw the horizontal line separating each row.
+ let mut row_line = String::new();
+ row_line.push_str(&" ".repeat(indent));
+ row_line.push('+');
+ let lines = col_widths
+ .iter()
+ .map(|width| "-".repeat(*width))
+ .collect::<Vec<_>>();
+ row_line.push_str(&lines.join("+"));
+ row_line.push('+');
+ row_line.push('\n');
+
+ // Draw top of the table.
+ result.push_str(&row_line);
+ // Draw each row.
+ for row in &self.rows {
+ // Word-wrap and fill each column as needed.
+ let filled = fill_row(row, &col_widths, &self.alignment);
+ // Need to transpose the cells across rows for cells that span
+ // multiple rows.
+ let height = filled.iter().map(|c| c.len()).max().unwrap();
+ for row_i in 0..height {
+ result.push_str(&" ".repeat(indent));
+ result.push('|');
+ for filled_row in &filled {
+ let cell = &filled_row[row_i];
+ result.push_str(cell);
+ result.push('|');
+ }
+ result.push('\n');
+ }
+ result.push_str(&row_line);
+ }
+ Ok(result)
+ }
+}
+
+/// Formats a row, filling cells with spaces and word-wrapping text.
+///
+/// Returns a vec of cells, where each cell is split into multiple lines.
+fn fill_row(row: &[String], col_widths: &[usize], alignment: &[Alignment]) -> Vec<Vec<String>> {
+ let mut cell_lines = row
+ .iter()
+ .zip(col_widths)
+ .zip(alignment)
+ .map(|((cell, width), alignment)| fill_cell(cell, *width - 2, *alignment))
+ .collect::<Vec<_>>();
+ // Fill each cell to match the maximum vertical height of the tallest cell.
+ let max_lines = cell_lines.iter().map(|cell| cell.len()).max().unwrap();
+ for (cell, width) in cell_lines.iter_mut().zip(col_widths) {
+ if cell.len() < max_lines {
+ cell.extend(std::iter::repeat(" ".repeat(*width)).take(max_lines - cell.len()));
+ }
+ }
+ cell_lines
+}
+
+/// Formats a cell. Word-wraps based on width, and adjusts based on alignment.
+///
+/// Returns a vec of lines for the cell.
+fn fill_cell(text: &str, width: usize, alignment: Alignment) -> Vec<String> {
+ let fill_width = |text: &str| match alignment {
+ Alignment::None | Alignment::Left => format!(" {:<width$} ", text, width = width),
+ Alignment::Center => format!(" {:^width$} ", text, width = width),
+ Alignment::Right => format!(" {:>width$} ", text, width = width),
+ };
+ if text.len() < width {
+ // No wrapping necessary, just format.
+ vec![fill_width(text)]
+ } else {
+ // Word-wrap the cell.
+ let mut result = Vec::new();
+ let mut line = String::new();
+ for word in text.split_whitespace() {
+ if line.len() + word.len() >= width {
+ // todo: word.len() > width
+ result.push(fill_width(&line));
+ line.clear();
+ }
+ if line.is_empty() {
+ line.push_str(word);
+ } else {
+ line.push(' ');
+ line.push_str(&word);
+ }
+ }
+ if !line.is_empty() {
+ result.push(fill_width(&line));
+ }
+
+ result
+ }
+}
diff --git a/src/tools/cargo/crates/mdman/src/hbs.rs b/src/tools/cargo/crates/mdman/src/hbs.rs
new file mode 100644
index 000000000..81ad7ee45
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/src/hbs.rs
@@ -0,0 +1,215 @@
+//! Handlebars template processing.
+
+use crate::format::Formatter;
+use anyhow::Error;
+use handlebars::{
+ handlebars_helper, Context, Decorator, Handlebars, Helper, HelperDef, HelperResult, Output,
+ RenderContext, RenderError, Renderable,
+};
+use std::collections::HashMap;
+use std::path::Path;
+
+type FormatterRef<'a> = &'a (dyn Formatter + Send + Sync);
+
+/// Processes the handlebars template at the given file.
+pub fn expand(file: &Path, formatter: FormatterRef) -> Result<String, Error> {
+ let mut handlebars = Handlebars::new();
+ handlebars.set_strict_mode(true);
+ handlebars.register_helper("lower", Box::new(lower));
+ handlebars.register_helper("options", Box::new(OptionsHelper { formatter }));
+ handlebars.register_helper("option", Box::new(OptionHelper { formatter }));
+ handlebars.register_helper("man", Box::new(ManLinkHelper { formatter }));
+ handlebars.register_decorator("set", Box::new(set_decorator));
+ handlebars.register_template_file("template", file)?;
+ let includes = file.parent().unwrap().join("includes");
+ handlebars.register_templates_directory(".md", includes)?;
+ let man_name = file
+ .file_stem()
+ .expect("expected filename")
+ .to_str()
+ .expect("utf8 filename")
+ .to_string();
+ let data = HashMap::from([("man_name", man_name)]);
+ let expanded = handlebars.render("template", &data)?;
+ Ok(expanded)
+}
+
+/// Helper for `{{#options}}` block.
+struct OptionsHelper<'a> {
+ formatter: FormatterRef<'a>,
+}
+
+impl HelperDef for OptionsHelper<'_> {
+ fn call<'reg: 'rc, 'rc>(
+ &self,
+ h: &Helper<'reg, 'rc>,
+ r: &'reg Handlebars<'reg>,
+ ctx: &'rc Context,
+ rc: &mut RenderContext<'reg, 'rc>,
+ out: &mut dyn Output,
+ ) -> HelperResult {
+ if in_options(rc) {
+ return Err(RenderError::new("options blocks cannot be nested"));
+ }
+ // Prevent nested {{#options}}.
+ set_in_context(rc, "__MDMAN_IN_OPTIONS", serde_json::Value::Bool(true));
+ let s = self.formatter.render_options_start();
+ out.write(&s)?;
+ let t = match h.template() {
+ Some(t) => t,
+ None => return Err(RenderError::new("options block must not be empty")),
+ };
+ let block = t.renders(r, ctx, rc)?;
+ out.write(&block)?;
+
+ let s = self.formatter.render_options_end();
+ out.write(&s)?;
+ remove_from_context(rc, "__MDMAN_IN_OPTIONS");
+ Ok(())
+ }
+}
+
+/// Whether or not the context is currently inside a `{{#options}}` block.
+fn in_options(rc: &RenderContext<'_, '_>) -> bool {
+ rc.context()
+ .map_or(false, |ctx| ctx.data().get("__MDMAN_IN_OPTIONS").is_some())
+}
+
+/// Helper for `{{#option}}` block.
+struct OptionHelper<'a> {
+ formatter: FormatterRef<'a>,
+}
+
+impl HelperDef for OptionHelper<'_> {
+ fn call<'reg: 'rc, 'rc>(
+ &self,
+ h: &Helper<'reg, 'rc>,
+ r: &'reg Handlebars<'reg>,
+ ctx: &'rc Context,
+ rc: &mut RenderContext<'reg, 'rc>,
+ out: &mut dyn Output,
+ ) -> HelperResult {
+ if !in_options(rc) {
+ return Err(RenderError::new("option must be in options block"));
+ }
+ let params = h.params();
+ if params.is_empty() {
+ return Err(RenderError::new(
+ "option block must have at least one param",
+ ));
+ }
+ // Convert params to strings.
+ let params = params
+ .iter()
+ .map(|param| {
+ param
+ .value()
+ .as_str()
+ .ok_or_else(|| RenderError::new("option params must be strings"))
+ })
+ .collect::<Result<Vec<&str>, RenderError>>()?;
+ let t = match h.template() {
+ Some(t) => t,
+ None => return Err(RenderError::new("option block must not be empty")),
+ };
+ // Render the block.
+ let block = t.renders(r, ctx, rc)?;
+
+ // Get the name of this page.
+ let man_name = ctx
+ .data()
+ .get("man_name")
+ .expect("expected man_name in context")
+ .as_str()
+ .expect("expect man_name str");
+
+ // Ask the formatter to convert this option to its format.
+ let option = self
+ .formatter
+ .render_option(&params, &block, man_name)
+ .map_err(|e| RenderError::new(format!("option render failed: {}", e)))?;
+ out.write(&option)?;
+ Ok(())
+ }
+}
+
+/// Helper for `{{man name section}}` expression.
+struct ManLinkHelper<'a> {
+ formatter: FormatterRef<'a>,
+}
+
+impl HelperDef for ManLinkHelper<'_> {
+ fn call<'reg: 'rc, 'rc>(
+ &self,
+ h: &Helper<'reg, 'rc>,
+ _r: &'reg Handlebars<'reg>,
+ _ctx: &'rc Context,
+ _rc: &mut RenderContext<'reg, 'rc>,
+ out: &mut dyn Output,
+ ) -> HelperResult {
+ let params = h.params();
+ if params.len() != 2 {
+ return Err(RenderError::new("{{man}} must have two arguments"));
+ }
+ let name = params[0]
+ .value()
+ .as_str()
+ .ok_or_else(|| RenderError::new("man link name must be a string"))?;
+ let section = params[1]
+ .value()
+ .as_u64()
+ .ok_or_else(|| RenderError::new("man link section must be an integer"))?;
+ let section =
+ u8::try_from(section).map_err(|_e| RenderError::new("section number too large"))?;
+ let link = self
+ .formatter
+ .linkify_man_to_md(name, section)
+ .map_err(|e| RenderError::new(format!("failed to linkify man: {}", e)))?;
+ out.write(&link)?;
+ Ok(())
+ }
+}
+
+/// `{{*set var=value}}` decorator.
+///
+/// This sets a variable to a value within the template context.
+fn set_decorator(
+ d: &Decorator,
+ _: &Handlebars,
+ _ctx: &Context,
+ rc: &mut RenderContext,
+) -> Result<(), RenderError> {
+ let data_to_set = d.hash();
+ for (k, v) in data_to_set {
+ set_in_context(rc, k, v.value().clone());
+ }
+ Ok(())
+}
+
+/// Sets a variable to a value within the context.
+fn set_in_context(rc: &mut RenderContext, key: &str, value: serde_json::Value) {
+ let mut ctx = match rc.context() {
+ Some(c) => (*c).clone(),
+ None => Context::wraps(serde_json::Value::Object(serde_json::Map::new())).unwrap(),
+ };
+ if let serde_json::Value::Object(m) = ctx.data_mut() {
+ m.insert(key.to_string(), value);
+ rc.set_context(ctx);
+ } else {
+ panic!("expected object in context");
+ }
+}
+
+/// Removes a variable from the context.
+fn remove_from_context(rc: &mut RenderContext, key: &str) {
+ let ctx = rc.context().expect("cannot remove from null context");
+ let mut ctx = (*ctx).clone();
+ if let serde_json::Value::Object(m) = ctx.data_mut() {
+ m.remove(key);
+ rc.set_context(ctx);
+ } else {
+ panic!("expected object in context");
+ }
+}
+
+handlebars_helper!(lower: |s: str| s.to_lowercase());
diff --git a/src/tools/cargo/crates/mdman/src/lib.rs b/src/tools/cargo/crates/mdman/src/lib.rs
new file mode 100644
index 000000000..01c3c8d31
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/src/lib.rs
@@ -0,0 +1,122 @@
+//! mdman markdown to man converter.
+
+use anyhow::{bail, Context, Error};
+use pulldown_cmark::{CowStr, Event, LinkType, Options, Parser, Tag};
+use std::collections::HashMap;
+use std::fs;
+use std::io::{self, BufRead};
+use std::ops::Range;
+use std::path::Path;
+use url::Url;
+
+mod format;
+mod hbs;
+mod util;
+
+use format::Formatter;
+
+/// Mapping of `(name, section)` of a man page to a URL.
+pub type ManMap = HashMap<(String, u8), String>;
+
+/// A man section.
+pub type Section = u8;
+
+/// The output formats supported by mdman.
+#[derive(Copy, Clone)]
+pub enum Format {
+ Man,
+ Md,
+ Text,
+}
+
+impl Format {
+ /// The filename extension for the format.
+ pub fn extension(&self, section: Section) -> String {
+ match self {
+ Format::Man => section.to_string(),
+ Format::Md => "md".to_string(),
+ Format::Text => "txt".to_string(),
+ }
+ }
+}
+
+/// Converts the handlebars markdown file at the given path into the given
+/// format, returning the translated result.
+pub fn convert(
+ file: &Path,
+ format: Format,
+ url: Option<Url>,
+ man_map: ManMap,
+) -> Result<String, Error> {
+ let formatter: Box<dyn Formatter + Send + Sync> = match format {
+ Format::Man => Box::new(format::man::ManFormatter::new(url)),
+ Format::Md => Box::new(format::md::MdFormatter::new(man_map)),
+ Format::Text => Box::new(format::text::TextFormatter::new(url)),
+ };
+ let expanded = hbs::expand(file, &*formatter)?;
+ // pulldown-cmark can behave a little differently with Windows newlines,
+ // just normalize it.
+ let expanded = expanded.replace("\r\n", "\n");
+ formatter.render(&expanded)
+}
+
+/// Pulldown-cmark iterator yielding an `(event, range)` tuple.
+type EventIter<'a> = Box<dyn Iterator<Item = (Event<'a>, Range<usize>)> + 'a>;
+
+/// Creates a new markdown parser with the given input.
+pub(crate) fn md_parser(input: &str, url: Option<Url>) -> EventIter {
+ let mut options = Options::empty();
+ options.insert(Options::ENABLE_TABLES);
+ options.insert(Options::ENABLE_FOOTNOTES);
+ options.insert(Options::ENABLE_STRIKETHROUGH);
+ options.insert(Options::ENABLE_SMART_PUNCTUATION);
+ let parser = Parser::new_ext(input, options);
+ let parser = parser.into_offset_iter();
+ // Translate all links to include the base url.
+ let parser = parser.map(move |(event, range)| match event {
+ Event::Start(Tag::Link(lt, dest_url, title)) if !matches!(lt, LinkType::Email) => (
+ Event::Start(Tag::Link(lt, join_url(url.as_ref(), dest_url), title)),
+ range,
+ ),
+ Event::End(Tag::Link(lt, dest_url, title)) if !matches!(lt, LinkType::Email) => (
+ Event::End(Tag::Link(lt, join_url(url.as_ref(), dest_url), title)),
+ range,
+ ),
+ _ => (event, range),
+ });
+ Box::new(parser)
+}
+
+fn join_url<'a>(base: Option<&Url>, dest: CowStr<'a>) -> CowStr<'a> {
+ match base {
+ Some(base_url) => {
+ // Absolute URL or page-relative anchor doesn't need to be translated.
+ if dest.contains(':') || dest.starts_with('#') {
+ dest
+ } else {
+ let joined = base_url.join(&dest).unwrap_or_else(|e| {
+ panic!("failed to join URL `{}` to `{}`: {}", dest, base_url, e)
+ });
+ String::from(joined).into()
+ }
+ }
+ None => dest,
+ }
+}
+
+pub fn extract_section(file: &Path) -> Result<Section, Error> {
+ let f = fs::File::open(file).with_context(|| format!("could not open `{}`", file.display()))?;
+ let mut f = io::BufReader::new(f);
+ let mut line = String::new();
+ f.read_line(&mut line)?;
+ if !line.starts_with("# ") {
+ bail!("expected input file to start with # header");
+ }
+ let (_name, section) = util::parse_name_and_section(&line[2..].trim()).with_context(|| {
+ format!(
+ "expected input file to have header with the format `# command-name(1)`, found: `{}`",
+ line
+ )
+ })?;
+ Ok(section)
+}
diff --git a/src/tools/cargo/crates/mdman/src/main.rs b/src/tools/cargo/crates/mdman/src/main.rs
new file mode 100644
index 000000000..2bdf96d72
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/src/main.rs
@@ -0,0 +1,133 @@
+use anyhow::{bail, format_err, Context, Error};
+use mdman::{Format, ManMap};
+use std::collections::HashMap;
+use std::path::{Path, PathBuf};
+use url::Url;
+
+/// Command-line options.
+struct Options {
+ format: Format,
+ output_dir: PathBuf,
+ sources: Vec<PathBuf>,
+ url: Option<Url>,
+ man_map: ManMap,
+}
+
+fn main() {
+ if let Err(e) = run() {
+ eprintln!("error: {}", e);
+ for cause in e.chain().skip(1) {
+ eprintln!("\nCaused by:");
+ for line in cause.to_string().lines() {
+ if line.is_empty() {
+ eprintln!();
+ } else {
+ eprintln!(" {}", line);
+ }
+ }
+ }
+ std::process::exit(1);
+ }
+}
+
+fn run() -> Result<(), Error> {
+ let opts = process_args()?;
+ if !opts.output_dir.exists() {
+ std::fs::create_dir_all(&opts.output_dir).with_context(|| {
+ format!(
+ "failed to create output directory {}",
+ opts.output_dir.display()
+ )
+ })?;
+ }
+ for source in &opts.sources {
+ let section = mdman::extract_section(source)?;
+ let filename =
+ Path::new(source.file_name().unwrap()).with_extension(opts.format.extension(section));
+ let out_path = opts.output_dir.join(filename);
+ if same_file::is_same_file(source, &out_path).unwrap_or(false) {
+ bail!("cannot output to the same file as the source");
+ }
+ println!("Converting {} -> {}", source.display(), out_path.display());
+ let result = mdman::convert(&source, opts.format, opts.url.clone(), opts.man_map.clone())
+ .with_context(|| format!("failed to translate {}", source.display()))?;
+
+ std::fs::write(out_path, result)?;
+ }
+ Ok(())
+}
+
+fn process_args() -> Result<Options, Error> {
+ let mut format = None;
+ let mut output = None;
+ let mut url = None;
+ let mut man_map: ManMap = HashMap::new();
+ let mut sources = Vec::new();
+ let mut args = std::env::args().skip(1);
+ while let Some(arg) = args.next() {
+ match arg.as_str() {
+ "-t" => {
+ format = match args.next().as_deref() {
+ Some("man") => Some(Format::Man),
+ Some("md") => Some(Format::Md),
+ Some("txt") => Some(Format::Text),
+ Some(s) => bail!("unknown output format: {}", s),
+ None => bail!("-t requires a value (man, md, txt)"),
+ };
+ }
+ "-o" => {
+ output = match args.next() {
+ Some(s) => Some(PathBuf::from(s)),
+ None => bail!("-o requires a value"),
+ };
+ }
+ "--url" => {
+ url = match args.next() {
+ Some(s) => {
+ let url = Url::parse(&s)
+ .with_context(|| format!("could not convert `{}` to a url", s))?;
+ if !url.path().ends_with('/') {
+ bail!("url `{}` should end with a /", url);
+ }
+ Some(url)
+ }
+ None => bail!("--url requires a value"),
+ }
+ }
+ "--man" => {
+ let man = args
+ .next()
+ .ok_or_else(|| format_err!("--man requires a value"))?;
+ let parts: Vec<_> = man.splitn(2, '=').collect();
+ let key_parts: Vec<_> = parts[0].splitn(2, ':').collect();
+ if parts.len() != 2 || key_parts.len() != 2 {
+ bail!("--man expected value with form name:1=link");
+ }
+ let section: u8 = key_parts[1].parse().with_context(|| {
+ format!("expected unsigned integer for section, got `{}`", parts[1])
+ })?;
+ man_map.insert((key_parts[0].to_string(), section), parts[1].to_string());
+ }
+ s => {
+ sources.push(PathBuf::from(s));
+ }
+ }
+ }
+ if format.is_none() {
+ bail!("-t must be specified (man, md, txt)");
+ }
+ if output.is_none() {
+ bail!("-o must be specified (output directory)");
+ }
+ if sources.is_empty() {
+ bail!("at least one source must be specified");
+ }
+ let opts = Options {
+ format: format.unwrap(),
+ output_dir: output.unwrap(),
+ sources,
+ url,
+ man_map,
+ };
+ Ok(opts)
+}
diff --git a/src/tools/cargo/crates/mdman/src/util.rs b/src/tools/cargo/crates/mdman/src/util.rs
new file mode 100644
index 000000000..a4c71ad38
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/src/util.rs
@@ -0,0 +1,44 @@
+///! General utilities.
+use crate::EventIter;
+use anyhow::{bail, format_err, Context, Error};
+use pulldown_cmark::{CowStr, Event, Tag};
+
+/// Splits the text `foo(1)` into "foo" and `1`.
+pub fn parse_name_and_section(text: &str) -> Result<(&str, u8), Error> {
+ let mut i = text.split_terminator(&['(', ')'][..]);
+ let name = i
+ .next()
+ .ok_or_else(|| format_err!("man reference must have a name"))?;
+ let section = i
+ .next()
+ .ok_or_else(|| format_err!("man reference must have a section such as mycommand(1)"))?;
+ if let Some(s) = i.next() {
+ bail!(
+ "man reference must have the form mycommand(1), got extra part `{}`",
+ s
+ );
+ }
+ let section: u8 = section
+ .parse()
+ .with_context(|| format!("section must be a number, got {}", section))?;
+ Ok((name, section))
+}
+
+/// Extracts the text from a header after Tag::Heading has been received.
+pub fn header_text<'e>(parser: &mut EventIter<'e>) -> Result<CowStr<'e>, Error> {
+ let text = match parser.next() {
+ Some((Event::Text(t), _range)) => t,
+ e => bail!("expected plain text in man header, got {:?}", e),
+ };
+ match parser.next() {
+ Some((Event::End(Tag::Heading(..)), _range)) => {
+ return Ok(text);
+ }
+ e => bail!("expected plain text in man header, got {:?}", e),
+ }
+}
+
+/// Removes tags from the front and back of a string.
+pub fn unwrap<'t>(text: &'t str, front: &str, back: &str) -> &'t str {
+ text.trim().trim_start_matches(front).trim_end_matches(back)
+}
diff --git a/src/tools/cargo/crates/mdman/tests/compare.rs b/src/tools/cargo/crates/mdman/tests/compare.rs
new file mode 100644
index 000000000..3e679d127
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare.rs
@@ -0,0 +1,48 @@
+//! Compares input to expected output.
+//!
+//! Use the MDMAN_BLESS environment variable to automatically update the
+//! expected output.
+
+use mdman::{Format, ManMap};
+use pretty_assertions::assert_eq;
+use std::path::PathBuf;
+use url::Url;
+
+fn run(name: &str) {
+ let input = PathBuf::from(format!("tests/compare/{}.md", name));
+ let url = Some(Url::parse("https://example.org/").unwrap());
+ let mut map = ManMap::new();
+ map.insert(
+ ("other-cmd".to_string(), 1),
+ "https://example.org/commands/other-cmd.html".to_string(),
+ );
+
+ for &format in &[Format::Man, Format::Md, Format::Text] {
+ let section = mdman::extract_section(&input).unwrap();
+ let result = mdman::convert(&input, format, url.clone(), map.clone()).unwrap();
+ let expected_path = format!(
+ "tests/compare/expected/{}.{}",
+ name,
+ format.extension(section)
+ );
+ if std::env::var("MDMAN_BLESS").is_ok() {
+ std::fs::write(&expected_path, result).unwrap();
+ } else {
+ let expected = std::fs::read_to_string(&expected_path).unwrap();
+ // Fix if Windows checked out with autocrlf.
+ let expected = expected.replace("\r\n", "\n");
+ assert_eq!(expected, result);
+ }
+ }
+}
+
+macro_rules! test( ($name:ident) => (
+ #[test]
+ fn $name() { run(stringify!($name)); }
+) );
+
+test!(formatting);
+test!(links);
+test!(options);
+test!(tables);
+test!(vars);
diff --git a/src/tools/cargo/crates/mdman/tests/compare/expected/formatting.1 b/src/tools/cargo/crates/mdman/tests/compare/expected/formatting.1
new file mode 100644
index 000000000..840734cd0
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/expected/formatting.1
@@ -0,0 +1,118 @@
+'\" t
+.TH "FORMATTING" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.sp
+This is \fBnested \f(BIformatting\fB \fBtext\fB\fR\&.
+.SH "SECOND HEADING"
+Some text at second level.
+.SS "Third heading"
+Some text at third level.
+.SS "Fourth heading"
+Some text at fourth level.
+.SH "Quotes and blocks."
+Here are some quotes and blocks.
+.RS 3
+.ll -5
+.sp
+This is a block quote. Ambidextrously koala apart that prudent blindly alas
+far amid dear goodness turgid so exact inside oh and alas much fanciful that
+dark on spoon\-fed adequately insolent walking crud.
+.br
+.RE
+.ll
+.sp
+.RS 4
+.nf
+This is a code block. Groundhog watchfully sudden firefly some self\-consciously hotly jeepers satanic after that this parrot this at virtuous
+some mocking the leaned jeez nightingale as much mallard so because jeez
+turned dear crud grizzly strenuously.
+
+ Indented and should be unmodified.
+.fi
+.RE
+.sp
+.RS 4
+.nf
+This is an indented code block. Egregiously yikes animatedly since outside beseechingly a badger hey shakily giraffe a one wow one this
+goodness regarding reindeer so astride before.
+
+ Doubly indented
+.fi
+.RE
+.SH "Lists"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Ordered list
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Unordered list
+.sp
+With a second paragraph inside it
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Inner ordered list
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Another
+.RE
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Eggs
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Milk
+.sp
+.RS 4
+\h'-04' 5.\h'+01'Don\[cq]t start at one.
+.RE
+.sp
+.RS 4
+\h'-04' 6.\h'+01'tamarind
+.RE
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Second element
+.RE
+.sp
+.RS 4
+\h'-04' 3.\h'+01'Third element
+.RE
+.SH "Breaks"
+This has a
+.br
+hard break in it
+and a soft one.
+.SH "Horizontal rule"
+This should contain a line:
+\l'\n(.lu'
+.sp
+Nice!
+.SH "Strange characters"
+Handles escaping for characters
+.sp
+\&.dot at the start of a line.
+.sp
+\(rsfBnot really troff
+.sp
+Various characters \(rs \- \[en] \[em] \- | | `
+.sp
+.RS 4
+.nf
+tree
+`\-\- example
+ |\-\- salamander
+ | |\-\- honey
+ | `\-\- some
+ |\-\- fancifully
+ `\-\- trout
+.fi
+.RE
+.sp
+\ \ \ \ non\-breaking space.
diff --git a/src/tools/cargo/crates/mdman/tests/compare/expected/formatting.md b/src/tools/cargo/crates/mdman/tests/compare/expected/formatting.md
new file mode 100644
index 000000000..3b9f5b888
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/expected/formatting.md
@@ -0,0 +1,95 @@
+# formatting(1)
+
+This is **nested _formatting_ `text`**.
+
+## SECOND HEADING
+
+Some text at second level.
+
+### Third heading
+
+Some text at third level.
+
+#### Fourth heading
+
+Some text at fourth level.
+
+## Quotes and blocks.
+
+Here are some quotes and blocks.
+
+> This is a block quote. Ambidextrously koala apart that prudent blindly alas
+> far amid dear goodness turgid so exact inside oh and alas much fanciful that
+> dark on spoon-fed adequately insolent walking crud.
+
+```
+This is a code block. Groundhog watchfully sudden firefly some self-consciously hotly jeepers satanic after that this parrot this at virtuous
+some mocking the leaned jeez nightingale as much mallard so because jeez
+turned dear crud grizzly strenuously.
+
+ Indented and should be unmodified.
+```
+
+ This is an indented code block. Egregiously yikes animatedly since outside beseechingly a badger hey shakily giraffe a one wow one this
+ goodness regarding reindeer so astride before.
+
+ Doubly indented
+
+## Lists
+
+1. Ordered list
+
+ * Unordered list
+
+ With a second paragraph inside it
+
+ 1. Inner ordered list
+
+ 1. Another
+
+ * Eggs
+
+ * Milk
+
+ 5. Don't start at one.
+ 6. tamarind
+
+1. Second element
+
+1. Third element
+
+## Breaks
+
+This has a\
+hard break in it
+and a soft one.
+
+## Horizontal rule
+
+This should contain a line:
+
+---
+
+Nice!
+
+## Strange characters
+
+Handles escaping for characters
+
+.dot at the start of a line.
+
+\fBnot really troff
+
+Various characters \ - – — ─ │ ├ └
+
+```
+tree
+└── example
+ ├── salamander
+ │ ├── honey
+ │ └── some
+ ├── fancifully
+ └── trout
+```
+
+&nbsp;&nbsp;&nbsp;&nbsp;non-breaking space.
diff --git a/src/tools/cargo/crates/mdman/tests/compare/expected/formatting.txt b/src/tools/cargo/crates/mdman/tests/compare/expected/formatting.txt
new file mode 100644
index 000000000..b5258c4f5
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/expected/formatting.txt
@@ -0,0 +1,84 @@
+FORMATTING(1)
+
+This is nested formatting text.
+
+SECOND HEADING
+ Some text at second level.
+
+ Third heading
+ Some text at third level.
+
+ Fourth heading
+ Some text at fourth level.
+
+QUOTES AND BLOCKS.
+ Here are some quotes and blocks.
+
+ This is a block quote. Ambidextrously koala apart that prudent
+ blindly alas far amid dear goodness turgid so exact inside oh and
+ alas much fanciful that dark on spoon-fed adequately insolent walking
+ crud.
+
+ This is a code block. Groundhog watchfully sudden firefly some self-consciously hotly jeepers satanic after that this parrot this at virtuous
+ some mocking the leaned jeez nightingale as much mallard so because jeez
+ turned dear crud grizzly strenuously.
+
+ Indented and should be unmodified.
+
+ This is an indented code block. Egregiously yikes animatedly since outside beseechingly a badger hey shakily giraffe a one wow one this
+ goodness regarding reindeer so astride before.
+
+ Doubly indented
+
+LISTS
+ 1. Ordered list
+
+ o Unordered list
+
+ With a second paragraph inside it
+
+ 1. Inner ordered list
+
+ 2. Another
+
+ o Eggs
+
+ o Milk
+
+ 5. Don’t start at one.
+
+ 6. tamarind
+
+ 2. Second element
+
+ 3. Third element
+
+BREAKS
+ This has a
+ hard break in it and a soft one.
+
+HORIZONTAL RULE
+ This should contain a line:
+
+ _________________________________________________________________
+ Nice!
+
+STRANGE CHARACTERS
+ Handles escaping for characters
+
+ .dot at the start of a line.
+
+ \fBnot really troff
+
+ Various characters \ - – — ─ │ ├ └
+
+ tree
+ └── example
+ ├── salamander
+ │ ├── honey
+ │ └── some
+ ├── fancifully
+ └── trout
+
+     non-breaking space.
+
diff --git a/src/tools/cargo/crates/mdman/tests/compare/expected/links.1 b/src/tools/cargo/crates/mdman/tests/compare/expected/links.1
new file mode 100644
index 000000000..e56cef74c
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/expected/links.1
@@ -0,0 +1,45 @@
+'\" t
+.TH "LINKS" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+links \- Test of different link kinds
+.SH "DESCRIPTION"
+Inline link: \fIinline link\fR <https://example.com/inline>
+.sp
+Reference link: \fIthis is a link\fR <https://example.com/bar>
+.sp
+Collapsed: \fIcollapsed\fR <https://example.com/collapsed>
+.sp
+Shortcut: \fIshortcut\fR <https://example.com/shortcut>
+.sp
+Autolink: <https://example.com/auto>
+.sp
+Email: <foo@example.com>
+.sp
+Relative link: \fIrelative link\fR <https://example.org/foo/bar.html>
+.sp
+Collapsed unknown: [collapsed unknown][]
+.sp
+Reference unknown: [foo][unknown]
+.sp
+Shortcut unknown: [shortcut unknown]
+.sp
+\fBother\-cmd\fR(1)
+.sp
+\fBlocal\-cmd\fR(1)
+.sp
+\fISome link\fR <https://example.org/foo.html>
+.sp
+\fB\-\-include\fR
+.RS 4
+Testing an \fIincluded link\fR <https://example.org/included_link.html>\&.
+.RE
+.SH "OPTIONS"
+.sp
+\fB\-\-foo\-bar\fR
+.RS 4
+Example \fIlink\fR <https://example.org/bar.html>\&.
+See \fBother\-cmd\fR(1), \fBlocal\-cmd\fR(1)
+.RE
diff --git a/src/tools/cargo/crates/mdman/tests/compare/expected/links.md b/src/tools/cargo/crates/mdman/tests/compare/expected/links.md
new file mode 100644
index 000000000..11afcf3bd
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/expected/links.md
@@ -0,0 +1,56 @@
+# links(1)
+
+## NAME
+
+links - Test of different link kinds
+
+## DESCRIPTION
+
+Inline link: [inline link](https://example.com/inline)
+
+Reference link: [this is a link][bar]
+
+Collapsed: [collapsed][]
+
+Shortcut: [shortcut]
+
+Autolink: <https://example.com/auto>
+
+Email: <foo@example.com>
+
+Relative link: [relative link](foo/bar.html)
+
+Collapsed unknown: [collapsed unknown][]
+
+Reference unknown: [foo][unknown]
+
+Shortcut unknown: [shortcut unknown]
+
+[other-cmd(1)](https://example.org/commands/other-cmd.html)
+
+[local-cmd(1)](local-cmd.html)
+
+[Some link](foo.html)
+
+<dl>
+<dt class="option-term" id="option-links---include"><a class="option-anchor" href="#option-links---include"></a><code>--include</code></dt>
+<dd class="option-desc">Testing an <a href="included_link.html">included link</a>.</dd>
+
+</dl>
+
+
+## OPTIONS
+
+<dl>
+
+<dt class="option-term" id="option-links---foo-bar"><a class="option-anchor" href="#option-links---foo-bar"></a><code>--foo-bar</code></dt>
+<dd class="option-desc">Example <a href="bar.html">link</a>.
+See <a href="https://example.org/commands/other-cmd.html">other-cmd(1)</a>, <a href="local-cmd.html">local-cmd(1)</a></dd>
+
+
+</dl>
+
+
+[bar]: https://example.com/bar
+[collapsed]: https://example.com/collapsed
+[shortcut]: https://example.com/shortcut
diff --git a/src/tools/cargo/crates/mdman/tests/compare/expected/links.txt b/src/tools/cargo/crates/mdman/tests/compare/expected/links.txt
new file mode 100644
index 000000000..7748c3d10
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/expected/links.txt
@@ -0,0 +1,40 @@
+LINKS(1)
+
+NAME
+ links - Test of different link kinds
+
+DESCRIPTION
+ Inline link: inline link <https://example.com/inline>
+
+ Reference link: this is a link <https://example.com/bar>
+
+ Collapsed: collapsed <https://example.com/collapsed>
+
+ Shortcut: shortcut <https://example.com/shortcut>
+
+ Autolink: <https://example.com/auto>
+
+ Email: <foo@example.com>
+
+ Relative link: relative link <https://example.org/foo/bar.html>
+
+ Collapsed unknown: [collapsed unknown][]
+
+ Reference unknown: [foo][unknown]
+
+ Shortcut unknown: [shortcut unknown]
+
+ other-cmd(1)
+
+ local-cmd(1)
+
+ Some link <https://example.org/foo.html>
+
+ --include
+ Testing an included link <https://example.org/included_link.html>.
+
+OPTIONS
+ --foo-bar
+ Example link <https://example.org/bar.html>. See other-cmd(1),
+ local-cmd(1)
+
diff --git a/src/tools/cargo/crates/mdman/tests/compare/expected/options.1 b/src/tools/cargo/crates/mdman/tests/compare/expected/options.1
new file mode 100644
index 000000000..d362421e9
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/expected/options.1
@@ -0,0 +1,94 @@
+'\" t
+.TH "MY\-COMMAND" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+my\-command \- A brief description
+.SH "SYNOPSIS"
+\fBmy\-command\fR [\fB\-\-abc\fR | \fB\-\-xyz\fR] \fIname\fR
+.br
+\fBmy\-command\fR [\fB\-f\fR \fIfile\fR]
+.br
+\fBmy\-command\fR (\fB\-m\fR | \fB\-M\fR) [\fIoldbranch\fR] \fInewbranch\fR
+.br
+\fBmy\-command\fR (\fB\-d\fR | \fB\-D\fR) [\fB\-r\fR] \fIbranchname\fR\[u2026]
+.SH "DESCRIPTION"
+A description of the command.
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'One
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Sub one
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Sub two
+.RE
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Two
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Three
+.RE
+.SH "OPTIONS"
+.SS "Command options"
+.sp
+\fB\-\-foo\-bar\fR
+.RS 4
+Demo \fIemphasis\fR, \fBstrong\fR, ~~strike~~
+.RE
+.sp
+\fB\-p\fR \fIspec\fR,
+\fB\-\-package\fR \fIspec\fR
+.RS 4
+This has multiple flags.
+.RE
+.sp
+\fInamed\-arg\[u2026]\fR
+.RS 4
+A named argument.
+.RE
+.SS "Common Options"
+.sp
+\fB@\fR\fIfilename\fR
+.RS 4
+Load from filename.
+.RE
+.sp
+\fB\-\-foo\fR [\fIbar\fR]
+.RS 4
+Flag with optional value.
+.RE
+.sp
+\fB\-\-foo\fR[\fB=\fR\fIbar\fR]
+.RS 4
+Alternate syntax for optional value (with required = for disambiguation).
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'An example
+.sp
+.RS 4
+.nf
+my\-command \-\-abc
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Another example
+.sp
+.RS 4
+.nf
+my\-command \-\-xyz
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBother\-command\fR(1) \fBabc\fR(7)
diff --git a/src/tools/cargo/crates/mdman/tests/compare/expected/options.md b/src/tools/cargo/crates/mdman/tests/compare/expected/options.md
new file mode 100644
index 000000000..19b0b443b
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/expected/options.md
@@ -0,0 +1,77 @@
+# my-command(1)
+
+## NAME
+
+my-command - A brief description
+
+## SYNOPSIS
+
+`my-command` [`--abc` | `--xyz`] _name_\
+`my-command` [`-f` _file_]\
+`my-command` (`-m` | `-M`) [_oldbranch_] _newbranch_\
+`my-command` (`-d` | `-D`) [`-r`] _branchname_...
+
+## DESCRIPTION
+
+A description of the command.
+
+* One
+ * Sub one
+ * Sub two
+* Two
+* Three
+
+
+## OPTIONS
+
+### Command options
+
+<dl>
+
+<dt class="option-term" id="option-options---foo-bar"><a class="option-anchor" href="#option-options---foo-bar"></a><code>--foo-bar</code></dt>
+<dd class="option-desc">Demo <em>emphasis</em>, <strong>strong</strong>, <del>strike</del></dd>
+
+
+<dt class="option-term" id="option-options--p"><a class="option-anchor" href="#option-options--p"></a><code>-p</code> <em>spec</em></dt>
+<dt class="option-term" id="option-options---package"><a class="option-anchor" href="#option-options---package"></a><code>--package</code> <em>spec</em></dt>
+<dd class="option-desc">This has multiple flags.</dd>
+
+
+<dt class="option-term" id="option-options-named-arg…"><a class="option-anchor" href="#option-options-named-arg…"></a><em>named-arg…</em></dt>
+<dd class="option-desc">A named argument.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+<dt class="option-term" id="option-options-@filename"><a class="option-anchor" href="#option-options-@filename"></a><code>@</code><em>filename</em></dt>
+<dd class="option-desc">Load from filename.</dd>
+
+
+<dt class="option-term" id="option-options---foo"><a class="option-anchor" href="#option-options---foo"></a><code>--foo</code> [<em>bar</em>]</dt>
+<dd class="option-desc">Flag with optional value.</dd>
+
+
+<dt class="option-term" id="option-options---foo[=bar]"><a class="option-anchor" href="#option-options---foo[=bar]"></a><code>--foo</code>[<code>=</code><em>bar</em>]</dt>
+<dd class="option-desc">Alternate syntax for optional value (with required = for disambiguation).</dd>
+
+
+</dl>
+
+
+## EXAMPLES
+
+1. An example
+
+ ```
+ my-command --abc
+ ```
+
+1. Another example
+
+ my-command --xyz
+
+## SEE ALSO
+[other-command(1)](other-command.html) [abc(7)](abc.html)
diff --git a/src/tools/cargo/crates/mdman/tests/compare/expected/options.txt b/src/tools/cargo/crates/mdman/tests/compare/expected/options.txt
new file mode 100644
index 000000000..9bfdec67c
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/expected/options.txt
@@ -0,0 +1,57 @@
+MY-COMMAND(1)
+
+NAME
+ my-command - A brief description
+
+SYNOPSIS
+ my-command [--abc | --xyz] name
+ my-command [-f file]
+ my-command (-m | -M) [oldbranch] newbranch
+ my-command (-d | -D) [-r] branchname…
+
+DESCRIPTION
+ A description of the command.
+
+ o One
+ o Sub one
+
+ o Sub two
+
+ o Two
+
+ o Three
+
+OPTIONS
+ Command options
+ --foo-bar
+ Demo emphasis, strong, ~~strike~~
+
+ -p spec, --package spec
+ This has multiple flags.
+
+ named-arg…
+ A named argument.
+
+ Common Options
+ @filename
+ Load from filename.
+
+ --foo [bar]
+ Flag with optional value.
+
+ --foo[=bar]
+ Alternate syntax for optional value (with required = for
+ disambiguation).
+
+EXAMPLES
+ 1. An example
+
+ my-command --abc
+
+ 2. Another example
+
+ my-command --xyz
+
+SEE ALSO
+ other-command(1) abc(7)
+
diff --git a/src/tools/cargo/crates/mdman/tests/compare/expected/tables.1 b/src/tools/cargo/crates/mdman/tests/compare/expected/tables.1
new file mode 100644
index 000000000..7175a3e85
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/expected/tables.1
@@ -0,0 +1,108 @@
+'\" t
+.TH "TABLES" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "DESCRIPTION"
+Testing tables.
+
+.TS
+allbox tab(:);
+lt.
+T{
+Single col
+T}
+T{
+Hi! :)
+T}
+.TE
+.sp
+
+.TS
+allbox tab(:);
+lt lt lt.
+T{
+Header content
+T}:T{
+With \fBformat\fR \fItext\fR
+T}:T{
+Another column
+T}
+T{
+Some data
+T}:T{
+More data
+T}:T{
+
+T}
+T{
+Extra long amount of text within a column
+T}:T{
+hi
+T}:T{
+there
+T}
+.TE
+.sp
+
+.TS
+allbox tab(:);
+lt ct rt.
+T{
+Left aligned
+T}:T{
+Center aligned
+T}:T{
+Right aligned
+T}
+T{
+abc
+T}:T{
+def
+T}:T{
+ghi
+T}
+.TE
+.sp
+
+.TS
+allbox tab(:);
+lt ct rt.
+T{
+Left aligned
+T}:T{
+Center aligned
+T}:T{
+Right aligned
+T}
+T{
+X
+T}:T{
+X
+T}:T{
+X
+T}
+T{
+Extra long text 123456789012 with mixed widths.
+T}:T{
+Extra long text 123456789012 with mixed widths.
+T}:T{
+Extra long text 123456789012 with mixed widths.
+T}
+.TE
+.sp
+
+.TS
+allbox tab(:);
+lt.
+T{
+Link check
+T}
+T{
+\fIfoo\fR <https://example.com/>
+T}
+T{
+<https://example.com/>
+T}
+.TE
+.sp
diff --git a/src/tools/cargo/crates/mdman/tests/compare/expected/tables.md b/src/tools/cargo/crates/mdman/tests/compare/expected/tables.md
new file mode 100644
index 000000000..831132c44
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/expected/tables.md
@@ -0,0 +1,35 @@
+# tables(1)
+
+## DESCRIPTION
+
+Testing tables.
+
+| Single col |
+--------------
+| Hi! :) |
+
+
+Header content | With `format` *text* | Another column
+---------------|----------------------|----------------
+Some data | More data |
+Extra long amount of text within a column | hi | there
+
+
+Left aligned | Center aligned | Right aligned
+-------------|:--------------:|--------------:
+abc | def | ghi
+
+
+Left aligned | Center aligned | Right aligned
+-------------|:--------------:|--------------:
+X | X | X
+Extra long text 123456789012 with mixed widths. | Extra long text 123456789012 with mixed widths. | Extra long text 123456789012 with mixed widths.
+
+
+| Link check |
+--------------
+| [foo] |
+| <https://example.com/> |
+
+
+[foo]: https://example.com/
diff --git a/src/tools/cargo/crates/mdman/tests/compare/expected/tables.txt b/src/tools/cargo/crates/mdman/tests/compare/expected/tables.txt
new file mode 100644
index 000000000..fed53f9a4
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/expected/tables.txt
@@ -0,0 +1,45 @@
+TABLES(1)
+
+DESCRIPTION
+ Testing tables.
+
+ +-------------+
+ | Single col |
+ +-------------+
+ | Hi! :) |
+ +-------------+
+
+ +-------------------------------------+----------------+--------------+
+ | Header content | With format | Another |
+ | | text | column |
+ +-------------------------------------+----------------+--------------+
+ | Some data | More data | |
+ +-------------------------------------+----------------+--------------+
+ | Extra long amount of text within a | hi | there |
+ | column | | |
+ +-------------------------------------+----------------+--------------+
+
+ +---------------+-----------------+----------------+
+ | Left aligned | Center aligned | Right aligned |
+ +---------------+-----------------+----------------+
+ | abc | def | ghi |
+ +---------------+-----------------+----------------+
+
+ +-----------------------+-----------------------+-----------------------+
+ | Left aligned | Center aligned | Right aligned |
+ +-----------------------+-----------------------+-----------------------+
+ | X | X | X |
+ +-----------------------+-----------------------+-----------------------+
+ | Extra long text | Extra long text | Extra long text |
+ | 123456789012 with | 123456789012 with | 123456789012 with |
+ | mixed widths. | mixed widths. | mixed widths. |
+ +-----------------------+-----------------------+-----------------------+
+
+ +-----------------------+
+ | Link check |
+ +-----------------------+
+ | foo |
+ +-----------------------+
+ | https://example.com/ |
+ +-----------------------+
+
diff --git a/src/tools/cargo/crates/mdman/tests/compare/expected/vars.7 b/src/tools/cargo/crates/mdman/tests/compare/expected/vars.7
new file mode 100644
index 000000000..0ee33ad36
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/expected/vars.7
@@ -0,0 +1,9 @@
+'\" t
+.TH "VARS" "7"
+.nh
+.ad l
+.ss \n[.ss] 0
+.sp
+Bar
+.sp
+bar
diff --git a/src/tools/cargo/crates/mdman/tests/compare/expected/vars.md b/src/tools/cargo/crates/mdman/tests/compare/expected/vars.md
new file mode 100644
index 000000000..2493aca36
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/expected/vars.md
@@ -0,0 +1,7 @@
+# vars(7)
+
+
+
+Bar
+
+bar
diff --git a/src/tools/cargo/crates/mdman/tests/compare/expected/vars.txt b/src/tools/cargo/crates/mdman/tests/compare/expected/vars.txt
new file mode 100644
index 000000000..11d34ca12
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/expected/vars.txt
@@ -0,0 +1,6 @@
+VARS(7)
+
+Bar
+
+bar
+
diff --git a/src/tools/cargo/crates/mdman/tests/compare/formatting.md b/src/tools/cargo/crates/mdman/tests/compare/formatting.md
new file mode 100644
index 000000000..3b9f5b888
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/formatting.md
@@ -0,0 +1,95 @@
+# formatting(1)
+
+This is **nested _formatting_ `text`**.
+
+## SECOND HEADING
+
+Some text at second level.
+
+### Third heading
+
+Some text at third level.
+
+#### Fourth heading
+
+Some text at fourth level.
+
+## Quotes and blocks.
+
+Here are some quotes and blocks.
+
+> This is a block quote. Ambidextrously koala apart that prudent blindly alas
+> far amid dear goodness turgid so exact inside oh and alas much fanciful that
+> dark on spoon-fed adequately insolent walking crud.
+
+```
+This is a code block. Groundhog watchfully sudden firefly some self-consciously hotly jeepers satanic after that this parrot this at virtuous
+some mocking the leaned jeez nightingale as much mallard so because jeez
+turned dear crud grizzly strenuously.
+
+ Indented and should be unmodified.
+```
+
+ This is an indented code block. Egregiously yikes animatedly since outside beseechingly a badger hey shakily giraffe a one wow one this
+ goodness regarding reindeer so astride before.
+
+ Doubly indented
+
+## Lists
+
+1. Ordered list
+
+ * Unordered list
+
+ With a second paragraph inside it
+
+ 1. Inner ordered list
+
+ 1. Another
+
+ * Eggs
+
+ * Milk
+
+ 5. Don't start at one.
+ 6. tamarind
+
+1. Second element
+
+1. Third element
+
+## Breaks
+
+This has a\
+hard break in it
+and a soft one.
+
+## Horizontal rule
+
+This should contain a line:
+
+---
+
+Nice!
+
+## Strange characters
+
+Handles escaping for characters
+
+.dot at the start of a line.
+
+\fBnot really troff
+
+Various characters \ - – — ─ │ ├ └
+
+```
+tree
+└── example
+ ├── salamander
+ │ ├── honey
+ │ └── some
+ ├── fancifully
+ └── trout
+```
+
+&nbsp;&nbsp;&nbsp;&nbsp;non-breaking space.
diff --git a/src/tools/cargo/crates/mdman/tests/compare/includes/links-include.md b/src/tools/cargo/crates/mdman/tests/compare/includes/links-include.md
new file mode 100644
index 000000000..737336070
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/includes/links-include.md
@@ -0,0 +1,7 @@
+[Some link](foo.html)
+
+{{#options}}
+{{#option "`--include`"}}
+Testing an [included link](included_link.html).
+{{/option}}
+{{/options}}
diff --git a/src/tools/cargo/crates/mdman/tests/compare/includes/options-common.md b/src/tools/cargo/crates/mdman/tests/compare/includes/options-common.md
new file mode 100644
index 000000000..07404e3f7
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/includes/options-common.md
@@ -0,0 +1,14 @@
+{{#options}}
+{{#option "`@`_filename_"}}
+Load from filename.
+{{/option}}
+
+{{#option "`--foo` [_bar_]"}}
+Flag with optional value.
+{{/option}}
+
+{{#option "`--foo`[`=`_bar_]"}}
+Alternate syntax for optional value (with required = for disambiguation).
+{{/option}}
+
+{{/options}}
diff --git a/src/tools/cargo/crates/mdman/tests/compare/links.md b/src/tools/cargo/crates/mdman/tests/compare/links.md
new file mode 100644
index 000000000..949f3749a
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/links.md
@@ -0,0 +1,49 @@
+# links(1)
+
+## NAME
+
+links - Test of different link kinds
+
+## DESCRIPTION
+
+Inline link: [inline link](https://example.com/inline)
+
+Reference link: [this is a link][bar]
+
+Collapsed: [collapsed][]
+
+Shortcut: [shortcut]
+
+Autolink: <https://example.com/auto>
+
+Email: <foo@example.com>
+
+Relative link: [relative link](foo/bar.html)
+
+Collapsed unknown: [collapsed unknown][]
+
+Reference unknown: [foo][unknown]
+
+Shortcut unknown: [shortcut unknown]
+
+{{man "other-cmd" 1}}
+
+{{man "local-cmd" 1}}
+
+{{> links-include}}
+
+## OPTIONS
+
+{{#options}}
+
+{{#option "`--foo-bar`"}}
+Example [link](bar.html).
+See {{man "other-cmd" 1}}, {{man "local-cmd" 1}}
+{{/option}}
+
+{{/options}}
+
+
+[bar]: https://example.com/bar
+[collapsed]: https://example.com/collapsed
+[shortcut]: https://example.com/shortcut
diff --git a/src/tools/cargo/crates/mdman/tests/compare/options.md b/src/tools/cargo/crates/mdman/tests/compare/options.md
new file mode 100644
index 000000000..51415b09e
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/options.md
@@ -0,0 +1,62 @@
+# my-command(1)
+
+## NAME
+
+my-command - A brief description
+
+## SYNOPSIS
+
+`my-command` [`--abc` | `--xyz`] _name_\
+`my-command` [`-f` _file_]\
+`my-command` (`-m` | `-M`) [_oldbranch_] _newbranch_\
+`my-command` (`-d` | `-D`) [`-r`] _branchname_...
+
+## DESCRIPTION
+
+A description of the command.
+
+* One
+ * Sub one
+ * Sub two
+* Two
+* Three
+
+
+## OPTIONS
+
+### Command options
+
+{{#options}}
+
+{{#option "`--foo-bar`"}}
+Demo *emphasis*, **strong**, ~~strike~~
+{{/option}}
+
+{{#option "`-p` _spec_" "`--package` _spec_"}}
+This has multiple flags.
+{{/option}}
+
+{{#option "_named-arg..._"}}
+A named argument.
+{{/option}}
+
+{{/options}}
+
+### Common Options
+
+{{> options-common}}
+
+## EXAMPLES
+
+1. An example
+
+ ```
+ my-command --abc
+ ```
+
+1. Another example
+
+ my-command --xyz
+
+## SEE ALSO
+{{man "other-command" 1}} {{man "abc" 7}}
diff --git a/src/tools/cargo/crates/mdman/tests/compare/tables.md b/src/tools/cargo/crates/mdman/tests/compare/tables.md
new file mode 100644
index 000000000..831132c44
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/tables.md
@@ -0,0 +1,35 @@
+# tables(1)
+
+## DESCRIPTION
+
+Testing tables.
+
+| Single col |
+--------------
+| Hi! :) |
+
+
+Header content | With `format` *text* | Another column
+---------------|----------------------|----------------
+Some data | More data |
+Extra long amount of text within a column | hi | there
+
+
+Left aligned | Center aligned | Right aligned
+-------------|:--------------:|--------------:
+abc | def | ghi
+
+
+Left aligned | Center aligned | Right aligned
+-------------|:--------------:|--------------:
+X | X | X
+Extra long text 123456789012 with mixed widths. | Extra long text 123456789012 with mixed widths. | Extra long text 123456789012 with mixed widths.
+
+
+| Link check |
+--------------
+| [foo] |
+| <https://example.com/> |
+
+
+[foo]: https://example.com/
diff --git a/src/tools/cargo/crates/mdman/tests/compare/vars.md b/src/tools/cargo/crates/mdman/tests/compare/vars.md
new file mode 100644
index 000000000..d41b76583
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/compare/vars.md
@@ -0,0 +1,7 @@
+# vars(7)
+
+{{*set foo="Bar"}}
+
+{{foo}}
+
+{{lower foo}}
diff --git a/src/tools/cargo/crates/mdman/tests/invalid.rs b/src/tools/cargo/crates/mdman/tests/invalid.rs
new file mode 100644
index 000000000..cc81d06c4
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/invalid.rs
@@ -0,0 +1,34 @@
+//! Tests for errors and invalid input.
+
+use mdman::{Format, ManMap};
+use pretty_assertions::assert_eq;
+use std::path::PathBuf;
+
+fn run(name: &str, expected_error: &str) {
+ let input = PathBuf::from(format!("tests/invalid/{}", name));
+ match mdman::convert(&input, Format::Man, None, ManMap::new()) {
+ Ok(_) => {
+ panic!("expected {} to fail", name);
+ }
+ Err(e) => {
+ assert_eq!(expected_error, e.to_string());
+ }
+ }
+}
+
+macro_rules! test( ($name:ident, $file_name:expr, $error:expr) => (
+ #[test]
+ fn $name() { run($file_name, $error); }
+) );
+
+test!(
+ nested,
+ "nested.md",
+ "Error rendering \"template\" line 4, col 1: options blocks cannot be nested"
+);
+
+test!(
+ not_inside_options,
+ "not-inside-options.md",
+ "Error rendering \"template\" line 3, col 1: option must be in options block"
+);
diff --git a/src/tools/cargo/crates/mdman/tests/invalid/nested.md b/src/tools/cargo/crates/mdman/tests/invalid/nested.md
new file mode 100644
index 000000000..6a33e6df6
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/invalid/nested.md
@@ -0,0 +1,6 @@
+# nested(1)
+
+{{#options}}
+{{#options}}
+{{/options}}
+{{/options}}
diff --git a/src/tools/cargo/crates/mdman/tests/invalid/not-inside-options.md b/src/tools/cargo/crates/mdman/tests/invalid/not-inside-options.md
new file mode 100644
index 000000000..b6c816f09
--- /dev/null
+++ b/src/tools/cargo/crates/mdman/tests/invalid/not-inside-options.md
@@ -0,0 +1,5 @@
+# not-inside-options(1)
+
+{{#option "`-o`"}}
+Testing without options block.
+{{/option}}
diff --git a/src/tools/cargo/crates/resolver-tests/Cargo.toml b/src/tools/cargo/crates/resolver-tests/Cargo.toml
new file mode 100644
index 000000000..e4aab4325
--- /dev/null
+++ b/src/tools/cargo/crates/resolver-tests/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "resolver-tests"
+version = "0.1.0"
+edition = "2018"
+
+[dependencies]
+cargo = { path = "../.." }
+cargo-util = { path = "../cargo-util" }
+is-terminal = "0.4.0"
+lazy_static = "1.3.0"
+proptest = "1.1.0"
+varisat = "0.2.1"
diff --git a/src/tools/cargo/crates/resolver-tests/src/lib.rs b/src/tools/cargo/crates/resolver-tests/src/lib.rs
new file mode 100644
index 000000000..3ffb6c5d2
--- /dev/null
+++ b/src/tools/cargo/crates/resolver-tests/src/lib.rs
@@ -0,0 +1,991 @@
+#![allow(clippy::all)]
+
+use std::cell::RefCell;
+use std::cmp::PartialEq;
+use std::cmp::{max, min};
+use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
+use std::fmt;
+use std::fmt::Write;
+use std::rc::Rc;
+use std::task::Poll;
+use std::time::Instant;
+
+use cargo::core::dependency::DepKind;
+use cargo::core::resolver::{self, ResolveOpts, VersionPreferences};
+use cargo::core::source::{GitReference, QueryKind, SourceId};
+use cargo::core::Resolve;
+use cargo::core::{Dependency, PackageId, Registry, Summary};
+use cargo::util::{CargoResult, Config, Graph, IntoUrl};
+
+use proptest::collection::{btree_map, vec};
+use proptest::prelude::*;
+use proptest::sample::Index;
+use proptest::string::string_regex;
+use varisat::{self, ExtendFormula};
+
+pub fn resolve(deps: Vec<Dependency>, registry: &[Summary]) -> CargoResult<Vec<PackageId>> {
+ resolve_with_config(deps, registry, &Config::default().unwrap())
+}
+
+pub fn resolve_and_validated(
+ deps: Vec<Dependency>,
+ registry: &[Summary],
+ sat_resolve: Option<SatResolve>,
+) -> CargoResult<Vec<PackageId>> {
+ let resolve = resolve_with_config_raw(deps.clone(), registry, &Config::default().unwrap());
+
+ match resolve {
+ Err(e) => {
+ let sat_resolve = sat_resolve.unwrap_or_else(|| SatResolve::new(registry));
+ if sat_resolve.sat_resolve(&deps) {
+ panic!(
+ "the resolve err but the sat_resolve thinks this will work:\n{}",
+ sat_resolve.use_packages().unwrap()
+ );
+ }
+ Err(e)
+ }
+ Ok(resolve) => {
+ let mut stack = vec![pkg_id("root")];
+ let mut used = HashSet::new();
+ let mut links = HashSet::new();
+ while let Some(p) = stack.pop() {
+ assert!(resolve.contains(&p));
+ if used.insert(p) {
+ // in the tests all `links` crates end in `-sys`
+ if p.name().ends_with("-sys") {
+ assert!(links.insert(p.name()));
+ }
+ stack.extend(resolve.deps(p).map(|(dp, deps)| {
+ for d in deps {
+ assert!(d.matches_id(dp));
+ }
+ dp
+ }));
+ }
+ }
+ let out = resolve.sort();
+ assert_eq!(out.len(), used.len());
+
+ let mut pub_deps: HashMap<PackageId, HashSet<_>> = HashMap::new();
+ for &p in out.iter() {
+ // make the list of `p` public dependencies
+ let mut self_pub_dep = HashSet::new();
+ self_pub_dep.insert(p);
+ for (dp, deps) in resolve.deps(p) {
+ if deps.iter().any(|d| d.is_public()) {
+ self_pub_dep.extend(pub_deps[&dp].iter().cloned())
+ }
+ }
+ pub_deps.insert(p, self_pub_dep);
+
+ // check if `p` has a public dependencies conflicts
+ let seen_dep: BTreeSet<_> = resolve
+ .deps(p)
+ .flat_map(|(dp, _)| pub_deps[&dp].iter().cloned())
+ .collect();
+ let seen_dep: Vec<_> = seen_dep.iter().collect();
+ for a in seen_dep.windows(2) {
+ if a[0].name() == a[1].name() {
+ panic!(
+ "the package {:?} can publicly see {:?} and {:?}",
+ p, a[0], a[1]
+ )
+ }
+ }
+ }
+ let sat_resolve = sat_resolve.unwrap_or_else(|| SatResolve::new(registry));
+ if !sat_resolve.sat_is_valid_solution(&out) {
+ panic!(
+ "the sat_resolve err but the resolve thinks this will work:\n{:?}",
+ resolve
+ );
+ }
+ Ok(out)
+ }
+ }
+}
+
+pub fn resolve_with_config(
+ deps: Vec<Dependency>,
+ registry: &[Summary],
+ config: &Config,
+) -> CargoResult<Vec<PackageId>> {
+ let resolve = resolve_with_config_raw(deps, registry, config)?;
+ Ok(resolve.sort())
+}
+
+pub fn resolve_with_config_raw(
+ deps: Vec<Dependency>,
+ registry: &[Summary],
+ config: &Config,
+) -> CargoResult<Resolve> {
+ struct MyRegistry<'a> {
+ list: &'a [Summary],
+ used: HashSet<PackageId>,
+ }
+ impl<'a> Registry for MyRegistry<'a> {
+ fn query(
+ &mut self,
+ dep: &Dependency,
+ kind: QueryKind,
+ f: &mut dyn FnMut(Summary),
+ ) -> Poll<CargoResult<()>> {
+ for summary in self.list.iter() {
+ let matched = match kind {
+ QueryKind::Exact => dep.matches(summary),
+ QueryKind::Fuzzy => true,
+ };
+ if matched {
+ self.used.insert(summary.package_id());
+ f(summary.clone());
+ }
+ }
+ Poll::Ready(Ok(()))
+ }
+
+ fn describe_source(&self, _src: SourceId) -> String {
+ String::new()
+ }
+
+ fn is_replaced(&self, _src: SourceId) -> bool {
+ false
+ }
+
+ fn block_until_ready(&mut self) -> CargoResult<()> {
+ Ok(())
+ }
+ }
+ impl<'a> Drop for MyRegistry<'a> {
+ fn drop(&mut self) {
+ if std::thread::panicking() && self.list.len() != self.used.len() {
+ // we found a case that causes a panic and did not use all of the input.
+ // lets print the part of the input that was used for minimization.
+ println!(
+ "{:?}",
+ PrettyPrintRegistry(
+ self.list
+ .iter()
+ .filter(|s| { self.used.contains(&s.package_id()) })
+ .cloned()
+ .collect()
+ )
+ );
+ }
+ }
+ }
+ let mut registry = MyRegistry {
+ list: registry,
+ used: HashSet::new(),
+ };
+ let summary = Summary::new(
+ config,
+ pkg_id("root"),
+ deps,
+ &BTreeMap::new(),
+ None::<&String>,
+ )
+ .unwrap();
+ let opts = ResolveOpts::everything();
+ let start = Instant::now();
+ let resolve = resolver::resolve(
+ &[(summary, opts)],
+ &[],
+ &mut registry,
+ &VersionPreferences::default(),
+ Some(config),
+ true,
+ );
+
+ // The largest test in our suite takes less then 30 sec.
+ // So lets fail the test if we have ben running for two long.
+ assert!(start.elapsed().as_secs() < 60);
+ resolve
+}
+
+const fn num_bits<T>() -> usize {
+ std::mem::size_of::<T>() * 8
+}
+
+fn log_bits(x: usize) -> usize {
+ if x == 0 {
+ return 0;
+ }
+ assert!(x > 0);
+ (num_bits::<usize>() as u32 - x.leading_zeros()) as usize
+}
+
+fn sat_at_most_one(solver: &mut impl varisat::ExtendFormula, vars: &[varisat::Var]) {
+ if vars.len() <= 1 {
+ return;
+ } else if vars.len() == 2 {
+ solver.add_clause(&[vars[0].negative(), vars[1].negative()]);
+ return;
+ } else if vars.len() == 3 {
+ solver.add_clause(&[vars[0].negative(), vars[1].negative()]);
+ solver.add_clause(&[vars[0].negative(), vars[2].negative()]);
+ solver.add_clause(&[vars[1].negative(), vars[2].negative()]);
+ return;
+ }
+ // use the "Binary Encoding" from
+ // https://www.it.uu.se/research/group/astra/ModRef10/papers/Alan%20M.%20Frisch%20and%20Paul%20A.%20Giannoros.%20SAT%20Encodings%20of%20the%20At-Most-k%20Constraint%20-%20ModRef%202010.pdf
+ let bits: Vec<varisat::Var> = solver.new_var_iter(log_bits(vars.len())).collect();
+ for (i, p) in vars.iter().enumerate() {
+ for b in 0..bits.len() {
+ solver.add_clause(&[p.negative(), bits[b].lit(((1 << b) & i) > 0)]);
+ }
+ }
+}
+
+fn sat_at_most_one_by_key<K: std::hash::Hash + Eq>(
+ cnf: &mut impl varisat::ExtendFormula,
+ data: impl Iterator<Item = (K, varisat::Var)>,
+) -> HashMap<K, Vec<varisat::Var>> {
+ // no two packages with the same links set
+ let mut by_keys: HashMap<K, Vec<varisat::Var>> = HashMap::new();
+ for (p, v) in data {
+ by_keys.entry(p).or_default().push(v)
+ }
+ for key in by_keys.values() {
+ sat_at_most_one(cnf, key);
+ }
+ by_keys
+}
+
+/// Resolution can be reduced to the SAT problem. So this is an alternative implementation
+/// of the resolver that uses a SAT library for the hard work. This is intended to be easy to read,
+/// as compared to the real resolver.
+///
+/// For the subset of functionality that are currently made by `registry_strategy` this will,
+/// find a valid resolution if one exists. The big thing that the real resolver does,
+/// that this one does not do is work with features and optional dependencies.
+///
+/// The SAT library dose not optimize for the newer version,
+/// so the selected packages may not match the real resolver.
+#[derive(Clone)]
+pub struct SatResolve(Rc<RefCell<SatResolveInner>>);
+struct SatResolveInner {
+ solver: varisat::Solver<'static>,
+ var_for_is_packages_used: HashMap<PackageId, varisat::Var>,
+ by_name: HashMap<&'static str, Vec<PackageId>>,
+}
+
+impl SatResolve {
+ pub fn new(registry: &[Summary]) -> Self {
+ let mut cnf = varisat::CnfFormula::new();
+ let var_for_is_packages_used: HashMap<PackageId, varisat::Var> = registry
+ .iter()
+ .map(|s| (s.package_id(), cnf.new_var()))
+ .collect();
+
+ // no two packages with the same links set
+ sat_at_most_one_by_key(
+ &mut cnf,
+ registry
+ .iter()
+ .map(|s| (s.links(), var_for_is_packages_used[&s.package_id()]))
+ .filter(|(l, _)| l.is_some()),
+ );
+
+ // no two semver compatible versions of the same package
+ let by_activations_keys = sat_at_most_one_by_key(
+ &mut cnf,
+ var_for_is_packages_used
+ .iter()
+ .map(|(p, &v)| (p.as_activations_key(), v)),
+ );
+
+ let mut by_name: HashMap<&'static str, Vec<PackageId>> = HashMap::new();
+
+ for p in registry.iter() {
+ by_name
+ .entry(p.name().as_str())
+ .or_default()
+ .push(p.package_id())
+ }
+
+ let empty_vec = vec![];
+
+ let mut graph: Graph<PackageId, ()> = Graph::new();
+
+ let mut version_selected_for: HashMap<
+ PackageId,
+ HashMap<Dependency, HashMap<_, varisat::Var>>,
+ > = HashMap::new();
+ // active packages need each of there `deps` to be satisfied
+ for p in registry.iter() {
+ graph.add(p.package_id());
+ for dep in p.dependencies() {
+ // This can more easily be written as:
+ // !is_active(p) or one of the things that match dep is_active
+ // All the complexity, from here to the end, is to support public and private dependencies!
+ let mut by_key: HashMap<_, Vec<varisat::Lit>> = HashMap::new();
+ for &m in by_name
+ .get(dep.package_name().as_str())
+ .unwrap_or(&empty_vec)
+ .iter()
+ .filter(|&p| dep.matches_id(*p))
+ {
+ graph.link(p.package_id(), m);
+ by_key
+ .entry(m.as_activations_key())
+ .or_default()
+ .push(var_for_is_packages_used[&m].positive());
+ }
+ let keys: HashMap<_, _> = by_key.keys().map(|&k| (k, cnf.new_var())).collect();
+
+ // if `p` is active then we need to select one of the keys
+ let matches: Vec<_> = keys
+ .values()
+ .map(|v| v.positive())
+ .chain(Some(var_for_is_packages_used[&p.package_id()].negative()))
+ .collect();
+ cnf.add_clause(&matches);
+
+ // if a key is active then we need to select one of the versions
+ for (key, vars) in by_key.iter() {
+ let mut matches = vars.clone();
+ matches.push(keys[key].negative());
+ cnf.add_clause(&matches);
+ }
+
+ version_selected_for
+ .entry(p.package_id())
+ .or_default()
+ .insert(dep.clone(), keys);
+ }
+ }
+
+ let topological_order = graph.sort();
+
+ // we already ensure there is only one version for each `activations_key` so we can think of
+ // `publicly_exports` as being in terms of a set of `activations_key`s
+ let mut publicly_exports: HashMap<_, HashMap<_, varisat::Var>> = HashMap::new();
+
+ for &key in by_activations_keys.keys() {
+ // everything publicly depends on itself
+ let var = publicly_exports
+ .entry(key)
+ .or_default()
+ .entry(key)
+ .or_insert_with(|| cnf.new_var());
+ cnf.add_clause(&[var.positive()]);
+ }
+
+ // if a `dep` is public then `p` `publicly_exports` all the things that the selected version `publicly_exports`
+ for &p in topological_order.iter() {
+ if let Some(deps) = version_selected_for.get(&p) {
+ let mut p_exports = publicly_exports.remove(&p.as_activations_key()).unwrap();
+ for (_, versions) in deps.iter().filter(|(d, _)| d.is_public()) {
+ for (ver, sel) in versions {
+ for (&export_pid, &export_var) in publicly_exports[ver].iter() {
+ let our_var =
+ p_exports.entry(export_pid).or_insert_with(|| cnf.new_var());
+ cnf.add_clause(&[
+ sel.negative(),
+ export_var.negative(),
+ our_var.positive(),
+ ]);
+ }
+ }
+ }
+ publicly_exports.insert(p.as_activations_key(), p_exports);
+ }
+ }
+
+ // we already ensure there is only one version for each `activations_key` so we can think of
+ // `can_see` as being in terms of a set of `activations_key`s
+ // and if `p` `publicly_exports` `export` then it `can_see` `export`
+ let mut can_see: HashMap<_, HashMap<_, varisat::Var>> = HashMap::new();
+
+ // if `p` has a `dep` that selected `ver` then it `can_see` all the things that the selected version `publicly_exports`
+ for (&p, deps) in version_selected_for.iter() {
+ let p_can_see = can_see.entry(p).or_default();
+ for (_, versions) in deps.iter() {
+ for (&ver, sel) in versions {
+ for (&export_pid, &export_var) in publicly_exports[&ver].iter() {
+ let our_var = p_can_see.entry(export_pid).or_insert_with(|| cnf.new_var());
+ cnf.add_clause(&[
+ sel.negative(),
+ export_var.negative(),
+ our_var.positive(),
+ ]);
+ }
+ }
+ }
+ }
+
+ // a package `can_see` only one version by each name
+ for (_, see) in can_see.iter() {
+ sat_at_most_one_by_key(&mut cnf, see.iter().map(|((name, _, _), &v)| (name, v)));
+ }
+ let mut solver = varisat::Solver::new();
+ solver.add_formula(&cnf);
+
+ // We dont need to `solve` now. We know that "use nothing" will satisfy all the clauses so far.
+ // But things run faster if we let it spend some time figuring out how the constraints interact before we add assumptions.
+ solver
+ .solve()
+ .expect("docs say it can't error in default config");
+ SatResolve(Rc::new(RefCell::new(SatResolveInner {
+ solver,
+ var_for_is_packages_used,
+ by_name,
+ })))
+ }
+ pub fn sat_resolve(&self, deps: &[Dependency]) -> bool {
+ let mut s = self.0.borrow_mut();
+ let mut assumption = vec![];
+ let mut this_call = None;
+
+ // the starting `deps` need to be satisfied
+ for dep in deps.iter() {
+ let empty_vec = vec![];
+ let matches: Vec<varisat::Lit> = s
+ .by_name
+ .get(dep.package_name().as_str())
+ .unwrap_or(&empty_vec)
+ .iter()
+ .filter(|&p| dep.matches_id(*p))
+ .map(|p| s.var_for_is_packages_used[p].positive())
+ .collect();
+ if matches.is_empty() {
+ return false;
+ } else if matches.len() == 1 {
+ assumption.extend_from_slice(&matches)
+ } else {
+ if this_call.is_none() {
+ let new_var = s.solver.new_var();
+ this_call = Some(new_var);
+ assumption.push(new_var.positive());
+ }
+ let mut matches = matches;
+ matches.push(this_call.unwrap().negative());
+ s.solver.add_clause(&matches);
+ }
+ }
+
+ s.solver.assume(&assumption);
+
+ s.solver
+ .solve()
+ .expect("docs say it can't error in default config")
+ }
+ pub fn sat_is_valid_solution(&self, pids: &[PackageId]) -> bool {
+ let mut s = self.0.borrow_mut();
+ for p in pids {
+ if p.name().as_str() != "root" && !s.var_for_is_packages_used.contains_key(p) {
+ return false;
+ }
+ }
+ let assumption: Vec<_> = s
+ .var_for_is_packages_used
+ .iter()
+ .map(|(p, v)| v.lit(pids.contains(p)))
+ .collect();
+
+ s.solver.assume(&assumption);
+
+ s.solver
+ .solve()
+ .expect("docs say it can't error in default config")
+ }
+ fn use_packages(&self) -> Option<String> {
+ self.0.borrow().solver.model().map(|lits| {
+ let lits: HashSet<_> = lits
+ .iter()
+ .filter(|l| l.is_positive())
+ .map(|l| l.var())
+ .collect();
+ let mut out = String::new();
+ out.push_str("used:\n");
+ for (p, v) in self.0.borrow().var_for_is_packages_used.iter() {
+ if lits.contains(v) {
+ writeln!(&mut out, " {}", p).unwrap();
+ }
+ }
+ out
+ })
+ }
+}
+
+pub trait ToDep {
+ fn to_dep(self) -> Dependency;
+}
+
+impl ToDep for &'static str {
+ fn to_dep(self) -> Dependency {
+ Dependency::parse(self, Some("1.0.0"), registry_loc()).unwrap()
+ }
+}
+
+impl ToDep for Dependency {
+ fn to_dep(self) -> Dependency {
+ self
+ }
+}
+
+pub trait ToPkgId {
+ fn to_pkgid(&self) -> PackageId;
+}
+
+impl ToPkgId for PackageId {
+ fn to_pkgid(&self) -> PackageId {
+ *self
+ }
+}
+
+impl<'a> ToPkgId for &'a str {
+ fn to_pkgid(&self) -> PackageId {
+ PackageId::new(*self, "1.0.0", registry_loc()).unwrap()
+ }
+}
+
+impl<T: AsRef<str>, U: AsRef<str>> ToPkgId for (T, U) {
+ fn to_pkgid(&self) -> PackageId {
+ let (name, vers) = self;
+ PackageId::new(name.as_ref(), vers.as_ref(), registry_loc()).unwrap()
+ }
+}
+
+#[macro_export]
+macro_rules! pkg {
+ ($pkgid:expr => [$($deps:expr),+ $(,)* ]) => ({
+ let d: Vec<Dependency> = vec![$($deps.to_dep()),+];
+ $crate::pkg_dep($pkgid, d)
+ });
+
+ ($pkgid:expr) => ({
+ $crate::pkg($pkgid)
+ })
+}
+
+fn registry_loc() -> SourceId {
+ lazy_static::lazy_static! {
+ static ref EXAMPLE_DOT_COM: SourceId =
+ SourceId::for_registry(&"https://example.com".into_url().unwrap()).unwrap();
+ }
+ *EXAMPLE_DOT_COM
+}
+
+pub fn pkg<T: ToPkgId>(name: T) -> Summary {
+ pkg_dep(name, Vec::new())
+}
+
+pub fn pkg_dep<T: ToPkgId>(name: T, dep: Vec<Dependency>) -> Summary {
+ let pkgid = name.to_pkgid();
+ let link = if pkgid.name().ends_with("-sys") {
+ Some(pkgid.name().as_str())
+ } else {
+ None
+ };
+ Summary::new(
+ &Config::default().unwrap(),
+ name.to_pkgid(),
+ dep,
+ &BTreeMap::new(),
+ link,
+ )
+ .unwrap()
+}
+
+pub fn pkg_id(name: &str) -> PackageId {
+ PackageId::new(name, "1.0.0", registry_loc()).unwrap()
+}
+
+fn pkg_id_loc(name: &str, loc: &str) -> PackageId {
+ let remote = loc.into_url();
+ let master = GitReference::Branch("master".to_string());
+ let source_id = SourceId::for_git(&remote.unwrap(), master).unwrap();
+
+ PackageId::new(name, "1.0.0", source_id).unwrap()
+}
+
+pub fn pkg_loc(name: &str, loc: &str) -> Summary {
+ let link = if name.ends_with("-sys") {
+ Some(name)
+ } else {
+ None
+ };
+ Summary::new(
+ &Config::default().unwrap(),
+ pkg_id_loc(name, loc),
+ Vec::new(),
+ &BTreeMap::new(),
+ link,
+ )
+ .unwrap()
+}
+
+pub fn remove_dep(sum: &Summary, ind: usize) -> Summary {
+ let mut deps = sum.dependencies().to_vec();
+ deps.remove(ind);
+ // note: more things will need to be copied over in the future, but it works for now.
+ Summary::new(
+ &Config::default().unwrap(),
+ sum.package_id(),
+ deps,
+ &BTreeMap::new(),
+ sum.links().map(|a| a.as_str()),
+ )
+ .unwrap()
+}
+
+pub fn dep(name: &str) -> Dependency {
+ dep_req(name, "*")
+}
+pub fn dep_req(name: &str, req: &str) -> Dependency {
+ Dependency::parse(name, Some(req), registry_loc()).unwrap()
+}
+pub fn dep_req_kind(name: &str, req: &str, kind: DepKind, public: bool) -> Dependency {
+ let mut dep = dep_req(name, req);
+ dep.set_kind(kind);
+ dep.set_public(public);
+ dep
+}
+
+pub fn dep_loc(name: &str, location: &str) -> Dependency {
+ let url = location.into_url().unwrap();
+ let master = GitReference::Branch("master".to_string());
+ let source_id = SourceId::for_git(&url, master).unwrap();
+ Dependency::parse(name, Some("1.0.0"), source_id).unwrap()
+}
+pub fn dep_kind(name: &str, kind: DepKind) -> Dependency {
+ dep(name).set_kind(kind).clone()
+}
+
+pub fn registry(pkgs: Vec<Summary>) -> Vec<Summary> {
+ pkgs
+}
+
+pub fn names<P: ToPkgId>(names: &[P]) -> Vec<PackageId> {
+ names.iter().map(|name| name.to_pkgid()).collect()
+}
+
+pub fn loc_names(names: &[(&'static str, &'static str)]) -> Vec<PackageId> {
+ names
+ .iter()
+ .map(|&(name, loc)| pkg_id_loc(name, loc))
+ .collect()
+}
+
+/// By default `Summary` and `Dependency` have a very verbose `Debug` representation.
+/// This replaces with a representation that uses constructors from this file.
+///
+/// If `registry_strategy` is improved to modify more fields
+/// then this needs to update to display the corresponding constructor.
+pub struct PrettyPrintRegistry(pub Vec<Summary>);
+
+impl fmt::Debug for PrettyPrintRegistry {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "vec![")?;
+ for s in &self.0 {
+ if s.dependencies().is_empty() {
+ write!(f, "pkg!((\"{}\", \"{}\")),", s.name(), s.version())?;
+ } else {
+ write!(f, "pkg!((\"{}\", \"{}\") => [", s.name(), s.version())?;
+ for d in s.dependencies() {
+ if d.kind() == DepKind::Normal
+ && &d.version_req().to_string() == "*"
+ && !d.is_public()
+ {
+ write!(f, "dep(\"{}\"),", d.name_in_toml())?;
+ } else if d.kind() == DepKind::Normal && !d.is_public() {
+ write!(
+ f,
+ "dep_req(\"{}\", \"{}\"),",
+ d.name_in_toml(),
+ d.version_req()
+ )?;
+ } else {
+ write!(
+ f,
+ "dep_req_kind(\"{}\", \"{}\", {}, {}),",
+ d.name_in_toml(),
+ d.version_req(),
+ match d.kind() {
+ DepKind::Development => "DepKind::Development",
+ DepKind::Build => "DepKind::Build",
+ DepKind::Normal => "DepKind::Normal",
+ },
+ d.is_public()
+ )?;
+ }
+ }
+ write!(f, "]),")?;
+ }
+ }
+ write!(f, "]")
+ }
+}
+
+#[test]
+fn meta_test_deep_pretty_print_registry() {
+ assert_eq!(
+ &format!(
+ "{:?}",
+ PrettyPrintRegistry(vec![
+ pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]),
+ pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]),
+ pkg!(("foo", "2.0.0") => [dep_req("bar", "*")]),
+ pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"),
+ dep_req("other", "1")]),
+ pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]),
+ pkg!(("baz", "1.0.2") => [dep_req("other", "2")]),
+ pkg!(("baz", "1.0.1")),
+ pkg!(("cat", "1.0.2") => [dep_req_kind("other", "2", DepKind::Build, false)]),
+ pkg!(("cat", "1.0.3") => [dep_req_kind("other", "2", DepKind::Development, false)]),
+ pkg!(("dep_req", "1.0.0")),
+ pkg!(("dep_req", "2.0.0")),
+ ])
+ ),
+ "vec![pkg!((\"foo\", \"1.0.1\") => [dep_req(\"bar\", \"^1\"),]),\
+ pkg!((\"foo\", \"1.0.0\") => [dep_req(\"bar\", \"^2\"),]),\
+ pkg!((\"foo\", \"2.0.0\") => [dep(\"bar\"),]),\
+ pkg!((\"bar\", \"1.0.0\") => [dep_req(\"baz\", \"=1.0.2\"),dep_req(\"other\", \"^1\"),]),\
+ pkg!((\"bar\", \"2.0.0\") => [dep_req(\"baz\", \"=1.0.1\"),]),\
+ pkg!((\"baz\", \"1.0.2\") => [dep_req(\"other\", \"^2\"),]),\
+ pkg!((\"baz\", \"1.0.1\")),\
+ pkg!((\"cat\", \"1.0.2\") => [dep_req_kind(\"other\", \"^2\", DepKind::Build, false),]),\
+ pkg!((\"cat\", \"1.0.3\") => [dep_req_kind(\"other\", \"^2\", DepKind::Development, false),]),\
+ pkg!((\"dep_req\", \"1.0.0\")),\
+ pkg!((\"dep_req\", \"2.0.0\")),]"
+ )
+}
+
+/// This generates a random registry index.
+/// Unlike vec((Name, Ver, vec((Name, VerRq), ..), ..)
+/// This strategy has a high probability of having valid dependencies
+pub fn registry_strategy(
+ max_crates: usize,
+ max_versions: usize,
+ shrinkage: usize,
+) -> impl Strategy<Value = PrettyPrintRegistry> {
+ let name = string_regex("[A-Za-z][A-Za-z0-9_-]*(-sys)?").unwrap();
+
+ let raw_version = ..max_versions.pow(3);
+ let version_from_raw = move |r: usize| {
+ let major = ((r / max_versions) / max_versions) % max_versions;
+ let minor = (r / max_versions) % max_versions;
+ let patch = r % max_versions;
+ format!("{}.{}.{}", major, minor, patch)
+ };
+
+ // If this is false then the crate will depend on the nonexistent "bad"
+ // instead of the complex set we generated for it.
+ let allow_deps = prop::bool::weighted(0.99);
+
+ let list_of_versions =
+ btree_map(raw_version, allow_deps, 1..=max_versions).prop_map(move |ver| {
+ ver.into_iter()
+ .map(|a| (version_from_raw(a.0), a.1))
+ .collect::<Vec<_>>()
+ });
+
+ let list_of_crates_with_versions =
+ btree_map(name, list_of_versions, 1..=max_crates).prop_map(|mut vers| {
+ // root is the name of the thing being compiled
+ // so it would be confusing to have it in the index
+ vers.remove("root");
+ // bad is a name reserved for a dep that won't work
+ vers.remove("bad");
+ vers
+ });
+
+ // each version of each crate can depend on each crate smaller then it.
+ // In theory shrinkage should be 2, but in practice we get better trees with a larger value.
+ let max_deps = max_versions * (max_crates * (max_crates - 1)) / shrinkage;
+
+ let raw_version_range = (any::<Index>(), any::<Index>());
+ let raw_dependency = (
+ any::<Index>(),
+ any::<Index>(),
+ raw_version_range,
+ 0..=1,
+ Just(false),
+ // TODO: ^ this needs to be set back to `any::<bool>()` and work before public & private dependencies can stabilize
+ );
+
+ fn order_index(a: Index, b: Index, size: usize) -> (usize, usize) {
+ let (a, b) = (a.index(size), b.index(size));
+ (min(a, b), max(a, b))
+ }
+
+ let list_of_raw_dependency = vec(raw_dependency, ..=max_deps);
+
+ // By default a package depends only on other packages that have a smaller name,
+ // this helps make sure that all things in the resulting index are DAGs.
+ // If this is true then the DAG is maintained with grater instead.
+ let reverse_alphabetical = any::<bool>().no_shrink();
+
+ (
+ list_of_crates_with_versions,
+ list_of_raw_dependency,
+ reverse_alphabetical,
+ )
+ .prop_map(
+ |(crate_vers_by_name, raw_dependencies, reverse_alphabetical)| {
+ let list_of_pkgid: Vec<_> = crate_vers_by_name
+ .iter()
+ .flat_map(|(name, vers)| vers.iter().map(move |x| ((name.as_str(), &x.0), x.1)))
+ .collect();
+ let len_all_pkgid = list_of_pkgid.len();
+ let mut dependency_by_pkgid = vec![vec![]; len_all_pkgid];
+ for (a, b, (c, d), k, p) in raw_dependencies {
+ let (a, b) = order_index(a, b, len_all_pkgid);
+ let (a, b) = if reverse_alphabetical { (b, a) } else { (a, b) };
+ let ((dep_name, _), _) = list_of_pkgid[a];
+ if (list_of_pkgid[b].0).0 == dep_name {
+ continue;
+ }
+ let s = &crate_vers_by_name[dep_name];
+ let s_last_index = s.len() - 1;
+ let (c, d) = order_index(c, d, s.len());
+
+ dependency_by_pkgid[b].push(dep_req_kind(
+ dep_name,
+ &if c == 0 && d == s_last_index {
+ "*".to_string()
+ } else if c == 0 {
+ format!("<={}", s[d].0)
+ } else if d == s_last_index {
+ format!(">={}", s[c].0)
+ } else if c == d {
+ format!("={}", s[c].0)
+ } else {
+ format!(">={}, <={}", s[c].0, s[d].0)
+ },
+ match k {
+ 0 => DepKind::Normal,
+ 1 => DepKind::Build,
+ // => DepKind::Development, // Development has no impact so don't gen
+ _ => panic!("bad index for DepKind"),
+ },
+ p && k == 0,
+ ))
+ }
+
+ let mut out: Vec<Summary> = list_of_pkgid
+ .into_iter()
+ .zip(dependency_by_pkgid.into_iter())
+ .map(|(((name, ver), allow_deps), deps)| {
+ pkg_dep(
+ (name, ver).to_pkgid(),
+ if !allow_deps {
+ vec![dep_req("bad", "*")]
+ } else {
+ let mut deps = deps;
+ deps.sort_by_key(|d| d.name_in_toml());
+ deps.dedup_by_key(|d| d.name_in_toml());
+ deps
+ },
+ )
+ })
+ .collect();
+
+ if reverse_alphabetical {
+ // make sure the complicated cases are at the end
+ out.reverse();
+ }
+
+ PrettyPrintRegistry(out)
+ },
+ )
+}
+
+/// This test is to test the generator to ensure
+/// that it makes registries with large dependency trees
+#[test]
+fn meta_test_deep_trees_from_strategy() {
+ use proptest::strategy::ValueTree;
+ use proptest::test_runner::TestRunner;
+
+ let mut dis = [0; 21];
+
+ let strategy = registry_strategy(50, 20, 60);
+ let mut test_runner = TestRunner::deterministic();
+ for _ in 0..128 {
+ let PrettyPrintRegistry(input) = strategy
+ .new_tree(&mut TestRunner::new_with_rng(
+ Default::default(),
+ test_runner.new_rng(),
+ ))
+ .unwrap()
+ .current();
+ let reg = registry(input.clone());
+ for this in input.iter().rev().take(10) {
+ let res = resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ );
+ dis[res
+ .as_ref()
+ .map(|x| min(x.len(), dis.len()) - 1)
+ .unwrap_or(0)] += 1;
+ if dis.iter().all(|&x| x > 0) {
+ return;
+ }
+ }
+ }
+
+ panic!(
+ "In 1280 tries we did not see a wide enough distribution of dependency trees! dis: {:?}",
+ dis
+ );
+}
+
+/// This test is to test the generator to ensure
+/// that it makes registries that include multiple versions of the same library
+#[test]
+fn meta_test_multiple_versions_strategy() {
+ use proptest::strategy::ValueTree;
+ use proptest::test_runner::TestRunner;
+
+ let mut dis = [0; 10];
+
+ let strategy = registry_strategy(50, 20, 60);
+ let mut test_runner = TestRunner::deterministic();
+ for _ in 0..128 {
+ let PrettyPrintRegistry(input) = strategy
+ .new_tree(&mut TestRunner::new_with_rng(
+ Default::default(),
+ test_runner.new_rng(),
+ ))
+ .unwrap()
+ .current();
+ let reg = registry(input.clone());
+ for this in input.iter().rev().take(10) {
+ let res = resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ );
+ if let Ok(mut res) = res {
+ let res_len = res.len();
+ res.sort_by_key(|s| s.name());
+ res.dedup_by_key(|s| s.name());
+ dis[min(res_len - res.len(), dis.len() - 1)] += 1;
+ }
+ if dis.iter().all(|&x| x > 0) {
+ return;
+ }
+ }
+ }
+ panic!(
+ "In 1280 tries we did not see a wide enough distribution of multiple versions of the same library! dis: {:?}",
+ dis
+ );
+}
+
+/// Assert `xs` contains `elems`
+#[track_caller]
+pub fn assert_contains<A: PartialEq>(xs: &[A], elems: &[A]) {
+ for elem in elems {
+ assert!(xs.contains(elem));
+ }
+}
+
+#[track_caller]
+pub fn assert_same<A: PartialEq>(a: &[A], b: &[A]) {
+ assert_eq!(a.len(), b.len());
+ assert_contains(b, a);
+}
diff --git a/src/tools/cargo/crates/resolver-tests/tests/resolve.rs b/src/tools/cargo/crates/resolver-tests/tests/resolve.rs
new file mode 100644
index 000000000..df74826f0
--- /dev/null
+++ b/src/tools/cargo/crates/resolver-tests/tests/resolve.rs
@@ -0,0 +1,1562 @@
+use cargo::core::dependency::DepKind;
+use cargo::core::Dependency;
+use cargo::util::Config;
+use cargo_util::is_ci;
+
+use resolver_tests::{
+ assert_contains, assert_same, dep, dep_kind, dep_loc, dep_req, dep_req_kind, loc_names, names,
+ pkg, pkg_id, pkg_loc, registry, registry_strategy, remove_dep, resolve, resolve_and_validated,
+ resolve_with_config, PrettyPrintRegistry, SatResolve, ToDep, ToPkgId,
+};
+
+use proptest::prelude::*;
+
+// NOTE: proptest is a form of fuzz testing. It generates random input and makes sure that
+// certain universal truths are upheld. Therefore, it can pass when there is a problem,
+// but if it fails then there really is something wrong. When testing something as
+// complicated as the resolver, the problems can be very subtle and hard to generate.
+// We have had a history of these tests only failing on PRs long after a bug is introduced.
+// If you have one of these test fail please report it on #6258,
+// and if you did not change the resolver then feel free to retry without concern.
+proptest! {
+ #![proptest_config(ProptestConfig {
+ max_shrink_iters:
+ if is_ci() || !is_terminal::IsTerminal::is_terminal(&std::io::stderr()){
+ // This attempts to make sure that CI will fail fast,
+ 0
+ } else {
+ // but that local builds will give a small clear test case.
+ u32::MAX
+ },
+ result_cache: prop::test_runner::basic_result_cache,
+ .. ProptestConfig::default()
+ })]
+
+ /// NOTE: if you think this test has failed spuriously see the note at the top of this macro.
+ #[test]
+ fn prop_passes_validation(
+ PrettyPrintRegistry(input) in registry_strategy(50, 20, 60)
+ ) {
+ let reg = registry(input.clone());
+ let sat_resolve = SatResolve::new(&reg);
+ // there is only a small chance that any one
+ // crate will be interesting.
+ // So we try some of the most complicated.
+ for this in input.iter().rev().take(20) {
+ let _ = resolve_and_validated(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ Some(sat_resolve.clone()),
+ );
+ }
+ }
+
+ /// NOTE: if you think this test has failed spuriously see the note at the top of this macro.
+ #[test]
+ fn prop_minimum_version_errors_the_same(
+ PrettyPrintRegistry(input) in registry_strategy(50, 20, 60)
+ ) {
+ let mut config = Config::default().unwrap();
+ config.nightly_features_allowed = true;
+ config
+ .configure(
+ 1,
+ false,
+ None,
+ false,
+ false,
+ false,
+ &None,
+ &["minimal-versions".to_string()],
+ &[],
+ )
+ .unwrap();
+
+ let reg = registry(input.clone());
+ // there is only a small chance that any one
+ // crate will be interesting.
+ // So we try some of the most complicated.
+ for this in input.iter().rev().take(10) {
+ // minimal-versions change what order the candidates
+ // are tried but not the existence of a solution
+ let res = resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ );
+
+ let mres = resolve_with_config(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ &config,
+ );
+
+ prop_assert_eq!(
+ res.is_ok(),
+ mres.is_ok(),
+ "minimal-versions and regular resolver disagree about whether `{} = \"={}\"` can resolve",
+ this.name(),
+ this.version()
+ )
+ }
+ }
+
+ /// NOTE: if you think this test has failed spuriously see the note at the top of this macro.
+ #[test]
+ fn prop_direct_minimum_version_error_implications(
+ PrettyPrintRegistry(input) in registry_strategy(50, 20, 60)
+ ) {
+ let mut config = Config::default().unwrap();
+ config.nightly_features_allowed = true;
+ config
+ .configure(
+ 1,
+ false,
+ None,
+ false,
+ false,
+ false,
+ &None,
+ &["direct-minimal-versions".to_string()],
+ &[],
+ )
+ .unwrap();
+
+ let reg = registry(input.clone());
+ // there is only a small chance that any one
+ // crate will be interesting.
+ // So we try some of the most complicated.
+ for this in input.iter().rev().take(10) {
+ // direct-minimal-versions reduces the number of available solutions, so we verify that
+ // we do not come up with solutions maximal versions does not
+ let res = resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ );
+
+ let mres = resolve_with_config(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ &config,
+ );
+
+ if res.is_err() {
+ prop_assert!(
+ mres.is_err(),
+ "direct-minimal-versions should not have more solutions than the regular, maximal resolver but found one when resolving `{} = \"={}\"`",
+ this.name(),
+ this.version()
+ )
+ }
+ if mres.is_ok() {
+ prop_assert!(
+ res.is_ok(),
+ "direct-minimal-versions should not have more solutions than the regular, maximal resolver but found one when resolving `{} = \"={}\"`",
+ this.name(),
+ this.version()
+ )
+ }
+ }
+ }
+
+ /// NOTE: if you think this test has failed spuriously see the note at the top of this macro.
+ #[test]
+ fn prop_removing_a_dep_cant_break(
+ PrettyPrintRegistry(input) in registry_strategy(50, 20, 60),
+ indexes_to_remove in prop::collection::vec((any::<prop::sample::Index>(), any::<prop::sample::Index>()), ..10)
+ ) {
+ let reg = registry(input.clone());
+ let mut removed_input = input.clone();
+ for (summary_idx, dep_idx) in indexes_to_remove {
+ if !removed_input.is_empty() {
+ let summary_idx = summary_idx.index(removed_input.len());
+ let deps = removed_input[summary_idx].dependencies();
+ if !deps.is_empty() {
+ let new = remove_dep(&removed_input[summary_idx], dep_idx.index(deps.len()));
+ removed_input[summary_idx] = new;
+ }
+ }
+ }
+ let removed_reg = registry(removed_input);
+ // there is only a small chance that any one
+ // crate will be interesting.
+ // So we try some of the most complicated.
+ for this in input.iter().rev().take(10) {
+ if resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ ).is_ok() {
+ prop_assert!(
+ resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &removed_reg,
+ ).is_ok(),
+ "full index worked for `{} = \"={}\"` but removing some deps broke it!",
+ this.name(),
+ this.version(),
+ )
+ }
+ }
+ }
+
+ /// NOTE: if you think this test has failed spuriously see the note at the top of this macro.
+ #[test]
+ fn prop_limited_independence_of_irrelevant_alternatives(
+ PrettyPrintRegistry(input) in registry_strategy(50, 20, 60),
+ indexes_to_unpublish in prop::collection::vec(any::<prop::sample::Index>(), ..10)
+ ) {
+ let reg = registry(input.clone());
+ // there is only a small chance that any one
+ // crate will be interesting.
+ // So we try some of the most complicated.
+ for this in input.iter().rev().take(10) {
+ let res = resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ );
+
+ match res {
+ Ok(r) => {
+ // If resolution was successful, then unpublishing a version of a crate
+ // that was not selected should not change that.
+ let not_selected: Vec<_> = input
+ .iter()
+ .cloned()
+ .filter(|x| !r.contains(&x.package_id()))
+ .collect();
+ if !not_selected.is_empty() {
+ let indexes_to_unpublish: Vec<_> = indexes_to_unpublish.iter().map(|x| x.get(&not_selected)).collect();
+
+ let new_reg = registry(
+ input
+ .iter()
+ .cloned()
+ .filter(|x| !indexes_to_unpublish.contains(&x))
+ .collect(),
+ );
+
+ let res = resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &new_reg,
+ );
+
+ // Note: that we can not assert that the two `res` are identical
+ // as the resolver does depend on irrelevant alternatives.
+ // It uses how constrained a dependency requirement is
+ // to determine what order to evaluate requirements.
+
+ prop_assert!(
+ res.is_ok(),
+ "unpublishing {:?} stopped `{} = \"={}\"` from working",
+ indexes_to_unpublish.iter().map(|x| x.package_id()).collect::<Vec<_>>(),
+ this.name(),
+ this.version()
+ )
+ }
+ }
+
+ Err(_) => {
+ // If resolution was unsuccessful, then it should stay unsuccessful
+ // even if any version of a crate is unpublished.
+ let indexes_to_unpublish: Vec<_> = indexes_to_unpublish.iter().map(|x| x.get(&input)).collect();
+
+ let new_reg = registry(
+ input
+ .iter()
+ .cloned()
+ .filter(|x| !indexes_to_unpublish.contains(&x))
+ .collect(),
+ );
+
+ let res = resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &new_reg,
+ );
+
+ prop_assert!(
+ res.is_err(),
+ "full index did not work for `{} = \"={}\"` but unpublishing {:?} fixed it!",
+ this.name(),
+ this.version(),
+ indexes_to_unpublish.iter().map(|x| x.package_id()).collect::<Vec<_>>(),
+ )
+ }
+ }
+ }
+ }
+}
+
+#[test]
+#[should_panic(expected = "pub dep")] // The error handling is not yet implemented.
+fn pub_fail() {
+ let input = vec![
+ pkg!(("a", "0.0.4")),
+ pkg!(("a", "0.0.5")),
+ pkg!(("e", "0.0.6") => [dep_req_kind("a", "<= 0.0.4", DepKind::Normal, true),]),
+ pkg!(("kB", "0.0.3") => [dep_req("a", ">= 0.0.5"),dep("e"),]),
+ ];
+ let reg = registry(input);
+ assert!(resolve_and_validated(vec![dep("kB")], &reg, None).is_err());
+}
+
+#[test]
+fn basic_public_dependency() {
+ let reg = registry(vec![
+ pkg!(("A", "0.1.0")),
+ pkg!(("A", "0.2.0")),
+ pkg!("B" => [dep_req_kind("A", "0.1", DepKind::Normal, true)]),
+ pkg!("C" => [dep("A"), dep("B")]),
+ ]);
+
+ let res = resolve_and_validated(vec![dep("C")], &reg, None).unwrap();
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("C", "1.0.0"),
+ ("B", "1.0.0"),
+ ("A", "0.1.0"),
+ ]),
+ );
+}
+
+#[test]
+fn public_dependency_filling_in() {
+ // The resolver has an optimization where if a candidate to resolve a dependency
+ // has already bean activated then we skip looking at the candidates dependencies.
+ // However, we have to be careful as the new path may make pub dependencies invalid.
+
+ // Triggering this case requires dependencies to be resolved in a specific order.
+ // Fuzzing found this unintuitive case, that triggers this unfortunate order of operations:
+ // 1. `d`'s dep on `c` is resolved
+ // 2. `d`'s dep on `a` is resolved with `0.1.1`
+ // 3. `c`'s dep on `b` is resolved with `0.0.2`
+ // 4. `b`'s dep on `a` is resolved with `0.0.6` no pub dev conflict as `b` is private to `c`
+ // 5. `d`'s dep on `b` is resolved with `0.0.2` triggering the optimization.
+ // Do we notice that `d` has a pub dep conflict on `a`? Lets try it and see.
+ let reg = registry(vec![
+ pkg!(("a", "0.0.6")),
+ pkg!(("a", "0.1.1")),
+ pkg!(("b", "0.0.0") => [dep("bad")]),
+ pkg!(("b", "0.0.1") => [dep("bad")]),
+ pkg!(("b", "0.0.2") => [dep_req_kind("a", "=0.0.6", DepKind::Normal, true)]),
+ pkg!("c" => [dep_req("b", ">=0.0.1")]),
+ pkg!("d" => [dep("c"), dep("a"), dep("b")]),
+ ]);
+
+ let res = resolve_and_validated(vec![dep("d")], &reg, None).unwrap();
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("d", "1.0.0"),
+ ("c", "1.0.0"),
+ ("b", "0.0.2"),
+ ("a", "0.0.6"),
+ ]),
+ );
+}
+
+#[test]
+fn public_dependency_filling_in_and_update() {
+ // The resolver has an optimization where if a candidate to resolve a dependency
+ // has already bean activated then we skip looking at the candidates dependencies.
+ // However, we have to be careful as the new path may make pub dependencies invalid.
+
+ // Triggering this case requires dependencies to be resolved in a specific order.
+ // Fuzzing found this unintuitive case, that triggers this unfortunate order of operations:
+ // 1. `D`'s dep on `B` is resolved
+ // 2. `D`'s dep on `C` is resolved
+ // 3. `B`'s dep on `A` is resolved with `0.0.0`
+ // 4. `C`'s dep on `B` triggering the optimization.
+ // So did we add `A 0.0.0` to the deps `C` can see?
+ // Or are we going to resolve `C`'s dep on `A` with `0.0.2`?
+ // Lets try it and see.
+ let reg = registry(vec![
+ pkg!(("A", "0.0.0")),
+ pkg!(("A", "0.0.2")),
+ pkg!("B" => [dep_req_kind("A", "=0.0.0", DepKind::Normal, true),]),
+ pkg!("C" => [dep("A"),dep("B")]),
+ pkg!("D" => [dep("B"),dep("C")]),
+ ]);
+ let res = resolve_and_validated(vec![dep("D")], &reg, None).unwrap();
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("D", "1.0.0"),
+ ("C", "1.0.0"),
+ ("B", "1.0.0"),
+ ("A", "0.0.0"),
+ ]),
+ );
+}
+
+#[test]
+fn public_dependency_skipping() {
+ // When backtracking due to a failed dependency, if Cargo is
+ // trying to be clever and skip irrelevant dependencies, care must
+ // the effects of pub dep must be accounted for.
+ let input = vec![
+ pkg!(("a", "0.2.0")),
+ pkg!(("a", "2.0.0")),
+ pkg!(("b", "0.0.0") => [dep("bad")]),
+ pkg!(("b", "0.2.1") => [dep_req_kind("a", "0.2.0", DepKind::Normal, true)]),
+ pkg!("c" => [dep("a"),dep("b")]),
+ ];
+ let reg = registry(input);
+
+ resolve_and_validated(vec![dep("c")], &reg, None).unwrap();
+}
+
+#[test]
+fn public_dependency_skipping_in_backtracking() {
+ // When backtracking due to a failed dependency, if Cargo is
+ // trying to be clever and skip irrelevant dependencies, care must
+ // the effects of pub dep must be accounted for.
+ let input = vec![
+ pkg!(("A", "0.0.0") => [dep("bad")]),
+ pkg!(("A", "0.0.1") => [dep("bad")]),
+ pkg!(("A", "0.0.2") => [dep("bad")]),
+ pkg!(("A", "0.0.3") => [dep("bad")]),
+ pkg!(("A", "0.0.4")),
+ pkg!(("A", "0.0.5")),
+ pkg!("B" => [dep_req_kind("A", ">= 0.0.3", DepKind::Normal, true)]),
+ pkg!("C" => [dep_req("A", "<= 0.0.4"), dep("B")]),
+ ];
+ let reg = registry(input);
+
+ resolve_and_validated(vec![dep("C")], &reg, None).unwrap();
+}
+
+#[test]
+fn public_sat_topological_order() {
+ let input = vec![
+ pkg!(("a", "0.0.1")),
+ pkg!(("a", "0.0.0")),
+ pkg!(("b", "0.0.1") => [dep_req_kind("a", "= 0.0.1", DepKind::Normal, true),]),
+ pkg!(("b", "0.0.0") => [dep("bad"),]),
+ pkg!("A" => [dep_req("a", "= 0.0.0"),dep_req_kind("b", "*", DepKind::Normal, true)]),
+ ];
+
+ let reg = registry(input);
+ assert!(resolve_and_validated(vec![dep("A")], &reg, None).is_err());
+}
+
+#[test]
+fn public_sat_unused_makes_things_pub() {
+ let input = vec![
+ pkg!(("a", "0.0.1")),
+ pkg!(("a", "0.0.0")),
+ pkg!(("b", "8.0.1") => [dep_req_kind("a", "= 0.0.1", DepKind::Normal, true),]),
+ pkg!(("b", "8.0.0") => [dep_req("a", "= 0.0.1"),]),
+ pkg!("c" => [dep_req("b", "= 8.0.0"),dep_req("a", "= 0.0.0"),]),
+ ];
+ let reg = registry(input);
+
+ resolve_and_validated(vec![dep("c")], &reg, None).unwrap();
+}
+
+#[test]
+fn public_sat_unused_makes_things_pub_2() {
+ let input = vec![
+ pkg!(("c", "0.0.2")),
+ pkg!(("c", "0.0.1")),
+ pkg!(("a-sys", "0.0.2")),
+ pkg!(("a-sys", "0.0.1") => [dep_req_kind("c", "= 0.0.1", DepKind::Normal, true),]),
+ pkg!("P" => [dep_req_kind("a-sys", "*", DepKind::Normal, true),dep_req("c", "= 0.0.1"),]),
+ pkg!("A" => [dep("P"),dep_req("c", "= 0.0.2"),]),
+ ];
+ let reg = registry(input);
+
+ resolve_and_validated(vec![dep("A")], &reg, None).unwrap();
+}
+
+#[test]
+#[should_panic(expected = "assertion failed: !name.is_empty()")]
+fn test_dependency_with_empty_name() {
+ // Bug 5229, dependency-names must not be empty
+ "".to_dep();
+}
+
+#[test]
+fn test_resolving_empty_dependency_list() {
+ let res = resolve(Vec::new(), &registry(vec![])).unwrap();
+
+ assert_eq!(res, names(&["root"]));
+}
+
+#[test]
+fn test_resolving_only_package() {
+ let reg = registry(vec![pkg!("foo")]);
+ let res = resolve(vec![dep("foo")], &reg).unwrap();
+ assert_same(&res, &names(&["root", "foo"]));
+}
+
+#[test]
+fn test_resolving_one_dep() {
+ let reg = registry(vec![pkg!("foo"), pkg!("bar")]);
+ let res = resolve(vec![dep("foo")], &reg).unwrap();
+ assert_same(&res, &names(&["root", "foo"]));
+}
+
+#[test]
+fn test_resolving_multiple_deps() {
+ let reg = registry(vec![pkg!("foo"), pkg!("bar"), pkg!("baz")]);
+ let res = resolve(vec![dep("foo"), dep("baz")], &reg).unwrap();
+ assert_same(&res, &names(&["root", "foo", "baz"]));
+}
+
+#[test]
+fn test_resolving_transitive_deps() {
+ let reg = registry(vec![pkg!("foo"), pkg!("bar" => ["foo"])]);
+ let res = resolve(vec![dep("bar")], &reg).unwrap();
+
+ assert_same(&res, &names(&["root", "foo", "bar"]));
+}
+
+#[test]
+fn test_resolving_common_transitive_deps() {
+ let reg = registry(vec![pkg!("foo" => ["bar"]), pkg!("bar")]);
+ let res = resolve(vec![dep("foo"), dep("bar")], &reg).unwrap();
+
+ assert_same(&res, &names(&["root", "foo", "bar"]));
+}
+
+#[test]
+fn test_resolving_with_same_name() {
+ let list = vec![
+ pkg_loc("foo", "https://first.example.com"),
+ pkg_loc("bar", "https://second.example.com"),
+ ];
+
+ let reg = registry(list);
+ let res = resolve(
+ vec![
+ dep_loc("foo", "https://first.example.com"),
+ dep_loc("bar", "https://second.example.com"),
+ ],
+ &reg,
+ )
+ .unwrap();
+
+ let mut names = loc_names(&[
+ ("foo", "https://first.example.com"),
+ ("bar", "https://second.example.com"),
+ ]);
+
+ names.push(pkg_id("root"));
+ assert_same(&res, &names);
+}
+
+#[test]
+fn test_resolving_with_dev_deps() {
+ let reg = registry(vec![
+ pkg!("foo" => ["bar", dep_kind("baz", DepKind::Development)]),
+ pkg!("baz" => ["bat", dep_kind("bam", DepKind::Development)]),
+ pkg!("bar"),
+ pkg!("bat"),
+ ]);
+
+ let res = resolve(
+ vec![dep("foo"), dep_kind("baz", DepKind::Development)],
+ &reg,
+ )
+ .unwrap();
+
+ assert_same(&res, &names(&["root", "foo", "bar", "baz", "bat"]));
+}
+
+#[test]
+fn resolving_with_many_versions() {
+ let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]);
+
+ let res = resolve(vec![dep("foo")], &reg).unwrap();
+
+ assert_same(&res, &names(&[("root", "1.0.0"), ("foo", "1.0.2")]));
+}
+
+#[test]
+fn resolving_with_specific_version() {
+ let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]);
+
+ let res = resolve(vec![dep_req("foo", "=1.0.1")], &reg).unwrap();
+
+ assert_same(&res, &names(&[("root", "1.0.0"), ("foo", "1.0.1")]));
+}
+
+#[test]
+fn test_resolving_maximum_version_with_transitive_deps() {
+ let reg = registry(vec![
+ pkg!(("util", "1.2.2")),
+ pkg!(("util", "1.0.0")),
+ pkg!(("util", "1.1.1")),
+ pkg!("foo" => [dep_req("util", "1.0.0")]),
+ pkg!("bar" => [dep_req("util", ">=1.0.1")]),
+ ]);
+
+ let res = resolve(vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "1.0.0"),
+ ("util", "1.2.2"),
+ ]),
+ );
+ assert!(!res.contains(&("util", "1.0.1").to_pkgid()));
+ assert!(!res.contains(&("util", "1.1.1").to_pkgid()));
+}
+
+#[test]
+fn test_resolving_minimum_version_with_transitive_deps() {
+ let reg = registry(vec![
+ pkg!(("util", "1.2.2")),
+ pkg!(("util", "1.0.0")),
+ pkg!(("util", "1.1.1")),
+ pkg!("foo" => [dep_req("util", "1.0.0")]),
+ pkg!("bar" => [dep_req("util", ">=1.0.1")]),
+ ]);
+
+ let mut config = Config::default().unwrap();
+ // -Z minimal-versions
+ // When the minimal-versions config option is specified then the lowest
+ // possible version of a package should be selected. "util 1.0.0" can't be
+ // selected because of the requirements of "bar", so the minimum version
+ // must be 1.1.1.
+ config.nightly_features_allowed = true;
+ config
+ .configure(
+ 1,
+ false,
+ None,
+ false,
+ false,
+ false,
+ &None,
+ &["minimal-versions".to_string()],
+ &[],
+ )
+ .unwrap();
+
+ let res = resolve_with_config(
+ vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")],
+ &reg,
+ &config,
+ )
+ .unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "1.0.0"),
+ ("util", "1.1.1"),
+ ]),
+ );
+ assert!(!res.contains(&("util", "1.2.2").to_pkgid()));
+ assert!(!res.contains(&("util", "1.0.0").to_pkgid()));
+}
+
+#[test]
+fn resolving_incompat_versions() {
+ let reg = registry(vec![
+ pkg!(("foo", "1.0.1")),
+ pkg!(("foo", "1.0.2")),
+ pkg!("bar" => [dep_req("foo", "=1.0.2")]),
+ ]);
+
+ assert!(resolve(vec![dep_req("foo", "=1.0.1"), dep("bar")], &reg).is_err());
+}
+
+#[test]
+fn resolving_wrong_case_from_registry() {
+ // In the future we may #5678 allow this to happen.
+ // For back compatibility reasons, we probably won't.
+ // But we may want to future prove ourselves by understanding it.
+ // This test documents the current behavior.
+ let reg = registry(vec![pkg!(("foo", "1.0.0")), pkg!("bar" => ["Foo"])]);
+
+ assert!(resolve(vec![dep("bar")], &reg).is_err());
+}
+
+#[test]
+fn resolving_mis_hyphenated_from_registry() {
+ // In the future we may #2775 allow this to happen.
+ // For back compatibility reasons, we probably won't.
+ // But we may want to future prove ourselves by understanding it.
+ // This test documents the current behavior.
+ let reg = registry(vec![pkg!(("fo-o", "1.0.0")), pkg!("bar" => ["fo_o"])]);
+
+ assert!(resolve(vec![dep("bar")], &reg).is_err());
+}
+
+#[test]
+fn resolving_backtrack() {
+ let reg = registry(vec![
+ pkg!(("foo", "1.0.2") => [dep("bar")]),
+ pkg!(("foo", "1.0.1") => [dep("baz")]),
+ pkg!("bar" => [dep_req("foo", "=2.0.2")]),
+ pkg!("baz"),
+ ]);
+
+ let res = resolve(vec![dep_req("foo", "^1")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[("root", "1.0.0"), ("foo", "1.0.1"), ("baz", "1.0.0")]),
+ );
+}
+
+#[test]
+fn resolving_backtrack_features() {
+ // test for cargo/issues/4347
+ let mut bad = dep("bar");
+ bad.set_features(vec!["bad"]);
+
+ let reg = registry(vec![
+ pkg!(("foo", "1.0.2") => [bad]),
+ pkg!(("foo", "1.0.1") => [dep("bar")]),
+ pkg!("bar"),
+ ]);
+
+ let res = resolve(vec![dep_req("foo", "^1")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[("root", "1.0.0"), ("foo", "1.0.1"), ("bar", "1.0.0")]),
+ );
+}
+
+#[test]
+fn resolving_allows_multiple_compatible_versions() {
+ let reg = registry(vec![
+ pkg!(("foo", "1.0.0")),
+ pkg!(("foo", "2.0.0")),
+ pkg!(("foo", "0.1.0")),
+ pkg!(("foo", "0.2.0")),
+ pkg!("bar" => ["d1", "d2", "d3", "d4"]),
+ pkg!("d1" => [dep_req("foo", "1")]),
+ pkg!("d2" => [dep_req("foo", "2")]),
+ pkg!("d3" => [dep_req("foo", "0.1")]),
+ pkg!("d4" => [dep_req("foo", "0.2")]),
+ ]);
+
+ let res = resolve(vec![dep("bar")], &reg).unwrap();
+
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("foo", "2.0.0"),
+ ("foo", "0.1.0"),
+ ("foo", "0.2.0"),
+ ("d1", "1.0.0"),
+ ("d2", "1.0.0"),
+ ("d3", "1.0.0"),
+ ("d4", "1.0.0"),
+ ("bar", "1.0.0"),
+ ]),
+ );
+}
+
+#[test]
+fn resolving_with_deep_backtracking() {
+ let reg = registry(vec![
+ pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]),
+ pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]),
+ pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"),
+ dep_req("other", "1")]),
+ pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]),
+ pkg!(("baz", "1.0.2") => [dep_req("other", "2")]),
+ pkg!(("baz", "1.0.1")),
+ pkg!(("dep_req", "1.0.0")),
+ pkg!(("dep_req", "2.0.0")),
+ ]);
+
+ let res = resolve(vec![dep_req("foo", "1")], &reg).unwrap();
+
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "2.0.0"),
+ ("baz", "1.0.1"),
+ ]),
+ );
+}
+
+#[test]
+fn resolving_with_sys_crates() {
+ // This is based on issues/4902
+ // With `l` a normal library we get 2copies so everyone gets the newest compatible.
+ // But `l-sys` a library with a links attribute we make sure there is only one.
+ let reg = registry(vec![
+ pkg!(("l-sys", "0.9.1")),
+ pkg!(("l-sys", "0.10.0")),
+ pkg!(("l", "0.9.1")),
+ pkg!(("l", "0.10.0")),
+ pkg!(("d", "1.0.0") => [dep_req("l-sys", ">=0.8.0, <=0.10.0"), dep_req("l", ">=0.8.0, <=0.10.0")]),
+ pkg!(("r", "1.0.0") => [dep_req("l-sys", "0.9"), dep_req("l", "0.9")]),
+ ]);
+
+ let res = resolve(vec![dep_req("d", "1"), dep_req("r", "1")], &reg).unwrap();
+
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("d", "1.0.0"),
+ ("r", "1.0.0"),
+ ("l-sys", "0.9.1"),
+ ("l", "0.9.1"),
+ ("l", "0.10.0"),
+ ]),
+ );
+}
+
+#[test]
+fn resolving_with_constrained_sibling_backtrack_parent() {
+ // There is no point in considering all of the backtrack_trap{1,2}
+ // candidates since they can't change the result of failing to
+ // resolve 'constrained'. Cargo should (ideally) skip past them and resume
+ // resolution once the activation of the parent, 'bar', is rolled back.
+ // Note that the traps are slightly more constrained to make sure they
+ // get picked first.
+ let mut reglist = vec![
+ pkg!(("foo", "1.0.0") => [dep_req("bar", "1.0"),
+ dep_req("constrained", "=1.0.0")]),
+ pkg!(("bar", "1.0.0") => [dep_req("backtrack_trap1", "1.0.2"),
+ dep_req("backtrack_trap2", "1.0.2"),
+ dep_req("constrained", "1.0.0")]),
+ pkg!(("constrained", "1.0.0")),
+ pkg!(("backtrack_trap1", "1.0.0")),
+ pkg!(("backtrack_trap2", "1.0.0")),
+ ];
+ // Bump this to make the test harder - it adds more versions of bar that will
+ // fail to resolve, and more versions of the traps to consider.
+ const NUM_BARS_AND_TRAPS: usize = 50; // minimum 2
+ for i in 1..NUM_BARS_AND_TRAPS {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(
+ pkg!(("bar", vsn.clone()) => [dep_req("backtrack_trap1", "1.0.2"),
+ dep_req("backtrack_trap2", "1.0.2"),
+ dep_req("constrained", "1.0.1")]),
+ );
+ reglist.push(pkg!(("backtrack_trap1", vsn.clone())));
+ reglist.push(pkg!(("backtrack_trap2", vsn.clone())));
+ reglist.push(pkg!(("constrained", vsn.clone())));
+ }
+ let reg = registry(reglist);
+
+ let res = resolve(vec![dep_req("foo", "1")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "1.0.0"),
+ ("constrained", "1.0.0"),
+ ]),
+ );
+}
+
+#[test]
+fn resolving_with_many_equivalent_backtracking() {
+ let mut reglist = Vec::new();
+
+ const DEPTH: usize = 200;
+ const BRANCHING_FACTOR: usize = 100;
+
+ // Each level depends on the next but the last level does not exist.
+ // Without cashing we need to test every path to the last level O(BRANCHING_FACTOR ^ DEPTH)
+ // and this test will time out. With cashing we need to discover that none of these
+ // can be activated O(BRANCHING_FACTOR * DEPTH)
+ for l in 0..DEPTH {
+ let name = format!("level{}", l);
+ let next = format!("level{}", l + 1);
+ for i in 1..BRANCHING_FACTOR {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())]));
+ }
+ }
+
+ let reg = registry(reglist.clone());
+
+ let res = resolve(vec![dep("level0")], &reg);
+
+ assert!(res.is_err());
+
+ // It is easy to write code that quickly returns an error.
+ // Lets make sure we can find a good answer if it is there.
+ reglist.push(pkg!(("level0", "1.0.0")));
+
+ let reg = registry(reglist.clone());
+
+ let res = resolve(vec![dep("level0")], &reg).unwrap();
+
+ assert_contains(&res, &names(&[("root", "1.0.0"), ("level0", "1.0.0")]));
+
+ // Make sure we have not special case no candidates.
+ reglist.push(pkg!(("constrained", "1.1.0")));
+ reglist.push(pkg!(("constrained", "1.0.0")));
+ reglist.push(
+ pkg!((format!("level{}", DEPTH).as_str(), "1.0.0") => [dep_req("constrained", "=1.0.0")]),
+ );
+
+ let reg = registry(reglist.clone());
+
+ let res = resolve(vec![dep("level0"), dep("constrained")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("level0", "1.0.0"),
+ ("constrained", "1.1.0"),
+ ]),
+ );
+
+ let reg = registry(reglist.clone());
+
+ let res = resolve(vec![dep_req("level0", "1.0.1"), dep("constrained")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ (format!("level{}", DEPTH).as_str(), "1.0.0"),
+ ("constrained", "1.0.0"),
+ ]),
+ );
+
+ let reg = registry(reglist);
+
+ let res = resolve(
+ vec![dep_req("level0", "1.0.1"), dep_req("constrained", "1.1.0")],
+ &reg,
+ );
+
+ assert!(res.is_err());
+}
+
+#[test]
+fn resolving_with_deep_traps() {
+ let mut reglist = Vec::new();
+
+ const DEPTH: usize = 200;
+ const BRANCHING_FACTOR: usize = 100;
+
+ // Each backtrack_trap depends on the next, and adds a backtrack frame.
+ // None of witch is going to help with `bad`.
+ for l in 0..DEPTH {
+ let name = format!("backtrack_trap{}", l);
+ let next = format!("backtrack_trap{}", l + 1);
+ for i in 1..BRANCHING_FACTOR {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())]));
+ }
+ }
+ {
+ let name = format!("backtrack_trap{}", DEPTH);
+ for i in 1..BRANCHING_FACTOR {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!((name.as_str(), vsn.as_str())));
+ }
+ }
+ {
+ // slightly less constrained to make sure `cloaking` gets picked last.
+ for i in 1..(BRANCHING_FACTOR + 10) {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!(("cloaking", vsn.as_str()) => [dep_req("bad", "1.0.1")]));
+ }
+ }
+
+ let reg = registry(reglist);
+
+ let res = resolve(vec![dep("backtrack_trap0"), dep("cloaking")], &reg);
+
+ assert!(res.is_err());
+}
+
+#[test]
+fn resolving_with_constrained_cousins_backtrack() {
+ let mut reglist = Vec::new();
+
+ const DEPTH: usize = 100;
+ const BRANCHING_FACTOR: usize = 50;
+
+ // Each backtrack_trap depends on the next.
+ // The last depends on a specific ver of constrained.
+ for l in 0..DEPTH {
+ let name = format!("backtrack_trap{}", l);
+ let next = format!("backtrack_trap{}", l + 1);
+ for i in 1..BRANCHING_FACTOR {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())]));
+ }
+ }
+ {
+ let name = format!("backtrack_trap{}", DEPTH);
+ for i in 1..BRANCHING_FACTOR {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(
+ pkg!((name.as_str(), vsn.as_str()) => [dep_req("constrained", ">=1.1.0, <=2.0.0")]),
+ );
+ }
+ }
+ {
+ // slightly less constrained to make sure `constrained` gets picked last.
+ for i in 0..(BRANCHING_FACTOR + 10) {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!(("constrained", vsn.as_str())));
+ }
+ reglist.push(pkg!(("constrained", "1.1.0")));
+ reglist.push(pkg!(("constrained", "2.0.0")));
+ reglist.push(pkg!(("constrained", "2.0.1")));
+ }
+ reglist.push(pkg!(("cloaking", "1.0.0") => [dep_req("constrained", "~1.0.0")]));
+
+ let reg = registry(reglist.clone());
+
+ // `backtrack_trap0 = "*"` is a lot of ways of saying `constrained = ">=1.1.0, <=2.0.0"`
+ // but `constrained= "2.0.1"` is already picked.
+ // Only then to try and solve `constrained= "~1.0.0"` which is incompatible.
+ let res = resolve(
+ vec![
+ dep("backtrack_trap0"),
+ dep_req("constrained", "2.0.1"),
+ dep("cloaking"),
+ ],
+ &reg,
+ );
+
+ assert!(res.is_err());
+
+ // Each level depends on the next but the last depends on incompatible deps.
+ // Let's make sure that we can cache that a dep has incompatible deps.
+ for l in 0..DEPTH {
+ let name = format!("level{}", l);
+ let next = format!("level{}", l + 1);
+ for i in 1..BRANCHING_FACTOR {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())]));
+ }
+ }
+ reglist.push(
+ pkg!((format!("level{}", DEPTH).as_str(), "1.0.0") => [dep("backtrack_trap0"),
+ dep("cloaking")
+ ]),
+ );
+
+ let reg = registry(reglist);
+
+ let res = resolve(vec![dep("level0"), dep_req("constrained", "2.0.1")], &reg);
+
+ assert!(res.is_err());
+
+ let res = resolve(vec![dep("level0"), dep_req("constrained", "2.0.0")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[("constrained", "2.0.0"), ("cloaking", "1.0.0")]),
+ );
+}
+
+#[test]
+fn resolving_with_constrained_sibling_backtrack_activation() {
+ // It makes sense to resolve most-constrained deps first, but
+ // with that logic the backtrack traps here come between the two
+ // attempted resolutions of 'constrained'. When backtracking,
+ // cargo should skip past them and resume resolution once the
+ // number of activations for 'constrained' changes.
+ let mut reglist = vec![
+ pkg!(("foo", "1.0.0") => [dep_req("bar", "=1.0.0"),
+ dep_req("backtrack_trap1", "1.0"),
+ dep_req("backtrack_trap2", "1.0"),
+ dep_req("constrained", "<=1.0.60")]),
+ pkg!(("bar", "1.0.0") => [dep_req("constrained", ">=1.0.60")]),
+ ];
+ // Bump these to make the test harder, but you'll also need to
+ // change the version constraints on `constrained` above. To correctly
+ // exercise Cargo, the relationship between the values is:
+ // NUM_CONSTRAINED - vsn < NUM_TRAPS < vsn
+ // to make sure the traps are resolved between `constrained`.
+ const NUM_TRAPS: usize = 45; // min 1
+ const NUM_CONSTRAINED: usize = 100; // min 1
+ for i in 0..NUM_TRAPS {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!(("backtrack_trap1", vsn.clone())));
+ reglist.push(pkg!(("backtrack_trap2", vsn.clone())));
+ }
+ for i in 0..NUM_CONSTRAINED {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!(("constrained", vsn.clone())));
+ }
+ let reg = registry(reglist);
+
+ let res = resolve(vec![dep_req("foo", "1")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "1.0.0"),
+ ("constrained", "1.0.60"),
+ ]),
+ );
+}
+
+#[test]
+fn resolving_with_public_constrained_sibling() {
+ // It makes sense to resolve most-constrained deps first, but
+ // with that logic the backtrack traps here come between the two
+ // attempted resolutions of 'constrained'. When backtracking,
+ // cargo should skip past them and resume resolution once the
+ // number of activations for 'constrained' changes.
+ let mut reglist = vec![
+ pkg!(("foo", "1.0.0") => [dep_req("bar", "=1.0.0"),
+ dep_req("backtrack_trap1", "1.0"),
+ dep_req("backtrack_trap2", "1.0"),
+ dep_req("constrained", "<=60")]),
+ pkg!(("bar", "1.0.0") => [dep_req_kind("constrained", ">=60", DepKind::Normal, true)]),
+ ];
+ // Bump these to make the test harder, but you'll also need to
+ // change the version constraints on `constrained` above. To correctly
+ // exercise Cargo, the relationship between the values is:
+ // NUM_CONSTRAINED - vsn < NUM_TRAPS < vsn
+ // to make sure the traps are resolved between `constrained`.
+ const NUM_TRAPS: usize = 45; // min 1
+ const NUM_CONSTRAINED: usize = 100; // min 1
+ for i in 0..NUM_TRAPS {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!(("backtrack_trap1", vsn.clone())));
+ reglist.push(pkg!(("backtrack_trap2", vsn.clone())));
+ }
+ for i in 0..NUM_CONSTRAINED {
+ let vsn = format!("{}.0.0", i);
+ reglist.push(pkg!(("constrained", vsn.clone())));
+ }
+ let reg = registry(reglist);
+
+ let _ = resolve_and_validated(vec![dep_req("foo", "1")], &reg, None);
+}
+
+#[test]
+fn resolving_with_constrained_sibling_transitive_dep_effects() {
+ // When backtracking due to a failed dependency, if Cargo is
+ // trying to be clever and skip irrelevant dependencies, care must
+ // be taken to not miss the transitive effects of alternatives. E.g.
+ // in the right-to-left resolution of the graph below, B may
+ // affect whether D is successfully resolved.
+ //
+ // A
+ // / | \
+ // B C D
+ // | |
+ // C D
+ let reg = registry(vec![
+ pkg!(("A", "1.0.0") => [dep_req("B", "1.0"),
+ dep_req("C", "1.0"),
+ dep_req("D", "1.0.100")]),
+ pkg!(("B", "1.0.0") => [dep_req("C", ">=1.0.0")]),
+ pkg!(("B", "1.0.1") => [dep_req("C", ">=1.0.1")]),
+ pkg!(("C", "1.0.0") => [dep_req("D", "1.0.0")]),
+ pkg!(("C", "1.0.1") => [dep_req("D", ">=1.0.1,<1.0.100")]),
+ pkg!(("C", "1.0.2") => [dep_req("D", ">=1.0.2,<1.0.100")]),
+ pkg!(("D", "1.0.0")),
+ pkg!(("D", "1.0.1")),
+ pkg!(("D", "1.0.2")),
+ pkg!(("D", "1.0.100")),
+ pkg!(("D", "1.0.101")),
+ pkg!(("D", "1.0.102")),
+ pkg!(("D", "1.0.103")),
+ pkg!(("D", "1.0.104")),
+ pkg!(("D", "1.0.105")),
+ ]);
+
+ let res = resolve(vec![dep_req("A", "1")], &reg).unwrap();
+
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("A", "1.0.0"),
+ ("B", "1.0.0"),
+ ("C", "1.0.0"),
+ ("D", "1.0.105"),
+ ]),
+ );
+}
+
+#[test]
+fn incomplete_information_skipping() {
+ // When backtracking due to a failed dependency, if Cargo is
+ // trying to be clever and skip irrelevant dependencies, care must
+ // be taken to not miss the transitive effects of alternatives.
+ // Fuzzing discovered that for some reason cargo was skipping based
+ // on incomplete information in the following case:
+ // minimized bug found in:
+ // https://github.com/rust-lang/cargo/commit/003c29b0c71e5ea28fbe8e72c148c755c9f3f8d9
+ let input = vec![
+ pkg!(("a", "1.0.0")),
+ pkg!(("a", "1.1.0")),
+ pkg!("b" => [dep("a")]),
+ pkg!(("c", "1.0.0")),
+ pkg!(("c", "1.1.0")),
+ pkg!("d" => [dep_req("c", "=1.0")]),
+ pkg!(("e", "1.0.0")),
+ pkg!(("e", "1.1.0") => [dep_req("c", "1.1")]),
+ pkg!("to_yank"),
+ pkg!(("f", "1.0.0") => [
+ dep("to_yank"),
+ dep("d"),
+ ]),
+ pkg!(("f", "1.1.0") => [dep("d")]),
+ pkg!("g" => [
+ dep("b"),
+ dep("e"),
+ dep("f"),
+ ]),
+ ];
+ let reg = registry(input.clone());
+
+ let res = resolve(vec![dep("g")], &reg).unwrap();
+ let package_to_yank = "to_yank".to_pkgid();
+ // this package is not used in the resolution.
+ assert!(!res.contains(&package_to_yank));
+ // so when we yank it
+ let new_reg = registry(
+ input
+ .iter()
+ .cloned()
+ .filter(|x| package_to_yank != x.package_id())
+ .collect(),
+ );
+ assert_eq!(input.len(), new_reg.len() + 1);
+ // it should still build
+ assert!(resolve(vec![dep("g")], &new_reg).is_ok());
+}
+
+#[test]
+fn incomplete_information_skipping_2() {
+ // When backtracking due to a failed dependency, if Cargo is
+ // trying to be clever and skip irrelevant dependencies, care must
+ // be taken to not miss the transitive effects of alternatives.
+ // Fuzzing discovered that for some reason cargo was skipping based
+ // on incomplete information in the following case:
+ // https://github.com/rust-lang/cargo/commit/003c29b0c71e5ea28fbe8e72c148c755c9f3f8d9
+ let input = vec![
+ pkg!(("b", "3.8.10")),
+ pkg!(("b", "8.7.4")),
+ pkg!(("b", "9.4.6")),
+ pkg!(("c", "1.8.8")),
+ pkg!(("c", "10.2.5")),
+ pkg!(("d", "4.1.2") => [
+ dep_req("bad", "=6.10.9"),
+ ]),
+ pkg!(("d", "5.5.6")),
+ pkg!(("d", "5.6.10")),
+ pkg!(("to_yank", "8.0.1")),
+ pkg!(("to_yank", "8.8.1")),
+ pkg!(("e", "4.7.8") => [
+ dep_req("d", ">=5.5.6, <=5.6.10"),
+ dep_req("to_yank", "=8.0.1"),
+ ]),
+ pkg!(("e", "7.4.9") => [
+ dep_req("bad", "=4.7.5"),
+ ]),
+ pkg!("f" => [
+ dep_req("d", ">=4.1.2, <=5.5.6"),
+ ]),
+ pkg!("g" => [
+ dep("bad"),
+ ]),
+ pkg!(("h", "3.8.3") => [
+ dep("g"),
+ ]),
+ pkg!(("h", "6.8.3") => [
+ dep("f"),
+ ]),
+ pkg!(("h", "8.1.9") => [
+ dep_req("to_yank", "=8.8.1"),
+ ]),
+ pkg!("i" => [
+ dep("b"),
+ dep("c"),
+ dep("e"),
+ dep("h"),
+ ]),
+ ];
+ let reg = registry(input.clone());
+
+ let res = resolve(vec![dep("i")], &reg).unwrap();
+ let package_to_yank = ("to_yank", "8.8.1").to_pkgid();
+ // this package is not used in the resolution.
+ assert!(!res.contains(&package_to_yank));
+ // so when we yank it
+ let new_reg = registry(
+ input
+ .iter()
+ .cloned()
+ .filter(|x| package_to_yank != x.package_id())
+ .collect(),
+ );
+ assert_eq!(input.len(), new_reg.len() + 1);
+ // it should still build
+ assert!(resolve(vec![dep("i")], &new_reg).is_ok());
+}
+
+#[test]
+fn incomplete_information_skipping_3() {
+ // When backtracking due to a failed dependency, if Cargo is
+ // trying to be clever and skip irrelevant dependencies, care must
+ // be taken to not miss the transitive effects of alternatives.
+ // Fuzzing discovered that for some reason cargo was skipping based
+ // on incomplete information in the following case:
+ // minimized bug found in:
+ // https://github.com/rust-lang/cargo/commit/003c29b0c71e5ea28fbe8e72c148c755c9f3f8d9
+ let input = vec![
+ pkg! {("to_yank", "3.0.3")},
+ pkg! {("to_yank", "3.3.0")},
+ pkg! {("to_yank", "3.3.1")},
+ pkg! {("a", "3.3.0") => [
+ dep_req("to_yank", "=3.0.3"),
+ ] },
+ pkg! {("a", "3.3.2") => [
+ dep_req("to_yank", "<=3.3.0"),
+ ] },
+ pkg! {("b", "0.1.3") => [
+ dep_req("a", "=3.3.0"),
+ ] },
+ pkg! {("b", "2.0.2") => [
+ dep_req("to_yank", "3.3.0"),
+ dep("a"),
+ ] },
+ pkg! {("b", "2.3.3") => [
+ dep_req("to_yank", "3.3.0"),
+ dep_req("a", "=3.3.0"),
+ ] },
+ ];
+ let reg = registry(input.clone());
+
+ let res = resolve(vec![dep("b")], &reg).unwrap();
+ let package_to_yank = ("to_yank", "3.0.3").to_pkgid();
+ // this package is not used in the resolution.
+ assert!(!res.contains(&package_to_yank));
+ // so when we yank it
+ let new_reg = registry(
+ input
+ .iter()
+ .cloned()
+ .filter(|x| package_to_yank != x.package_id())
+ .collect(),
+ );
+ assert_eq!(input.len(), new_reg.len() + 1);
+ // it should still build
+ assert!(resolve(vec![dep("b")], &new_reg).is_ok());
+}
+
+#[test]
+fn resolving_but_no_exists() {
+ let reg = registry(vec![]);
+
+ let res = resolve(vec![dep_req("foo", "1")], &reg);
+ assert!(res.is_err());
+
+ assert_eq!(
+ res.err().unwrap().to_string(),
+ "no matching package named `foo` found\n\
+ location searched: registry `https://example.com/`\n\
+ required by package `root v1.0.0 (registry `https://example.com/`)`\
+ "
+ );
+}
+
+#[test]
+fn resolving_cycle() {
+ let reg = registry(vec![pkg!("foo" => ["foo"])]);
+
+ let _ = resolve(vec![dep_req("foo", "1")], &reg);
+}
+
+#[test]
+fn hard_equality() {
+ let reg = registry(vec![
+ pkg!(("foo", "1.0.1")),
+ pkg!(("foo", "1.0.0")),
+ pkg!(("bar", "1.0.0") => [dep_req("foo", "1.0.0")]),
+ ]);
+
+ let res = resolve(vec![dep_req("bar", "1"), dep_req("foo", "=1.0.0")], &reg).unwrap();
+
+ assert_same(
+ &res,
+ &names(&[("root", "1.0.0"), ("foo", "1.0.0"), ("bar", "1.0.0")]),
+ );
+}
+
+#[test]
+fn large_conflict_cache() {
+ let mut input = vec![
+ pkg!(("last", "0.0.0") => [dep("bad")]), // just to make sure last is less constrained
+ ];
+ let mut root_deps = vec![dep("last")];
+ const NUM_VERSIONS: u8 = 20;
+ for name in 0..=NUM_VERSIONS {
+ // a large number of conflicts can easily be generated by a sys crate.
+ let sys_name = format!("{}-sys", (b'a' + name) as char);
+ let in_len = input.len();
+ input.push(pkg!(("last", format!("{}.0.0", in_len)) => [dep_req(&sys_name, "=0.0.0")]));
+ root_deps.push(dep_req(&sys_name, ">= 0.0.1"));
+
+ // a large number of conflicts can also easily be generated by a major release version.
+ let plane_name = format!("{}", (b'a' + name) as char);
+ let in_len = input.len();
+ input.push(pkg!(("last", format!("{}.0.0", in_len)) => [dep_req(&plane_name, "=1.0.0")]));
+ root_deps.push(dep_req(&plane_name, ">= 1.0.1"));
+
+ for i in 0..=NUM_VERSIONS {
+ input.push(pkg!((&sys_name, format!("{}.0.0", i))));
+ input.push(pkg!((&plane_name, format!("1.0.{}", i))));
+ }
+ }
+ let reg = registry(input);
+ let _ = resolve(root_deps, &reg);
+}
+
+#[test]
+fn off_by_one_bug() {
+ let input = vec![
+ pkg!(("A-sys", "0.0.1")),
+ pkg!(("A-sys", "0.0.4")),
+ pkg!(("A-sys", "0.0.6")),
+ pkg!(("A-sys", "0.0.7")),
+ pkg!(("NA", "0.0.0") => [dep_req("A-sys", "<= 0.0.5"),]),
+ pkg!(("NA", "0.0.1") => [dep_req("A-sys", ">= 0.0.6, <= 0.0.8"),]),
+ pkg!(("a", "0.0.1")),
+ pkg!(("a", "0.0.2")),
+ pkg!(("aa", "0.0.0") => [dep_req("A-sys", ">= 0.0.4, <= 0.0.6"),dep_req("NA", "<= 0.0.0"),]),
+ pkg!(("f", "0.0.3") => [dep("NA"),dep_req("a", "<= 0.0.2"),dep("aa"),]),
+ ];
+
+ let reg = registry(input);
+ let _ = resolve_and_validated(vec![dep("f")], &reg, None);
+}
+
+#[test]
+fn conflict_store_bug() {
+ let input = vec![
+ pkg!(("A", "0.0.3")),
+ pkg!(("A", "0.0.5")),
+ pkg!(("A", "0.0.9") => [dep("bad"),]),
+ pkg!(("A", "0.0.10") => [dep("bad"),]),
+ pkg!(("L-sys", "0.0.1") => [dep("bad"),]),
+ pkg!(("L-sys", "0.0.5")),
+ pkg!(("R", "0.0.4") => [
+ dep_req("L-sys", "= 0.0.5"),
+ ]),
+ pkg!(("R", "0.0.6")),
+ pkg!(("a-sys", "0.0.5")),
+ pkg!(("a-sys", "0.0.11")),
+ pkg!(("c", "0.0.12") => [
+ dep_req("R", ">= 0.0.3, <= 0.0.4"),
+ ]),
+ pkg!(("c", "0.0.13") => [
+ dep_req("a-sys", ">= 0.0.8, <= 0.0.11"),
+ ]),
+ pkg!(("c0", "0.0.6") => [
+ dep_req("L-sys", "<= 0.0.2"),
+ ]),
+ pkg!(("c0", "0.0.10") => [
+ dep_req("A", ">= 0.0.9, <= 0.0.10"),
+ dep_req("a-sys", "= 0.0.5"),
+ ]),
+ pkg!("j" => [
+ dep_req("A", ">= 0.0.3, <= 0.0.5"),
+ dep_req("R", ">=0.0.4, <= 0.0.6"),
+ dep_req("c", ">= 0.0.9"),
+ dep_req("c0", ">= 0.0.6"),
+ ]),
+ ];
+
+ let reg = registry(input);
+ let _ = resolve_and_validated(vec![dep("j")], &reg, None);
+}
+
+#[test]
+fn conflict_store_more_then_one_match() {
+ let input = vec![
+ pkg!(("A", "0.0.0")),
+ pkg!(("A", "0.0.1")),
+ pkg!(("A-sys", "0.0.0")),
+ pkg!(("A-sys", "0.0.1")),
+ pkg!(("A-sys", "0.0.2")),
+ pkg!(("A-sys", "0.0.3")),
+ pkg!(("A-sys", "0.0.12")),
+ pkg!(("A-sys", "0.0.16")),
+ pkg!(("B-sys", "0.0.0")),
+ pkg!(("B-sys", "0.0.1")),
+ pkg!(("B-sys", "0.0.2") => [dep_req("A-sys", "= 0.0.12"),]),
+ pkg!(("BA-sys", "0.0.0") => [dep_req("A-sys","= 0.0.16"),]),
+ pkg!(("BA-sys", "0.0.1") => [dep("bad"),]),
+ pkg!(("BA-sys", "0.0.2") => [dep("bad"),]),
+ pkg!("nA" => [
+ dep("A"),
+ dep_req("A-sys", "<= 0.0.3"),
+ dep("B-sys"),
+ dep("BA-sys"),
+ ]),
+ ];
+ let reg = registry(input);
+ let _ = resolve_and_validated(vec![dep("nA")], &reg, None);
+}
+
+#[test]
+fn bad_lockfile_from_8249() {
+ let input = vec![
+ pkg!(("a-sys", "0.2.0")),
+ pkg!(("a-sys", "0.1.0")),
+ pkg!(("b", "0.1.0") => [
+ dep_req("a-sys", "0.1"), // should be optional: true, but not deeded for now
+ ]),
+ pkg!(("c", "1.0.0") => [
+ dep_req("b", "=0.1.0"),
+ ]),
+ pkg!("foo" => [
+ dep_req("a-sys", "=0.2.0"),
+ {
+ let mut b = dep_req("b", "=0.1.0");
+ b.set_features(vec!["a-sys"]);
+ b
+ },
+ dep_req("c", "=1.0.0"),
+ ]),
+ ];
+ let reg = registry(input);
+ let _ = resolve_and_validated(vec![dep("foo")], &reg, None);
+}
+
+#[test]
+fn cyclic_good_error_message() {
+ let input = vec![
+ pkg!(("A", "0.0.0") => [dep("C")]),
+ pkg!(("B", "0.0.0") => [dep("C")]),
+ pkg!(("C", "0.0.0") => [dep("A")]),
+ ];
+ let reg = registry(input);
+ let error = resolve(vec![dep("A"), dep("B")], &reg).unwrap_err();
+ println!("{}", error);
+ assert_eq!("\
+cyclic package dependency: package `A v0.0.0 (registry `https://example.com/`)` depends on itself. Cycle:
+package `A v0.0.0 (registry `https://example.com/`)`
+ ... which satisfies dependency `A = \"*\"` of package `C v0.0.0 (registry `https://example.com/`)`
+ ... which satisfies dependency `C = \"*\"` of package `A v0.0.0 (registry `https://example.com/`)`\
+", error.to_string());
+}
diff --git a/src/tools/cargo/deny.toml b/src/tools/cargo/deny.toml
new file mode 100644
index 000000000..89d08eacc
--- /dev/null
+++ b/src/tools/cargo/deny.toml
@@ -0,0 +1,273 @@
+# This template contains all of the possible sections and their default values
+
+# Note that all fields that take a lint level have these possible values:
+# * deny - An error will be produced and the check will fail
+# * warn - A warning will be produced, but the check will not fail
+# * allow - No warning or error will be produced, though in some cases a note
+# will be
+
+# The values provided in this template are the default values that will be used
+# when any section or field is not specified in your own configuration
+
+# Root options
+
+# If 1 or more target triples (and optionally, target_features) are specified,
+# only the specified targets will be checked when running `cargo deny check`.
+# This means, if a particular package is only ever used as a target specific
+# dependency, such as, for example, the `nix` crate only being used via the
+# `target_family = "unix"` configuration, that only having windows targets in
+# this list would mean the nix crate, as well as any of its exclusive
+# dependencies not shared by any other crates, would be ignored, as the target
+# list here is effectively saying which targets you are building for.
+targets = [
+ # The triple can be any string, but only the target triples built in to
+ # rustc (as of 1.40) can be checked against actual config expressions
+ #{ triple = "x86_64-unknown-linux-musl" },
+ # You can also specify which target_features you promise are enabled for a
+ # particular target. target_features are currently not validated against
+ # the actual valid features supported by the target architecture.
+ #{ triple = "wasm32-unknown-unknown", features = ["atomics"] },
+]
+# When creating the dependency graph used as the source of truth when checks are
+# executed, this field can be used to prune crates from the graph, removing them
+# from the view of cargo-deny. This is an extremely heavy hammer, as if a crate
+# is pruned from the graph, all of its dependencies will also be pruned unless
+# they are connected to another crate in the graph that hasn't been pruned,
+# so it should be used with care. The identifiers are [Package ID Specifications]
+# (https://doc.rust-lang.org/cargo/reference/pkgid-spec.html)
+#exclude = []
+# If true, metadata will be collected with `--all-features`. Note that this can't
+# be toggled off if true, if you want to conditionally enable `--all-features` it
+# is recommended to pass `--all-features` on the cmd line instead
+all-features = false
+# If true, metadata will be collected with `--no-default-features`. The same
+# caveat with `all-features` applies
+no-default-features = false
+# If set, these feature will be enabled when collecting metadata. If `--features`
+# is specified on the cmd line they will take precedence over this option.
+#features = []
+# When outputting inclusion graphs in diagnostics that include features, this
+# option can be used to specify the depth at which feature edges will be added.
+# This option is included since the graphs can be quite large and the addition
+# of features from the crate(s) to all of the graph roots can be far too verbose.
+# This option can be overridden via `--feature-depth` on the cmd line
+feature-depth = 1
+
+# This section is considered when running `cargo deny check advisories`
+# More documentation for the advisories section can be found here:
+# https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html
+[advisories]
+# The path where the advisory database is cloned/fetched into
+db-path = "~/.cargo/advisory-db"
+# The url(s) of the advisory databases to use
+db-urls = ["https://github.com/rustsec/advisory-db"]
+# The lint level for security vulnerabilities
+vulnerability = "deny"
+# The lint level for unmaintained crates
+unmaintained = "warn"
+# The lint level for crates that have been yanked from their source registry
+yanked = "warn"
+# The lint level for crates with security notices. Note that as of
+# 2019-12-17 there are no security notice advisories in
+# https://github.com/rustsec/advisory-db
+notice = "warn"
+# A list of advisory IDs to ignore. Note that ignored advisories will still
+# output a note when they are encountered.
+ignore = [
+ #"RUSTSEC-0000-0000",
+]
+# Threshold for security vulnerabilities, any vulnerability with a CVSS score
+# lower than the range specified will be ignored. Note that ignored advisories
+# will still output a note when they are encountered.
+# * None - CVSS Score 0.0
+# * Low - CVSS Score 0.1 - 3.9
+# * Medium - CVSS Score 4.0 - 6.9
+# * High - CVSS Score 7.0 - 8.9
+# * Critical - CVSS Score 9.0 - 10.0
+#severity-threshold =
+
+# If this is true, then cargo deny will use the git executable to fetch advisory database.
+# If this is false, then it uses a built-in git library.
+# Setting this to true can be helpful if you have special authentication requirements that cargo-deny does not support.
+# See Git Authentication for more information about setting up git authentication.
+#git-fetch-with-cli = true
+
+# This section is considered when running `cargo deny check licenses`
+# More documentation for the licenses section can be found here:
+# https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html
+[licenses]
+# The lint level for crates which do not have a detectable license
+unlicensed = "deny"
+# List of explicitly allowed licenses
+# See https://spdx.org/licenses/ for list of possible licenses
+# [possible values: any SPDX 3.11 short identifier (+ optional exception)].
+allow = [
+ "MIT",
+ "MIT-0",
+ "Apache-2.0",
+ "BSD-3-Clause",
+ "MPL-2.0",
+ "Unicode-DFS-2016",
+ "CC0-1.0",
+]
+# List of explicitly disallowed licenses
+# See https://spdx.org/licenses/ for list of possible licenses
+# [possible values: any SPDX 3.11 short identifier (+ optional exception)].
+deny = [
+ #"Nokia",
+]
+# Lint level for licenses considered copyleft
+copyleft = "warn"
+# Blanket approval or denial for OSI-approved or FSF Free/Libre licenses
+# * both - The license will be approved if it is both OSI-approved *AND* FSF
+# * either - The license will be approved if it is either OSI-approved *OR* FSF
+# * osi-only - The license will be approved if is OSI-approved *AND NOT* FSF
+# * fsf-only - The license will be approved if is FSF *AND NOT* OSI-approved
+# * neither - This predicate is ignored and the default lint level is used
+allow-osi-fsf-free = "neither"
+# Lint level used when no other predicates are matched
+# 1. License isn't in the allow or deny lists
+# 2. License isn't copyleft
+# 3. License isn't OSI/FSF, or allow-osi-fsf-free = "neither"
+default = "deny"
+# The confidence threshold for detecting a license from license text.
+# The higher the value, the more closely the license text must be to the
+# canonical license text of a valid SPDX license file.
+# [possible values: any between 0.0 and 1.0].
+confidence-threshold = 0.8
+# Allow 1 or more licenses on a per-crate basis, so that particular licenses
+# aren't accepted for every possible crate as with the normal allow list
+exceptions = [
+ # Each entry is the crate and version constraint, and its specific allow
+ # list
+ #{ allow = ["Zlib"], name = "adler32", version = "*" },
+]
+
+# Some crates don't have (easily) machine readable licensing information,
+# adding a clarification entry for it allows you to manually specify the
+# licensing information
+#[[licenses.clarify]]
+# The name of the crate the clarification applies to
+#name = "ring"
+# The optional version constraint for the crate
+#version = "*"
+# The SPDX expression for the license requirements of the crate
+#expression = "MIT AND ISC AND OpenSSL"
+# One or more files in the crate's source used as the "source of truth" for
+# the license expression. If the contents match, the clarification will be used
+# when running the license check, otherwise the clarification will be ignored
+# and the crate will be checked normally, which may produce warnings or errors
+# depending on the rest of your configuration
+#license-files = [
+ # Each entry is a crate relative path, and the (opaque) hash of its contents
+ #{ path = "LICENSE", hash = 0xbd0eed23 }
+#]
+
+[licenses.private]
+# If true, ignores workspace crates that aren't published, or are only
+# published to private registries.
+# To see how to mark a crate as unpublished (to the official registry),
+# visit https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field.
+ignore = false
+# One or more private registries that you might publish crates to, if a crate
+# is only published to private registries, and ignore is true, the crate will
+# not have its license(s) checked
+registries = [
+ #"https://sekretz.com/registry
+]
+
+# This section is considered when running `cargo deny check bans`.
+# More documentation about the 'bans' section can be found here:
+# https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html
+[bans]
+# Lint level for when multiple versions of the same crate are detected
+multiple-versions = "warn"
+# Lint level for when a crate version requirement is `*`
+wildcards = "allow"
+# The graph highlighting used when creating dotgraphs for crates
+# with multiple versions
+# * lowest-version - The path to the lowest versioned duplicate is highlighted
+# * simplest-path - The path to the version with the fewest edges is highlighted
+# * all - Both lowest-version and simplest-path are used
+highlight = "all"
+# The default lint level for `default` features for crates that are members of
+# the workspace that is being checked. This can be overriden by allowing/denying
+# `default` on a crate-by-crate basis if desired.
+workspace-default-features = "allow"
+# The default lint level for `default` features for external crates that are not
+# members of the workspace. This can be overriden by allowing/denying `default`
+# on a crate-by-crate basis if desired.
+external-default-features = "allow"
+# List of crates that are allowed. Use with care!
+allow = [
+ #{ name = "ansi_term", version = "=0.11.0" },
+]
+# List of crates to deny
+deny = [
+ # Each entry the name of a crate and a version range. If version is
+ # not specified, all versions will be matched.
+ #{ name = "ansi_term", version = "=0.11.0" },
+ #
+ # Wrapper crates can optionally be specified to allow the crate when it
+ # is a direct dependency of the otherwise banned crate
+ #{ name = "ansi_term", version = "=0.11.0", wrappers = [] },
+]
+
+# List of features to allow/deny
+# Each entry the name of a crate and a version range. If version is
+# not specified, all versions will be matched.
+#[[bans.features]]
+#name = "reqwest"
+# Features to not allow
+#deny = ["json"]
+# Features to allow
+#allow = [
+# "rustls",
+# "__rustls",
+# "__tls",
+# "hyper-rustls",
+# "rustls",
+# "rustls-pemfile",
+# "rustls-tls-webpki-roots",
+# "tokio-rustls",
+# "webpki-roots",
+#]
+# If true, the allowed features must exactly match the enabled feature set. If
+# this is set there is no point setting `deny`
+#exact = true
+
+# Certain crates/versions that will be skipped when doing duplicate detection.
+skip = [
+ #{ name = "ansi_term", version = "=0.11.0" },
+]
+# Similarly to `skip` allows you to skip certain crates during duplicate
+# detection. Unlike skip, it also includes the entire tree of transitive
+# dependencies starting at the specified crate, up to a certain depth, which is
+# by default infinite.
+skip-tree = [
+ #{ name = "ansi_term", version = "=0.11.0", depth = 20 },
+]
+
+# This section is considered when running `cargo deny check sources`.
+# More documentation about the 'sources' section can be found here:
+# https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html
+[sources]
+# Lint level for what to happen when a crate from a crate registry that is not
+# in the allow list is encountered
+unknown-registry = "warn"
+# Lint level for what to happen when a crate from a git repository that is not
+# in the allow list is encountered
+unknown-git = "warn"
+# List of URLs for allowed crate registries. Defaults to the crates.io index
+# if not specified. If it is specified but empty, no registries are allowed.
+allow-registry = ["https://github.com/rust-lang/crates.io-index"]
+# List of URLs for allowed Git repositories
+allow-git = []
+
+[sources.allow-org]
+# 1 or more github.com organizations to allow git sources for
+github = []
+# 1 or more gitlab.com organizations to allow git sources for
+gitlab = []
+# 1 or more bitbucket.org organizations to allow git sources for
+bitbucket = []
diff --git a/src/tools/cargo/publish.py b/src/tools/cargo/publish.py
new file mode 100755
index 000000000..5ace18f72
--- /dev/null
+++ b/src/tools/cargo/publish.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python3
+
+# This script is used to publish Cargo to crates.io.
+
+import os
+import re
+import subprocess
+import time
+import urllib.request
+from urllib.error import HTTPError
+
+
+TO_PUBLISH = [
+ 'crates/cargo-platform',
+ 'crates/cargo-util',
+ 'crates/crates-io',
+ '.',
+]
+
+
+def already_published(name, version):
+ try:
+ urllib.request.urlopen('https://crates.io/api/v1/crates/%s/%s/download' % (name, version))
+ except HTTPError as e:
+ if e.code == 404:
+ return False
+ raise
+ return True
+
+
+def maybe_publish(path):
+ content = open(os.path.join(path, 'Cargo.toml')).read()
+ name = re.search('^name = "([^"]+)"', content, re.M).group(1)
+ version = re.search('^version = "([^"]+)"', content, re.M).group(1)
+ if already_published(name, version):
+ print('%s %s is already published, skipping' % (name, version))
+ return False
+ subprocess.check_call(['cargo', 'publish', '--no-verify'], cwd=path)
+ return True
+
+
+def main():
+ print('Starting publish...')
+ for i, path in enumerate(TO_PUBLISH):
+ if maybe_publish(path):
+ if i < len(TO_PUBLISH)-1:
+ # Sleep to allow the index to update. This should probably
+ # check that the index is updated, or use a retry loop
+ # instead.
+ time.sleep(5)
+ print('Publish complete!')
+
+
+if __name__ == '__main__':
+ main()
diff --git a/src/tools/cargo/src/bin/cargo/cli.rs b/src/tools/cargo/src/bin/cargo/cli.rs
new file mode 100644
index 000000000..17120a656
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/cli.rs
@@ -0,0 +1,566 @@
+use anyhow::{anyhow, Context as _};
+use cargo::core::shell::Shell;
+use cargo::core::{features, CliUnstable};
+use cargo::{self, drop_print, drop_println, CliResult, Config};
+use clap::{Arg, ArgMatches};
+use itertools::Itertools;
+use std::collections::HashMap;
+use std::ffi::OsStr;
+use std::ffi::OsString;
+use std::fmt::Write;
+
+use super::commands;
+use super::list_commands;
+use crate::command_prelude::*;
+use cargo::core::features::HIDDEN;
+
+lazy_static::lazy_static! {
+ // Maps from commonly known external commands (not builtin to cargo) to their
+ // description, for the help page. Reserved for external subcommands that are
+ // core within the rust ecosystem (esp ones that might become internal in the future).
+ static ref KNOWN_EXTERNAL_COMMAND_DESCRIPTIONS: HashMap<&'static str, &'static str> = HashMap::from([
+ ("clippy", "Checks a package to catch common mistakes and improve your Rust code."),
+ ("fmt", "Formats all bin and lib files of the current crate using rustfmt."),
+ ]);
+}
+
+pub fn main(config: &mut LazyConfig) -> CliResult {
+ let args = cli().try_get_matches()?;
+
+ // Update the process-level notion of cwd
+ // This must be completed before config is initialized
+ assert_eq!(config.is_init(), false);
+ if let Some(new_cwd) = args.get_one::<std::path::PathBuf>("directory") {
+ // This is a temporary hack. This cannot access `Config`, so this is a bit messy.
+ // This does not properly parse `-Z` flags that appear after the subcommand.
+ // The error message is not as helpful as the standard one.
+ let nightly_features_allowed = matches!(&*features::channel(), "nightly" | "dev");
+ if !nightly_features_allowed
+ || (nightly_features_allowed
+ && !args
+ .get_many("unstable-features")
+ .map(|mut z| z.any(|value: &String| value == "unstable-options"))
+ .unwrap_or(false))
+ {
+ return Err(anyhow::format_err!(
+ "the `-C` flag is unstable, \
+ pass `-Z unstable-options` on the nightly channel to enable it"
+ )
+ .into());
+ }
+ std::env::set_current_dir(&new_cwd).context("could not change to requested directory")?;
+ }
+
+ // CAUTION: Be careful with using `config` until it is configured below.
+ // In general, try to avoid loading config values unless necessary (like
+ // the [alias] table).
+ let config = config.get_mut();
+
+ let (expanded_args, global_args) = expand_aliases(config, args, vec![])?;
+
+ if expanded_args
+ .get_one::<String>("unstable-features")
+ .map(String::as_str)
+ == Some("help")
+ {
+ let options = CliUnstable::help();
+ let non_hidden_options: Vec<(String, String)> = options
+ .iter()
+ .filter(|(_, help_message)| *help_message != HIDDEN)
+ .map(|(name, help)| (name.to_string(), help.to_string()))
+ .collect();
+ let longest_option = non_hidden_options
+ .iter()
+ .map(|(option_name, _)| option_name.len())
+ .max()
+ .unwrap_or(0);
+ let help_lines: Vec<String> = non_hidden_options
+ .iter()
+ .map(|(option_name, option_help_message)| {
+ let option_name_kebab_case = option_name.replace("_", "-");
+ let padding = " ".repeat(longest_option - option_name.len()); // safe to subtract
+ format!(
+ " -Z {}{} -- {}",
+ option_name_kebab_case, padding, option_help_message
+ )
+ })
+ .collect();
+ let joined = help_lines.join("\n");
+ drop_println!(
+ config,
+ "
+Available unstable (nightly-only) flags:
+
+{}
+
+Run with 'cargo -Z [FLAG] [COMMAND]'",
+ joined
+ );
+ if !config.nightly_features_allowed {
+ drop_println!(
+ config,
+ "\nUnstable flags are only available on the nightly channel \
+ of Cargo, but this is the `{}` channel.\n\
+ {}",
+ features::channel(),
+ features::SEE_CHANNELS
+ );
+ }
+ drop_println!(
+ config,
+ "\nSee https://doc.rust-lang.org/nightly/cargo/reference/unstable.html \
+ for more information about these flags."
+ );
+ return Ok(());
+ }
+
+ let is_verbose = expanded_args.verbose() > 0;
+ if expanded_args.flag("version") {
+ let version = get_version_string(is_verbose);
+ drop_print!(config, "{}", version);
+ return Ok(());
+ }
+
+ if let Some(code) = expanded_args.get_one::<String>("explain") {
+ let mut procss = config.load_global_rustc(None)?.process();
+ procss.arg("--explain").arg(code).exec()?;
+ return Ok(());
+ }
+
+ if expanded_args.flag("list") {
+ drop_println!(config, "Installed Commands:");
+ for (name, command) in list_commands(config) {
+ let known_external_desc = KNOWN_EXTERNAL_COMMAND_DESCRIPTIONS.get(name.as_str());
+ match command {
+ CommandInfo::BuiltIn { about } => {
+ assert!(
+ known_external_desc.is_none(),
+ "KNOWN_EXTERNAL_COMMANDS shouldn't contain builtin \"{}\"",
+ name
+ );
+ let summary = about.unwrap_or_default();
+ let summary = summary.lines().next().unwrap_or(&summary); // display only the first line
+ drop_println!(config, " {:<20} {}", name, summary);
+ }
+ CommandInfo::External { path } => {
+ if let Some(desc) = known_external_desc {
+ drop_println!(config, " {:<20} {}", name, desc);
+ } else if is_verbose {
+ drop_println!(config, " {:<20} {}", name, path.display());
+ } else {
+ drop_println!(config, " {}", name);
+ }
+ }
+ CommandInfo::Alias { target } => {
+ drop_println!(
+ config,
+ " {:<20} alias: {}",
+ name,
+ target.iter().join(" ")
+ );
+ }
+ }
+ }
+ return Ok(());
+ }
+
+ let (cmd, subcommand_args) = match expanded_args.subcommand() {
+ Some((cmd, args)) => (cmd, args),
+ _ => {
+ // No subcommand provided.
+ cli().print_help()?;
+ return Ok(());
+ }
+ };
+ config_configure(config, &expanded_args, subcommand_args, global_args)?;
+ super::init_git(config);
+
+ execute_subcommand(config, cmd, subcommand_args)
+}
+
+pub fn get_version_string(is_verbose: bool) -> String {
+ let version = cargo::version();
+ let mut version_string = format!("cargo {}\n", version);
+ if is_verbose {
+ version_string.push_str(&format!("release: {}\n", version.version));
+ if let Some(ref ci) = version.commit_info {
+ version_string.push_str(&format!("commit-hash: {}\n", ci.commit_hash));
+ version_string.push_str(&format!("commit-date: {}\n", ci.commit_date));
+ }
+ writeln!(version_string, "host: {}", env!("RUST_HOST_TARGET")).unwrap();
+ add_libgit2(&mut version_string);
+ add_curl(&mut version_string);
+ add_ssl(&mut version_string);
+ writeln!(version_string, "os: {}", os_info::get()).unwrap();
+ }
+ version_string
+}
+
+fn add_libgit2(version_string: &mut String) {
+ let git2_v = git2::Version::get();
+ let lib_v = git2_v.libgit2_version();
+ let vendored = if git2_v.vendored() {
+ format!("vendored")
+ } else {
+ format!("system")
+ };
+ writeln!(
+ version_string,
+ "libgit2: {}.{}.{} (sys:{} {})",
+ lib_v.0,
+ lib_v.1,
+ lib_v.2,
+ git2_v.crate_version(),
+ vendored
+ )
+ .unwrap();
+}
+
+fn add_curl(version_string: &mut String) {
+ let curl_v = curl::Version::get();
+ let vendored = if curl_v.vendored() {
+ format!("vendored")
+ } else {
+ format!("system")
+ };
+ writeln!(
+ version_string,
+ "libcurl: {} (sys:{} {} ssl:{})",
+ curl_v.version(),
+ curl_sys::rust_crate_version(),
+ vendored,
+ curl_v.ssl_version().unwrap_or("none")
+ )
+ .unwrap();
+}
+
+fn add_ssl(version_string: &mut String) {
+ #[cfg(feature = "openssl")]
+ {
+ writeln!(version_string, "ssl: {}", openssl::version::version()).unwrap();
+ }
+ #[cfg(not(feature = "openssl"))]
+ {
+ let _ = version_string; // Silence unused warning.
+ }
+}
+
+/// Expands aliases recursively to collect all the command line arguments.
+///
+/// [`GlobalArgs`] need to be extracted before expanding aliases because the
+/// clap code for extracting a subcommand discards global options
+/// (appearing before the subcommand).
+fn expand_aliases(
+ config: &mut Config,
+ args: ArgMatches,
+ mut already_expanded: Vec<String>,
+) -> Result<(ArgMatches, GlobalArgs), CliError> {
+ if let Some((cmd, args)) = args.subcommand() {
+ let exec = commands::builtin_exec(cmd);
+ let aliased_cmd = super::aliased_command(config, cmd);
+
+ match (exec, aliased_cmd) {
+ (Some(_), Ok(Some(_))) => {
+ // User alias conflicts with a built-in subcommand
+ config.shell().warn(format!(
+ "user-defined alias `{}` is ignored, because it is shadowed by a built-in command",
+ cmd,
+ ))?;
+ }
+ (Some(_), Ok(None) | Err(_)) => {
+ // Here we ignore errors from aliasing as we already favor built-in command,
+ // and alias doesn't involve in this context.
+
+ if let Some(values) = args.get_many::<OsString>("") {
+ // Command is built-in and is not conflicting with alias, but contains ignored values.
+ return Err(anyhow::format_err!(
+ "\
+trailing arguments after built-in command `{}` are unsupported: `{}`
+
+To pass the arguments to the subcommand, remove `--`",
+ cmd,
+ values.map(|s| s.to_string_lossy()).join(" "),
+ )
+ .into());
+ }
+ }
+ (None, Ok(None)) => {}
+ (None, Ok(Some(alias))) => {
+ // Check if a user-defined alias is shadowing an external subcommand
+ // (binary of the form `cargo-<subcommand>`)
+ // Currently this is only a warning, but after a transition period this will become
+ // a hard error.
+ if super::builtin_aliases_execs(cmd).is_none() {
+ if let Some(path) = super::find_external_subcommand(config, cmd) {
+ config.shell().warn(format!(
+ "\
+user-defined alias `{}` is shadowing an external subcommand found at: `{}`
+This was previously accepted but is being phased out; it will become a hard error in a future release.
+For more information, see issue #10049 <https://github.com/rust-lang/cargo/issues/10049>.",
+ cmd,
+ path.display(),
+ ))?;
+ }
+ }
+
+ let mut alias = alias
+ .into_iter()
+ .map(|s| OsString::from(s))
+ .collect::<Vec<_>>();
+ alias.extend(args.get_many::<OsString>("").unwrap_or_default().cloned());
+ // new_args strips out everything before the subcommand, so
+ // capture those global options now.
+ // Note that an alias to an external command will not receive
+ // these arguments. That may be confusing, but such is life.
+ let global_args = GlobalArgs::new(args);
+ let new_args = cli().no_binary_name(true).try_get_matches_from(alias)?;
+
+ let new_cmd = new_args.subcommand_name().expect("subcommand is required");
+ already_expanded.push(cmd.to_string());
+ if already_expanded.contains(&new_cmd.to_string()) {
+ // Crash if the aliases are corecursive / unresolvable
+ return Err(anyhow!(
+ "alias {} has unresolvable recursive definition: {} -> {}",
+ already_expanded[0],
+ already_expanded.join(" -> "),
+ new_cmd,
+ )
+ .into());
+ }
+
+ let (expanded_args, _) = expand_aliases(config, new_args, already_expanded)?;
+ return Ok((expanded_args, global_args));
+ }
+ (None, Err(e)) => return Err(e.into()),
+ }
+ };
+
+ Ok((args, GlobalArgs::default()))
+}
+
+fn config_configure(
+ config: &mut Config,
+ args: &ArgMatches,
+ subcommand_args: &ArgMatches,
+ global_args: GlobalArgs,
+) -> CliResult {
+ let arg_target_dir = &subcommand_args.value_of_path("target-dir", config);
+ let verbose = global_args.verbose + args.verbose();
+ // quiet is unusual because it is redefined in some subcommands in order
+ // to provide custom help text.
+ let quiet = args.flag("quiet") || subcommand_args.flag("quiet") || global_args.quiet;
+ let global_color = global_args.color; // Extract so it can take reference.
+ let color = args
+ .get_one::<String>("color")
+ .map(String::as_str)
+ .or_else(|| global_color.as_deref());
+ let frozen = args.flag("frozen") || global_args.frozen;
+ let locked = args.flag("locked") || global_args.locked;
+ let offline = args.flag("offline") || global_args.offline;
+ let mut unstable_flags = global_args.unstable_flags;
+ if let Some(values) = args.get_many::<String>("unstable-features") {
+ unstable_flags.extend(values.cloned());
+ }
+ let mut config_args = global_args.config_args;
+ if let Some(values) = args.get_many::<String>("config") {
+ config_args.extend(values.cloned());
+ }
+ config.configure(
+ verbose,
+ quiet,
+ color,
+ frozen,
+ locked,
+ offline,
+ arg_target_dir,
+ &unstable_flags,
+ &config_args,
+ )?;
+ Ok(())
+}
+
+fn execute_subcommand(config: &mut Config, cmd: &str, subcommand_args: &ArgMatches) -> CliResult {
+ if let Some(exec) = commands::builtin_exec(cmd) {
+ return exec(config, subcommand_args);
+ }
+
+ let mut ext_args: Vec<&OsStr> = vec![OsStr::new(cmd)];
+ ext_args.extend(
+ subcommand_args
+ .get_many::<OsString>("")
+ .unwrap_or_default()
+ .map(OsString::as_os_str),
+ );
+ super::execute_external_subcommand(config, cmd, &ext_args)
+}
+
+#[derive(Default)]
+struct GlobalArgs {
+ verbose: u32,
+ quiet: bool,
+ color: Option<String>,
+ frozen: bool,
+ locked: bool,
+ offline: bool,
+ unstable_flags: Vec<String>,
+ config_args: Vec<String>,
+}
+
+impl GlobalArgs {
+ fn new(args: &ArgMatches) -> GlobalArgs {
+ GlobalArgs {
+ verbose: args.verbose(),
+ quiet: args.flag("quiet"),
+ color: args.get_one::<String>("color").cloned(),
+ frozen: args.flag("frozen"),
+ locked: args.flag("locked"),
+ offline: args.flag("offline"),
+ unstable_flags: args
+ .get_many::<String>("unstable-features")
+ .unwrap_or_default()
+ .cloned()
+ .collect(),
+ config_args: args
+ .get_many::<String>("config")
+ .unwrap_or_default()
+ .cloned()
+ .collect(),
+ }
+ }
+}
+
+pub fn cli() -> Command {
+ // ALLOWED: `RUSTUP_HOME` should only be read from process env, otherwise
+ // other tools may point to executables from incompatible distributions.
+ #[allow(clippy::disallowed_methods)]
+ let is_rustup = std::env::var_os("RUSTUP_HOME").is_some();
+ let usage = if is_rustup {
+ "cargo [+toolchain] [OPTIONS] [COMMAND]"
+ } else {
+ "cargo [OPTIONS] [COMMAND]"
+ };
+ Command::new("cargo")
+ .allow_external_subcommands(true)
+ // Doesn't mix well with our list of common cargo commands. See clap-rs/clap#3108 for
+ // opening clap up to allow us to style our help template
+ .disable_colored_help(true)
+ // Provide a custom help subcommand for calling into man pages
+ .disable_help_subcommand(true)
+ .override_usage(usage)
+ .help_template(
+ "\
+Rust's package manager
+
+Usage: {usage}
+
+Options:
+{options}
+
+Some common cargo commands are (see all commands with --list):
+ build, b Compile the current package
+ check, c Analyze the current package and report errors, but don't build object files
+ clean Remove the target directory
+ doc, d Build this package's and its dependencies' documentation
+ new Create a new cargo package
+ init Create a new cargo package in an existing directory
+ add Add dependencies to a manifest file
+ remove Remove dependencies from a manifest file
+ run, r Run a binary or example of the local package
+ test, t Run the tests
+ bench Run the benchmarks
+ update Update dependencies listed in Cargo.lock
+ search Search registry for crates
+ publish Package and upload this package to the registry
+ install Install a Rust binary. Default location is $HOME/.cargo/bin
+ uninstall Uninstall a Rust binary
+
+See 'cargo help <command>' for more information on a specific command.\n",
+ )
+ .arg(flag("version", "Print version info and exit").short('V'))
+ .arg(flag("list", "List installed commands"))
+ .arg(opt("explain", "Run `rustc --explain CODE`").value_name("CODE"))
+ .arg(
+ opt(
+ "verbose",
+ "Use verbose output (-vv very verbose/build.rs output)",
+ )
+ .short('v')
+ .action(ArgAction::Count)
+ .global(true),
+ )
+ .arg_quiet()
+ .arg(
+ opt("color", "Coloring: auto, always, never")
+ .value_name("WHEN")
+ .global(true),
+ )
+ .arg(
+ Arg::new("directory")
+ .help("Change to DIRECTORY before doing anything (nightly-only)")
+ .short('C')
+ .value_name("DIRECTORY")
+ .value_hint(clap::ValueHint::DirPath)
+ .value_parser(clap::builder::ValueParser::path_buf()),
+ )
+ .arg(flag("frozen", "Require Cargo.lock and cache are up to date").global(true))
+ .arg(flag("locked", "Require Cargo.lock is up to date").global(true))
+ .arg(flag("offline", "Run without accessing the network").global(true))
+ .arg(multi_opt("config", "KEY=VALUE", "Override a configuration value").global(true))
+ .arg(
+ Arg::new("unstable-features")
+ .help("Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details")
+ .short('Z')
+ .value_name("FLAG")
+ .action(ArgAction::Append)
+ .global(true),
+ )
+ .subcommands(commands::builtin())
+}
+
+/// Delay loading [`Config`] until access.
+///
+/// In the common path, the [`Config`] is dependent on CLI parsing and shouldn't be loaded until
+/// after that is done but some other paths (like fix or earlier errors) might need access to it,
+/// so this provides a way to share the instance and the implementation across these different
+/// accesses.
+pub struct LazyConfig {
+ config: Option<Config>,
+}
+
+impl LazyConfig {
+ pub fn new() -> Self {
+ Self { config: None }
+ }
+
+ /// Check whether the config is loaded
+ ///
+ /// This is useful for asserts in case the environment needs to be setup before loading
+ pub fn is_init(&self) -> bool {
+ self.config.is_some()
+ }
+
+ /// Get the config, loading it if needed
+ ///
+ /// On error, the process is terminated
+ pub fn get(&mut self) -> &Config {
+ self.get_mut()
+ }
+
+ /// Get the config, loading it if needed
+ ///
+ /// On error, the process is terminated
+ pub fn get_mut(&mut self) -> &mut Config {
+ self.config.get_or_insert_with(|| match Config::default() {
+ Ok(cfg) => cfg,
+ Err(e) => {
+ let mut shell = Shell::new();
+ cargo::exit_with_error(e.into(), &mut shell)
+ }
+ })
+ }
+}
+
+#[test]
+fn verify_cli() {
+ cli().debug_assert();
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/add.rs b/src/tools/cargo/src/bin/cargo/commands/add.rs
new file mode 100644
index 000000000..39f0e189e
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/add.rs
@@ -0,0 +1,362 @@
+use cargo::sources::CRATES_IO_REGISTRY;
+use cargo::util::print_available_packages;
+use indexmap::IndexMap;
+use indexmap::IndexSet;
+
+use cargo::core::dependency::DepKind;
+use cargo::core::FeatureValue;
+use cargo::ops::cargo_add::add;
+use cargo::ops::cargo_add::AddOptions;
+use cargo::ops::cargo_add::DepOp;
+use cargo::ops::resolve_ws;
+use cargo::util::command_prelude::*;
+use cargo::util::interning::InternedString;
+use cargo::util::toml_mut::manifest::DepTable;
+use cargo::CargoResult;
+
+pub fn cli() -> Command {
+ clap::Command::new("add")
+ .about("Add dependencies to a Cargo.toml manifest file")
+ .override_usage(
+ "\
+ cargo add [OPTIONS] <DEP>[@<VERSION>] ...
+ cargo add [OPTIONS] --path <PATH> ...
+ cargo add [OPTIONS] --git <URL> ..."
+ )
+ .after_help("Run `cargo help add` for more detailed information.\n")
+ .group(clap::ArgGroup::new("selected").multiple(true).required(true))
+ .args([
+ clap::Arg::new("crates")
+ .value_name("DEP_ID")
+ .num_args(0..)
+ .help("Reference to a package to add as a dependency")
+ .long_help(
+ "Reference to a package to add as a dependency
+
+You can reference a package by:
+- `<name>`, like `cargo add serde` (latest version will be used)
+- `<name>@<version-req>`, like `cargo add serde@1` or `cargo add serde@=1.0.38`"
+ )
+ .group("selected"),
+ flag("no-default-features",
+ "Disable the default features"),
+ flag("default-features",
+ "Re-enable the default features")
+ .overrides_with("no-default-features"),
+ clap::Arg::new("features")
+ .short('F')
+ .long("features")
+ .value_name("FEATURES")
+ .action(ArgAction::Append)
+ .help("Space or comma separated list of features to activate"),
+ flag("optional",
+ "Mark the dependency as optional")
+ .long_help("Mark the dependency as optional
+
+The package name will be exposed as feature of your crate.")
+ .conflicts_with("dev"),
+ flag("no-optional",
+ "Mark the dependency as required")
+ .long_help("Mark the dependency as required
+
+The package will be removed from your features.")
+ .conflicts_with("dev")
+ .overrides_with("optional"),
+ clap::Arg::new("rename")
+ .long("rename")
+ .action(ArgAction::Set)
+ .value_name("NAME")
+ .help("Rename the dependency")
+ .long_help("Rename the dependency
+
+Example uses:
+- Depending on multiple versions of a crate
+- Depend on crates with the same name from different registries"),
+ ])
+ .arg_manifest_path()
+ .arg_package("Package to modify")
+ .arg_quiet()
+ .arg_dry_run("Don't actually write the manifest")
+ .next_help_heading("Source")
+ .args([
+ clap::Arg::new("path")
+ .long("path")
+ .action(ArgAction::Set)
+ .value_name("PATH")
+ .help("Filesystem path to local crate to add")
+ .group("selected")
+ .conflicts_with("git"),
+ clap::Arg::new("git")
+ .long("git")
+ .action(ArgAction::Set)
+ .value_name("URI")
+ .help("Git repository location")
+ .long_help("Git repository location
+
+Without any other information, cargo will use latest commit on the main branch.")
+ .group("selected"),
+ clap::Arg::new("branch")
+ .long("branch")
+ .action(ArgAction::Set)
+ .value_name("BRANCH")
+ .help("Git branch to download the crate from")
+ .requires("git")
+ .group("git-ref"),
+ clap::Arg::new("tag")
+ .long("tag")
+ .action(ArgAction::Set)
+ .value_name("TAG")
+ .help("Git tag to download the crate from")
+ .requires("git")
+ .group("git-ref"),
+ clap::Arg::new("rev")
+ .long("rev")
+ .action(ArgAction::Set)
+ .value_name("REV")
+ .help("Git reference to download the crate from")
+ .long_help("Git reference to download the crate from
+
+This is the catch all, handling hashes to named references in remote repositories.")
+ .requires("git")
+ .group("git-ref"),
+ clap::Arg::new("registry")
+ .long("registry")
+ .action(ArgAction::Set)
+ .value_name("NAME")
+ .help("Package registry for this dependency"),
+ ])
+ .next_help_heading("Section")
+ .args([
+ flag("dev",
+ "Add as development dependency")
+ .long_help("Add as development dependency
+
+Dev-dependencies are not used when compiling a package for building, but are used for compiling tests, examples, and benchmarks.
+
+These dependencies are not propagated to other packages which depend on this package.")
+ .group("section"),
+ flag("build",
+ "Add as build dependency")
+ .long_help("Add as build dependency
+
+Build-dependencies are the only dependencies available for use by build scripts (`build.rs` files).")
+ .group("section"),
+ clap::Arg::new("target")
+ .long("target")
+ .action(ArgAction::Set)
+ .value_name("TARGET")
+ .value_parser(clap::builder::NonEmptyStringValueParser::new())
+ .help("Add as dependency to the given target platform")
+ ])
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let dry_run = args.dry_run();
+ let section = parse_section(args);
+
+ let ws = args.workspace(config)?;
+
+ if args.is_present_with_zero_values("package") {
+ print_available_packages(&ws)?;
+ }
+
+ let packages = args.packages_from_flags()?;
+ let packages = packages.get_packages(&ws)?;
+ let spec = match packages.len() {
+ 0 => {
+ return Err(CliError::new(
+ anyhow::format_err!(
+ "no packages selected to modify. Please specify one with `-p <PKGID>`"
+ ),
+ 101,
+ ));
+ }
+ 1 => packages[0],
+ _ => {
+ let names = packages.iter().map(|p| p.name()).collect::<Vec<_>>();
+ return Err(CliError::new(
+ anyhow::format_err!(
+ "`cargo add` could not determine which package to modify. \
+ Use the `--package` option to specify a package. \n\
+ available packages: {}",
+ names.join(", ")
+ ),
+ 101,
+ ));
+ }
+ };
+
+ let dependencies = parse_dependencies(config, args)?;
+
+ let options = AddOptions {
+ config,
+ spec,
+ dependencies,
+ section,
+ dry_run,
+ };
+ add(&ws, &options)?;
+
+ if !dry_run {
+ // Reload the workspace since we've changed dependencies
+ let ws = args.workspace(config)?;
+ resolve_ws(&ws)?;
+ }
+
+ Ok(())
+}
+
+fn parse_dependencies(config: &Config, matches: &ArgMatches) -> CargoResult<Vec<DepOp>> {
+ let path = matches.get_one::<String>("path");
+ let git = matches.get_one::<String>("git");
+ let branch = matches.get_one::<String>("branch");
+ let rev = matches.get_one::<String>("rev");
+ let tag = matches.get_one::<String>("tag");
+ let rename = matches.get_one::<String>("rename");
+ let registry = match matches.registry(config)? {
+ Some(reg) if reg == CRATES_IO_REGISTRY => None,
+ reg => reg,
+ };
+ let default_features = default_features(matches);
+ let optional = optional(matches);
+
+ let mut crates = matches
+ .get_many::<String>("crates")
+ .into_iter()
+ .flatten()
+ .map(|c| (Some(c.clone()), None))
+ .collect::<IndexMap<_, _>>();
+ let mut infer_crate_name = false;
+ if crates.is_empty() {
+ if path.is_some() || git.is_some() {
+ crates.insert(None, None);
+ infer_crate_name = true;
+ } else {
+ unreachable!("clap should ensure we have some source selected");
+ }
+ }
+ for feature in matches
+ .get_many::<String>("features")
+ .into_iter()
+ .flatten()
+ .map(String::as_str)
+ .flat_map(parse_feature)
+ {
+ let parsed_value = FeatureValue::new(InternedString::new(feature));
+ match parsed_value {
+ FeatureValue::Feature(_) => {
+ if 1 < crates.len() {
+ let candidates = crates
+ .keys()
+ .map(|c| {
+ format!(
+ "`{}/{}`",
+ c.as_deref().expect("only none when there is 1"),
+ feature
+ )
+ })
+ .collect::<Vec<_>>();
+ anyhow::bail!("feature `{feature}` must be qualified by the dependency it's being activated for, like {}", candidates.join(", "));
+ }
+ crates
+ .first_mut()
+ .expect("always at least one crate")
+ .1
+ .get_or_insert_with(IndexSet::new)
+ .insert(feature.to_owned());
+ }
+ FeatureValue::Dep { .. } => {
+ anyhow::bail!("feature `{feature}` is not allowed to use explicit `dep:` syntax",)
+ }
+ FeatureValue::DepFeature {
+ dep_name,
+ dep_feature,
+ ..
+ } => {
+ if infer_crate_name {
+ anyhow::bail!("`{feature}` is unsupported when inferring the crate name, use `{dep_feature}`");
+ }
+ if dep_feature.contains('/') {
+ anyhow::bail!("multiple slashes in feature `{feature}` is not allowed");
+ }
+ crates.get_mut(&Some(dep_name.as_str().to_owned())).ok_or_else(|| {
+ anyhow::format_err!("feature `{dep_feature}` activated for crate `{dep_name}` but the crate wasn't specified")
+ })?
+ .get_or_insert_with(IndexSet::new)
+ .insert(dep_feature.as_str().to_owned());
+ }
+ }
+ }
+
+ let mut deps: Vec<DepOp> = Vec::new();
+ for (crate_spec, features) in crates {
+ let dep = DepOp {
+ crate_spec,
+ rename: rename.map(String::from),
+ features,
+ default_features,
+ optional,
+ registry: registry.clone(),
+ path: path.map(String::from),
+ git: git.map(String::from),
+ branch: branch.map(String::from),
+ rev: rev.map(String::from),
+ tag: tag.map(String::from),
+ };
+ deps.push(dep);
+ }
+
+ if deps.len() > 1 && rename.is_some() {
+ anyhow::bail!("cannot specify multiple crates with `--rename`");
+ }
+
+ Ok(deps)
+}
+
+fn default_features(matches: &ArgMatches) -> Option<bool> {
+ resolve_bool_arg(
+ matches.flag("default-features"),
+ matches.flag("no-default-features"),
+ )
+}
+
+fn optional(matches: &ArgMatches) -> Option<bool> {
+ resolve_bool_arg(matches.flag("optional"), matches.flag("no-optional"))
+}
+
+fn resolve_bool_arg(yes: bool, no: bool) -> Option<bool> {
+ match (yes, no) {
+ (true, false) => Some(true),
+ (false, true) => Some(false),
+ (false, false) => None,
+ (_, _) => unreachable!("clap should make this impossible"),
+ }
+}
+
+fn parse_section(matches: &ArgMatches) -> DepTable {
+ let kind = if matches.flag("dev") {
+ DepKind::Development
+ } else if matches.flag("build") {
+ DepKind::Build
+ } else {
+ DepKind::Normal
+ };
+
+ let mut table = DepTable::new().set_kind(kind);
+
+ if let Some(target) = matches.get_one::<String>("target") {
+ assert!(!target.is_empty(), "Target specification may not be empty");
+ table = table.set_target(target);
+ }
+
+ table
+}
+
+/// Split feature flag list
+fn parse_feature(feature: &str) -> impl Iterator<Item = &str> {
+ // Not re-using `CliFeatures` because it uses a BTreeSet and loses user's ordering
+ feature
+ .split_whitespace()
+ .flat_map(|s| s.split(','))
+ .filter(|s| !s.is_empty())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/bench.rs b/src/tools/cargo/src/bin/cargo/commands/bench.rs
new file mode 100644
index 000000000..3739d880e
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/bench.rs
@@ -0,0 +1,77 @@
+use crate::command_prelude::*;
+use cargo::ops::{self, TestOptions};
+
+pub fn cli() -> Command {
+ subcommand("bench")
+ .about("Execute all benchmarks of a local package")
+ .arg_quiet()
+ .arg(
+ Arg::new("BENCHNAME")
+ .action(ArgAction::Set)
+ .help("If specified, only run benches containing this string in their names"),
+ )
+ .arg(
+ Arg::new("args")
+ .help("Arguments for the bench binary")
+ .num_args(0..)
+ .last(true),
+ )
+ .arg_targets_all(
+ "Benchmark only this package's library",
+ "Benchmark only the specified binary",
+ "Benchmark all binaries",
+ "Benchmark only the specified example",
+ "Benchmark all examples",
+ "Benchmark only the specified test target",
+ "Benchmark all tests",
+ "Benchmark only the specified bench target",
+ "Benchmark all benches",
+ "Benchmark all targets",
+ )
+ .arg(flag("no-run", "Compile, but don't run benchmarks"))
+ .arg_package_spec(
+ "Package to run benchmarks for",
+ "Benchmark all packages in the workspace",
+ "Exclude packages from the benchmark",
+ )
+ .arg_jobs()
+ .arg_profile("Build artifacts with the specified profile")
+ .arg_features()
+ .arg_target_triple("Build for the target triple")
+ .arg_target_dir()
+ .arg_manifest_path()
+ .arg_ignore_rust_version()
+ .arg_message_format()
+ .arg(flag(
+ "no-fail-fast",
+ "Run all benchmarks regardless of failure",
+ ))
+ .arg_unit_graph()
+ .arg_timings()
+ .after_help("Run `cargo help bench` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let ws = args.workspace(config)?;
+ let mut compile_opts = args.compile_options(
+ config,
+ CompileMode::Bench,
+ Some(&ws),
+ ProfileChecking::Custom,
+ )?;
+
+ compile_opts.build_config.requested_profile =
+ args.get_profile_name(config, "bench", ProfileChecking::Custom)?;
+
+ let ops = TestOptions {
+ no_run: args.flag("no-run"),
+ no_fail_fast: args.flag("no-fail-fast"),
+ compile_opts,
+ };
+
+ let bench_args = args.get_one::<String>("BENCHNAME").into_iter();
+ let bench_args = bench_args.chain(args.get_many::<String>("args").unwrap_or_default());
+ let bench_args = bench_args.map(String::as_str).collect::<Vec<_>>();
+
+ ops::run_benches(&ws, &ops, &bench_args)
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/build.rs b/src/tools/cargo/src/bin/cargo/commands/build.rs
new file mode 100644
index 000000000..a78da38a4
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/build.rs
@@ -0,0 +1,73 @@
+use crate::command_prelude::*;
+
+use cargo::ops;
+
+pub fn cli() -> Command {
+ subcommand("build")
+ // subcommand aliases are handled in aliased_command()
+ // .alias("b")
+ .about("Compile a local package and all of its dependencies")
+ .arg_quiet()
+ .arg_package_spec(
+ "Package to build (see `cargo help pkgid`)",
+ "Build all packages in the workspace",
+ "Exclude packages from the build",
+ )
+ .arg_jobs()
+ .arg_targets_all(
+ "Build only this package's library",
+ "Build only the specified binary",
+ "Build all binaries",
+ "Build only the specified example",
+ "Build all examples",
+ "Build only the specified test target",
+ "Build all tests",
+ "Build only the specified bench target",
+ "Build all benches",
+ "Build all targets",
+ )
+ .arg_release("Build artifacts in release mode, with optimizations")
+ .arg_profile("Build artifacts with the specified profile")
+ .arg_features()
+ .arg_target_triple("Build for the target triple")
+ .arg_target_dir()
+ .arg(
+ opt(
+ "out-dir",
+ "Copy final artifacts to this directory (unstable)",
+ )
+ .value_name("PATH"),
+ )
+ .arg_manifest_path()
+ .arg_ignore_rust_version()
+ .arg_message_format()
+ .arg_build_plan()
+ .arg_unit_graph()
+ .arg_future_incompat_report()
+ .arg_timings()
+ .after_help("Run `cargo help build` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let ws = args.workspace(config)?;
+ let mut compile_opts = args.compile_options(
+ config,
+ CompileMode::Build,
+ Some(&ws),
+ ProfileChecking::Custom,
+ )?;
+
+ if let Some(out_dir) = args.value_of_path("out-dir", config) {
+ compile_opts.build_config.export_dir = Some(out_dir);
+ } else if let Some(out_dir) = config.build_config()?.out_dir.as_ref() {
+ let out_dir = out_dir.resolve_path(config);
+ compile_opts.build_config.export_dir = Some(out_dir);
+ }
+ if compile_opts.build_config.export_dir.is_some() {
+ config
+ .cli_unstable()
+ .fail_if_stable_opt("--out-dir", 6790)?;
+ }
+ ops::compile(&ws, &compile_opts)?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/check.rs b/src/tools/cargo/src/bin/cargo/commands/check.rs
new file mode 100644
index 000000000..c9f6e0b38
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/check.rs
@@ -0,0 +1,56 @@
+use crate::command_prelude::*;
+
+use cargo::ops;
+
+pub fn cli() -> Command {
+ subcommand("check")
+ // subcommand aliases are handled in aliased_command()
+ // .alias("c")
+ .about("Check a local package and all of its dependencies for errors")
+ .arg_quiet()
+ .arg_package_spec(
+ "Package(s) to check",
+ "Check all packages in the workspace",
+ "Exclude packages from the check",
+ )
+ .arg_jobs()
+ .arg_targets_all(
+ "Check only this package's library",
+ "Check only the specified binary",
+ "Check all binaries",
+ "Check only the specified example",
+ "Check all examples",
+ "Check only the specified test target",
+ "Check all tests",
+ "Check only the specified bench target",
+ "Check all benches",
+ "Check all targets",
+ )
+ .arg_release("Check artifacts in release mode, with optimizations")
+ .arg_profile("Check artifacts with the specified profile")
+ .arg_features()
+ .arg_target_triple("Check for the target triple")
+ .arg_target_dir()
+ .arg_manifest_path()
+ .arg_ignore_rust_version()
+ .arg_message_format()
+ .arg_unit_graph()
+ .arg_future_incompat_report()
+ .arg_timings()
+ .after_help("Run `cargo help check` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let ws = args.workspace(config)?;
+ // This is a legacy behavior that causes `cargo check` to pass `--test`.
+ let test = matches!(
+ args.get_one::<String>("profile").map(String::as_str),
+ Some("test")
+ );
+ let mode = CompileMode::Check { test };
+ let compile_opts =
+ args.compile_options(config, mode, Some(&ws), ProfileChecking::LegacyTestOnly)?;
+
+ ops::compile(&ws, &compile_opts)?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/clean.rs b/src/tools/cargo/src/bin/cargo/commands/clean.rs
new file mode 100644
index 000000000..162461c47
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/clean.rs
@@ -0,0 +1,37 @@
+use crate::command_prelude::*;
+
+use cargo::ops::{self, CleanOptions};
+use cargo::util::print_available_packages;
+
+pub fn cli() -> Command {
+ subcommand("clean")
+ .about("Remove artifacts that cargo has generated in the past")
+ .arg_quiet()
+ .arg_package_spec_simple("Package to clean artifacts for")
+ .arg_manifest_path()
+ .arg_target_triple("Target triple to clean output for")
+ .arg_target_dir()
+ .arg_release("Whether or not to clean release artifacts")
+ .arg_profile("Clean artifacts of the specified profile")
+ .arg_doc("Whether or not to clean just the documentation directory")
+ .after_help("Run `cargo help clean` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let ws = args.workspace(config)?;
+
+ if args.is_present_with_zero_values("package") {
+ print_available_packages(&ws)?;
+ }
+
+ let opts = CleanOptions {
+ config,
+ spec: values(args, "package"),
+ targets: args.targets(),
+ requested_profile: args.get_profile_name(config, "dev", ProfileChecking::Custom)?,
+ profile_specified: args.contains_id("profile") || args.flag("release"),
+ doc: args.flag("doc"),
+ };
+ ops::clean(&ws, &opts)?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/config.rs b/src/tools/cargo/src/bin/cargo/commands/config.rs
new file mode 100644
index 000000000..84c5e9209
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/config.rs
@@ -0,0 +1,55 @@
+use crate::command_prelude::*;
+use cargo::ops::cargo_config;
+
+pub fn cli() -> Command {
+ subcommand("config")
+ .about("Inspect configuration values")
+ .subcommand_required(true)
+ .arg_required_else_help(true)
+ .subcommand(
+ subcommand("get")
+ .arg(
+ Arg::new("key")
+ .action(ArgAction::Set)
+ .help("The config key to display"),
+ )
+ .arg(
+ opt("format", "Display format")
+ .value_parser(cargo_config::ConfigFormat::POSSIBLE_VALUES)
+ .default_value("toml"),
+ )
+ .arg(flag(
+ "show-origin",
+ "Display where the config value is defined",
+ ))
+ .arg(
+ opt("merged", "Whether or not to merge config values")
+ .value_parser(["yes", "no"])
+ .default_value("yes"),
+ ),
+ )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ config
+ .cli_unstable()
+ .fail_if_stable_command(config, "config", 9301)?;
+ match args.subcommand() {
+ Some(("get", args)) => {
+ let opts = cargo_config::GetOptions {
+ key: args.get_one::<String>("key").map(String::as_str),
+ format: args.get_one::<String>("format").unwrap().parse()?,
+ show_origin: args.flag("show-origin"),
+ merged: args.get_one::<String>("merged").map(String::as_str) == Some("yes"),
+ };
+ cargo_config::get(config, &opts)?;
+ }
+ Some((cmd, _)) => {
+ unreachable!("unexpected command {}", cmd)
+ }
+ None => {
+ unreachable!("unexpected command")
+ }
+ }
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/doc.rs b/src/tools/cargo/src/bin/cargo/commands/doc.rs
new file mode 100644
index 000000000..932058afb
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/doc.rs
@@ -0,0 +1,61 @@
+use crate::command_prelude::*;
+
+use cargo::ops::{self, DocOptions};
+
+pub fn cli() -> Command {
+ subcommand("doc")
+ // subcommand aliases are handled in aliased_command()
+ // .alias("d")
+ .about("Build a package's documentation")
+ .arg_quiet()
+ .arg(flag(
+ "open",
+ "Opens the docs in a browser after the operation",
+ ))
+ .arg_package_spec(
+ "Package to document",
+ "Document all packages in the workspace",
+ "Exclude packages from the build",
+ )
+ .arg(flag(
+ "no-deps",
+ "Don't build documentation for dependencies",
+ ))
+ .arg(flag("document-private-items", "Document private items"))
+ .arg_jobs()
+ .arg_targets_lib_bin_example(
+ "Document only this package's library",
+ "Document only the specified binary",
+ "Document all binaries",
+ "Document only the specified example",
+ "Document all examples",
+ )
+ .arg_release("Build artifacts in release mode, with optimizations")
+ .arg_profile("Build artifacts with the specified profile")
+ .arg_features()
+ .arg_target_triple("Build for the target triple")
+ .arg_target_dir()
+ .arg_manifest_path()
+ .arg_message_format()
+ .arg_ignore_rust_version()
+ .arg_unit_graph()
+ .arg_timings()
+ .after_help("Run `cargo help doc` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let ws = args.workspace(config)?;
+ let mode = CompileMode::Doc {
+ deps: !args.flag("no-deps"),
+ };
+ let mut compile_opts =
+ args.compile_options(config, mode, Some(&ws), ProfileChecking::Custom)?;
+ compile_opts.rustdoc_document_private_items = args.flag("document-private-items");
+
+ let doc_opts = DocOptions {
+ open_result: args.flag("open"),
+ compile_opts,
+ };
+ ops::doc(&ws, &doc_opts)?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/fetch.rs b/src/tools/cargo/src/bin/cargo/commands/fetch.rs
new file mode 100644
index 000000000..2fbbc478c
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/fetch.rs
@@ -0,0 +1,24 @@
+use crate::command_prelude::*;
+
+use cargo::ops;
+use cargo::ops::FetchOptions;
+
+pub fn cli() -> Command {
+ subcommand("fetch")
+ .about("Fetch dependencies of a package from the network")
+ .arg_quiet()
+ .arg_manifest_path()
+ .arg_target_triple("Fetch dependencies for the target triple")
+ .after_help("Run `cargo help fetch` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let ws = args.workspace(config)?;
+
+ let opts = FetchOptions {
+ config,
+ targets: args.targets(),
+ };
+ let _ = ops::fetch(&ws, &opts)?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/fix.rs b/src/tools/cargo/src/bin/cargo/commands/fix.rs
new file mode 100644
index 000000000..5238d5852
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/fix.rs
@@ -0,0 +1,92 @@
+use crate::command_prelude::*;
+
+use cargo::ops;
+
+pub fn cli() -> Command {
+ subcommand("fix")
+ .about("Automatically fix lint warnings reported by rustc")
+ .arg_quiet()
+ .arg_package_spec(
+ "Package(s) to fix",
+ "Fix all packages in the workspace",
+ "Exclude packages from the fixes",
+ )
+ .arg_jobs()
+ .arg_targets_all(
+ "Fix only this package's library",
+ "Fix only the specified binary",
+ "Fix all binaries",
+ "Fix only the specified example",
+ "Fix all examples",
+ "Fix only the specified test target",
+ "Fix all tests",
+ "Fix only the specified bench target",
+ "Fix all benches",
+ "Fix all targets (default)",
+ )
+ .arg_release("Fix artifacts in release mode, with optimizations")
+ .arg_profile("Build artifacts with the specified profile")
+ .arg_features()
+ .arg_target_triple("Fix for the target triple")
+ .arg_target_dir()
+ .arg_manifest_path()
+ .arg_message_format()
+ .arg(flag(
+ "broken-code",
+ "Fix code even if it already has compiler errors",
+ ))
+ .arg(flag("edition", "Fix in preparation for the next edition"))
+ .arg(flag(
+ "edition-idioms",
+ "Fix warnings to migrate to the idioms of an edition",
+ ))
+ .arg(flag(
+ "allow-no-vcs",
+ "Fix code even if a VCS was not detected",
+ ))
+ .arg(flag(
+ "allow-dirty",
+ "Fix code even if the working directory is dirty",
+ ))
+ .arg(flag(
+ "allow-staged",
+ "Fix code even if the working directory has staged changes",
+ ))
+ .arg_ignore_rust_version()
+ .arg_timings()
+ .after_help("Run `cargo help fix` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let ws = args.workspace(config)?;
+ // This is a legacy behavior that causes `cargo fix` to pass `--test`.
+ let test = matches!(
+ args.get_one::<String>("profile").map(String::as_str),
+ Some("test")
+ );
+ let mode = CompileMode::Check { test };
+
+ // Unlike other commands default `cargo fix` to all targets to fix as much
+ // code as we can.
+ let mut opts =
+ args.compile_options(config, mode, Some(&ws), ProfileChecking::LegacyTestOnly)?;
+
+ if !opts.filter.is_specific() {
+ // cargo fix with no target selection implies `--all-targets`.
+ opts.filter = ops::CompileFilter::new_all_targets();
+ }
+
+ ops::fix(
+ &ws,
+ &mut ops::FixOptions {
+ edition: args.flag("edition"),
+ idioms: args.flag("edition-idioms"),
+ compile_opts: opts,
+ allow_dirty: args.flag("allow-dirty"),
+ allow_no_vcs: args.flag("allow-no-vcs"),
+ allow_staged: args.flag("allow-staged"),
+ broken_code: args.flag("broken-code"),
+ },
+ )?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/generate_lockfile.rs b/src/tools/cargo/src/bin/cargo/commands/generate_lockfile.rs
new file mode 100644
index 000000000..7d06aad59
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/generate_lockfile.rs
@@ -0,0 +1,17 @@
+use crate::command_prelude::*;
+
+use cargo::ops;
+
+pub fn cli() -> Command {
+ subcommand("generate-lockfile")
+ .about("Generate the lockfile for a package")
+ .arg_quiet()
+ .arg_manifest_path()
+ .after_help("Run `cargo help generate-lockfile` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let ws = args.workspace(config)?;
+ ops::generate_lockfile(&ws)?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/git_checkout.rs b/src/tools/cargo/src/bin/cargo/commands/git_checkout.rs
new file mode 100644
index 000000000..90be9bc55
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/git_checkout.rs
@@ -0,0 +1,14 @@
+use crate::command_prelude::*;
+
+const REMOVED: &str = "The `git-checkout` command has been removed.";
+
+pub fn cli() -> Command {
+ subcommand("git-checkout")
+ .about("This command has been removed")
+ .hide(true)
+ .override_help(REMOVED)
+}
+
+pub fn exec(_config: &mut Config, _args: &ArgMatches) -> CliResult {
+ Err(anyhow::format_err!(REMOVED).into())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/help.rs b/src/tools/cargo/src/bin/cargo/commands/help.rs
new file mode 100644
index 000000000..2839b931e
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/help.rs
@@ -0,0 +1,147 @@
+use crate::aliased_command;
+use crate::command_prelude::*;
+use cargo::util::errors::CargoResult;
+use cargo::{drop_println, Config};
+use cargo_util::paths::resolve_executable;
+use flate2::read::GzDecoder;
+use std::ffi::OsStr;
+use std::ffi::OsString;
+use std::io::Read;
+use std::io::Write;
+use std::path::Path;
+
+const COMPRESSED_MAN: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/man.tgz"));
+
+pub fn cli() -> Command {
+ subcommand("help")
+ .about("Displays help for a cargo subcommand")
+ .arg(Arg::new("COMMAND").action(ArgAction::Set))
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let subcommand = args.get_one::<String>("COMMAND");
+ if let Some(subcommand) = subcommand {
+ if !try_help(config, subcommand)? {
+ match check_builtin(&subcommand) {
+ Some(s) => {
+ crate::execute_internal_subcommand(
+ config,
+ &[OsStr::new(s), OsStr::new("--help")],
+ )?;
+ }
+ None => {
+ crate::execute_external_subcommand(
+ config,
+ subcommand,
+ &[OsStr::new(subcommand), OsStr::new("--help")],
+ )?;
+ }
+ }
+ }
+ } else {
+ let mut cmd = crate::cli::cli();
+ let _ = cmd.print_help();
+ }
+ Ok(())
+}
+
+fn try_help(config: &Config, subcommand: &str) -> CargoResult<bool> {
+ let subcommand = match check_alias(config, subcommand) {
+ // If this alias is more than a simple subcommand pass-through, show the alias.
+ Some(argv) if argv.len() > 1 => {
+ let alias = argv.join(" ");
+ drop_println!(config, "`{}` is aliased to `{}`", subcommand, alias);
+ return Ok(true);
+ }
+ // Otherwise, resolve the alias into its subcommand.
+ Some(argv) => {
+ // An alias with an empty argv can be created via `"empty-alias" = ""`.
+ let first = argv.get(0).map(String::as_str).unwrap_or(subcommand);
+ first.to_string()
+ }
+ None => subcommand.to_string(),
+ };
+
+ let subcommand = match check_builtin(&subcommand) {
+ Some(s) => s,
+ None => return Ok(false),
+ };
+
+ if resolve_executable(Path::new("man")).is_ok() {
+ let man = match extract_man(subcommand, "1") {
+ Some(man) => man,
+ None => return Ok(false),
+ };
+ write_and_spawn(subcommand, &man, "man")?;
+ } else {
+ let txt = match extract_man(subcommand, "txt") {
+ Some(txt) => txt,
+ None => return Ok(false),
+ };
+ if resolve_executable(Path::new("less")).is_ok() {
+ write_and_spawn(subcommand, &txt, "less")?;
+ } else if resolve_executable(Path::new("more")).is_ok() {
+ write_and_spawn(subcommand, &txt, "more")?;
+ } else {
+ drop(std::io::stdout().write_all(&txt));
+ }
+ }
+ Ok(true)
+}
+
+/// Checks if the given subcommand is an alias.
+///
+/// Returns None if it is not an alias.
+fn check_alias(config: &Config, subcommand: &str) -> Option<Vec<String>> {
+ aliased_command(config, subcommand).ok().flatten()
+}
+
+/// Checks if the given subcommand is a built-in command (not via an alias).
+///
+/// Returns None if it is not a built-in command.
+fn check_builtin(subcommand: &str) -> Option<&str> {
+ super::builtin_exec(subcommand).map(|_| subcommand)
+}
+
+/// Extracts the given man page from the compressed archive.
+///
+/// Returns None if the command wasn't found.
+fn extract_man(subcommand: &str, extension: &str) -> Option<Vec<u8>> {
+ let extract_name = OsString::from(format!("cargo-{}.{}", subcommand, extension));
+ let gz = GzDecoder::new(COMPRESSED_MAN);
+ let mut ar = tar::Archive::new(gz);
+ // Unwraps should be safe here, since this is a static archive generated
+ // by our build script. It should never be an invalid format!
+ for entry in ar.entries().unwrap() {
+ let mut entry = entry.unwrap();
+ let path = entry.path().unwrap();
+ if path.file_name().unwrap() != extract_name {
+ continue;
+ }
+ let mut result = Vec::new();
+ entry.read_to_end(&mut result).unwrap();
+ return Some(result);
+ }
+ None
+}
+
+/// Write the contents of a man page to disk and spawn the given command to
+/// display it.
+fn write_and_spawn(name: &str, contents: &[u8], command: &str) -> CargoResult<()> {
+ let prefix = format!("cargo-{}.", name);
+ let mut tmp = tempfile::Builder::new().prefix(&prefix).tempfile()?;
+ let f = tmp.as_file_mut();
+ f.write_all(contents)?;
+ f.flush()?;
+ let path = tmp.path();
+ // Use a path relative to the temp directory so that it can work on
+ // cygwin/msys systems which don't handle windows-style paths.
+ let mut relative_name = std::ffi::OsString::from("./");
+ relative_name.push(path.file_name().unwrap());
+ let mut cmd = std::process::Command::new(command)
+ .arg(relative_name)
+ .current_dir(path.parent().unwrap())
+ .spawn()?;
+ drop(cmd.wait());
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/init.rs b/src/tools/cargo/src/bin/cargo/commands/init.rs
new file mode 100644
index 000000000..b280d4fe4
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/init.rs
@@ -0,0 +1,22 @@
+use crate::command_prelude::*;
+
+use cargo::ops;
+
+pub fn cli() -> Command {
+ subcommand("init")
+ .about("Create a new cargo package in an existing directory")
+ .arg_quiet()
+ .arg(Arg::new("path").action(ArgAction::Set).default_value("."))
+ .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+ .arg_new_opts()
+ .after_help("Run `cargo help init` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let opts = args.new_options(config)?;
+ let project_kind = ops::init(&opts, config)?;
+ config
+ .shell()
+ .status("Created", format!("{} package", project_kind))?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/install.rs b/src/tools/cargo/src/bin/cargo/commands/install.rs
new file mode 100644
index 000000000..8197a1690
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/install.rs
@@ -0,0 +1,198 @@
+use crate::command_prelude::*;
+
+use cargo::core::{GitReference, SourceId, Workspace};
+use cargo::ops;
+use cargo::util::IntoUrl;
+
+use cargo_util::paths;
+
+pub fn cli() -> Command {
+ subcommand("install")
+ .about("Install a Rust binary. Default location is $HOME/.cargo/bin")
+ .arg_quiet()
+ .arg(
+ Arg::new("crate")
+ .value_parser(clap::builder::NonEmptyStringValueParser::new())
+ .num_args(0..),
+ )
+ .arg(
+ opt("version", "Specify a version to install")
+ .alias("vers")
+ .value_name("VERSION")
+ .requires("crate"),
+ )
+ .arg(
+ opt("git", "Git URL to install the specified crate from")
+ .value_name("URL")
+ .conflicts_with_all(&["path", "index", "registry"]),
+ )
+ .arg(
+ opt("branch", "Branch to use when installing from git")
+ .value_name("BRANCH")
+ .requires("git"),
+ )
+ .arg(
+ opt("tag", "Tag to use when installing from git")
+ .value_name("TAG")
+ .requires("git"),
+ )
+ .arg(
+ opt("rev", "Specific commit to use when installing from git")
+ .value_name("SHA")
+ .requires("git"),
+ )
+ .arg(
+ opt("path", "Filesystem path to local crate to install")
+ .value_name("PATH")
+ .conflicts_with_all(&["git", "index", "registry"]),
+ )
+ .arg(flag(
+ "list",
+ "list all installed packages and their versions",
+ ))
+ .arg_jobs()
+ .arg(flag("force", "Force overwriting existing crates or binaries").short('f'))
+ .arg(flag("no-track", "Do not save tracking information"))
+ .arg_features()
+ .arg_profile("Install artifacts with the specified profile")
+ .arg(flag(
+ "debug",
+ "Build in debug mode (with the 'dev' profile) instead of release mode",
+ ))
+ .arg_targets_bins_examples(
+ "Install only the specified binary",
+ "Install all binaries",
+ "Install only the specified example",
+ "Install all examples",
+ )
+ .arg_target_triple("Build for the target triple")
+ .arg_target_dir()
+ .arg(opt("root", "Directory to install packages into").value_name("DIR"))
+ .arg(
+ opt("index", "Registry index to install from")
+ .value_name("INDEX")
+ .requires("crate")
+ .conflicts_with_all(&["git", "path", "registry"]),
+ )
+ .arg(
+ opt("registry", "Registry to use")
+ .value_name("REGISTRY")
+ .requires("crate")
+ .conflicts_with_all(&["git", "path", "index"]),
+ )
+ .arg_ignore_rust_version()
+ .arg_message_format()
+ .arg_timings()
+ .after_help("Run `cargo help install` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let path = args.value_of_path("path", config);
+ if let Some(path) = &path {
+ config.reload_rooted_at(path)?;
+ } else {
+ // TODO: Consider calling set_search_stop_path(home).
+ config.reload_rooted_at(config.home().clone().into_path_unlocked())?;
+ }
+
+ // In general, we try to avoid normalizing paths in Cargo,
+ // but in these particular cases we need it to fix rust-lang/cargo#10283.
+ // (Handle `SourceId::for_path` and `Workspace::new`,
+ // but not `Config::reload_rooted_at` which is always cwd)
+ let path = path.map(|p| paths::normalize_path(&p));
+
+ let version = args.get_one::<String>("version").map(String::as_str);
+ let krates = args
+ .get_many::<String>("crate")
+ .unwrap_or_default()
+ .map(|k| resolve_crate(k, version))
+ .collect::<crate::CargoResult<Vec<_>>>()?;
+
+ let mut from_cwd = false;
+
+ let source = if let Some(url) = args.get_one::<String>("git") {
+ let url = url.into_url()?;
+ let gitref = if let Some(branch) = args.get_one::<String>("branch") {
+ GitReference::Branch(branch.clone())
+ } else if let Some(tag) = args.get_one::<String>("tag") {
+ GitReference::Tag(tag.clone())
+ } else if let Some(rev) = args.get_one::<String>("rev") {
+ GitReference::Rev(rev.clone())
+ } else {
+ GitReference::DefaultBranch
+ };
+ SourceId::for_git(&url, gitref)?
+ } else if let Some(path) = &path {
+ SourceId::for_path(path)?
+ } else if krates.is_empty() {
+ from_cwd = true;
+ SourceId::for_path(config.cwd())?
+ } else if let Some(index) = args.get_one::<String>("index") {
+ SourceId::for_registry(&index.into_url()?)?
+ } else if let Some(registry) = args.registry(config)? {
+ SourceId::alt_registry(config, &registry)?
+ } else {
+ SourceId::crates_io(config)?
+ };
+
+ let root = args.get_one::<String>("root").map(String::as_str);
+
+ // We only provide workspace information for local crate installation from
+ // one of the following sources:
+ // - From current working directory (only work for edition 2015).
+ // - From a specific local file path (from `--path` arg).
+ //
+ // This workspace information is for emitting helpful messages from
+ // `ArgMatchesExt::compile_options` and won't affect the actual compilation.
+ let workspace = if from_cwd {
+ args.workspace(config).ok()
+ } else if let Some(path) = &path {
+ Workspace::new(&path.join("Cargo.toml"), config).ok()
+ } else {
+ None
+ };
+
+ let mut compile_opts = args.compile_options(
+ config,
+ CompileMode::Build,
+ workspace.as_ref(),
+ ProfileChecking::Custom,
+ )?;
+
+ compile_opts.build_config.requested_profile =
+ args.get_profile_name(config, "release", ProfileChecking::Custom)?;
+
+ if args.flag("list") {
+ ops::install_list(root, config)?;
+ } else {
+ ops::install(
+ config,
+ root,
+ krates,
+ source,
+ from_cwd,
+ &compile_opts,
+ args.flag("force"),
+ args.flag("no-track"),
+ )?;
+ }
+ Ok(())
+}
+
+fn resolve_crate<'k>(
+ mut krate: &'k str,
+ mut version: Option<&'k str>,
+) -> crate::CargoResult<(&'k str, Option<&'k str>)> {
+ if let Some((k, v)) = krate.split_once('@') {
+ if version.is_some() {
+ anyhow::bail!("cannot specify both `@{v}` and `--version`");
+ }
+ if k.is_empty() {
+ // by convention, arguments starting with `@` are response files
+ anyhow::bail!("missing crate name for `@{v}`");
+ }
+ krate = k;
+ version = Some(v);
+ }
+ Ok((krate, version))
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/locate_project.rs b/src/tools/cargo/src/bin/cargo/commands/locate_project.rs
new file mode 100644
index 000000000..26c35cd91
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/locate_project.rs
@@ -0,0 +1,93 @@
+use crate::command_prelude::*;
+use anyhow::bail;
+use cargo::{drop_println, CargoResult};
+use serde::Serialize;
+
+pub fn cli() -> Command {
+ subcommand("locate-project")
+ .about("Print a JSON representation of a Cargo.toml file's location")
+ .arg_quiet()
+ .arg_manifest_path()
+ .arg(
+ opt(
+ "message-format",
+ "Output representation [possible values: json, plain]",
+ )
+ .value_name("FMT"),
+ )
+ .arg(flag("workspace", "Locate Cargo.toml of the workspace root"))
+ .after_help("Run `cargo help locate-project` for more detailed information.\n")
+}
+
+#[derive(Serialize)]
+pub struct ProjectLocation<'a> {
+ root: &'a str,
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let root_manifest;
+ let workspace;
+ let root = match WhatToFind::parse(args) {
+ WhatToFind::CurrentManifest => {
+ root_manifest = args.root_manifest(config)?;
+ &root_manifest
+ }
+ WhatToFind::Workspace => {
+ workspace = args.workspace(config)?;
+ workspace.root_manifest()
+ }
+ };
+
+ let root = root
+ .to_str()
+ .ok_or_else(|| {
+ anyhow::format_err!(
+ "your package path contains characters \
+ not representable in Unicode"
+ )
+ })
+ .map_err(|e| CliError::new(e, 1))?;
+
+ let location = ProjectLocation { root };
+
+ match MessageFormat::parse(args)? {
+ MessageFormat::Json => config.shell().print_json(&location)?,
+ MessageFormat::Plain => drop_println!(config, "{}", location.root),
+ }
+
+ Ok(())
+}
+
+enum WhatToFind {
+ CurrentManifest,
+ Workspace,
+}
+
+impl WhatToFind {
+ fn parse(args: &ArgMatches) -> Self {
+ if args.flag("workspace") {
+ WhatToFind::Workspace
+ } else {
+ WhatToFind::CurrentManifest
+ }
+ }
+}
+
+enum MessageFormat {
+ Json,
+ Plain,
+}
+
+impl MessageFormat {
+ fn parse(args: &ArgMatches) -> CargoResult<Self> {
+ let fmt = match args.get_one::<String>("message-format") {
+ Some(fmt) => fmt,
+ None => return Ok(MessageFormat::Json),
+ };
+ match fmt.to_ascii_lowercase().as_str() {
+ "json" => Ok(MessageFormat::Json),
+ "plain" => Ok(MessageFormat::Plain),
+ s => bail!("invalid message format specifier: `{}`", s),
+ }
+ }
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/login.rs b/src/tools/cargo/src/bin/cargo/commands/login.rs
new file mode 100644
index 000000000..1c8d3ae4c
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/login.rs
@@ -0,0 +1,47 @@
+use crate::command_prelude::*;
+
+use cargo::ops;
+
+pub fn cli() -> Command {
+ subcommand("login")
+ .about(
+ "Save an api token from the registry locally. \
+ If token is not specified, it will be read from stdin.",
+ )
+ .arg_quiet()
+ .arg(Arg::new("token").action(ArgAction::Set))
+ .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+ .arg(
+ flag(
+ "generate-keypair",
+ "Generate a public/secret keypair (unstable)",
+ )
+ .conflicts_with("token"),
+ )
+ .arg(
+ flag("secret-key", "Prompt for secret key (unstable)")
+ .conflicts_with_all(&["generate-keypair", "token"]),
+ )
+ .arg(
+ opt(
+ "key-subject",
+ "Set the key subject for this registry (unstable)",
+ )
+ .value_name("SUBJECT")
+ .conflicts_with("token"),
+ )
+ .after_help("Run `cargo help login` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let registry = args.registry(config)?;
+ ops::registry_login(
+ config,
+ args.get_one::<String>("token").map(|s| s.as_str().into()),
+ registry.as_deref(),
+ args.flag("generate-keypair"),
+ args.flag("secret-key"),
+ args.get_one("key-subject").map(String::as_str),
+ )?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/logout.rs b/src/tools/cargo/src/bin/cargo/commands/logout.rs
new file mode 100644
index 000000000..0b4d8b83f
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/logout.rs
@@ -0,0 +1,16 @@
+use crate::command_prelude::*;
+use cargo::ops;
+
+pub fn cli() -> Command {
+ subcommand("logout")
+ .about("Remove an API token from the registry locally")
+ .arg_quiet()
+ .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+ .after_help("Run `cargo help logout` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let registry = args.registry(config)?;
+ ops::registry_logout(config, registry.as_deref())?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/metadata.rs b/src/tools/cargo/src/bin/cargo/commands/metadata.rs
new file mode 100644
index 000000000..fdf59654c
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/metadata.rs
@@ -0,0 +1,56 @@
+use crate::command_prelude::*;
+use cargo::ops::{self, OutputMetadataOptions};
+
+pub fn cli() -> Command {
+ subcommand("metadata")
+ .about(
+ "Output the resolved dependencies of a package, \
+ the concrete used versions including overrides, \
+ in machine-readable format",
+ )
+ .arg_quiet()
+ .arg_features()
+ .arg(multi_opt(
+ "filter-platform",
+ "TRIPLE",
+ "Only include resolve dependencies matching the given target-triple",
+ ))
+ .arg(flag(
+ "no-deps",
+ "Output information only about the workspace members \
+ and don't fetch dependencies",
+ ))
+ .arg_manifest_path()
+ .arg(
+ opt("format-version", "Format version")
+ .value_name("VERSION")
+ .value_parser(["1"]),
+ )
+ .after_help("Run `cargo help metadata` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let ws = args.workspace(config)?;
+
+ let version = match args.get_one::<String>("format-version") {
+ None => {
+ config.shell().warn(
+ "please specify `--format-version` flag explicitly \
+ to avoid compatibility problems",
+ )?;
+ 1
+ }
+ Some(version) => version.parse().unwrap(),
+ };
+
+ let options = OutputMetadataOptions {
+ cli_features: args.cli_features()?,
+ no_deps: args.flag("no-deps"),
+ filter_platforms: args._values_of("filter-platform"),
+ version,
+ };
+
+ let result = ops::output_metadata(&ws, &options)?;
+ config.shell().print_json(&result)?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/mod.rs b/src/tools/cargo/src/bin/cargo/commands/mod.rs
new file mode 100644
index 000000000..da3109260
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/mod.rs
@@ -0,0 +1,128 @@
+use crate::command_prelude::*;
+
+pub fn builtin() -> Vec<Command> {
+ vec![
+ add::cli(),
+ bench::cli(),
+ build::cli(),
+ check::cli(),
+ clean::cli(),
+ config::cli(),
+ doc::cli(),
+ fetch::cli(),
+ fix::cli(),
+ generate_lockfile::cli(),
+ git_checkout::cli(),
+ help::cli(),
+ init::cli(),
+ install::cli(),
+ locate_project::cli(),
+ login::cli(),
+ logout::cli(),
+ metadata::cli(),
+ new::cli(),
+ owner::cli(),
+ package::cli(),
+ pkgid::cli(),
+ publish::cli(),
+ read_manifest::cli(),
+ remove::cli(),
+ report::cli(),
+ run::cli(),
+ rustc::cli(),
+ rustdoc::cli(),
+ search::cli(),
+ test::cli(),
+ tree::cli(),
+ uninstall::cli(),
+ update::cli(),
+ vendor::cli(),
+ verify_project::cli(),
+ version::cli(),
+ yank::cli(),
+ ]
+}
+
+pub fn builtin_exec(cmd: &str) -> Option<fn(&mut Config, &ArgMatches) -> CliResult> {
+ let f = match cmd {
+ "add" => add::exec,
+ "bench" => bench::exec,
+ "build" => build::exec,
+ "check" => check::exec,
+ "clean" => clean::exec,
+ "config" => config::exec,
+ "doc" => doc::exec,
+ "fetch" => fetch::exec,
+ "fix" => fix::exec,
+ "generate-lockfile" => generate_lockfile::exec,
+ "git-checkout" => git_checkout::exec,
+ "help" => help::exec,
+ "init" => init::exec,
+ "install" => install::exec,
+ "locate-project" => locate_project::exec,
+ "login" => login::exec,
+ "logout" => logout::exec,
+ "metadata" => metadata::exec,
+ "new" => new::exec,
+ "owner" => owner::exec,
+ "package" => package::exec,
+ "pkgid" => pkgid::exec,
+ "publish" => publish::exec,
+ "read-manifest" => read_manifest::exec,
+ "remove" => remove::exec,
+ "report" => report::exec,
+ "run" => run::exec,
+ "rustc" => rustc::exec,
+ "rustdoc" => rustdoc::exec,
+ "search" => search::exec,
+ "test" => test::exec,
+ "tree" => tree::exec,
+ "uninstall" => uninstall::exec,
+ "update" => update::exec,
+ "vendor" => vendor::exec,
+ "verify-project" => verify_project::exec,
+ "version" => version::exec,
+ "yank" => yank::exec,
+ _ => return None,
+ };
+ Some(f)
+}
+
+pub mod add;
+pub mod bench;
+pub mod build;
+pub mod check;
+pub mod clean;
+pub mod config;
+pub mod doc;
+pub mod fetch;
+pub mod fix;
+pub mod generate_lockfile;
+pub mod git_checkout;
+pub mod help;
+pub mod init;
+pub mod install;
+pub mod locate_project;
+pub mod login;
+pub mod logout;
+pub mod metadata;
+pub mod new;
+pub mod owner;
+pub mod package;
+pub mod pkgid;
+pub mod publish;
+pub mod read_manifest;
+pub mod remove;
+pub mod report;
+pub mod run;
+pub mod rustc;
+pub mod rustdoc;
+pub mod search;
+pub mod test;
+pub mod tree;
+pub mod uninstall;
+pub mod update;
+pub mod vendor;
+pub mod verify_project;
+pub mod version;
+pub mod yank;
diff --git a/src/tools/cargo/src/bin/cargo/commands/new.rs b/src/tools/cargo/src/bin/cargo/commands/new.rs
new file mode 100644
index 000000000..18cf93d2e
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/new.rs
@@ -0,0 +1,30 @@
+use crate::command_prelude::*;
+
+use cargo::ops;
+
+pub fn cli() -> Command {
+ subcommand("new")
+ .about("Create a new cargo package at <path>")
+ .arg_quiet()
+ .arg(Arg::new("path").action(ArgAction::Set).required(true))
+ .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+ .arg_new_opts()
+ .after_help("Run `cargo help new` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let opts = args.new_options(config)?;
+
+ ops::new(&opts, config)?;
+ let path = args.get_one::<String>("path").unwrap();
+ let package_name = if let Some(name) = args.get_one::<String>("name") {
+ name
+ } else {
+ path
+ };
+ config.shell().status(
+ "Created",
+ format!("{} `{}` package", opts.kind, package_name),
+ )?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/owner.rs b/src/tools/cargo/src/bin/cargo/commands/owner.rs
new file mode 100644
index 000000000..493072b7b
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/owner.rs
@@ -0,0 +1,51 @@
+use crate::command_prelude::*;
+
+use cargo::ops::{self, OwnersOptions};
+use cargo::util::auth::Secret;
+
+pub fn cli() -> Command {
+ subcommand("owner")
+ .about("Manage the owners of a crate on the registry")
+ .arg_quiet()
+ .arg(Arg::new("crate").action(ArgAction::Set))
+ .arg(
+ multi_opt(
+ "add",
+ "LOGIN",
+ "Name of a user or team to invite as an owner",
+ )
+ .short('a'),
+ )
+ .arg(
+ multi_opt(
+ "remove",
+ "LOGIN",
+ "Name of a user or team to remove as an owner",
+ )
+ .short('r'),
+ )
+ .arg(flag("list", "List owners of a crate").short('l'))
+ .arg(opt("index", "Registry index to modify owners for").value_name("INDEX"))
+ .arg(opt("token", "API token to use when authenticating").value_name("TOKEN"))
+ .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+ .after_help("Run `cargo help owner` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let registry = args.registry(config)?;
+ let opts = OwnersOptions {
+ krate: args.get_one::<String>("crate").cloned(),
+ token: args.get_one::<String>("token").cloned().map(Secret::from),
+ index: args.get_one::<String>("index").cloned(),
+ to_add: args
+ .get_many::<String>("add")
+ .map(|xs| xs.cloned().collect()),
+ to_remove: args
+ .get_many::<String>("remove")
+ .map(|xs| xs.cloned().collect()),
+ list: args.flag("list"),
+ registry,
+ };
+ ops::modify_owners(config, &opts)?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/package.rs b/src/tools/cargo/src/bin/cargo/commands/package.rs
new file mode 100644
index 000000000..ac6b1fe27
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/package.rs
@@ -0,0 +1,62 @@
+use crate::command_prelude::*;
+
+use cargo::ops::{self, PackageOpts};
+
+pub fn cli() -> Command {
+ subcommand("package")
+ .about("Assemble the local package into a distributable tarball")
+ .arg_quiet()
+ .arg(
+ flag(
+ "list",
+ "Print files included in a package without making one",
+ )
+ .short('l'),
+ )
+ .arg(flag(
+ "no-verify",
+ "Don't verify the contents by building them",
+ ))
+ .arg(flag(
+ "no-metadata",
+ "Ignore warnings about a lack of human-usable metadata",
+ ))
+ .arg(flag(
+ "allow-dirty",
+ "Allow dirty working directories to be packaged",
+ ))
+ .arg_target_triple("Build for the target triple")
+ .arg_target_dir()
+ .arg_features()
+ .arg_package_spec_no_all(
+ "Package(s) to assemble",
+ "Assemble all packages in the workspace",
+ "Don't assemble specified packages",
+ )
+ .arg_manifest_path()
+ .arg_jobs()
+ .after_help("Run `cargo help package` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let ws = args.workspace(config)?;
+ let specs = args.packages_from_flags()?;
+
+ ops::package(
+ &ws,
+ &PackageOpts {
+ config,
+ verify: !args.flag("no-verify"),
+ list: args.flag("list"),
+ check_metadata: !args.flag("no-metadata"),
+ allow_dirty: args.flag("allow-dirty"),
+ to_package: specs,
+ targets: args.targets(),
+ jobs: args.jobs()?,
+ keep_going: args.keep_going(),
+ cli_features: args.cli_features()?,
+ },
+ )?;
+
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/pkgid.rs b/src/tools/cargo/src/bin/cargo/commands/pkgid.rs
new file mode 100644
index 000000000..664db75bd
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/pkgid.rs
@@ -0,0 +1,28 @@
+use crate::command_prelude::*;
+
+use cargo::ops;
+use cargo::util::print_available_packages;
+
+pub fn cli() -> Command {
+ subcommand("pkgid")
+ .about("Print a fully qualified package specification")
+ .arg_quiet()
+ .arg(Arg::new("spec").action(ArgAction::Set))
+ .arg_package("Argument to get the package ID specifier for")
+ .arg_manifest_path()
+ .after_help("Run `cargo help pkgid` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let ws = args.workspace(config)?;
+ if args.is_present_with_zero_values("package") {
+ print_available_packages(&ws)?
+ }
+ let spec = args
+ .get_one::<String>("spec")
+ .or_else(|| args.get_one::<String>("package"))
+ .map(String::as_str);
+ let spec = ops::pkgid(&ws, spec)?;
+ cargo::drop_println!(config, "{}", spec);
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/publish.rs b/src/tools/cargo/src/bin/cargo/commands/publish.rs
new file mode 100644
index 000000000..c831d399f
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/publish.rs
@@ -0,0 +1,55 @@
+use crate::command_prelude::*;
+
+use cargo::ops::{self, PublishOpts};
+
+pub fn cli() -> Command {
+ subcommand("publish")
+ .about("Upload a package to the registry")
+ .arg_quiet()
+ .arg_index()
+ .arg(opt("token", "Token to use when uploading").value_name("TOKEN"))
+ .arg(flag(
+ "no-verify",
+ "Don't verify the contents by building them",
+ ))
+ .arg(flag(
+ "allow-dirty",
+ "Allow dirty working directories to be packaged",
+ ))
+ .arg_target_triple("Build for the target triple")
+ .arg_target_dir()
+ .arg_package("Package to publish")
+ .arg_manifest_path()
+ .arg_features()
+ .arg_jobs()
+ .arg_dry_run("Perform all checks without uploading")
+ .arg(opt("registry", "Registry to publish to").value_name("REGISTRY"))
+ .after_help("Run `cargo help publish` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let registry = args.registry(config)?;
+ let ws = args.workspace(config)?;
+ let index = args.index()?;
+
+ ops::publish(
+ &ws,
+ &PublishOpts {
+ config,
+ token: args
+ .get_one::<String>("token")
+ .map(|s| s.to_string().into()),
+ index,
+ verify: !args.flag("no-verify"),
+ allow_dirty: args.flag("allow-dirty"),
+ to_publish: args.packages_from_flags()?,
+ targets: args.targets(),
+ jobs: args.jobs()?,
+ keep_going: args.keep_going(),
+ dry_run: args.dry_run(),
+ registry,
+ cli_features: args.cli_features()?,
+ },
+ )?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/read_manifest.rs b/src/tools/cargo/src/bin/cargo/commands/read_manifest.rs
new file mode 100644
index 000000000..a1f42bfb0
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/read_manifest.rs
@@ -0,0 +1,20 @@
+use crate::command_prelude::*;
+
+pub fn cli() -> Command {
+ subcommand("read-manifest")
+ .about(
+ "\
+Print a JSON representation of a Cargo.toml manifest.
+
+Deprecated, use `cargo metadata --no-deps` instead.\
+",
+ )
+ .arg_quiet()
+ .arg_manifest_path()
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let ws = args.workspace(config)?;
+ config.shell().print_json(&ws.current()?.serialized())?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/remove.rs b/src/tools/cargo/src/bin/cargo/commands/remove.rs
new file mode 100644
index 000000000..50bc8b7e6
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/remove.rs
@@ -0,0 +1,344 @@
+use cargo::core::dependency::DepKind;
+use cargo::core::PackageIdSpec;
+use cargo::core::Workspace;
+use cargo::ops::cargo_remove::remove;
+use cargo::ops::cargo_remove::RemoveOptions;
+use cargo::ops::resolve_ws;
+use cargo::util::command_prelude::*;
+use cargo::util::print_available_packages;
+use cargo::util::toml_mut::dependency::Dependency;
+use cargo::util::toml_mut::dependency::MaybeWorkspace;
+use cargo::util::toml_mut::dependency::Source;
+use cargo::util::toml_mut::manifest::DepTable;
+use cargo::util::toml_mut::manifest::LocalManifest;
+use cargo::CargoResult;
+
+pub fn cli() -> clap::Command {
+ clap::Command::new("remove")
+ // Subcommand aliases are handled in `aliased_command()`.
+ // .alias("rm")
+ .about("Remove dependencies from a Cargo.toml manifest file")
+ .args([clap::Arg::new("dependencies")
+ .action(clap::ArgAction::Append)
+ .required(true)
+ .num_args(1..)
+ .value_name("DEP_ID")
+ .help("Dependencies to be removed")])
+ .arg_package("Package to remove from")
+ .arg_manifest_path()
+ .arg_quiet()
+ .arg_dry_run("Don't actually write the manifest")
+ .next_help_heading("Section")
+ .args([
+ clap::Arg::new("dev")
+ .long("dev")
+ .conflicts_with("build")
+ .action(clap::ArgAction::SetTrue)
+ .group("section")
+ .help("Remove as development dependency"),
+ clap::Arg::new("build")
+ .long("build")
+ .conflicts_with("dev")
+ .action(clap::ArgAction::SetTrue)
+ .group("section")
+ .help("Remove as build dependency"),
+ clap::Arg::new("target")
+ .long("target")
+ .num_args(1)
+ .value_name("TARGET")
+ .value_parser(clap::builder::NonEmptyStringValueParser::new())
+ .help("Remove as dependency from the given target platform"),
+ ])
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let dry_run = args.dry_run();
+
+ let workspace = args.workspace(config)?;
+
+ if args.is_present_with_zero_values("package") {
+ print_available_packages(&workspace)?;
+ }
+
+ let packages = args.packages_from_flags()?;
+ let packages = packages.get_packages(&workspace)?;
+ let spec = match packages.len() {
+ 0 => {
+ return Err(CliError::new(
+ anyhow::format_err!(
+ "no packages selected to modify. Please specify one with `-p <PKGID>`"
+ ),
+ 101,
+ ));
+ }
+ 1 => packages[0],
+ _ => {
+ let names = packages.iter().map(|p| p.name()).collect::<Vec<_>>();
+ return Err(CliError::new(
+ anyhow::format_err!(
+ "`cargo remove` could not determine which package to modify. \
+ Use the `--package` option to specify a package. \n\
+ available packages: {}",
+ names.join(", ")
+ ),
+ 101,
+ ));
+ }
+ };
+
+ let dependencies = args
+ .get_many::<String>("dependencies")
+ .expect("required(true)")
+ .cloned()
+ .collect::<Vec<_>>();
+
+ let section = parse_section(args);
+
+ let options = RemoveOptions {
+ config,
+ spec,
+ dependencies,
+ section,
+ dry_run,
+ };
+ remove(&options)?;
+
+ if !dry_run {
+ // Clean up the workspace
+ gc_workspace(&workspace)?;
+
+ // Reload the workspace since we've changed dependencies
+ let ws = args.workspace(config)?;
+ resolve_ws(&ws)?;
+ }
+
+ Ok(())
+}
+
+fn parse_section(args: &ArgMatches) -> DepTable {
+ let dev = args.flag("dev");
+ let build = args.flag("build");
+
+ let kind = if dev {
+ DepKind::Development
+ } else if build {
+ DepKind::Build
+ } else {
+ DepKind::Normal
+ };
+
+ let mut table = DepTable::new().set_kind(kind);
+
+ if let Some(target) = args.get_one::<String>("target") {
+ assert!(!target.is_empty(), "Target specification may not be empty");
+ table = table.set_target(target);
+ }
+
+ table
+}
+
+/// Clean up the workspace.dependencies, profile, patch, and replace sections of the root manifest
+/// by removing dependencies which no longer have a reference to them.
+fn gc_workspace(workspace: &Workspace<'_>) -> CargoResult<()> {
+ let mut manifest: toml_edit::Document =
+ cargo_util::paths::read(workspace.root_manifest())?.parse()?;
+ let mut is_modified = true;
+
+ let members = workspace
+ .members()
+ .map(|p| LocalManifest::try_new(p.manifest_path()))
+ .collect::<CargoResult<Vec<_>>>()?;
+
+ let mut dependencies = members
+ .iter()
+ .flat_map(|manifest| {
+ manifest.get_sections().into_iter().flat_map(|(_, table)| {
+ table
+ .as_table_like()
+ .unwrap()
+ .iter()
+ .map(|(key, item)| Dependency::from_toml(&manifest.path, key, item))
+ .collect::<Vec<_>>()
+ })
+ })
+ .collect::<CargoResult<Vec<_>>>()?;
+
+ // Clean up the workspace.dependencies section and replace instances of
+ // workspace dependencies with their definitions
+ if let Some(toml_edit::Item::Table(deps_table)) = manifest
+ .get_mut("workspace")
+ .and_then(|t| t.get_mut("dependencies"))
+ {
+ deps_table.set_implicit(true);
+ for (key, item) in deps_table.iter_mut() {
+ let ws_dep = Dependency::from_toml(&workspace.root(), key.get(), item)?;
+
+ // search for uses of this workspace dependency
+ let mut is_used = false;
+ for dep in dependencies.iter_mut().filter(|d| {
+ d.toml_key() == key.get() && matches!(d.source(), Some(Source::Workspace(_)))
+ }) {
+ // HACK: Replace workspace references in `dependencies` to simplify later GC steps:
+ // 1. Avoid having to look it up again to determine the dependency source / spec
+ // 2. The entry might get deleted, preventing us from looking it up again
+ //
+ // This does lose extra information, like features enabled, but that shouldn't be a
+ // problem for GC
+ *dep = ws_dep.clone();
+
+ is_used = true;
+ }
+
+ if !is_used {
+ *item = toml_edit::Item::None;
+ is_modified = true;
+ }
+ }
+ }
+
+ // Clean up the profile section
+ //
+ // Example tables:
+ // - profile.dev.package.foo
+ // - profile.release.package."*"
+ // - profile.release.package."foo:2.1.0"
+ if let Some(toml_edit::Item::Table(profile_section_table)) = manifest.get_mut("profile") {
+ profile_section_table.set_implicit(true);
+
+ for (_, item) in profile_section_table.iter_mut() {
+ if let toml_edit::Item::Table(profile_table) = item {
+ profile_table.set_implicit(true);
+
+ if let Some(toml_edit::Item::Table(package_table)) =
+ profile_table.get_mut("package")
+ {
+ package_table.set_implicit(true);
+
+ for (key, item) in package_table.iter_mut() {
+ if !spec_has_match(
+ &PackageIdSpec::parse(key.get())?,
+ &dependencies,
+ workspace.config(),
+ )? {
+ *item = toml_edit::Item::None;
+ is_modified = true;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ // Clean up the patch section
+ if let Some(toml_edit::Item::Table(patch_section_table)) = manifest.get_mut("patch") {
+ patch_section_table.set_implicit(true);
+
+ // The key in each of the subtables is a source (either a registry or a URL)
+ for (source, item) in patch_section_table.iter_mut() {
+ if let toml_edit::Item::Table(patch_table) = item {
+ patch_table.set_implicit(true);
+
+ for (key, item) in patch_table.iter_mut() {
+ let package_name =
+ Dependency::from_toml(&workspace.root_manifest(), key.get(), item)?.name;
+ if !source_has_match(
+ &package_name,
+ source.get(),
+ &dependencies,
+ workspace.config(),
+ )? {
+ *item = toml_edit::Item::None;
+ }
+ }
+ }
+ }
+ }
+
+ // Clean up the replace section
+ if let Some(toml_edit::Item::Table(table)) = manifest.get_mut("replace") {
+ table.set_implicit(true);
+
+ for (key, item) in table.iter_mut() {
+ if !spec_has_match(
+ &PackageIdSpec::parse(key.get())?,
+ &dependencies,
+ workspace.config(),
+ )? {
+ *item = toml_edit::Item::None;
+ is_modified = true;
+ }
+ }
+ }
+
+ if is_modified {
+ cargo_util::paths::write(workspace.root_manifest(), manifest.to_string().as_bytes())?;
+ }
+
+ Ok(())
+}
+
+/// Check whether or not a package ID spec matches any non-workspace dependencies.
+fn spec_has_match(
+ spec: &PackageIdSpec,
+ dependencies: &[Dependency],
+ config: &Config,
+) -> CargoResult<bool> {
+ for dep in dependencies {
+ if spec.name().as_str() != &dep.name {
+ continue;
+ }
+
+ let version_matches = match (spec.version(), dep.version()) {
+ (Some(v), Some(vq)) => semver::VersionReq::parse(vq)?.matches(v),
+ (Some(_), None) => false,
+ (None, None | Some(_)) => true,
+ };
+ if !version_matches {
+ continue;
+ }
+
+ match dep.source_id(config)? {
+ MaybeWorkspace::Other(source_id) => {
+ if spec.url().map(|u| u == source_id.url()).unwrap_or(true) {
+ return Ok(true);
+ }
+ }
+ MaybeWorkspace::Workspace(_) => {}
+ }
+ }
+
+ Ok(false)
+}
+
+/// Check whether or not a source (URL or registry name) matches any non-workspace dependencies.
+fn source_has_match(
+ name: &str,
+ source: &str,
+ dependencies: &[Dependency],
+ config: &Config,
+) -> CargoResult<bool> {
+ for dep in dependencies {
+ if &dep.name != name {
+ continue;
+ }
+
+ match dep.source_id(config)? {
+ MaybeWorkspace::Other(source_id) => {
+ if source_id.is_registry() {
+ if source_id.display_registry_name() == source
+ || source_id.url().as_str() == source
+ {
+ return Ok(true);
+ }
+ } else if source_id.is_git() {
+ if source_id.url().as_str() == source {
+ return Ok(true);
+ }
+ }
+ }
+ MaybeWorkspace::Workspace(_) => {}
+ }
+ }
+
+ Ok(false)
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/report.rs b/src/tools/cargo/src/bin/cargo/commands/report.rs
new file mode 100644
index 000000000..275a8f7c0
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/report.rs
@@ -0,0 +1,49 @@
+use crate::command_prelude::*;
+use cargo::core::compiler::future_incompat::{OnDiskReports, REPORT_PREAMBLE};
+use cargo::drop_println;
+
+pub fn cli() -> Command {
+ subcommand("report")
+ .about("Generate and display various kinds of reports")
+ .after_help("Run `cargo help report` for more detailed information.\n")
+ .subcommand_required(true)
+ .arg_required_else_help(true)
+ .subcommand(
+ subcommand("future-incompatibilities")
+ .alias("future-incompat")
+ .about("Reports any crates which will eventually stop compiling")
+ .arg(
+ opt(
+ "id",
+ "identifier of the report generated by a Cargo command invocation",
+ )
+ .value_name("id"),
+ )
+ .arg_package("Package to display a report for"),
+ )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ match args.subcommand() {
+ Some(("future-incompatibilities", args)) => report_future_incompatibilities(config, args),
+ Some((cmd, _)) => {
+ unreachable!("unexpected command {}", cmd)
+ }
+ None => {
+ unreachable!("unexpected command")
+ }
+ }
+}
+
+fn report_future_incompatibilities(config: &Config, args: &ArgMatches) -> CliResult {
+ let ws = args.workspace(config)?;
+ let reports = OnDiskReports::load(&ws)?;
+ let id = args
+ .value_of_u32("id")?
+ .unwrap_or_else(|| reports.last_id());
+ let krate = args.get_one::<String>("package").map(String::as_str);
+ let report = reports.get_report(id, config, krate)?;
+ drop_println!(config, "{}", REPORT_PREAMBLE);
+ drop(config.shell().print_ansi_stdout(report.as_bytes()));
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/run.rs b/src/tools/cargo/src/bin/cargo/commands/run.rs
new file mode 100644
index 000000000..cde754c7a
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/run.rs
@@ -0,0 +1,103 @@
+use crate::command_prelude::*;
+use crate::util::restricted_names::is_glob_pattern;
+use cargo::core::Verbosity;
+use cargo::ops::{self, CompileFilter, Packages};
+use cargo_util::ProcessError;
+
+pub fn cli() -> Command {
+ subcommand("run")
+ // subcommand aliases are handled in aliased_command()
+ // .alias("r")
+ .about("Run a binary or example of the local package")
+ .arg_quiet()
+ .arg(
+ Arg::new("args")
+ .help("Arguments for the binary or example to run")
+ .value_parser(value_parser!(std::ffi::OsString))
+ .num_args(0..)
+ .trailing_var_arg(true),
+ )
+ .arg_targets_bin_example(
+ "Name of the bin target to run",
+ "Name of the example target to run",
+ )
+ .arg_package("Package with the target to run")
+ .arg_jobs()
+ .arg_release("Build artifacts in release mode, with optimizations")
+ .arg_profile("Build artifacts with the specified profile")
+ .arg_features()
+ .arg_target_triple("Build for the target triple")
+ .arg_target_dir()
+ .arg_manifest_path()
+ .arg_message_format()
+ .arg_unit_graph()
+ .arg_ignore_rust_version()
+ .arg_timings()
+ .after_help("Run `cargo help run` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let ws = args.workspace(config)?;
+
+ let mut compile_opts = args.compile_options(
+ config,
+ CompileMode::Build,
+ Some(&ws),
+ ProfileChecking::Custom,
+ )?;
+
+ // Disallow `spec` to be an glob pattern
+ if let Packages::Packages(opt_in) = &compile_opts.spec {
+ if let Some(pattern) = opt_in.iter().find(|s| is_glob_pattern(s)) {
+ return Err(anyhow::anyhow!(
+ "`cargo run` does not support glob pattern `{}` on package selection",
+ pattern,
+ )
+ .into());
+ }
+ }
+
+ if !args.contains_id("example") && !args.contains_id("bin") {
+ let default_runs: Vec<_> = compile_opts
+ .spec
+ .get_packages(&ws)?
+ .iter()
+ .filter_map(|pkg| pkg.manifest().default_run())
+ .collect();
+ if let [bin] = &default_runs[..] {
+ compile_opts.filter = CompileFilter::single_bin(bin.to_string());
+ } else {
+ // ops::run will take care of errors if len pkgs != 1.
+ compile_opts.filter = CompileFilter::Default {
+ // Force this to false because the code in ops::run is not
+ // able to pre-check features before compilation starts to
+ // enforce that only 1 binary is built.
+ required_features_filterable: false,
+ };
+ }
+ };
+
+ ops::run(&ws, &compile_opts, &values_os(args, "args")).map_err(|err| {
+ let proc_err = match err.downcast_ref::<ProcessError>() {
+ Some(e) => e,
+ None => return CliError::new(err, 101),
+ };
+
+ // If we never actually spawned the process then that sounds pretty
+ // bad and we always want to forward that up.
+ let exit_code = match proc_err.code {
+ Some(exit) => exit,
+ None => return CliError::new(err, 101),
+ };
+
+ // If `-q` was passed then we suppress extra error information about
+ // a failed process, we assume the process itself printed out enough
+ // information about why it failed so we don't do so as well
+ let is_quiet = config.shell().verbosity() == Verbosity::Quiet;
+ if is_quiet {
+ CliError::code(exit_code)
+ } else {
+ CliError::new(err, exit_code)
+ }
+ })
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/rustc.rs b/src/tools/cargo/src/bin/cargo/commands/rustc.rs
new file mode 100644
index 000000000..de73eb80c
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/rustc.rs
@@ -0,0 +1,100 @@
+use crate::command_prelude::*;
+use cargo::ops;
+use cargo::util::interning::InternedString;
+
+const PRINT_ARG_NAME: &str = "print";
+const CRATE_TYPE_ARG_NAME: &str = "crate-type";
+
+pub fn cli() -> Command {
+ subcommand("rustc")
+ .about("Compile a package, and pass extra options to the compiler")
+ .arg_quiet()
+ .arg(
+ Arg::new("args")
+ .num_args(0..)
+ .help("Extra rustc flags")
+ .trailing_var_arg(true),
+ )
+ .arg_package("Package to build")
+ .arg_jobs()
+ .arg_targets_all(
+ "Build only this package's library",
+ "Build only the specified binary",
+ "Build all binaries",
+ "Build only the specified example",
+ "Build all examples",
+ "Build only the specified test target",
+ "Build all tests",
+ "Build only the specified bench target",
+ "Build all benches",
+ "Build all targets",
+ )
+ .arg_release("Build artifacts in release mode, with optimizations")
+ .arg_profile("Build artifacts with the specified profile")
+ .arg_features()
+ .arg_target_triple("Target triple which compiles will be for")
+ .arg(
+ opt(
+ PRINT_ARG_NAME,
+ "Output compiler information without compiling",
+ )
+ .value_name("INFO"),
+ )
+ .arg(multi_opt(
+ CRATE_TYPE_ARG_NAME,
+ "CRATE-TYPE",
+ "Comma separated list of types of crates for the compiler to emit",
+ ))
+ .arg_target_dir()
+ .arg_manifest_path()
+ .arg_message_format()
+ .arg_unit_graph()
+ .arg_ignore_rust_version()
+ .arg_future_incompat_report()
+ .arg_timings()
+ .after_help("Run `cargo help rustc` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let ws = args.workspace(config)?;
+ // This is a legacy behavior that changes the behavior based on the profile.
+ // If we want to support this more formally, I think adding a --mode flag
+ // would be warranted.
+ let mode = match args.get_one::<String>("profile").map(String::as_str) {
+ Some("test") => CompileMode::Test,
+ Some("bench") => CompileMode::Bench,
+ Some("check") => CompileMode::Check { test: false },
+ _ => CompileMode::Build,
+ };
+ let mut compile_opts = args.compile_options_for_single_package(
+ config,
+ mode,
+ Some(&ws),
+ ProfileChecking::LegacyRustc,
+ )?;
+ if compile_opts.build_config.requested_profile == "check" {
+ compile_opts.build_config.requested_profile = InternedString::new("dev");
+ }
+ let target_args = values(args, "args");
+ compile_opts.target_rustc_args = if target_args.is_empty() {
+ None
+ } else {
+ Some(target_args)
+ };
+ if let Some(opt_value) = args.get_one::<String>(PRINT_ARG_NAME) {
+ config
+ .cli_unstable()
+ .fail_if_stable_opt(PRINT_ARG_NAME, 9357)?;
+ ops::print(&ws, &compile_opts, opt_value)?;
+ return Ok(());
+ }
+ let crate_types = values(args, CRATE_TYPE_ARG_NAME);
+ compile_opts.target_rustc_crate_types = if crate_types.is_empty() {
+ None
+ } else {
+ Some(crate_types)
+ };
+ ops::compile(&ws, &compile_opts)?;
+
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/rustdoc.rs b/src/tools/cargo/src/bin/cargo/commands/rustdoc.rs
new file mode 100644
index 000000000..e87f435fd
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/rustdoc.rs
@@ -0,0 +1,66 @@
+use cargo::ops::{self, DocOptions};
+
+use crate::command_prelude::*;
+
+pub fn cli() -> Command {
+ subcommand("rustdoc")
+ .about("Build a package's documentation, using specified custom flags.")
+ .arg_quiet()
+ .arg(
+ Arg::new("args")
+ .help("Extra rustdoc flags")
+ .num_args(0..)
+ .trailing_var_arg(true),
+ )
+ .arg(flag(
+ "open",
+ "Opens the docs in a browser after the operation",
+ ))
+ .arg_package("Package to document")
+ .arg_jobs()
+ .arg_targets_all(
+ "Build only this package's library",
+ "Build only the specified binary",
+ "Build all binaries",
+ "Build only the specified example",
+ "Build all examples",
+ "Build only the specified test target",
+ "Build all tests",
+ "Build only the specified bench target",
+ "Build all benches",
+ "Build all targets",
+ )
+ .arg_release("Build artifacts in release mode, with optimizations")
+ .arg_profile("Build artifacts with the specified profile")
+ .arg_features()
+ .arg_target_triple("Build for the target triple")
+ .arg_target_dir()
+ .arg_manifest_path()
+ .arg_message_format()
+ .arg_unit_graph()
+ .arg_ignore_rust_version()
+ .arg_timings()
+ .after_help("Run `cargo help rustdoc` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let ws = args.workspace(config)?;
+ let mut compile_opts = args.compile_options_for_single_package(
+ config,
+ CompileMode::Doc { deps: false },
+ Some(&ws),
+ ProfileChecking::Custom,
+ )?;
+ let target_args = values(args, "args");
+ compile_opts.target_rustdoc_args = if target_args.is_empty() {
+ None
+ } else {
+ Some(target_args)
+ };
+ let doc_opts = DocOptions {
+ open_result: args.flag("open"),
+ compile_opts,
+ };
+ ops::doc(&ws, &doc_opts)?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/search.rs b/src/tools/cargo/src/bin/cargo/commands/search.rs
new file mode 100644
index 000000000..c55d932cc
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/search.rs
@@ -0,0 +1,37 @@
+use crate::command_prelude::*;
+
+use std::cmp::min;
+
+use cargo::ops;
+
+pub fn cli() -> Command {
+ subcommand("search")
+ .about("Search packages in crates.io")
+ .arg_quiet()
+ .arg(Arg::new("query").num_args(0..))
+ .arg_index()
+ .arg(
+ opt(
+ "limit",
+ "Limit the number of results (default: 10, max: 100)",
+ )
+ .value_name("LIMIT"),
+ )
+ .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+ .after_help("Run `cargo help search` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let registry = args.registry(config)?;
+ let index = args.index()?;
+ let limit = args.value_of_u32("limit")?;
+ let limit = min(100, limit.unwrap_or(10));
+ let query: Vec<&str> = args
+ .get_many::<String>("query")
+ .unwrap_or_default()
+ .map(String::as_str)
+ .collect();
+ let query: String = query.join("+");
+ ops::search(&query, config, index, limit, registry)?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/test.rs b/src/tools/cargo/src/bin/cargo/commands/test.rs
new file mode 100644
index 000000000..607655aaf
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/test.rs
@@ -0,0 +1,113 @@
+use crate::command_prelude::*;
+use cargo::ops;
+
+pub fn cli() -> Command {
+ subcommand("test")
+ // Subcommand aliases are handled in `aliased_command()`.
+ // .alias("t")
+ .about("Execute all unit and integration tests and build examples of a local package")
+ .arg(
+ Arg::new("TESTNAME")
+ .action(ArgAction::Set)
+ .help("If specified, only run tests containing this string in their names"),
+ )
+ .arg(
+ Arg::new("args")
+ .help("Arguments for the test binary")
+ .num_args(0..)
+ .last(true),
+ )
+ .arg(
+ flag(
+ "quiet",
+ "Display one character per test instead of one line",
+ )
+ .short('q'),
+ )
+ .arg_targets_all(
+ "Test only this package's library unit tests",
+ "Test only the specified binary",
+ "Test all binaries",
+ "Test only the specified example",
+ "Test all examples",
+ "Test only the specified test target",
+ "Test all tests",
+ "Test only the specified bench target",
+ "Test all benches",
+ "Test all targets",
+ )
+ .arg(flag("doc", "Test only this library's documentation"))
+ .arg(flag("no-run", "Compile, but don't run tests"))
+ .arg(flag("no-fail-fast", "Run all tests regardless of failure"))
+ .arg_package_spec(
+ "Package to run tests for",
+ "Test all packages in the workspace",
+ "Exclude packages from the test",
+ )
+ .arg_jobs()
+ .arg_release("Build artifacts in release mode, with optimizations")
+ .arg_profile("Build artifacts with the specified profile")
+ .arg_features()
+ .arg_target_triple("Build for the target triple")
+ .arg_target_dir()
+ .arg_manifest_path()
+ .arg_ignore_rust_version()
+ .arg_message_format()
+ .arg_unit_graph()
+ .arg_future_incompat_report()
+ .arg_timings()
+ .after_help(
+ "Run `cargo help test` for more detailed information.\n\
+ Run `cargo test -- --help` for test binary options.\n",
+ )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let ws = args.workspace(config)?;
+
+ let mut compile_opts = args.compile_options(
+ config,
+ CompileMode::Test,
+ Some(&ws),
+ ProfileChecking::Custom,
+ )?;
+
+ compile_opts.build_config.requested_profile =
+ args.get_profile_name(config, "test", ProfileChecking::Custom)?;
+
+ // `TESTNAME` is actually an argument of the test binary, but it's
+ // important, so we explicitly mention it and reconfigure.
+ let test_name = args.get_one::<String>("TESTNAME");
+ let test_args = args.get_one::<String>("TESTNAME").into_iter();
+ let test_args = test_args.chain(args.get_many::<String>("args").unwrap_or_default());
+ let test_args = test_args.map(String::as_str).collect::<Vec<_>>();
+
+ let no_run = args.flag("no-run");
+ let doc = args.flag("doc");
+ if doc {
+ if compile_opts.filter.is_specific() {
+ return Err(
+ anyhow::format_err!("Can't mix --doc with other target selecting options").into(),
+ );
+ }
+ if no_run {
+ return Err(anyhow::format_err!("Can't skip running doc tests with --no-run").into());
+ }
+ compile_opts.build_config.mode = CompileMode::Doctest;
+ compile_opts.filter = ops::CompileFilter::lib_only();
+ } else if test_name.is_some() && !compile_opts.filter.is_specific() {
+ // If arg `TESTNAME` is provided, assumed that the user knows what
+ // exactly they wants to test, so we use `all_test_targets` to
+ // avoid compiling unnecessary targets such as examples, which are
+ // included by the logic of default target filter.
+ compile_opts.filter = ops::CompileFilter::all_test_targets();
+ }
+
+ let ops = ops::TestOptions {
+ no_run,
+ no_fail_fast: args.flag("no-fail-fast"),
+ compile_opts,
+ };
+
+ ops::run_tests(&ws, &ops, &test_args)
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/tree.rs b/src/tools/cargo/src/bin/cargo/commands/tree.rs
new file mode 100644
index 000000000..94bf3fff1
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/tree.rs
@@ -0,0 +1,305 @@
+use crate::cli;
+use crate::command_prelude::*;
+use anyhow::{bail, format_err};
+use cargo::core::dependency::DepKind;
+use cargo::ops::tree::{self, EdgeKind};
+use cargo::ops::Packages;
+use cargo::util::print_available_packages;
+use cargo::util::CargoResult;
+use std::collections::HashSet;
+use std::str::FromStr;
+
+pub fn cli() -> Command {
+ subcommand("tree")
+ .about("Display a tree visualization of a dependency graph")
+ .arg_quiet()
+ .arg_manifest_path()
+ .arg_package_spec_no_all(
+ "Package to be used as the root of the tree",
+ "Display the tree for all packages in the workspace",
+ "Exclude specific workspace members",
+ )
+ .arg(
+ flag("all", "Deprecated, use --no-dedupe instead")
+ .short('a')
+ .hide(true),
+ )
+ .arg(flag("all-targets", "Deprecated, use --target=all instead").hide(true))
+ .arg_features()
+ .arg_target_triple(
+ "Filter dependencies matching the given target-triple (default host platform). \
+ Pass `all` to include all targets.",
+ )
+ .arg(flag("no-dev-dependencies", "Deprecated, use -e=no-dev instead").hide(true))
+ .arg(
+ multi_opt(
+ "edges",
+ "KINDS",
+ "The kinds of dependencies to display \
+ (features, normal, build, dev, all, \
+ no-normal, no-build, no-dev, no-proc-macro)",
+ )
+ .short('e'),
+ )
+ .arg(
+ optional_multi_opt(
+ "invert",
+ "SPEC",
+ "Invert the tree direction and focus on the given package",
+ )
+ .short('i'),
+ )
+ .arg(multi_opt(
+ "prune",
+ "SPEC",
+ "Prune the given package from the display of the dependency tree",
+ ))
+ .arg(opt("depth", "Maximum display depth of the dependency tree").value_name("DEPTH"))
+ .arg(flag("no-indent", "Deprecated, use --prefix=none instead").hide(true))
+ .arg(flag("prefix-depth", "Deprecated, use --prefix=depth instead").hide(true))
+ .arg(
+ opt(
+ "prefix",
+ "Change the prefix (indentation) of how each entry is displayed",
+ )
+ .value_name("PREFIX")
+ .value_parser(["depth", "indent", "none"])
+ .default_value("indent"),
+ )
+ .arg(flag(
+ "no-dedupe",
+ "Do not de-duplicate (repeats all shared dependencies)",
+ ))
+ .arg(
+ flag(
+ "duplicates",
+ "Show only dependencies which come in multiple versions (implies -i)",
+ )
+ .short('d')
+ .alias("duplicate"),
+ )
+ .arg(
+ opt("charset", "Character set to use in output")
+ .value_name("CHARSET")
+ .value_parser(["utf8", "ascii"])
+ .default_value("utf8"),
+ )
+ .arg(
+ opt("format", "Format string used for printing dependencies")
+ .value_name("FORMAT")
+ .short('f')
+ .default_value("{p}"),
+ )
+ .arg(
+ // Backwards compatibility with old cargo-tree.
+ flag("version", "Print version info and exit")
+ .short('V')
+ .hide(true),
+ )
+ .after_help("Run `cargo help tree` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ if args.flag("version") {
+ let verbose = args.verbose() > 0;
+ let version = cli::get_version_string(verbose);
+ cargo::drop_print!(config, "{}", version);
+ return Ok(());
+ }
+ let prefix = if args.flag("no-indent") {
+ config
+ .shell()
+ .warn("the --no-indent flag has been changed to --prefix=none")?;
+ "none"
+ } else if args.flag("prefix-depth") {
+ config
+ .shell()
+ .warn("the --prefix-depth flag has been changed to --prefix=depth")?;
+ "depth"
+ } else {
+ args.get_one::<String>("prefix").unwrap().as_str()
+ };
+ let prefix = tree::Prefix::from_str(prefix).map_err(|e| anyhow::anyhow!("{}", e))?;
+
+ let no_dedupe = args.flag("no-dedupe") || args.flag("all");
+ if args.flag("all") {
+ config.shell().warn(
+ "The `cargo tree` --all flag has been changed to --no-dedupe, \
+ and may be removed in a future version.\n\
+ If you are looking to display all workspace members, use the --workspace flag.",
+ )?;
+ }
+
+ let targets = if args.flag("all-targets") {
+ config
+ .shell()
+ .warn("the --all-targets flag has been changed to --target=all")?;
+ vec!["all".to_string()]
+ } else {
+ args._values_of("target")
+ };
+ let target = tree::Target::from_cli(targets);
+
+ let (edge_kinds, no_proc_macro) = parse_edge_kinds(config, args)?;
+ let graph_features = edge_kinds.contains(&EdgeKind::Feature);
+
+ let pkgs_to_prune = args._values_of("prune");
+
+ let packages = args.packages_from_flags()?;
+ let mut invert = args
+ .get_many::<String>("invert")
+ .map_or_else(|| Vec::new(), |is| is.map(|s| s.to_string()).collect());
+ if args.is_present_with_zero_values("invert") {
+ match &packages {
+ Packages::Packages(ps) => {
+ // Backwards compatibility with old syntax of `cargo tree -i -p foo`.
+ invert.extend(ps.clone());
+ }
+ _ => {
+ return Err(format_err!(
+ "The `-i` flag requires a package name.\n\
+\n\
+The `-i` flag is used to inspect the reverse dependencies of a specific\n\
+package. It will invert the tree and display the packages that depend on the\n\
+given package.\n\
+\n\
+Note that in a workspace, by default it will only display the package's\n\
+reverse dependencies inside the tree of the workspace member in the current\n\
+directory. The --workspace flag can be used to extend it so that it will show\n\
+the package's reverse dependencies across the entire workspace. The -p flag\n\
+can be used to display the package's reverse dependencies only with the\n\
+subtree of the package given to -p.\n\
+"
+ )
+ .into());
+ }
+ }
+ }
+
+ let ws = args.workspace(config)?;
+
+ if args.is_present_with_zero_values("package") {
+ print_available_packages(&ws)?;
+ }
+
+ let charset = tree::Charset::from_str(args.get_one::<String>("charset").unwrap())
+ .map_err(|e| anyhow::anyhow!("{}", e))?;
+ let opts = tree::TreeOptions {
+ cli_features: args.cli_features()?,
+ packages,
+ target,
+ edge_kinds,
+ invert,
+ pkgs_to_prune,
+ prefix,
+ no_dedupe,
+ duplicates: args.flag("duplicates"),
+ charset,
+ format: args.get_one::<String>("format").cloned().unwrap(),
+ graph_features,
+ max_display_depth: args.value_of_u32("depth")?.unwrap_or(u32::MAX),
+ no_proc_macro,
+ };
+
+ if opts.graph_features && opts.duplicates {
+ return Err(format_err!("the `-e features` flag does not support `--duplicates`").into());
+ }
+
+ tree::build_and_print(&ws, &opts)?;
+ Ok(())
+}
+
+/// Parses `--edges` option.
+///
+/// Returns a tuple of `EdgeKind` map and `no_proc_marco` flag.
+fn parse_edge_kinds(config: &Config, args: &ArgMatches) -> CargoResult<(HashSet<EdgeKind>, bool)> {
+ let (kinds, no_proc_macro) = {
+ let mut no_proc_macro = false;
+ let mut kinds = args.get_many::<String>("edges").map_or_else(
+ || Vec::new(),
+ |es| {
+ es.flat_map(|e| e.split(','))
+ .filter(|e| {
+ no_proc_macro = *e == "no-proc-macro";
+ !no_proc_macro
+ })
+ .collect()
+ },
+ );
+
+ if args.flag("no-dev-dependencies") {
+ config
+ .shell()
+ .warn("the --no-dev-dependencies flag has changed to -e=no-dev")?;
+ kinds.push("no-dev");
+ }
+
+ if kinds.is_empty() {
+ kinds.extend(&["normal", "build", "dev"]);
+ }
+
+ (kinds, no_proc_macro)
+ };
+
+ let mut result = HashSet::new();
+ let insert_defaults = |result: &mut HashSet<EdgeKind>| {
+ result.insert(EdgeKind::Dep(DepKind::Normal));
+ result.insert(EdgeKind::Dep(DepKind::Build));
+ result.insert(EdgeKind::Dep(DepKind::Development));
+ };
+ let unknown = |k| {
+ bail!(
+ "unknown edge kind `{}`, valid values are \
+ \"normal\", \"build\", \"dev\", \
+ \"no-normal\", \"no-build\", \"no-dev\", \"no-proc-macro\", \
+ \"features\", or \"all\"",
+ k
+ )
+ };
+ if kinds.iter().any(|k| k.starts_with("no-")) {
+ insert_defaults(&mut result);
+ for kind in &kinds {
+ match *kind {
+ "no-normal" => result.remove(&EdgeKind::Dep(DepKind::Normal)),
+ "no-build" => result.remove(&EdgeKind::Dep(DepKind::Build)),
+ "no-dev" => result.remove(&EdgeKind::Dep(DepKind::Development)),
+ "features" => result.insert(EdgeKind::Feature),
+ "normal" | "build" | "dev" | "all" => {
+ bail!(
+ "`{}` dependency kind cannot be mixed with \
+ \"no-normal\", \"no-build\", or \"no-dev\" \
+ dependency kinds",
+ kind
+ )
+ }
+ k => return unknown(k),
+ };
+ }
+ return Ok((result, no_proc_macro));
+ }
+ for kind in &kinds {
+ match *kind {
+ "all" => {
+ insert_defaults(&mut result);
+ result.insert(EdgeKind::Feature);
+ }
+ "features" => {
+ result.insert(EdgeKind::Feature);
+ }
+ "normal" => {
+ result.insert(EdgeKind::Dep(DepKind::Normal));
+ }
+ "build" => {
+ result.insert(EdgeKind::Dep(DepKind::Build));
+ }
+ "dev" => {
+ result.insert(EdgeKind::Dep(DepKind::Development));
+ }
+ k => return unknown(k),
+ }
+ }
+ if kinds.len() == 1 && kinds[0] == "features" {
+ insert_defaults(&mut result);
+ }
+ Ok((result, no_proc_macro))
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/uninstall.rs b/src/tools/cargo/src/bin/cargo/commands/uninstall.rs
new file mode 100644
index 000000000..46654b668
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/uninstall.rs
@@ -0,0 +1,34 @@
+use crate::command_prelude::*;
+
+use cargo::ops;
+
+pub fn cli() -> Command {
+ subcommand("uninstall")
+ .about("Remove a Rust binary")
+ .arg_quiet()
+ .arg(Arg::new("spec").num_args(0..))
+ .arg_package_spec_simple("Package to uninstall")
+ .arg(multi_opt("bin", "NAME", "Only uninstall the binary NAME"))
+ .arg(opt("root", "Directory to uninstall packages from").value_name("DIR"))
+ .after_help("Run `cargo help uninstall` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let root = args.get_one::<String>("root").map(String::as_str);
+
+ if args.is_present_with_zero_values("package") {
+ return Err(anyhow::anyhow!(
+ "\"--package <SPEC>\" requires a SPEC format value.\n\
+ Run `cargo help pkgid` for more information about SPEC format."
+ )
+ .into());
+ }
+
+ let specs = args
+ .get_many::<String>("spec")
+ .unwrap_or_else(|| args.get_many::<String>("package").unwrap_or_default())
+ .map(String::as_str)
+ .collect();
+ ops::uninstall(root, specs, &values(args, "bin"), config)?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/update.rs b/src/tools/cargo/src/bin/cargo/commands/update.rs
new file mode 100644
index 000000000..da33e8d30
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/update.rs
@@ -0,0 +1,46 @@
+use crate::command_prelude::*;
+
+use cargo::ops::{self, UpdateOptions};
+use cargo::util::print_available_packages;
+
+pub fn cli() -> Command {
+ subcommand("update")
+ .about("Update dependencies as recorded in the local lock file")
+ .arg_quiet()
+ .arg(flag("workspace", "Only update the workspace packages").short('w'))
+ .arg_package_spec_simple("Package to update")
+ .arg(flag(
+ "aggressive",
+ "Force updating all dependencies of SPEC as well when used with -p",
+ ))
+ .arg_dry_run("Don't actually write the lockfile")
+ .arg(
+ opt(
+ "precise",
+ "Update a single dependency to exactly PRECISE when used with -p",
+ )
+ .value_name("PRECISE")
+ .requires("package"),
+ )
+ .arg_manifest_path()
+ .after_help("Run `cargo help update` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let ws = args.workspace(config)?;
+
+ if args.is_present_with_zero_values("package") {
+ print_available_packages(&ws)?;
+ }
+
+ let update_opts = UpdateOptions {
+ aggressive: args.flag("aggressive"),
+ precise: args.get_one::<String>("precise").map(String::as_str),
+ to_update: values(args, "package"),
+ dry_run: args.dry_run(),
+ workspace: args.flag("workspace"),
+ config,
+ };
+ ops::update_lockfile(&ws, &update_opts)?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/vendor.rs b/src/tools/cargo/src/bin/cargo/commands/vendor.rs
new file mode 100644
index 000000000..1fd79ec51
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/vendor.rs
@@ -0,0 +1,100 @@
+use crate::command_prelude::*;
+use cargo::ops;
+use std::path::PathBuf;
+
+pub fn cli() -> Command {
+ subcommand("vendor")
+ .about("Vendor all dependencies for a project locally")
+ .arg_quiet()
+ .arg_manifest_path()
+ .arg(
+ Arg::new("path")
+ .action(ArgAction::Set)
+ .value_parser(clap::value_parser!(PathBuf))
+ .help("Where to vendor crates (`vendor` by default)"),
+ )
+ .arg(flag(
+ "no-delete",
+ "Don't delete older crates in the vendor directory",
+ ))
+ .arg(
+ Arg::new("tomls")
+ .short('s')
+ .long("sync")
+ .help("Additional `Cargo.toml` to sync and vendor")
+ .value_name("TOML")
+ .value_parser(clap::value_parser!(PathBuf))
+ .action(clap::ArgAction::Append),
+ )
+ .arg(flag(
+ "respect-source-config",
+ "Respect `[source]` config in `.cargo/config`",
+ ))
+ .arg(flag(
+ "versioned-dirs",
+ "Always include version in subdir name",
+ ))
+ .arg(flag("no-merge-sources", "Not supported").hide(true))
+ .arg(flag("relative-path", "Not supported").hide(true))
+ .arg(flag("only-git-deps", "Not supported").hide(true))
+ .arg(flag("disallow-duplicates", "Not supported").hide(true))
+ .after_help("Run `cargo help vendor` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ // We're doing the vendoring operation ourselves, so we don't actually want
+ // to respect any of the `source` configuration in Cargo itself. That's
+ // intended for other consumers of Cargo, but we want to go straight to the
+ // source, e.g. crates.io, to fetch crates.
+ if !args.flag("respect-source-config") {
+ config.values_mut()?.remove("source");
+ }
+
+ // When we moved `cargo vendor` into Cargo itself we didn't stabilize a few
+ // flags, so try to provide a helpful error message in that case to ensure
+ // that users currently using the flag aren't tripped up.
+ let crates_io_cargo_vendor_flag = if args.flag("no-merge-sources") {
+ Some("--no-merge-sources")
+ } else if args.flag("relative-path") {
+ Some("--relative-path")
+ } else if args.flag("only-git-deps") {
+ Some("--only-git-deps")
+ } else if args.flag("disallow-duplicates") {
+ Some("--disallow-duplicates")
+ } else {
+ None
+ };
+ if let Some(flag) = crates_io_cargo_vendor_flag {
+ return Err(anyhow::format_err!(
+ "\
+the crates.io `cargo vendor` command has now been merged into Cargo itself
+and does not support the flag `{}` currently; to continue using the flag you
+can execute `cargo-vendor vendor ...`, and if you would like to see this flag
+supported in Cargo itself please feel free to file an issue at
+https://github.com/rust-lang/cargo/issues/new
+",
+ flag
+ )
+ .into());
+ }
+
+ let ws = args.workspace(config)?;
+ let path = args
+ .get_one::<PathBuf>("path")
+ .cloned()
+ .unwrap_or_else(|| PathBuf::from("vendor"));
+ ops::vendor(
+ &ws,
+ &ops::VendorOptions {
+ no_delete: args.flag("no-delete"),
+ destination: &path,
+ versioned_dirs: args.flag("versioned-dirs"),
+ extra: args
+ .get_many::<PathBuf>("tomls")
+ .unwrap_or_default()
+ .cloned()
+ .collect(),
+ },
+ )?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/verify_project.rs b/src/tools/cargo/src/bin/cargo/commands/verify_project.rs
new file mode 100644
index 000000000..4d5492606
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/verify_project.rs
@@ -0,0 +1,26 @@
+use crate::command_prelude::*;
+
+use std::collections::HashMap;
+use std::process;
+
+pub fn cli() -> Command {
+ subcommand("verify-project")
+ .about("Check correctness of crate manifest")
+ .arg_quiet()
+ .arg_manifest_path()
+ .after_help("Run `cargo help verify-project` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ if let Err(e) = args.workspace(config) {
+ config
+ .shell()
+ .print_json(&HashMap::from([("invalid", e.to_string())]))?;
+ process::exit(1)
+ }
+
+ config
+ .shell()
+ .print_json(&HashMap::from([("success", "true")]))?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/version.rs b/src/tools/cargo/src/bin/cargo/commands/version.rs
new file mode 100644
index 000000000..ac1681f5b
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/version.rs
@@ -0,0 +1,16 @@
+use crate::cli;
+use crate::command_prelude::*;
+
+pub fn cli() -> Command {
+ subcommand("version")
+ .about("Show version information")
+ .arg_quiet()
+ .after_help("Run `cargo help version` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let verbose = args.verbose() > 0;
+ let version = cli::get_version_string(verbose);
+ cargo::drop_print!(config, "{}", version);
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/yank.rs b/src/tools/cargo/src/bin/cargo/commands/yank.rs
new file mode 100644
index 000000000..3dee52279
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/commands/yank.rs
@@ -0,0 +1,65 @@
+use crate::command_prelude::*;
+
+use cargo::ops;
+use cargo::util::auth::Secret;
+
+pub fn cli() -> Command {
+ subcommand("yank")
+ .about("Remove a pushed crate from the index")
+ .arg_quiet()
+ .arg(Arg::new("crate").action(ArgAction::Set))
+ .arg(
+ opt("version", "The version to yank or un-yank")
+ .alias("vers")
+ .value_name("VERSION"),
+ )
+ .arg(flag(
+ "undo",
+ "Undo a yank, putting a version back into the index",
+ ))
+ .arg(opt("index", "Registry index to yank from").value_name("INDEX"))
+ .arg(opt("token", "API token to use when authenticating").value_name("TOKEN"))
+ .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+ .after_help("Run `cargo help yank` for more detailed information.\n")
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let registry = args.registry(config)?;
+
+ let (krate, version) = resolve_crate(
+ args.get_one::<String>("crate").map(String::as_str),
+ args.get_one::<String>("version").map(String::as_str),
+ )?;
+ if version.is_none() {
+ return Err(anyhow::format_err!("`--version` is required").into());
+ }
+
+ ops::yank(
+ config,
+ krate.map(|s| s.to_string()),
+ version.map(|s| s.to_string()),
+ args.get_one::<String>("token").cloned().map(Secret::from),
+ args.get_one::<String>("index").cloned(),
+ args.flag("undo"),
+ registry,
+ )?;
+ Ok(())
+}
+
+fn resolve_crate<'k>(
+ mut krate: Option<&'k str>,
+ mut version: Option<&'k str>,
+) -> crate::CargoResult<(Option<&'k str>, Option<&'k str>)> {
+ if let Some((k, v)) = krate.and_then(|k| k.split_once('@')) {
+ if version.is_some() {
+ anyhow::bail!("cannot specify both `@{v}` and `--version`");
+ }
+ if k.is_empty() {
+ // by convention, arguments starting with `@` are response files
+ anyhow::bail!("missing crate name for `@{v}`");
+ }
+ krate = Some(k);
+ version = Some(v);
+ }
+ Ok((krate, version))
+}
diff --git a/src/tools/cargo/src/bin/cargo/main.rs b/src/tools/cargo/src/bin/cargo/main.rs
new file mode 100644
index 000000000..55da2997f
--- /dev/null
+++ b/src/tools/cargo/src/bin/cargo/main.rs
@@ -0,0 +1,323 @@
+#![warn(rust_2018_idioms)] // while we're getting used to 2018
+#![allow(clippy::all)]
+#![warn(clippy::disallowed_methods)]
+
+use cargo::util::toml::StringOrVec;
+use cargo::util::CliError;
+use cargo::util::{self, closest_msg, command_prelude, CargoResult, CliResult, Config};
+use cargo_util::{ProcessBuilder, ProcessError};
+use std::collections::BTreeMap;
+use std::env;
+use std::ffi::OsStr;
+use std::fs;
+use std::path::{Path, PathBuf};
+
+mod cli;
+mod commands;
+
+use crate::command_prelude::*;
+
+fn main() {
+ #[cfg(feature = "pretty-env-logger")]
+ pretty_env_logger::init_custom_env("CARGO_LOG");
+ #[cfg(not(feature = "pretty-env-logger"))]
+ env_logger::init_from_env("CARGO_LOG");
+
+ let mut config = cli::LazyConfig::new();
+
+ let result = if let Some(lock_addr) = cargo::ops::fix_get_proxy_lock_addr() {
+ cargo::ops::fix_exec_rustc(config.get(), &lock_addr).map_err(|e| CliError::from(e))
+ } else {
+ let _token = cargo::util::job::setup();
+ cli::main(&mut config)
+ };
+
+ match result {
+ Err(e) => cargo::exit_with_error(e, &mut config.get_mut().shell()),
+ Ok(()) => {}
+ }
+}
+
+/// Table for defining the aliases which come builtin in `Cargo`.
+/// The contents are structured as: `(alias, aliased_command, description)`.
+const BUILTIN_ALIASES: [(&str, &str, &str); 6] = [
+ ("b", "build", "alias: build"),
+ ("c", "check", "alias: check"),
+ ("d", "doc", "alias: doc"),
+ ("r", "run", "alias: run"),
+ ("t", "test", "alias: test"),
+ ("rm", "remove", "alias: remove"),
+];
+
+/// Function which contains the list of all of the builtin aliases and it's
+/// corresponding execs represented as &str.
+fn builtin_aliases_execs(cmd: &str) -> Option<&(&str, &str, &str)> {
+ BUILTIN_ALIASES.iter().find(|alias| alias.0 == cmd)
+}
+
+/// Resolve the aliased command from the [`Config`] with a given command string.
+///
+/// The search fallback chain is:
+///
+/// 1. Get the aliased command as a string.
+/// 2. If an `Err` occurs (missing key, type mismatch, or any possible error),
+/// try to get it as an array again.
+/// 3. If still cannot find any, finds one insides [`BUILTIN_ALIASES`].
+fn aliased_command(config: &Config, command: &str) -> CargoResult<Option<Vec<String>>> {
+ let alias_name = format!("alias.{}", command);
+ let user_alias = match config.get_string(&alias_name) {
+ Ok(Some(record)) => Some(
+ record
+ .val
+ .split_whitespace()
+ .map(|s| s.to_string())
+ .collect(),
+ ),
+ Ok(None) => None,
+ Err(_) => config.get::<Option<Vec<String>>>(&alias_name)?,
+ };
+
+ let result = user_alias.or_else(|| {
+ builtin_aliases_execs(command).map(|command_str| vec![command_str.1.to_string()])
+ });
+ Ok(result)
+}
+
+/// List all runnable commands
+fn list_commands(config: &Config) -> BTreeMap<String, CommandInfo> {
+ let prefix = "cargo-";
+ let suffix = env::consts::EXE_SUFFIX;
+ let mut commands = BTreeMap::new();
+ for dir in search_directories(config) {
+ let entries = match fs::read_dir(dir) {
+ Ok(entries) => entries,
+ _ => continue,
+ };
+ for entry in entries.filter_map(|e| e.ok()) {
+ let path = entry.path();
+ let filename = match path.file_name().and_then(|s| s.to_str()) {
+ Some(filename) => filename,
+ _ => continue,
+ };
+ if !filename.starts_with(prefix) || !filename.ends_with(suffix) {
+ continue;
+ }
+ if is_executable(entry.path()) {
+ let end = filename.len() - suffix.len();
+ commands.insert(
+ filename[prefix.len()..end].to_string(),
+ CommandInfo::External { path: path.clone() },
+ );
+ }
+ }
+ }
+
+ for cmd in commands::builtin() {
+ commands.insert(
+ cmd.get_name().to_string(),
+ CommandInfo::BuiltIn {
+ about: cmd.get_about().map(|s| s.to_string()),
+ },
+ );
+ }
+
+ // Add the builtin_aliases and them descriptions to the
+ // `commands` `BTreeMap`.
+ for command in &BUILTIN_ALIASES {
+ commands.insert(
+ command.0.to_string(),
+ CommandInfo::BuiltIn {
+ about: Some(command.2.to_string()),
+ },
+ );
+ }
+
+ // Add the user-defined aliases
+ if let Ok(aliases) = config.get::<BTreeMap<String, StringOrVec>>("alias") {
+ for (name, target) in aliases.iter() {
+ commands.insert(
+ name.to_string(),
+ CommandInfo::Alias {
+ target: target.clone(),
+ },
+ );
+ }
+ }
+
+ // `help` is special, so it needs to be inserted separately.
+ commands.insert(
+ "help".to_string(),
+ CommandInfo::BuiltIn {
+ about: Some("Displays help for a cargo subcommand".to_string()),
+ },
+ );
+
+ commands
+}
+
+fn find_external_subcommand(config: &Config, cmd: &str) -> Option<PathBuf> {
+ let command_exe = format!("cargo-{}{}", cmd, env::consts::EXE_SUFFIX);
+ search_directories(config)
+ .iter()
+ .map(|dir| dir.join(&command_exe))
+ .find(|file| is_executable(file))
+}
+
+fn execute_external_subcommand(config: &Config, cmd: &str, args: &[&OsStr]) -> CliResult {
+ let path = find_external_subcommand(config, cmd);
+ let command = match path {
+ Some(command) => command,
+ None => {
+ let err = if cmd.starts_with('+') {
+ anyhow::format_err!(
+ "no such command: `{}`\n\n\t\
+ Cargo does not handle `+toolchain` directives.\n\t\
+ Did you mean to invoke `cargo` through `rustup` instead?",
+ cmd
+ )
+ } else {
+ let suggestions = list_commands(config);
+ let did_you_mean = closest_msg(cmd, suggestions.keys(), |c| c);
+
+ anyhow::format_err!(
+ "no such command: `{}`{}\n\n\t\
+ View all installed commands with `cargo --list`",
+ cmd,
+ did_you_mean
+ )
+ };
+
+ return Err(CliError::new(err, 101));
+ }
+ };
+ execute_subcommand(config, Some(&command), args)
+}
+
+fn execute_internal_subcommand(config: &Config, args: &[&OsStr]) -> CliResult {
+ execute_subcommand(config, None, args)
+}
+
+// This function is used to execute a subcommand. It is used to execute both
+// internal and external subcommands.
+// If `cmd_path` is `None`, then the subcommand is an internal subcommand.
+fn execute_subcommand(config: &Config, cmd_path: Option<&PathBuf>, args: &[&OsStr]) -> CliResult {
+ let cargo_exe = config.cargo_exe()?;
+ let mut cmd = match cmd_path {
+ Some(cmd_path) => ProcessBuilder::new(cmd_path),
+ None => ProcessBuilder::new(&cargo_exe),
+ };
+ cmd.env(cargo::CARGO_ENV, cargo_exe).args(args);
+ if let Some(client) = config.jobserver_from_env() {
+ cmd.inherit_jobserver(client);
+ }
+ let err = match cmd.exec_replace() {
+ Ok(()) => return Ok(()),
+ Err(e) => e,
+ };
+
+ if let Some(perr) = err.downcast_ref::<ProcessError>() {
+ if let Some(code) = perr.code {
+ return Err(CliError::code(code));
+ }
+ }
+ Err(CliError::new(err, 101))
+}
+
+#[cfg(unix)]
+fn is_executable<P: AsRef<Path>>(path: P) -> bool {
+ use std::os::unix::prelude::*;
+ fs::metadata(path)
+ .map(|metadata| metadata.is_file() && metadata.permissions().mode() & 0o111 != 0)
+ .unwrap_or(false)
+}
+#[cfg(windows)]
+fn is_executable<P: AsRef<Path>>(path: P) -> bool {
+ path.as_ref().is_file()
+}
+
+fn search_directories(config: &Config) -> Vec<PathBuf> {
+ let mut path_dirs = if let Some(val) = config.get_env_os("PATH") {
+ env::split_paths(&val).collect()
+ } else {
+ vec![]
+ };
+
+ let home_bin = config.home().clone().into_path_unlocked().join("bin");
+
+ // If any of that PATH elements contains `home_bin`, do not
+ // add it again. This is so that the users can control priority
+ // of it using PATH, while preserving the historical
+ // behavior of preferring it over system global directories even
+ // when not in PATH at all.
+ // See https://github.com/rust-lang/cargo/issues/11020 for details.
+ //
+ // Note: `p == home_bin` will ignore trailing slash, but we don't
+ // `canonicalize` the paths.
+ if !path_dirs.iter().any(|p| p == &home_bin) {
+ path_dirs.insert(0, home_bin);
+ };
+
+ path_dirs
+}
+
+/// Initialize libgit2.
+fn init_git(config: &Config) {
+ // Disabling the owner validation in git can, in theory, lead to code execution
+ // vulnerabilities. However, libgit2 does not launch executables, which is the foundation of
+ // the original security issue. Meanwhile, issues with refusing to load git repos in
+ // `CARGO_HOME` for example will likely be very frustrating for users. So, we disable the
+ // validation.
+ //
+ // For further discussion of Cargo's current interactions with git, see
+ //
+ // https://github.com/rust-lang/rfcs/pull/3279
+ //
+ // and in particular the subsection on "Git support".
+ //
+ // Note that we only disable this when Cargo is run as a binary. If Cargo is used as a library,
+ // this code won't be invoked. Instead, developers will need to explicitly disable the
+ // validation in their code. This is inconvenient, but won't accidentally open consuming
+ // applications up to security issues if they use git2 to open repositories elsewhere in their
+ // code.
+ unsafe {
+ git2::opts::set_verify_owner_validation(false)
+ .expect("set_verify_owner_validation should never fail");
+ }
+
+ init_git_transports(config);
+}
+
+/// Configure libgit2 to use libcurl if necessary.
+///
+/// If the user has a non-default network configuration, then libgit2 will be
+/// configured to use libcurl instead of the built-in networking support so
+/// that those configuration settings can be used.
+fn init_git_transports(config: &Config) {
+ // Only use a custom transport if any HTTP options are specified,
+ // such as proxies or custom certificate authorities. The custom
+ // transport, however, is not as well battle-tested.
+
+ match cargo::ops::needs_custom_http_transport(config) {
+ Ok(true) => {}
+ _ => return,
+ }
+
+ let handle = match cargo::ops::http_handle(config) {
+ Ok(handle) => handle,
+ Err(..) => return,
+ };
+
+ // The unsafety of the registration function derives from two aspects:
+ //
+ // 1. This call must be synchronized with all other registration calls as
+ // well as construction of new transports.
+ // 2. The argument is leaked.
+ //
+ // We're clear on point (1) because this is only called at the start of this
+ // binary (we know what the state of the world looks like) and we're mostly
+ // clear on point (2) because we'd only free it after everything is done
+ // anyway
+ unsafe {
+ git2_curl::register(handle);
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/artifact.rs b/src/tools/cargo/src/cargo/core/compiler/artifact.rs
new file mode 100644
index 000000000..1f3b12b5c
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/artifact.rs
@@ -0,0 +1,98 @@
+//! Generate artifact information from unit dependencies for configuring the compiler environment.
+
+use crate::core::compiler::unit_graph::UnitDep;
+use crate::core::compiler::{Context, CrateType, FileFlavor, Unit};
+use crate::core::dependency::ArtifactKind;
+use crate::core::{Dependency, Target, TargetKind};
+use crate::CargoResult;
+use std::collections::{HashMap, HashSet};
+use std::ffi::OsString;
+
+/// Return all environment variables for the given unit-dependencies
+/// if artifacts are present.
+pub fn get_env(
+ cx: &Context<'_, '_>,
+ dependencies: &[UnitDep],
+) -> CargoResult<HashMap<String, OsString>> {
+ let mut env = HashMap::new();
+ for unit_dep in dependencies.iter().filter(|d| d.unit.artifact.is_true()) {
+ for artifact_path in cx
+ .outputs(&unit_dep.unit)?
+ .iter()
+ .filter_map(|f| (f.flavor == FileFlavor::Normal).then(|| &f.path))
+ {
+ let artifact_type_upper = unit_artifact_type_name_upper(&unit_dep.unit);
+ let dep_name = unit_dep.dep_name.unwrap_or(unit_dep.unit.pkg.name());
+ let dep_name_upper = dep_name.to_uppercase().replace("-", "_");
+
+ let var = format!("CARGO_{}_DIR_{}", artifact_type_upper, dep_name_upper);
+ let path = artifact_path.parent().expect("parent dir for artifacts");
+ env.insert(var, path.to_owned().into());
+
+ let var = format!(
+ "CARGO_{}_FILE_{}_{}",
+ artifact_type_upper,
+ dep_name_upper,
+ unit_dep.unit.target.name()
+ );
+ env.insert(var, artifact_path.to_owned().into());
+
+ if unit_dep.unit.target.name() == dep_name.as_str() {
+ let var = format!("CARGO_{}_FILE_{}", artifact_type_upper, dep_name_upper,);
+ env.insert(var, artifact_path.to_owned().into());
+ }
+ }
+ }
+ Ok(env)
+}
+
+fn unit_artifact_type_name_upper(unit: &Unit) -> &'static str {
+ match unit.target.kind() {
+ TargetKind::Lib(kinds) => match kinds.as_slice() {
+ &[CrateType::Cdylib] => "CDYLIB",
+ &[CrateType::Staticlib] => "STATICLIB",
+ invalid => unreachable!("BUG: artifacts cannot be of type {:?}", invalid),
+ },
+ TargetKind::Bin => "BIN",
+ invalid => unreachable!("BUG: artifacts cannot be of type {:?}", invalid),
+ }
+}
+
+/// Given a dependency with an artifact `artifact_dep` and a set of available `targets`
+/// of its package, find a target for each kind of artifacts that are to be built.
+///
+/// Failure to match any target results in an error mentioning the parent manifests
+/// `parent_package` name.
+pub(crate) fn match_artifacts_kind_with_targets<'t, 'd>(
+ artifact_dep: &'d Dependency,
+ targets: &'t [Target],
+ parent_package: &str,
+) -> CargoResult<HashSet<(&'d ArtifactKind, &'t Target)>> {
+ let mut out = HashSet::new();
+ let artifact_requirements = artifact_dep.artifact().expect("artifact present");
+ for artifact_kind in artifact_requirements.kinds() {
+ let mut extend = |kind, filter: &dyn Fn(&&Target) -> bool| {
+ let mut iter = targets.iter().filter(filter).peekable();
+ let found = iter.peek().is_some();
+ out.extend(std::iter::repeat(kind).zip(iter));
+ found
+ };
+ let found = match artifact_kind {
+ ArtifactKind::Cdylib => extend(artifact_kind, &|t| t.is_cdylib()),
+ ArtifactKind::Staticlib => extend(artifact_kind, &|t| t.is_staticlib()),
+ ArtifactKind::AllBinaries => extend(artifact_kind, &|t| t.is_bin()),
+ ArtifactKind::SelectedBinary(bin_name) => extend(artifact_kind, &|t| {
+ t.is_bin() && t.name() == bin_name.as_str()
+ }),
+ };
+ if !found {
+ anyhow::bail!(
+ "dependency `{}` in package `{}` requires a `{}` artifact to be present.",
+ artifact_dep.name_in_toml(),
+ parent_package,
+ artifact_kind
+ );
+ }
+ }
+ Ok(out)
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/build_config.rs b/src/tools/cargo/src/cargo/core/compiler/build_config.rs
new file mode 100644
index 000000000..885b124b9
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/build_config.rs
@@ -0,0 +1,261 @@
+use crate::core::compiler::CompileKind;
+use crate::util::interning::InternedString;
+use crate::util::{CargoResult, Config, RustfixDiagnosticServer};
+use anyhow::{bail, Context as _};
+use cargo_util::ProcessBuilder;
+use serde::ser;
+use std::cell::RefCell;
+use std::path::PathBuf;
+use std::sync::Arc;
+use std::thread::available_parallelism;
+
+/// Configuration information for a rustc build.
+#[derive(Debug, Clone)]
+pub struct BuildConfig {
+ /// The requested kind of compilation for this session
+ pub requested_kinds: Vec<CompileKind>,
+ /// Number of rustc jobs to run in parallel.
+ pub jobs: u32,
+ /// Do not abort the build as soon as there is an error.
+ pub keep_going: bool,
+ /// Build profile
+ pub requested_profile: InternedString,
+ /// The mode we are compiling in.
+ pub mode: CompileMode,
+ /// `true` to print stdout in JSON format (for machine reading).
+ pub message_format: MessageFormat,
+ /// Force Cargo to do a full rebuild and treat each target as changed.
+ pub force_rebuild: bool,
+ /// Output a build plan to stdout instead of actually compiling.
+ pub build_plan: bool,
+ /// Output the unit graph to stdout instead of actually compiling.
+ pub unit_graph: bool,
+ /// An optional override of the rustc process for primary units
+ pub primary_unit_rustc: Option<ProcessBuilder>,
+ /// A thread used by `cargo fix` to receive messages on a socket regarding
+ /// the success/failure of applying fixes.
+ pub rustfix_diagnostic_server: Arc<RefCell<Option<RustfixDiagnosticServer>>>,
+ /// The directory to copy final artifacts to. Note that even if `out_dir` is
+ /// set, a copy of artifacts still could be found a `target/(debug\release)`
+ /// as usual.
+ // Note that, although the cmd-line flag name is `out-dir`, in code we use
+ // `export_dir`, to avoid confusion with out dir at `target/debug/deps`.
+ pub export_dir: Option<PathBuf>,
+ /// `true` to output a future incompatibility report at the end of the build
+ pub future_incompat_report: bool,
+ /// Which kinds of build timings to output (empty if none).
+ pub timing_outputs: Vec<TimingOutput>,
+}
+
+fn default_parallelism() -> CargoResult<u32> {
+ Ok(available_parallelism()
+ .context("failed to determine the amount of parallelism available")?
+ .get() as u32)
+}
+
+impl BuildConfig {
+ /// Parses all config files to learn about build configuration. Currently
+ /// configured options are:
+ ///
+ /// * `build.jobs`
+ /// * `build.target`
+ /// * `target.$target.ar`
+ /// * `target.$target.linker`
+ /// * `target.$target.libfoo.metadata`
+ pub fn new(
+ config: &Config,
+ jobs: Option<i32>,
+ keep_going: bool,
+ requested_targets: &[String],
+ mode: CompileMode,
+ ) -> CargoResult<BuildConfig> {
+ let cfg = config.build_config()?;
+ let requested_kinds = CompileKind::from_requested_targets(config, requested_targets)?;
+ if jobs.is_some() && config.jobserver_from_env().is_some() {
+ config.shell().warn(
+ "a `-j` argument was passed to Cargo but Cargo is \
+ also configured with an external jobserver in \
+ its environment, ignoring the `-j` parameter",
+ )?;
+ }
+ let jobs = match jobs.or(cfg.jobs) {
+ None => default_parallelism()?,
+ Some(0) => anyhow::bail!("jobs may not be 0"),
+ Some(j) if j < 0 => (default_parallelism()? as i32 + j).max(1) as u32,
+ Some(j) => j as u32,
+ };
+
+ if config.cli_unstable().build_std.is_some() && requested_kinds[0].is_host() {
+ // TODO: This should eventually be fixed.
+ anyhow::bail!("-Zbuild-std requires --target");
+ }
+
+ Ok(BuildConfig {
+ requested_kinds,
+ jobs,
+ keep_going,
+ requested_profile: InternedString::new("dev"),
+ mode,
+ message_format: MessageFormat::Human,
+ force_rebuild: false,
+ build_plan: false,
+ unit_graph: false,
+ primary_unit_rustc: None,
+ rustfix_diagnostic_server: Arc::new(RefCell::new(None)),
+ export_dir: None,
+ future_incompat_report: false,
+ timing_outputs: Vec::new(),
+ })
+ }
+
+ /// Whether or not the *user* wants JSON output. Whether or not rustc
+ /// actually uses JSON is decided in `add_error_format`.
+ pub fn emit_json(&self) -> bool {
+ matches!(self.message_format, MessageFormat::Json { .. })
+ }
+
+ pub fn test(&self) -> bool {
+ self.mode == CompileMode::Test || self.mode == CompileMode::Bench
+ }
+
+ pub fn single_requested_kind(&self) -> CargoResult<CompileKind> {
+ match self.requested_kinds.len() {
+ 1 => Ok(self.requested_kinds[0]),
+ _ => bail!("only one `--target` argument is supported"),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum MessageFormat {
+ Human,
+ Json {
+ /// Whether rustc diagnostics are rendered by cargo or included into the
+ /// output stream.
+ render_diagnostics: bool,
+ /// Whether the `rendered` field of rustc diagnostics are using the
+ /// "short" rendering.
+ short: bool,
+ /// Whether the `rendered` field of rustc diagnostics embed ansi color
+ /// codes.
+ ansi: bool,
+ },
+ Short,
+}
+
+/// The general "mode" for what to do.
+/// This is used for two purposes. The commands themselves pass this in to
+/// `compile_ws` to tell it the general execution strategy. This influences
+/// the default targets selected. The other use is in the `Unit` struct
+/// to indicate what is being done with a specific target.
+#[derive(Clone, Copy, PartialEq, Debug, Eq, Hash, PartialOrd, Ord)]
+pub enum CompileMode {
+ /// A target being built for a test.
+ Test,
+ /// Building a target with `rustc` (lib or bin).
+ Build,
+ /// Building a target with `rustc` to emit `rmeta` metadata only. If
+ /// `test` is true, then it is also compiled with `--test` to check it like
+ /// a test.
+ Check { test: bool },
+ /// Used to indicate benchmarks should be built. This is not used in
+ /// `Unit`, because it is essentially the same as `Test` (indicating
+ /// `--test` should be passed to rustc) and by using `Test` instead it
+ /// allows some de-duping of Units to occur.
+ Bench,
+ /// A target that will be documented with `rustdoc`.
+ /// If `deps` is true, then it will also document all dependencies.
+ Doc { deps: bool },
+ /// A target that will be tested with `rustdoc`.
+ Doctest,
+ /// An example or library that will be scraped for function calls by `rustdoc`.
+ Docscrape,
+ /// A marker for Units that represent the execution of a `build.rs` script.
+ RunCustomBuild,
+}
+
+impl ser::Serialize for CompileMode {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: ser::Serializer,
+ {
+ use self::CompileMode::*;
+ match *self {
+ Test => "test".serialize(s),
+ Build => "build".serialize(s),
+ Check { .. } => "check".serialize(s),
+ Bench => "bench".serialize(s),
+ Doc { .. } => "doc".serialize(s),
+ Doctest => "doctest".serialize(s),
+ Docscrape => "docscrape".serialize(s),
+ RunCustomBuild => "run-custom-build".serialize(s),
+ }
+ }
+}
+
+impl CompileMode {
+ /// Returns `true` if the unit is being checked.
+ pub fn is_check(self) -> bool {
+ matches!(self, CompileMode::Check { .. })
+ }
+
+ /// Returns `true` if this is generating documentation.
+ pub fn is_doc(self) -> bool {
+ matches!(self, CompileMode::Doc { .. })
+ }
+
+ /// Returns `true` if this a doc test.
+ pub fn is_doc_test(self) -> bool {
+ self == CompileMode::Doctest
+ }
+
+ /// Returns `true` if this is scraping examples for documentation.
+ pub fn is_doc_scrape(self) -> bool {
+ self == CompileMode::Docscrape
+ }
+
+ /// Returns `true` if this is any type of test (test, benchmark, doc test, or
+ /// check test).
+ pub fn is_any_test(self) -> bool {
+ matches!(
+ self,
+ CompileMode::Test
+ | CompileMode::Bench
+ | CompileMode::Check { test: true }
+ | CompileMode::Doctest
+ )
+ }
+
+ /// Returns `true` if this is something that passes `--test` to rustc.
+ pub fn is_rustc_test(self) -> bool {
+ matches!(
+ self,
+ CompileMode::Test | CompileMode::Bench | CompileMode::Check { test: true }
+ )
+ }
+
+ /// Returns `true` if this is the *execution* of a `build.rs` script.
+ pub fn is_run_custom_build(self) -> bool {
+ self == CompileMode::RunCustomBuild
+ }
+
+ /// Returns `true` if this mode may generate an executable.
+ ///
+ /// Note that this also returns `true` for building libraries, so you also
+ /// have to check the target.
+ pub fn generates_executable(self) -> bool {
+ matches!(
+ self,
+ CompileMode::Test | CompileMode::Bench | CompileMode::Build
+ )
+ }
+}
+
+/// Kinds of build timings we can output.
+#[derive(Clone, Copy, PartialEq, Debug, Eq, Hash, PartialOrd, Ord)]
+pub enum TimingOutput {
+ /// Human-readable HTML report
+ Html,
+ /// Machine-readable JSON (unstable)
+ Json,
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/build_context/mod.rs b/src/tools/cargo/src/cargo/core/compiler/build_context/mod.rs
new file mode 100644
index 000000000..f35084e2b
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/build_context/mod.rs
@@ -0,0 +1,180 @@
+//! [`BuildContext`] is a (mostly) static information about a build task.
+
+use crate::core::compiler::unit_graph::UnitGraph;
+use crate::core::compiler::{BuildConfig, CompileKind, Unit};
+use crate::core::profiles::Profiles;
+use crate::core::PackageSet;
+use crate::core::Workspace;
+use crate::util::config::Config;
+use crate::util::errors::CargoResult;
+use crate::util::interning::InternedString;
+use crate::util::Rustc;
+use std::collections::{HashMap, HashSet};
+use std::path::PathBuf;
+
+mod target_info;
+pub use self::target_info::{
+ FileFlavor, FileType, RustDocFingerprint, RustcTargetData, TargetInfo,
+};
+
+/// The build context, containing complete information needed for a build task
+/// before it gets started.
+///
+/// It is intended that this is mostly static information. Stuff that mutates
+/// during the build can be found in the parent [`Context`]. (I say mostly,
+/// because this has internal caching, but nothing that should be observable
+/// or require &mut.)
+///
+/// As a result, almost every field on `BuildContext` is public, including
+///
+/// * a resolved [`UnitGraph`] of your dependencies,
+/// * a [`Profiles`] containing compiler flags presets,
+/// * a [`RustcTargetData`] containing host and target platform information,
+/// * and a [`PackageSet`] for further package downloads,
+///
+/// just to name a few. Learn more on each own documentation.
+///
+/// # How to use
+///
+/// To prepare a build task, you may not want to use [`BuildContext::new`] directly,
+/// since it is often too lower-level.
+/// Instead, [`ops::create_bcx`] is usually what you are looking for.
+///
+/// After a `BuildContext` is built, the next stage of building is handled in [`Context`].
+///
+/// [`Context`]: crate::core::compiler::Context
+/// [`ops::create_bcx`]: crate::ops::create_bcx
+pub struct BuildContext<'a, 'cfg> {
+ /// The workspace the build is for.
+ pub ws: &'a Workspace<'cfg>,
+
+ /// The cargo configuration.
+ pub config: &'cfg Config,
+
+ /// This contains a collection of compiler flags presets.
+ pub profiles: Profiles,
+
+ /// Configuration information for a rustc build.
+ pub build_config: &'a BuildConfig,
+
+ /// Extra compiler args for either `rustc` or `rustdoc`.
+ pub extra_compiler_args: HashMap<Unit, Vec<String>>,
+
+ /// Package downloader.
+ ///
+ /// This holds ownership of the `Package` objects.
+ pub packages: PackageSet<'cfg>,
+
+ /// Information about rustc and the target platform.
+ pub target_data: RustcTargetData<'cfg>,
+
+ /// The root units of `unit_graph` (units requested on the command-line).
+ pub roots: Vec<Unit>,
+
+ /// The dependency graph of units to compile.
+ pub unit_graph: UnitGraph,
+
+ /// Reverse-dependencies of documented units, used by the `rustdoc --scrape-examples` flag.
+ pub scrape_units: Vec<Unit>,
+
+ /// The list of all kinds that are involved in this build
+ pub all_kinds: HashSet<CompileKind>,
+}
+
+impl<'a, 'cfg> BuildContext<'a, 'cfg> {
+ pub fn new(
+ ws: &'a Workspace<'cfg>,
+ packages: PackageSet<'cfg>,
+ build_config: &'a BuildConfig,
+ profiles: Profiles,
+ extra_compiler_args: HashMap<Unit, Vec<String>>,
+ target_data: RustcTargetData<'cfg>,
+ roots: Vec<Unit>,
+ unit_graph: UnitGraph,
+ scrape_units: Vec<Unit>,
+ ) -> CargoResult<BuildContext<'a, 'cfg>> {
+ let all_kinds = unit_graph
+ .keys()
+ .map(|u| u.kind)
+ .chain(build_config.requested_kinds.iter().copied())
+ .chain(std::iter::once(CompileKind::Host))
+ .collect();
+
+ Ok(BuildContext {
+ ws,
+ config: ws.config(),
+ packages,
+ build_config,
+ profiles,
+ extra_compiler_args,
+ target_data,
+ roots,
+ unit_graph,
+ scrape_units,
+ all_kinds,
+ })
+ }
+
+ /// Information of the `rustc` this build task will use.
+ pub fn rustc(&self) -> &Rustc {
+ &self.target_data.rustc
+ }
+
+ /// Gets the user-specified linker for a particular host or target.
+ pub fn linker(&self, kind: CompileKind) -> Option<PathBuf> {
+ self.target_data
+ .target_config(kind)
+ .linker
+ .as_ref()
+ .map(|l| l.val.clone().resolve_program(self.config))
+ }
+
+ /// Gets the host architecture triple.
+ ///
+ /// For example, x86_64-unknown-linux-gnu, would be
+ /// - machine: x86_64,
+ /// - hardware-platform: unknown,
+ /// - operating system: linux-gnu.
+ pub fn host_triple(&self) -> InternedString {
+ self.target_data.rustc.host
+ }
+
+ /// Gets the number of jobs specified for this build.
+ pub fn jobs(&self) -> u32 {
+ self.build_config.jobs
+ }
+
+ /// Extra compiler flags to pass to `rustc` for a given unit.
+ ///
+ /// Although it depends on the caller, in the current Cargo implementation,
+ /// these flags take precedence over those from [`BuildContext::extra_args_for`].
+ ///
+ /// As of now, these flags come from environment variables and configurations.
+ /// See [`TargetInfo.rustflags`] for more on how Cargo collects them.
+ ///
+ /// [`TargetInfo.rustflags`]: TargetInfo::rustflags
+ pub fn rustflags_args(&self, unit: &Unit) -> &[String] {
+ &self.target_data.info(unit.kind).rustflags
+ }
+
+ /// Extra compiler flags to pass to `rustdoc` for a given unit.
+ ///
+ /// Although it depends on the caller, in the current Cargo implementation,
+ /// these flags take precedence over those from [`BuildContext::extra_args_for`].
+ ///
+ /// As of now, these flags come from environment variables and configurations.
+ /// See [`TargetInfo.rustdocflags`] for more on how Cargo collects them.
+ ///
+ /// [`TargetInfo.rustdocflags`]: TargetInfo::rustdocflags
+ pub fn rustdocflags_args(&self, unit: &Unit) -> &[String] {
+ &self.target_data.info(unit.kind).rustdocflags
+ }
+
+ /// Extra compiler args for either `rustc` or `rustdoc`.
+ ///
+ /// As of now, these flags come from the trailing args of either
+ /// `cargo rustc` or `cargo rustdoc`.
+ pub fn extra_args_for(&self, unit: &Unit) -> Option<&Vec<String>> {
+ self.extra_compiler_args.get(unit)
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/build_context/target_info.rs b/src/tools/cargo/src/cargo/core/compiler/build_context/target_info.rs
new file mode 100644
index 000000000..bf8bb1578
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/build_context/target_info.rs
@@ -0,0 +1,1116 @@
+//! This modules contains types storing information of target platforms.
+//!
+//! Normally, call [`RustcTargetData::new`] to construct all the target
+//! platform once, and then query info on your demand. For example,
+//!
+//! * [`RustcTargetData::dep_platform_activated`] to check if platform is activated.
+//! * [`RustcTargetData::info`] to get a [`TargetInfo`] for an in-depth query.
+//! * [`TargetInfo::rustc_outputs`] to get a list of supported file types.
+
+use crate::core::compiler::{
+ BuildOutput, CompileKind, CompileMode, CompileTarget, Context, CrateType,
+};
+use crate::core::{Dependency, Package, Target, TargetKind, Workspace};
+use crate::util::config::{Config, StringList, TargetConfig};
+use crate::util::interning::InternedString;
+use crate::util::{CargoResult, Rustc};
+use anyhow::Context as _;
+use cargo_platform::{Cfg, CfgExpr};
+use cargo_util::{paths, ProcessBuilder};
+use serde::{Deserialize, Serialize};
+use std::cell::RefCell;
+use std::collections::hash_map::{Entry, HashMap};
+use std::path::{Path, PathBuf};
+use std::str::{self, FromStr};
+
+/// Information about the platform target gleaned from querying rustc.
+///
+/// [`RustcTargetData`] keeps several of these, one for the host and the others
+/// for other specified targets. If no target is specified, it uses a clone from
+/// the host.
+#[derive(Clone)]
+pub struct TargetInfo {
+ /// A base process builder for discovering crate type information. In
+ /// particular, this is used to determine the output filename prefix and
+ /// suffix for a crate type.
+ crate_type_process: ProcessBuilder,
+ /// Cache of output filename prefixes and suffixes.
+ ///
+ /// The key is the crate type name (like `cdylib`) and the value is
+ /// `Some((prefix, suffix))`, for example `libcargo.so` would be
+ /// `Some(("lib", ".so")). The value is `None` if the crate type is not
+ /// supported.
+ crate_types: RefCell<HashMap<CrateType, Option<(String, String)>>>,
+ /// `cfg` information extracted from `rustc --print=cfg`.
+ cfg: Vec<Cfg>,
+ /// Supported values for `-Csplit-debuginfo=` flag, queried from rustc
+ support_split_debuginfo: Vec<String>,
+ /// Path to the sysroot.
+ pub sysroot: PathBuf,
+ /// Path to the "lib" or "bin" directory that rustc uses for its dynamic
+ /// libraries.
+ pub sysroot_host_libdir: PathBuf,
+ /// Path to the "lib" directory in the sysroot which rustc uses for linking
+ /// target libraries.
+ pub sysroot_target_libdir: PathBuf,
+ /// Extra flags to pass to `rustc`, see [`extra_args`].
+ pub rustflags: Vec<String>,
+ /// Extra flags to pass to `rustdoc`, see [`extra_args`].
+ pub rustdocflags: Vec<String>,
+}
+
+/// Kind of each file generated by a Unit, part of `FileType`.
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub enum FileFlavor {
+ /// Not a special file type.
+ Normal,
+ /// Like `Normal`, but not directly executable.
+ /// For example, a `.wasm` file paired with the "normal" `.js` file.
+ Auxiliary,
+ /// Something you can link against (e.g., a library).
+ Linkable,
+ /// An `.rmeta` Rust metadata file.
+ Rmeta,
+ /// Piece of external debug information (e.g., `.dSYM`/`.pdb` file).
+ DebugInfo,
+}
+
+/// Type of each file generated by a Unit.
+#[derive(Debug)]
+pub struct FileType {
+ /// The kind of file.
+ pub flavor: FileFlavor,
+ /// The crate-type that generates this file.
+ ///
+ /// `None` for things that aren't associated with a specific crate type,
+ /// for example `rmeta` files.
+ pub crate_type: Option<CrateType>,
+ /// The suffix for the file (for example, `.rlib`).
+ /// This is an empty string for executables on Unix-like platforms.
+ suffix: String,
+ /// The prefix for the file (for example, `lib`).
+ /// This is an empty string for things like executables.
+ prefix: String,
+ /// Flag to convert hyphen to underscore when uplifting.
+ should_replace_hyphens: bool,
+}
+
+impl FileType {
+ /// The filename for this FileType crated by rustc.
+ pub fn output_filename(&self, target: &Target, metadata: Option<&str>) -> String {
+ match metadata {
+ Some(metadata) => format!(
+ "{}{}-{}{}",
+ self.prefix,
+ target.crate_name(),
+ metadata,
+ self.suffix
+ ),
+ None => format!("{}{}{}", self.prefix, target.crate_name(), self.suffix),
+ }
+ }
+
+ /// The filename for this FileType that Cargo should use when "uplifting"
+ /// it to the destination directory.
+ pub fn uplift_filename(&self, target: &Target) -> String {
+ let name = match target.binary_filename() {
+ Some(name) => name,
+ None => {
+ // For binary crate type, `should_replace_hyphens` will always be false.
+ if self.should_replace_hyphens {
+ target.crate_name()
+ } else {
+ target.name().to_string()
+ }
+ }
+ };
+
+ format!("{}{}{}", self.prefix, name, self.suffix)
+ }
+
+ /// Creates a new instance representing a `.rmeta` file.
+ pub fn new_rmeta() -> FileType {
+ // Note that even binaries use the `lib` prefix.
+ FileType {
+ flavor: FileFlavor::Rmeta,
+ crate_type: None,
+ suffix: ".rmeta".to_string(),
+ prefix: "lib".to_string(),
+ should_replace_hyphens: true,
+ }
+ }
+}
+
+impl TargetInfo {
+ /// Learns the information of target platform from `rustc` invocation(s).
+ ///
+ /// Generally, the first time calling this function is expensive, as it may
+ /// query `rustc` several times. To reduce the cost, output of each `rustc`
+ /// invocation is cached by [`Rustc::cached_output`].
+ ///
+ /// Search `Tricky` to learn why querying `rustc` several times is needed.
+ pub fn new(
+ config: &Config,
+ requested_kinds: &[CompileKind],
+ rustc: &Rustc,
+ kind: CompileKind,
+ ) -> CargoResult<TargetInfo> {
+ let mut rustflags = extra_args(
+ config,
+ requested_kinds,
+ &rustc.host,
+ None,
+ kind,
+ Flags::Rust,
+ )?;
+ let mut turn = 0;
+ loop {
+ let extra_fingerprint = kind.fingerprint_hash();
+
+ // Query rustc for several kinds of info from each line of output:
+ // 0) file-names (to determine output file prefix/suffix for given crate type)
+ // 1) sysroot
+ // 2) split-debuginfo
+ // 3) cfg
+ //
+ // Search `--print` to see what we query so far.
+ let mut process = rustc.workspace_process();
+ process
+ .arg("-")
+ .arg("--crate-name")
+ .arg("___")
+ .arg("--print=file-names")
+ .args(&rustflags)
+ .env_remove("RUSTC_LOG");
+
+ if let CompileKind::Target(target) = kind {
+ process.arg("--target").arg(target.rustc_target());
+ }
+
+ let crate_type_process = process.clone();
+ const KNOWN_CRATE_TYPES: &[CrateType] = &[
+ CrateType::Bin,
+ CrateType::Rlib,
+ CrateType::Dylib,
+ CrateType::Cdylib,
+ CrateType::Staticlib,
+ CrateType::ProcMacro,
+ ];
+ for crate_type in KNOWN_CRATE_TYPES.iter() {
+ process.arg("--crate-type").arg(crate_type.as_str());
+ }
+
+ process.arg("--print=sysroot");
+ process.arg("--print=split-debuginfo");
+ process.arg("--print=crate-name"); // `___` as a delimiter.
+ process.arg("--print=cfg");
+
+ let (output, error) = rustc
+ .cached_output(&process, extra_fingerprint)
+ .with_context(|| {
+ "failed to run `rustc` to learn about target-specific information"
+ })?;
+
+ let mut lines = output.lines();
+ let mut map = HashMap::new();
+ for crate_type in KNOWN_CRATE_TYPES {
+ let out = parse_crate_type(crate_type, &process, &output, &error, &mut lines)?;
+ map.insert(crate_type.clone(), out);
+ }
+
+ let Some(line) = lines.next() else {
+ return error_missing_print_output("sysroot", &process, &output, &error);
+ };
+ let sysroot = PathBuf::from(line);
+ let sysroot_host_libdir = if cfg!(windows) {
+ sysroot.join("bin")
+ } else {
+ sysroot.join("lib")
+ };
+ let mut sysroot_target_libdir = sysroot.clone();
+ sysroot_target_libdir.push("lib");
+ sysroot_target_libdir.push("rustlib");
+ sysroot_target_libdir.push(match &kind {
+ CompileKind::Host => rustc.host.as_str(),
+ CompileKind::Target(target) => target.short_name(),
+ });
+ sysroot_target_libdir.push("lib");
+
+ let support_split_debuginfo = {
+ // HACK: abuse `--print=crate-name` to use `___` as a delimiter.
+ let mut res = Vec::new();
+ loop {
+ match lines.next() {
+ Some(line) if line == "___" => break,
+ Some(line) => res.push(line.into()),
+ None => {
+ return error_missing_print_output(
+ "split-debuginfo",
+ &process,
+ &output,
+ &error,
+ )
+ }
+ }
+ }
+ res
+ };
+
+ let cfg = lines
+ .map(|line| Ok(Cfg::from_str(line)?))
+ .filter(TargetInfo::not_user_specific_cfg)
+ .collect::<CargoResult<Vec<_>>>()
+ .with_context(|| {
+ format!(
+ "failed to parse the cfg from `rustc --print=cfg`, got:\n{}",
+ output
+ )
+ })?;
+
+ // recalculate `rustflags` from above now that we have `cfg`
+ // information
+ let new_flags = extra_args(
+ config,
+ requested_kinds,
+ &rustc.host,
+ Some(&cfg),
+ kind,
+ Flags::Rust,
+ )?;
+
+ // Tricky: `RUSTFLAGS` defines the set of active `cfg` flags, active
+ // `cfg` flags define which `.cargo/config` sections apply, and they
+ // in turn can affect `RUSTFLAGS`! This is a bona fide mutual
+ // dependency, and it can even diverge (see `cfg_paradox` test).
+ //
+ // So what we do here is running at most *two* iterations of
+ // fixed-point iteration, which should be enough to cover
+ // practically useful cases, and warn if that's not enough for
+ // convergence.
+ let reached_fixed_point = new_flags == rustflags;
+ if !reached_fixed_point && turn == 0 {
+ turn += 1;
+ rustflags = new_flags;
+ continue;
+ }
+ if !reached_fixed_point {
+ config.shell().warn("non-trivial mutual dependency between target-specific configuration and RUSTFLAGS")?;
+ }
+
+ return Ok(TargetInfo {
+ crate_type_process,
+ crate_types: RefCell::new(map),
+ sysroot,
+ sysroot_host_libdir,
+ sysroot_target_libdir,
+ rustflags,
+ rustdocflags: extra_args(
+ config,
+ requested_kinds,
+ &rustc.host,
+ Some(&cfg),
+ kind,
+ Flags::Rustdoc,
+ )?,
+ cfg,
+ support_split_debuginfo,
+ });
+ }
+ }
+
+ fn not_user_specific_cfg(cfg: &CargoResult<Cfg>) -> bool {
+ if let Ok(Cfg::Name(cfg_name)) = cfg {
+ // This should also include "debug_assertions", but it causes
+ // regressions. Maybe some day in the distant future it can be
+ // added (and possibly change the warning to an error).
+ if cfg_name == "proc_macro" {
+ return false;
+ }
+ }
+ true
+ }
+
+ /// All the target [`Cfg`] settings.
+ pub fn cfg(&self) -> &[Cfg] {
+ &self.cfg
+ }
+
+ /// Returns the list of file types generated by the given crate type.
+ ///
+ /// Returns `None` if the target does not support the given crate type.
+ fn file_types(
+ &self,
+ crate_type: &CrateType,
+ flavor: FileFlavor,
+ target_triple: &str,
+ ) -> CargoResult<Option<Vec<FileType>>> {
+ let crate_type = if *crate_type == CrateType::Lib {
+ CrateType::Rlib
+ } else {
+ crate_type.clone()
+ };
+
+ let mut crate_types = self.crate_types.borrow_mut();
+ let entry = crate_types.entry(crate_type.clone());
+ let crate_type_info = match entry {
+ Entry::Occupied(o) => &*o.into_mut(),
+ Entry::Vacant(v) => {
+ let value = self.discover_crate_type(v.key())?;
+ &*v.insert(value)
+ }
+ };
+ let (prefix, suffix) = match *crate_type_info {
+ Some((ref prefix, ref suffix)) => (prefix, suffix),
+ None => return Ok(None),
+ };
+ let mut ret = vec![FileType {
+ suffix: suffix.clone(),
+ prefix: prefix.clone(),
+ flavor,
+ crate_type: Some(crate_type.clone()),
+ should_replace_hyphens: crate_type != CrateType::Bin,
+ }];
+
+ // Window shared library import/export files.
+ if crate_type.is_dynamic() {
+ // Note: Custom JSON specs can alter the suffix. For now, we'll
+ // just ignore non-DLL suffixes.
+ if target_triple.ends_with("-windows-msvc") && suffix == ".dll" {
+ // See https://docs.microsoft.com/en-us/cpp/build/reference/working-with-import-libraries-and-export-files
+ // for more information about DLL import/export files.
+ ret.push(FileType {
+ suffix: ".dll.lib".to_string(),
+ prefix: prefix.clone(),
+ flavor: FileFlavor::Auxiliary,
+ crate_type: Some(crate_type.clone()),
+ should_replace_hyphens: true,
+ });
+ // NOTE: lld does not produce these
+ ret.push(FileType {
+ suffix: ".dll.exp".to_string(),
+ prefix: prefix.clone(),
+ flavor: FileFlavor::Auxiliary,
+ crate_type: Some(crate_type.clone()),
+ should_replace_hyphens: true,
+ });
+ } else if target_triple.ends_with("windows-gnu") && suffix == ".dll" {
+ // See https://cygwin.com/cygwin-ug-net/dll.html for more
+ // information about GNU import libraries.
+ // LD can link DLL directly, but LLD requires the import library.
+ ret.push(FileType {
+ suffix: ".dll.a".to_string(),
+ prefix: "lib".to_string(),
+ flavor: FileFlavor::Auxiliary,
+ crate_type: Some(crate_type.clone()),
+ should_replace_hyphens: true,
+ })
+ }
+ }
+
+ if target_triple.starts_with("wasm32-") && crate_type == CrateType::Bin && suffix == ".js" {
+ // emscripten binaries generate a .js file, which loads a .wasm
+ // file.
+ ret.push(FileType {
+ suffix: ".wasm".to_string(),
+ prefix: prefix.clone(),
+ flavor: FileFlavor::Auxiliary,
+ crate_type: Some(crate_type.clone()),
+ // Name `foo-bar` will generate a `foo_bar.js` and
+ // `foo_bar.wasm`. Cargo will translate the underscore and
+ // copy `foo_bar.js` to `foo-bar.js`. However, the wasm
+ // filename is embedded in the .js file with an underscore, so
+ // it should not contain hyphens.
+ should_replace_hyphens: true,
+ });
+ // And a map file for debugging. This is only emitted with debug=2
+ // (-g4 for emcc).
+ ret.push(FileType {
+ suffix: ".wasm.map".to_string(),
+ prefix: prefix.clone(),
+ flavor: FileFlavor::DebugInfo,
+ crate_type: Some(crate_type.clone()),
+ should_replace_hyphens: true,
+ });
+ }
+
+ // Handle separate debug files.
+ let is_apple = target_triple.contains("-apple-");
+ if matches!(
+ crate_type,
+ CrateType::Bin | CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro
+ ) {
+ if is_apple {
+ let suffix = if crate_type == CrateType::Bin {
+ ".dSYM".to_string()
+ } else {
+ ".dylib.dSYM".to_string()
+ };
+ ret.push(FileType {
+ suffix,
+ prefix: prefix.clone(),
+ flavor: FileFlavor::DebugInfo,
+ crate_type: Some(crate_type),
+ // macOS tools like lldb use all sorts of magic to locate
+ // dSYM files. See https://lldb.llvm.org/use/symbols.html
+ // for some details. It seems like a `.dSYM` located next
+ // to the executable with the same name is one method. The
+ // dSYM should have the same hyphens as the executable for
+ // the names to match.
+ should_replace_hyphens: false,
+ })
+ } else if target_triple.ends_with("-msvc") {
+ ret.push(FileType {
+ suffix: ".pdb".to_string(),
+ prefix: prefix.clone(),
+ flavor: FileFlavor::DebugInfo,
+ crate_type: Some(crate_type),
+ // The absolute path to the pdb file is embedded in the
+ // executable. If the exe/pdb pair is moved to another
+ // machine, then debuggers will look in the same directory
+ // of the exe with the original pdb filename. Since the
+ // original name contains underscores, they need to be
+ // preserved.
+ should_replace_hyphens: true,
+ })
+ } else {
+ // Because DWARF Package (dwp) files are produced after the
+ // fact by another tool, there is nothing in the binary that
+ // provides a means to locate them. By convention, debuggers
+ // take the binary filename and append ".dwp" (including to
+ // binaries that already have an extension such as shared libs)
+ // to find the dwp.
+ ret.push(FileType {
+ // It is important to preserve the existing suffix for
+ // e.g. shared libraries, where the dwp for libfoo.so is
+ // expected to be at libfoo.so.dwp.
+ suffix: format!("{suffix}.dwp"),
+ prefix: prefix.clone(),
+ flavor: FileFlavor::DebugInfo,
+ crate_type: Some(crate_type.clone()),
+ // Likewise, the dwp needs to match the primary artifact's
+ // hyphenation exactly.
+ should_replace_hyphens: crate_type != CrateType::Bin,
+ })
+ }
+ }
+
+ Ok(Some(ret))
+ }
+
+ fn discover_crate_type(&self, crate_type: &CrateType) -> CargoResult<Option<(String, String)>> {
+ let mut process = self.crate_type_process.clone();
+
+ process.arg("--crate-type").arg(crate_type.as_str());
+
+ let output = process.exec_with_output().with_context(|| {
+ format!(
+ "failed to run `rustc` to learn about crate-type {} information",
+ crate_type
+ )
+ })?;
+
+ let error = str::from_utf8(&output.stderr).unwrap();
+ let output = str::from_utf8(&output.stdout).unwrap();
+ parse_crate_type(crate_type, &process, output, error, &mut output.lines())
+ }
+
+ /// Returns all the file types generated by rustc for the given mode/target_kind.
+ ///
+ /// The first value is a Vec of file types generated, the second value is
+ /// a list of CrateTypes that are not supported by the given target.
+ pub fn rustc_outputs(
+ &self,
+ mode: CompileMode,
+ target_kind: &TargetKind,
+ target_triple: &str,
+ ) -> CargoResult<(Vec<FileType>, Vec<CrateType>)> {
+ match mode {
+ CompileMode::Build => self.calc_rustc_outputs(target_kind, target_triple),
+ CompileMode::Test | CompileMode::Bench => {
+ match self.file_types(&CrateType::Bin, FileFlavor::Normal, target_triple)? {
+ Some(fts) => Ok((fts, Vec::new())),
+ None => Ok((Vec::new(), vec![CrateType::Bin])),
+ }
+ }
+ CompileMode::Check { .. } => Ok((vec![FileType::new_rmeta()], Vec::new())),
+ CompileMode::Doc { .. }
+ | CompileMode::Doctest
+ | CompileMode::Docscrape
+ | CompileMode::RunCustomBuild => {
+ panic!("asked for rustc output for non-rustc mode")
+ }
+ }
+ }
+
+ fn calc_rustc_outputs(
+ &self,
+ target_kind: &TargetKind,
+ target_triple: &str,
+ ) -> CargoResult<(Vec<FileType>, Vec<CrateType>)> {
+ let mut unsupported = Vec::new();
+ let mut result = Vec::new();
+ let crate_types = target_kind.rustc_crate_types();
+ for crate_type in &crate_types {
+ let flavor = if crate_type.is_linkable() {
+ FileFlavor::Linkable
+ } else {
+ FileFlavor::Normal
+ };
+ let file_types = self.file_types(crate_type, flavor, target_triple)?;
+ match file_types {
+ Some(types) => {
+ result.extend(types);
+ }
+ None => {
+ unsupported.push(crate_type.clone());
+ }
+ }
+ }
+ if !result.is_empty() && !crate_types.iter().any(|ct| ct.requires_upstream_objects()) {
+ // Only add rmeta if pipelining.
+ result.push(FileType::new_rmeta());
+ }
+ Ok((result, unsupported))
+ }
+
+ /// Checks if the debuginfo-split value is supported by this target
+ pub fn supports_debuginfo_split(&self, split: InternedString) -> bool {
+ self.support_split_debuginfo
+ .iter()
+ .any(|sup| sup.as_str() == split.as_str())
+ }
+}
+
+/// Takes rustc output (using specialized command line args), and calculates the file prefix and
+/// suffix for the given crate type, or returns `None` if the type is not supported. (e.g., for a
+/// Rust library like `libcargo.rlib`, we have prefix "lib" and suffix "rlib").
+///
+/// The caller needs to ensure that the lines object is at the correct line for the given crate
+/// type: this is not checked.
+///
+/// This function can not handle more than one file per type (with wasm32-unknown-emscripten, there
+/// are two files for bin (`.wasm` and `.js`)).
+fn parse_crate_type(
+ crate_type: &CrateType,
+ cmd: &ProcessBuilder,
+ output: &str,
+ error: &str,
+ lines: &mut str::Lines<'_>,
+) -> CargoResult<Option<(String, String)>> {
+ let not_supported = error.lines().any(|line| {
+ (line.contains("unsupported crate type") || line.contains("unknown crate type"))
+ && line.contains(&format!("crate type `{}`", crate_type))
+ });
+ if not_supported {
+ return Ok(None);
+ }
+ let line = match lines.next() {
+ Some(line) => line,
+ None => anyhow::bail!(
+ "malformed output when learning about crate-type {} information\n{}",
+ crate_type,
+ output_err_info(cmd, output, error)
+ ),
+ };
+ let mut parts = line.trim().split("___");
+ let prefix = parts.next().unwrap();
+ let Some(suffix) = parts.next() else {
+ return error_missing_print_output("file-names", cmd, output, error);
+ };
+
+ Ok(Some((prefix.to_string(), suffix.to_string())))
+}
+
+/// Helper for creating an error message for missing output from a certain `--print` request.
+fn error_missing_print_output<T>(
+ request: &str,
+ cmd: &ProcessBuilder,
+ stdout: &str,
+ stderr: &str,
+) -> CargoResult<T> {
+ let err_info = output_err_info(cmd, stdout, stderr);
+ anyhow::bail!(
+ "output of --print={request} missing when learning about \
+ target-specific information from rustc\n{err_info}",
+ )
+}
+
+/// Helper for creating an error message when parsing rustc output fails.
+fn output_err_info(cmd: &ProcessBuilder, stdout: &str, stderr: &str) -> String {
+ let mut result = format!("command was: {}\n", cmd);
+ if !stdout.is_empty() {
+ result.push_str("\n--- stdout\n");
+ result.push_str(stdout);
+ }
+ if !stderr.is_empty() {
+ result.push_str("\n--- stderr\n");
+ result.push_str(stderr);
+ }
+ if stdout.is_empty() && stderr.is_empty() {
+ result.push_str("(no output received)");
+ }
+ result
+}
+
+/// Compiler flags for either rustc or rustdoc.
+#[derive(Debug, Copy, Clone)]
+enum Flags {
+ Rust,
+ Rustdoc,
+}
+
+impl Flags {
+ fn as_key(self) -> &'static str {
+ match self {
+ Flags::Rust => "rustflags",
+ Flags::Rustdoc => "rustdocflags",
+ }
+ }
+
+ fn as_env(self) -> &'static str {
+ match self {
+ Flags::Rust => "RUSTFLAGS",
+ Flags::Rustdoc => "RUSTDOCFLAGS",
+ }
+ }
+}
+
+/// Acquire extra flags to pass to the compiler from various locations.
+///
+/// The locations are:
+///
+/// - the `CARGO_ENCODED_RUSTFLAGS` environment variable
+/// - the `RUSTFLAGS` environment variable
+///
+/// then if none of those were found
+///
+/// - `target.*.rustflags` from the config (.cargo/config)
+/// - `target.cfg(..).rustflags` from the config
+/// - `host.*.rustflags` from the config if compiling a host artifact or without `--target`
+/// (requires `-Zhost-config`)
+///
+/// then if none of those were found
+///
+/// - `build.rustflags` from the config
+///
+/// The behavior differs slightly when cross-compiling (or, specifically, when `--target` is
+/// provided) for artifacts that are always built for the host (plugins, build scripts, ...).
+/// For those artifacts, _only_ `host.*.rustflags` is respected, and no other configuration
+/// sources, _regardless of the value of `target-applies-to-host`_. This is counterintuitive, but
+/// necessary to retain backwards compatibility with older versions of Cargo.
+fn extra_args(
+ config: &Config,
+ requested_kinds: &[CompileKind],
+ host_triple: &str,
+ target_cfg: Option<&[Cfg]>,
+ kind: CompileKind,
+ flags: Flags,
+) -> CargoResult<Vec<String>> {
+ let target_applies_to_host = config.target_applies_to_host()?;
+
+ // Host artifacts should not generally pick up rustflags from anywhere except [host].
+ //
+ // The one exception to this is if `target-applies-to-host = true`, which opts into a
+ // particular (inconsistent) past Cargo behavior where host artifacts _do_ pick up rustflags
+ // set elsewhere when `--target` isn't passed.
+ if kind.is_host() {
+ if target_applies_to_host && requested_kinds == [CompileKind::Host] {
+ // This is the past Cargo behavior where we fall back to the same logic as for other
+ // artifacts without --target.
+ } else {
+ // In all other cases, host artifacts just get flags from [host], regardless of
+ // --target. Or, phrased differently, no `--target` behaves the same as `--target
+ // <host>`, and host artifacts are always "special" (they don't pick up `RUSTFLAGS` for
+ // example).
+ return Ok(rustflags_from_host(config, flags, host_triple)?.unwrap_or_else(Vec::new));
+ }
+ }
+
+ // All other artifacts pick up the RUSTFLAGS, [target.*], and [build], in that order.
+ // NOTE: It is impossible to have a [host] section and reach this logic with kind.is_host(),
+ // since [host] implies `target-applies-to-host = false`, which always early-returns above.
+
+ if let Some(rustflags) = rustflags_from_env(config, flags) {
+ Ok(rustflags)
+ } else if let Some(rustflags) =
+ rustflags_from_target(config, host_triple, target_cfg, kind, flags)?
+ {
+ Ok(rustflags)
+ } else if let Some(rustflags) = rustflags_from_build(config, flags)? {
+ Ok(rustflags)
+ } else {
+ Ok(Vec::new())
+ }
+}
+
+/// Gets compiler flags from environment variables.
+/// See [`extra_args`] for more.
+fn rustflags_from_env(config: &Config, flags: Flags) -> Option<Vec<String>> {
+ // First try CARGO_ENCODED_RUSTFLAGS from the environment.
+ // Prefer this over RUSTFLAGS since it's less prone to encoding errors.
+ if let Ok(a) = config.get_env(format!("CARGO_ENCODED_{}", flags.as_env())) {
+ if a.is_empty() {
+ return Some(Vec::new());
+ }
+ return Some(a.split('\x1f').map(str::to_string).collect());
+ }
+
+ // Then try RUSTFLAGS from the environment
+ if let Ok(a) = config.get_env(flags.as_env()) {
+ let args = a
+ .split(' ')
+ .map(str::trim)
+ .filter(|s| !s.is_empty())
+ .map(str::to_string);
+ return Some(args.collect());
+ }
+
+ // No rustflags to be collected from the environment
+ None
+}
+
+/// Gets compiler flags from `[target]` section in the config.
+/// See [`extra_args`] for more.
+fn rustflags_from_target(
+ config: &Config,
+ host_triple: &str,
+ target_cfg: Option<&[Cfg]>,
+ kind: CompileKind,
+ flag: Flags,
+) -> CargoResult<Option<Vec<String>>> {
+ let mut rustflags = Vec::new();
+
+ // Then the target.*.rustflags value...
+ let target = match &kind {
+ CompileKind::Host => host_triple,
+ CompileKind::Target(target) => target.short_name(),
+ };
+ let key = format!("target.{}.{}", target, flag.as_key());
+ if let Some(args) = config.get::<Option<StringList>>(&key)? {
+ rustflags.extend(args.as_slice().iter().cloned());
+ }
+ // ...including target.'cfg(...)'.rustflags
+ if let Some(target_cfg) = target_cfg {
+ config
+ .target_cfgs()?
+ .iter()
+ .filter_map(|(key, cfg)| {
+ match flag {
+ Flags::Rust => cfg
+ .rustflags
+ .as_ref()
+ .map(|rustflags| (key, &rustflags.val)),
+ // `target.cfg(…).rustdocflags` is currently not supported.
+ // In fact, neither is `target.<triple>.rustdocflags`.
+ Flags::Rustdoc => None,
+ }
+ })
+ .filter(|(key, _rustflags)| CfgExpr::matches_key(key, target_cfg))
+ .for_each(|(_key, cfg_rustflags)| {
+ rustflags.extend(cfg_rustflags.as_slice().iter().cloned());
+ });
+ }
+
+ if rustflags.is_empty() {
+ Ok(None)
+ } else {
+ Ok(Some(rustflags))
+ }
+}
+
+/// Gets compiler flags from `[host]` section in the config.
+/// See [`extra_args`] for more.
+fn rustflags_from_host(
+ config: &Config,
+ flag: Flags,
+ host_triple: &str,
+) -> CargoResult<Option<Vec<String>>> {
+ let target_cfg = config.host_cfg_triple(host_triple)?;
+ let list = match flag {
+ Flags::Rust => &target_cfg.rustflags,
+ Flags::Rustdoc => {
+ // host.rustdocflags is not a thing, since it does not make sense
+ return Ok(None);
+ }
+ };
+ Ok(list.as_ref().map(|l| l.val.as_slice().to_vec()))
+}
+
+/// Gets compiler flags from `[build]` section in the config.
+/// See [`extra_args`] for more.
+fn rustflags_from_build(config: &Config, flag: Flags) -> CargoResult<Option<Vec<String>>> {
+ // Then the `build.rustflags` value.
+ let build = config.build_config()?;
+ let list = match flag {
+ Flags::Rust => &build.rustflags,
+ Flags::Rustdoc => &build.rustdocflags,
+ };
+ Ok(list.as_ref().map(|l| l.as_slice().to_vec()))
+}
+
+/// Collection of information about `rustc` and the host and target.
+pub struct RustcTargetData<'cfg> {
+ /// Information about `rustc` itself.
+ pub rustc: Rustc,
+
+ /// Config
+ pub config: &'cfg Config,
+ requested_kinds: Vec<CompileKind>,
+
+ /// Build information for the "host", which is information about when
+ /// `rustc` is invoked without a `--target` flag. This is used for
+ /// procedural macros, build scripts, etc.
+ host_config: TargetConfig,
+ /// Information about the host platform.
+ host_info: TargetInfo,
+
+ /// Build information for targets that we're building for.
+ target_config: HashMap<CompileTarget, TargetConfig>,
+ /// Information about the target platform that we're building for.
+ target_info: HashMap<CompileTarget, TargetInfo>,
+}
+
+impl<'cfg> RustcTargetData<'cfg> {
+ pub fn new(
+ ws: &Workspace<'cfg>,
+ requested_kinds: &[CompileKind],
+ ) -> CargoResult<RustcTargetData<'cfg>> {
+ let config = ws.config();
+ let rustc = config.load_global_rustc(Some(ws))?;
+ let mut target_config = HashMap::new();
+ let mut target_info = HashMap::new();
+ let target_applies_to_host = config.target_applies_to_host()?;
+ let host_info = TargetInfo::new(config, requested_kinds, &rustc, CompileKind::Host)?;
+ let host_config = if target_applies_to_host {
+ config.target_cfg_triple(&rustc.host)?
+ } else {
+ config.host_cfg_triple(&rustc.host)?
+ };
+
+ // This is a hack. The unit_dependency graph builder "pretends" that
+ // `CompileKind::Host` is `CompileKind::Target(host)` if the
+ // `--target` flag is not specified. Since the unit_dependency code
+ // needs access to the target config data, create a copy so that it
+ // can be found. See `rebuild_unit_graph_shared` for why this is done.
+ if requested_kinds.iter().any(CompileKind::is_host) {
+ let ct = CompileTarget::new(&rustc.host)?;
+ target_info.insert(ct, host_info.clone());
+ target_config.insert(ct, config.target_cfg_triple(&rustc.host)?);
+ };
+
+ let mut res = RustcTargetData {
+ rustc,
+ config,
+ requested_kinds: requested_kinds.into(),
+ host_config,
+ host_info,
+ target_config,
+ target_info,
+ };
+
+ // Get all kinds we currently know about.
+ //
+ // For now, targets can only ever come from the root workspace
+ // units and artifact dependencies, so this
+ // correctly represents all the kinds that can happen. When we have
+ // other ways for targets to appear at places that are not the root units,
+ // we may have to revisit this.
+ fn artifact_targets(package: &Package) -> impl Iterator<Item = CompileKind> + '_ {
+ package
+ .manifest()
+ .dependencies()
+ .iter()
+ .filter_map(|d| d.artifact()?.target()?.to_compile_kind())
+ }
+ let all_kinds = requested_kinds
+ .iter()
+ .copied()
+ .chain(ws.members().flat_map(|p| {
+ p.manifest()
+ .default_kind()
+ .into_iter()
+ .chain(p.manifest().forced_kind())
+ .chain(artifact_targets(p))
+ }));
+ for kind in all_kinds {
+ res.merge_compile_kind(kind)?;
+ }
+
+ Ok(res)
+ }
+
+ /// Insert `kind` into our `target_info` and `target_config` members if it isn't present yet.
+ fn merge_compile_kind(&mut self, kind: CompileKind) -> CargoResult<()> {
+ if let CompileKind::Target(target) = kind {
+ if !self.target_config.contains_key(&target) {
+ self.target_config
+ .insert(target, self.config.target_cfg_triple(target.short_name())?);
+ }
+ if !self.target_info.contains_key(&target) {
+ self.target_info.insert(
+ target,
+ TargetInfo::new(self.config, &self.requested_kinds, &self.rustc, kind)?,
+ );
+ }
+ }
+ Ok(())
+ }
+
+ /// Returns a "short" name for the given kind, suitable for keying off
+ /// configuration in Cargo or presenting to users.
+ pub fn short_name<'a>(&'a self, kind: &'a CompileKind) -> &'a str {
+ match kind {
+ CompileKind::Host => &self.rustc.host,
+ CompileKind::Target(target) => target.short_name(),
+ }
+ }
+
+ /// Whether a dependency should be compiled for the host or target platform,
+ /// specified by `CompileKind`.
+ pub fn dep_platform_activated(&self, dep: &Dependency, kind: CompileKind) -> bool {
+ // If this dependency is only available for certain platforms,
+ // make sure we're only enabling it for that platform.
+ let platform = match dep.platform() {
+ Some(p) => p,
+ None => return true,
+ };
+ let name = self.short_name(&kind);
+ platform.matches(name, self.cfg(kind))
+ }
+
+ /// Gets the list of `cfg`s printed out from the compiler for the specified kind.
+ pub fn cfg(&self, kind: CompileKind) -> &[Cfg] {
+ self.info(kind).cfg()
+ }
+
+ /// Information about the given target platform, learned by querying rustc.
+ ///
+ /// # Panics
+ ///
+ /// Panics, if the target platform described by `kind` can't be found.
+ /// See [`get_info`](Self::get_info) for a non-panicking alternative.
+ pub fn info(&self, kind: CompileKind) -> &TargetInfo {
+ self.get_info(kind).unwrap()
+ }
+
+ /// Information about the given target platform, learned by querying rustc.
+ ///
+ /// Returns `None` if the target platform described by `kind` can't be found.
+ pub fn get_info(&self, kind: CompileKind) -> Option<&TargetInfo> {
+ match kind {
+ CompileKind::Host => Some(&self.host_info),
+ CompileKind::Target(s) => self.target_info.get(&s),
+ }
+ }
+
+ /// Gets the target configuration for a particular host or target.
+ pub fn target_config(&self, kind: CompileKind) -> &TargetConfig {
+ match kind {
+ CompileKind::Host => &self.host_config,
+ CompileKind::Target(s) => &self.target_config[&s],
+ }
+ }
+
+ /// If a build script is overridden, this returns the `BuildOutput` to use.
+ ///
+ /// `lib_name` is the `links` library name and `kind` is whether it is for
+ /// Host or Target.
+ pub fn script_override(&self, lib_name: &str, kind: CompileKind) -> Option<&BuildOutput> {
+ self.target_config(kind).links_overrides.get(lib_name)
+ }
+}
+
+/// Structure used to deal with Rustdoc fingerprinting
+#[derive(Debug, Serialize, Deserialize)]
+pub struct RustDocFingerprint {
+ pub rustc_vv: String,
+}
+
+impl RustDocFingerprint {
+ /// This function checks whether the latest version of `Rustc` used to compile this
+ /// `Workspace`'s docs was the same as the one is currently being used in this `cargo doc`
+ /// call.
+ ///
+ /// In case it's not, it takes care of removing the `doc/` folder as well as overwriting
+ /// the rustdoc fingerprint info in order to guarantee that we won't end up with mixed
+ /// versions of the `js/html/css` files that `rustdoc` autogenerates which do not have
+ /// any versioning.
+ pub fn check_rustdoc_fingerprint(cx: &Context<'_, '_>) -> CargoResult<()> {
+ if cx.bcx.config.cli_unstable().skip_rustdoc_fingerprint {
+ return Ok(());
+ }
+ let actual_rustdoc_target_data = RustDocFingerprint {
+ rustc_vv: cx.bcx.rustc().verbose_version.clone(),
+ };
+
+ let fingerprint_path = cx.files().host_root().join(".rustdoc_fingerprint.json");
+ let write_fingerprint = || -> CargoResult<()> {
+ paths::write(
+ &fingerprint_path,
+ serde_json::to_string(&actual_rustdoc_target_data)?,
+ )
+ };
+ let rustdoc_data = match paths::read(&fingerprint_path) {
+ Ok(rustdoc_data) => rustdoc_data,
+ // If the fingerprint does not exist, do not clear out the doc
+ // directories. Otherwise this ran into problems where projects
+ // like rustbuild were creating the doc directory before running
+ // `cargo doc` in a way that deleting it would break it.
+ Err(_) => return write_fingerprint(),
+ };
+ match serde_json::from_str::<RustDocFingerprint>(&rustdoc_data) {
+ Ok(fingerprint) => {
+ if fingerprint.rustc_vv == actual_rustdoc_target_data.rustc_vv {
+ return Ok(());
+ } else {
+ log::debug!(
+ "doc fingerprint changed:\noriginal:\n{}\nnew:\n{}",
+ fingerprint.rustc_vv,
+ actual_rustdoc_target_data.rustc_vv
+ );
+ }
+ }
+ Err(e) => {
+ log::debug!("could not deserialize {:?}: {}", fingerprint_path, e);
+ }
+ };
+ // Fingerprint does not match, delete the doc directories and write a new fingerprint.
+ log::debug!(
+ "fingerprint {:?} mismatch, clearing doc directories",
+ fingerprint_path
+ );
+ cx.bcx
+ .all_kinds
+ .iter()
+ .map(|kind| cx.files().layout(*kind).doc())
+ .filter(|path| path.exists())
+ .try_for_each(|path| clean_doc(path))?;
+ write_fingerprint()?;
+ return Ok(());
+
+ fn clean_doc(path: &Path) -> CargoResult<()> {
+ let entries = path
+ .read_dir()
+ .with_context(|| format!("failed to read directory `{}`", path.display()))?;
+ for entry in entries {
+ let entry = entry?;
+ // Don't remove hidden files. Rustdoc does not create them,
+ // but the user might have.
+ if entry
+ .file_name()
+ .to_str()
+ .map_or(false, |name| name.starts_with('.'))
+ {
+ continue;
+ }
+ let path = entry.path();
+ if entry.file_type()?.is_dir() {
+ paths::remove_dir_all(path)?;
+ } else {
+ paths::remove_file(path)?;
+ }
+ }
+ Ok(())
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/build_plan.rs b/src/tools/cargo/src/cargo/core/compiler/build_plan.rs
new file mode 100644
index 000000000..a823aa952
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/build_plan.rs
@@ -0,0 +1,163 @@
+//! A graph-like structure used to represent the rustc commands to build the package and the
+//! interdependencies between them.
+//!
+//! The BuildPlan structure is used to store the dependency graph of a dry run so that it can be
+//! shared with an external build system. Each Invocation in the BuildPlan comprises a single
+//! subprocess and defines the build environment, the outputs produced by the subprocess, and the
+//! dependencies on other Invocations.
+
+use std::collections::BTreeMap;
+use std::path::{Path, PathBuf};
+
+use serde::Serialize;
+
+use super::context::OutputFile;
+use super::{CompileKind, CompileMode, Context, Unit};
+use crate::core::TargetKind;
+use crate::util::{internal, CargoResult, Config};
+use cargo_util::ProcessBuilder;
+
+#[derive(Debug, Serialize)]
+struct Invocation {
+ package_name: String,
+ package_version: semver::Version,
+ target_kind: TargetKind,
+ kind: CompileKind,
+ compile_mode: CompileMode,
+ deps: Vec<usize>,
+ outputs: Vec<PathBuf>,
+ links: BTreeMap<PathBuf, PathBuf>,
+ program: String,
+ args: Vec<String>,
+ env: BTreeMap<String, String>,
+ cwd: Option<PathBuf>,
+}
+
+#[derive(Debug)]
+pub struct BuildPlan {
+ invocation_map: BTreeMap<String, usize>,
+ plan: SerializedBuildPlan,
+}
+
+#[derive(Debug, Serialize)]
+struct SerializedBuildPlan {
+ invocations: Vec<Invocation>,
+ inputs: Vec<PathBuf>,
+}
+
+impl Invocation {
+ pub fn new(unit: &Unit, deps: Vec<usize>) -> Invocation {
+ let id = unit.pkg.package_id();
+ Invocation {
+ package_name: id.name().to_string(),
+ package_version: id.version().clone(),
+ kind: unit.kind,
+ target_kind: unit.target.kind().clone(),
+ compile_mode: unit.mode,
+ deps,
+ outputs: Vec::new(),
+ links: BTreeMap::new(),
+ program: String::new(),
+ args: Vec::new(),
+ env: BTreeMap::new(),
+ cwd: None,
+ }
+ }
+
+ pub fn add_output(&mut self, path: &Path, link: &Option<PathBuf>) {
+ self.outputs.push(path.to_path_buf());
+ if let Some(ref link) = *link {
+ self.links.insert(link.clone(), path.to_path_buf());
+ }
+ }
+
+ pub fn update_cmd(&mut self, cmd: &ProcessBuilder) -> CargoResult<()> {
+ self.program = cmd
+ .get_program()
+ .to_str()
+ .ok_or_else(|| anyhow::format_err!("unicode program string required"))?
+ .to_string();
+ self.cwd = Some(cmd.get_cwd().unwrap().to_path_buf());
+ for arg in cmd.get_args() {
+ self.args.push(
+ arg.to_str()
+ .ok_or_else(|| anyhow::format_err!("unicode argument string required"))?
+ .to_string(),
+ );
+ }
+ for (var, value) in cmd.get_envs() {
+ let value = match value {
+ Some(s) => s,
+ None => continue,
+ };
+ self.env.insert(
+ var.clone(),
+ value
+ .to_str()
+ .ok_or_else(|| anyhow::format_err!("unicode environment value required"))?
+ .to_string(),
+ );
+ }
+ Ok(())
+ }
+}
+
+impl BuildPlan {
+ pub fn new() -> BuildPlan {
+ BuildPlan {
+ invocation_map: BTreeMap::new(),
+ plan: SerializedBuildPlan::new(),
+ }
+ }
+
+ pub fn add(&mut self, cx: &Context<'_, '_>, unit: &Unit) -> CargoResult<()> {
+ let id = self.plan.invocations.len();
+ self.invocation_map.insert(unit.buildkey(), id);
+ let deps = cx
+ .unit_deps(unit)
+ .iter()
+ .map(|dep| self.invocation_map[&dep.unit.buildkey()])
+ .collect();
+ let invocation = Invocation::new(unit, deps);
+ self.plan.invocations.push(invocation);
+ Ok(())
+ }
+
+ pub fn update(
+ &mut self,
+ invocation_name: &str,
+ cmd: &ProcessBuilder,
+ outputs: &[OutputFile],
+ ) -> CargoResult<()> {
+ let id = self.invocation_map[invocation_name];
+ let invocation =
+ self.plan.invocations.get_mut(id).ok_or_else(|| {
+ internal(format!("couldn't find invocation for {}", invocation_name))
+ })?;
+
+ invocation.update_cmd(cmd)?;
+ for output in outputs.iter() {
+ invocation.add_output(&output.path, &output.hardlink);
+ }
+
+ Ok(())
+ }
+
+ pub fn set_inputs(&mut self, inputs: Vec<PathBuf>) {
+ self.plan.inputs = inputs;
+ }
+
+ pub fn output_plan(self, config: &Config) {
+ let encoded = serde_json::to_string(&self.plan).unwrap();
+ crate::drop_println!(config, "{}", encoded);
+ }
+}
+
+impl SerializedBuildPlan {
+ pub fn new() -> SerializedBuildPlan {
+ SerializedBuildPlan {
+ invocations: Vec::new(),
+ inputs: Vec::new(),
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/compilation.rs b/src/tools/cargo/src/cargo/core/compiler/compilation.rs
new file mode 100644
index 000000000..29642f13d
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/compilation.rs
@@ -0,0 +1,452 @@
+//! Type definitions for the result of a compilation.
+
+use std::collections::{BTreeSet, HashMap};
+use std::ffi::{OsStr, OsString};
+use std::path::PathBuf;
+
+use cargo_platform::CfgExpr;
+use cargo_util::{paths, ProcessBuilder};
+
+use super::BuildContext;
+use crate::core::compiler::{CompileKind, Metadata, Unit};
+use crate::core::Package;
+use crate::util::{config, CargoResult, Config};
+
+/// Structure with enough information to run `rustdoc --test`.
+pub struct Doctest {
+ /// What's being doctested
+ pub unit: Unit,
+ /// Arguments needed to pass to rustdoc to run this test.
+ pub args: Vec<OsString>,
+ /// Whether or not -Zunstable-options is needed.
+ pub unstable_opts: bool,
+ /// The -Clinker value to use.
+ pub linker: Option<PathBuf>,
+ /// The script metadata, if this unit's package has a build script.
+ ///
+ /// This is used for indexing [`Compilation::extra_env`].
+ pub script_meta: Option<Metadata>,
+
+ /// Environment variables to set in the rustdoc process.
+ pub env: HashMap<String, OsString>,
+}
+
+/// Information about the output of a unit.
+#[derive(Ord, PartialOrd, Eq, PartialEq)]
+pub struct UnitOutput {
+ /// The unit that generated this output.
+ pub unit: Unit,
+ /// Path to the unit's primary output (an executable or cdylib).
+ pub path: PathBuf,
+ /// The script metadata, if this unit's package has a build script.
+ ///
+ /// This is used for indexing [`Compilation::extra_env`].
+ pub script_meta: Option<Metadata>,
+}
+
+/// A structure returning the result of a compilation.
+pub struct Compilation<'cfg> {
+ /// An array of all tests created during this compilation.
+ pub tests: Vec<UnitOutput>,
+
+ /// An array of all binaries created.
+ pub binaries: Vec<UnitOutput>,
+
+ /// An array of all cdylibs created.
+ pub cdylibs: Vec<UnitOutput>,
+
+ /// The crate names of the root units specified on the command-line.
+ pub root_crate_names: Vec<String>,
+
+ /// All directories for the output of native build commands.
+ ///
+ /// This is currently used to drive some entries which are added to the
+ /// LD_LIBRARY_PATH as appropriate.
+ ///
+ /// The order should be deterministic.
+ pub native_dirs: BTreeSet<PathBuf>,
+
+ /// Root output directory (for the local package's artifacts)
+ pub root_output: HashMap<CompileKind, PathBuf>,
+
+ /// Output directory for rust dependencies.
+ /// May be for the host or for a specific target.
+ pub deps_output: HashMap<CompileKind, PathBuf>,
+
+ /// The path to the host libdir for the compiler used
+ sysroot_host_libdir: PathBuf,
+
+ /// The path to libstd for each target
+ sysroot_target_libdir: HashMap<CompileKind, PathBuf>,
+
+ /// Extra environment variables that were passed to compilations and should
+ /// be passed to future invocations of programs.
+ ///
+ /// The key is the build script metadata for uniquely identifying the
+ /// `RunCustomBuild` unit that generated these env vars.
+ pub extra_env: HashMap<Metadata, Vec<(String, String)>>,
+
+ /// Libraries to test with rustdoc.
+ pub to_doc_test: Vec<Doctest>,
+
+ /// The target host triple.
+ pub host: String,
+
+ config: &'cfg Config,
+
+ /// Rustc process to be used by default
+ rustc_process: ProcessBuilder,
+ /// Rustc process to be used for workspace crates instead of rustc_process
+ rustc_workspace_wrapper_process: ProcessBuilder,
+ /// Optional rustc process to be used for primary crates instead of either rustc_process or
+ /// rustc_workspace_wrapper_process
+ primary_rustc_process: Option<ProcessBuilder>,
+
+ target_runners: HashMap<CompileKind, Option<(PathBuf, Vec<String>)>>,
+}
+
+impl<'cfg> Compilation<'cfg> {
+ pub fn new<'a>(bcx: &BuildContext<'a, 'cfg>) -> CargoResult<Compilation<'cfg>> {
+ let mut rustc = bcx.rustc().process();
+ let mut primary_rustc_process = bcx.build_config.primary_unit_rustc.clone();
+ let mut rustc_workspace_wrapper_process = bcx.rustc().workspace_process();
+
+ if bcx.config.extra_verbose() {
+ rustc.display_env_vars();
+ rustc_workspace_wrapper_process.display_env_vars();
+
+ if let Some(rustc) = primary_rustc_process.as_mut() {
+ rustc.display_env_vars();
+ }
+ }
+
+ Ok(Compilation {
+ // TODO: deprecated; remove.
+ native_dirs: BTreeSet::new(),
+ root_output: HashMap::new(),
+ deps_output: HashMap::new(),
+ sysroot_host_libdir: bcx
+ .target_data
+ .info(CompileKind::Host)
+ .sysroot_host_libdir
+ .clone(),
+ sysroot_target_libdir: get_sysroot_target_libdir(bcx)?,
+ tests: Vec::new(),
+ binaries: Vec::new(),
+ cdylibs: Vec::new(),
+ root_crate_names: Vec::new(),
+ extra_env: HashMap::new(),
+ to_doc_test: Vec::new(),
+ config: bcx.config,
+ host: bcx.host_triple().to_string(),
+ rustc_process: rustc,
+ rustc_workspace_wrapper_process,
+ primary_rustc_process,
+ target_runners: bcx
+ .build_config
+ .requested_kinds
+ .iter()
+ .chain(Some(&CompileKind::Host))
+ .map(|kind| Ok((*kind, target_runner(bcx, *kind)?)))
+ .collect::<CargoResult<HashMap<_, _>>>()?,
+ })
+ }
+
+ /// Returns a [`ProcessBuilder`] for running `rustc`.
+ ///
+ /// `is_primary` is true if this is a "primary package", which means it
+ /// was selected by the user on the command-line (such as with a `-p`
+ /// flag), see [`crate::core::compiler::Context::primary_packages`].
+ ///
+ /// `is_workspace` is true if this is a workspace member.
+ pub fn rustc_process(
+ &self,
+ unit: &Unit,
+ is_primary: bool,
+ is_workspace: bool,
+ ) -> CargoResult<ProcessBuilder> {
+ let rustc = if is_primary && self.primary_rustc_process.is_some() {
+ self.primary_rustc_process.clone().unwrap()
+ } else if is_workspace {
+ self.rustc_workspace_wrapper_process.clone()
+ } else {
+ self.rustc_process.clone()
+ };
+
+ let cmd = fill_rustc_tool_env(rustc, unit);
+ self.fill_env(cmd, &unit.pkg, None, unit.kind, true)
+ }
+
+ /// Returns a [`ProcessBuilder`] for running `rustdoc`.
+ pub fn rustdoc_process(
+ &self,
+ unit: &Unit,
+ script_meta: Option<Metadata>,
+ ) -> CargoResult<ProcessBuilder> {
+ let rustdoc = ProcessBuilder::new(&*self.config.rustdoc()?);
+ let cmd = fill_rustc_tool_env(rustdoc, unit);
+ let mut cmd = self.fill_env(cmd, &unit.pkg, script_meta, unit.kind, true)?;
+ cmd.retry_with_argfile(true);
+ unit.target.edition().cmd_edition_arg(&mut cmd);
+
+ for crate_type in unit.target.rustc_crate_types() {
+ cmd.arg("--crate-type").arg(crate_type.as_str());
+ }
+
+ Ok(cmd)
+ }
+
+ /// Returns a [`ProcessBuilder`] appropriate for running a process for the
+ /// host platform.
+ ///
+ /// This is currently only used for running build scripts. If you use this
+ /// for anything else, please be extra careful on how environment
+ /// variables are set!
+ pub fn host_process<T: AsRef<OsStr>>(
+ &self,
+ cmd: T,
+ pkg: &Package,
+ ) -> CargoResult<ProcessBuilder> {
+ self.fill_env(
+ ProcessBuilder::new(cmd),
+ pkg,
+ None,
+ CompileKind::Host,
+ false,
+ )
+ }
+
+ pub fn target_runner(&self, kind: CompileKind) -> Option<&(PathBuf, Vec<String>)> {
+ self.target_runners.get(&kind).and_then(|x| x.as_ref())
+ }
+
+ /// Returns a [`ProcessBuilder`] appropriate for running a process for the
+ /// target platform. This is typically used for `cargo run` and `cargo
+ /// test`.
+ ///
+ /// `script_meta` is the metadata for the `RunCustomBuild` unit that this
+ /// unit used for its build script. Use `None` if the package did not have
+ /// a build script.
+ pub fn target_process<T: AsRef<OsStr>>(
+ &self,
+ cmd: T,
+ kind: CompileKind,
+ pkg: &Package,
+ script_meta: Option<Metadata>,
+ ) -> CargoResult<ProcessBuilder> {
+ let builder = if let Some((runner, args)) = self.target_runner(kind) {
+ let mut builder = ProcessBuilder::new(runner);
+ builder.args(args);
+ builder.arg(cmd);
+ builder
+ } else {
+ ProcessBuilder::new(cmd)
+ };
+ self.fill_env(builder, pkg, script_meta, kind, false)
+ }
+
+ /// Prepares a new process with an appropriate environment to run against
+ /// the artifacts produced by the build process.
+ ///
+ /// The package argument is also used to configure environment variables as
+ /// well as the working directory of the child process.
+ fn fill_env(
+ &self,
+ mut cmd: ProcessBuilder,
+ pkg: &Package,
+ script_meta: Option<Metadata>,
+ kind: CompileKind,
+ is_rustc_tool: bool,
+ ) -> CargoResult<ProcessBuilder> {
+ let mut search_path = Vec::new();
+ if is_rustc_tool {
+ search_path.push(self.deps_output[&CompileKind::Host].clone());
+ search_path.push(self.sysroot_host_libdir.clone());
+ } else {
+ search_path.extend(super::filter_dynamic_search_path(
+ self.native_dirs.iter(),
+ &self.root_output[&kind],
+ ));
+ search_path.push(self.deps_output[&kind].clone());
+ search_path.push(self.root_output[&kind].clone());
+ // For build-std, we don't want to accidentally pull in any shared
+ // libs from the sysroot that ships with rustc. This may not be
+ // required (at least I cannot craft a situation where it
+ // matters), but is here to be safe.
+ if self.config.cli_unstable().build_std.is_none() {
+ search_path.push(self.sysroot_target_libdir[&kind].clone());
+ }
+ }
+
+ let dylib_path = paths::dylib_path();
+ let dylib_path_is_empty = dylib_path.is_empty();
+ search_path.extend(dylib_path.into_iter());
+ if cfg!(target_os = "macos") && dylib_path_is_empty {
+ // These are the defaults when DYLD_FALLBACK_LIBRARY_PATH isn't
+ // set or set to an empty string. Since Cargo is explicitly setting
+ // the value, make sure the defaults still work.
+ if let Some(home) = self.config.get_env_os("HOME") {
+ search_path.push(PathBuf::from(home).join("lib"));
+ }
+ search_path.push(PathBuf::from("/usr/local/lib"));
+ search_path.push(PathBuf::from("/usr/lib"));
+ }
+ let search_path = paths::join_paths(&search_path, paths::dylib_path_envvar())?;
+
+ cmd.env(paths::dylib_path_envvar(), &search_path);
+ if let Some(meta) = script_meta {
+ if let Some(env) = self.extra_env.get(&meta) {
+ for (k, v) in env {
+ cmd.env(k, v);
+ }
+ }
+ }
+
+ let metadata = pkg.manifest().metadata();
+
+ let cargo_exe = self.config.cargo_exe()?;
+ cmd.env(crate::CARGO_ENV, cargo_exe);
+
+ // When adding new environment variables depending on
+ // crate properties which might require rebuild upon change
+ // consider adding the corresponding properties to the hash
+ // in BuildContext::target_metadata()
+ cmd.env("CARGO_MANIFEST_DIR", pkg.root())
+ .env("CARGO_PKG_VERSION_MAJOR", &pkg.version().major.to_string())
+ .env("CARGO_PKG_VERSION_MINOR", &pkg.version().minor.to_string())
+ .env("CARGO_PKG_VERSION_PATCH", &pkg.version().patch.to_string())
+ .env("CARGO_PKG_VERSION_PRE", pkg.version().pre.as_str())
+ .env("CARGO_PKG_VERSION", &pkg.version().to_string())
+ .env("CARGO_PKG_NAME", &*pkg.name())
+ .env(
+ "CARGO_PKG_DESCRIPTION",
+ metadata.description.as_ref().unwrap_or(&String::new()),
+ )
+ .env(
+ "CARGO_PKG_HOMEPAGE",
+ metadata.homepage.as_ref().unwrap_or(&String::new()),
+ )
+ .env(
+ "CARGO_PKG_REPOSITORY",
+ metadata.repository.as_ref().unwrap_or(&String::new()),
+ )
+ .env(
+ "CARGO_PKG_LICENSE",
+ metadata.license.as_ref().unwrap_or(&String::new()),
+ )
+ .env(
+ "CARGO_PKG_LICENSE_FILE",
+ metadata.license_file.as_ref().unwrap_or(&String::new()),
+ )
+ .env("CARGO_PKG_AUTHORS", &pkg.authors().join(":"))
+ .env(
+ "CARGO_PKG_RUST_VERSION",
+ &pkg.rust_version().unwrap_or(&String::new()),
+ )
+ .env(
+ "CARGO_PKG_README",
+ metadata.readme.as_ref().unwrap_or(&String::new()),
+ )
+ .cwd(pkg.root());
+
+ // Apply any environment variables from the config
+ for (key, value) in self.config.env_config()?.iter() {
+ // never override a value that has already been set by cargo
+ if cmd.get_envs().contains_key(key) {
+ continue;
+ }
+
+ if value.is_force() || self.config.get_env_os(key).is_none() {
+ cmd.env(key, value.resolve(self.config));
+ }
+ }
+
+ Ok(cmd)
+ }
+}
+
+/// Prepares a rustc_tool process with additional environment variables
+/// that are only relevant in a context that has a unit
+fn fill_rustc_tool_env(mut cmd: ProcessBuilder, unit: &Unit) -> ProcessBuilder {
+ if unit.target.is_executable() {
+ let name = unit
+ .target
+ .binary_filename()
+ .unwrap_or(unit.target.name().to_string());
+
+ cmd.env("CARGO_BIN_NAME", name);
+ }
+ cmd.env("CARGO_CRATE_NAME", unit.target.crate_name());
+ cmd
+}
+
+fn get_sysroot_target_libdir(
+ bcx: &BuildContext<'_, '_>,
+) -> CargoResult<HashMap<CompileKind, PathBuf>> {
+ bcx.all_kinds
+ .iter()
+ .map(|&kind| {
+ let Some(info) = bcx.target_data.get_info(kind) else {
+ let target = match kind {
+ CompileKind::Host => "host".to_owned(),
+ CompileKind::Target(s) => s.short_name().to_owned(),
+ };
+
+ let dependency = bcx
+ .unit_graph
+ .iter()
+ .find_map(|(u, _)| (u.kind == kind).then_some(u.pkg.summary().package_id()))
+ .unwrap();
+
+ anyhow::bail!(
+ "could not find specification for target `{target}`.\n \
+ Dependency `{dependency}` requires to build for target `{target}`."
+ )
+ };
+
+ Ok((kind, info.sysroot_target_libdir.clone()))
+ })
+ .collect()
+}
+
+fn target_runner(
+ bcx: &BuildContext<'_, '_>,
+ kind: CompileKind,
+) -> CargoResult<Option<(PathBuf, Vec<String>)>> {
+ let target = bcx.target_data.short_name(&kind);
+
+ // try target.{}.runner
+ let key = format!("target.{}.runner", target);
+
+ if let Some(v) = bcx.config.get::<Option<config::PathAndArgs>>(&key)? {
+ let path = v.path.resolve_program(bcx.config);
+ return Ok(Some((path, v.args)));
+ }
+
+ // try target.'cfg(...)'.runner
+ let target_cfg = bcx.target_data.info(kind).cfg();
+ let mut cfgs = bcx
+ .config
+ .target_cfgs()?
+ .iter()
+ .filter_map(|(key, cfg)| cfg.runner.as_ref().map(|runner| (key, runner)))
+ .filter(|(key, _runner)| CfgExpr::matches_key(key, target_cfg));
+ let matching_runner = cfgs.next();
+ if let Some((key, runner)) = cfgs.next() {
+ anyhow::bail!(
+ "several matching instances of `target.'cfg(..)'.runner` in configurations\n\
+ first match `{}` located in {}\n\
+ second match `{}` located in {}",
+ matching_runner.unwrap().0,
+ matching_runner.unwrap().1.definition,
+ key,
+ runner.definition
+ );
+ }
+ Ok(matching_runner.map(|(_k, runner)| {
+ (
+ runner.val.path.clone().resolve_program(bcx.config),
+ runner.val.args.clone(),
+ )
+ }))
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/compile_kind.rs b/src/tools/cargo/src/cargo/core/compiler/compile_kind.rs
new file mode 100644
index 000000000..73d8f89cc
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/compile_kind.rs
@@ -0,0 +1,200 @@
+//! Type definitions for cross-compilation.
+
+use crate::core::Target;
+use crate::util::errors::CargoResult;
+use crate::util::interning::InternedString;
+use crate::util::{try_canonicalize, Config, StableHasher};
+use anyhow::Context as _;
+use serde::Serialize;
+use std::collections::BTreeSet;
+use std::fs;
+use std::hash::{Hash, Hasher};
+use std::path::Path;
+
+/// Indicator for how a unit is being compiled.
+///
+/// This is used primarily for organizing cross compilations vs host
+/// compilations, where cross compilations happen at the request of `--target`
+/// and host compilations happen for things like build scripts and procedural
+/// macros.
+#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, PartialOrd, Ord)]
+pub enum CompileKind {
+ /// Attached to a unit that is compiled for the "host" system or otherwise
+ /// is compiled without a `--target` flag. This is used for procedural
+ /// macros and build scripts, or if the `--target` flag isn't passed.
+ Host,
+
+ /// Attached to a unit to be compiled for a particular target. This is used
+ /// for units when the `--target` flag is passed.
+ Target(CompileTarget),
+}
+
+impl CompileKind {
+ pub fn is_host(&self) -> bool {
+ matches!(self, CompileKind::Host)
+ }
+
+ pub fn for_target(self, target: &Target) -> CompileKind {
+ // Once we start compiling for the `Host` kind we continue doing so, but
+ // if we are a `Target` kind and then we start compiling for a target
+ // that needs to be on the host we lift ourselves up to `Host`.
+ match self {
+ CompileKind::Host => CompileKind::Host,
+ CompileKind::Target(_) if target.for_host() => CompileKind::Host,
+ CompileKind::Target(n) => CompileKind::Target(n),
+ }
+ }
+
+ /// Creates a new list of `CompileKind` based on the requested list of
+ /// targets.
+ ///
+ /// If no targets are given then this returns a single-element vector with
+ /// `CompileKind::Host`.
+ pub fn from_requested_targets(
+ config: &Config,
+ targets: &[String],
+ ) -> CargoResult<Vec<CompileKind>> {
+ let dedup = |targets: &[String]| {
+ Ok(targets
+ .iter()
+ .map(|value| Ok(CompileKind::Target(CompileTarget::new(value)?)))
+ // First collect into a set to deduplicate any `--target` passed
+ // more than once...
+ .collect::<CargoResult<BTreeSet<_>>>()?
+ // ... then generate a flat list for everything else to use.
+ .into_iter()
+ .collect())
+ };
+
+ if !targets.is_empty() {
+ return dedup(targets);
+ }
+
+ let kinds = match &config.build_config()?.target {
+ None => Ok(vec![CompileKind::Host]),
+ Some(build_target_config) => dedup(&build_target_config.values(config)?),
+ };
+
+ kinds
+ }
+
+ /// Hash used for fingerprinting.
+ ///
+ /// Metadata hashing uses the normal Hash trait, which does not
+ /// differentiate on `.json` file contents. The fingerprint hash does
+ /// check the contents.
+ pub fn fingerprint_hash(&self) -> u64 {
+ match self {
+ CompileKind::Host => 0,
+ CompileKind::Target(target) => target.fingerprint_hash(),
+ }
+ }
+}
+
+impl serde::ser::Serialize for CompileKind {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::ser::Serializer,
+ {
+ match self {
+ CompileKind::Host => None::<&str>.serialize(s),
+ CompileKind::Target(t) => Some(t.name).serialize(s),
+ }
+ }
+}
+
+/// Abstraction for the representation of a compilation target that Cargo has.
+///
+/// Compilation targets are one of two things right now:
+///
+/// 1. A raw target string, like `x86_64-unknown-linux-gnu`.
+/// 2. The path to a JSON file, such as `/path/to/my-target.json`.
+///
+/// Raw target strings are typically dictated by `rustc` itself and represent
+/// built-in targets. Custom JSON files are somewhat unstable, but supported
+/// here in Cargo. Note that for JSON target files this `CompileTarget` stores a
+/// full canonicalized path to the target.
+///
+/// The main reason for this existence is to handle JSON target files where when
+/// we call rustc we pass full paths but when we use it for Cargo's purposes
+/// like naming directories or looking up configuration keys we only check the
+/// file stem of JSON target files. For built-in rustc targets this is just an
+/// uninterpreted string basically.
+#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, PartialOrd, Ord, Serialize)]
+pub struct CompileTarget {
+ name: InternedString,
+}
+
+impl CompileTarget {
+ pub fn new(name: &str) -> CargoResult<CompileTarget> {
+ let name = name.trim();
+ if name.is_empty() {
+ anyhow::bail!("target was empty");
+ }
+ if !name.ends_with(".json") {
+ return Ok(CompileTarget { name: name.into() });
+ }
+
+ // If `name` ends in `.json` then it's likely a custom target
+ // specification. Canonicalize the path to ensure that different builds
+ // with different paths always produce the same result.
+ let path = try_canonicalize(Path::new(name))
+ .with_context(|| format!("target path {:?} is not a valid file", name))?;
+
+ let name = path
+ .into_os_string()
+ .into_string()
+ .map_err(|_| anyhow::format_err!("target path is not valid unicode"))?;
+ Ok(CompileTarget { name: name.into() })
+ }
+
+ /// Returns the full unqualified name of this target, suitable for passing
+ /// to `rustc` directly.
+ ///
+ /// Typically this is pretty much the same as `short_name`, but for the case
+ /// of JSON target files this will be a full canonicalized path name for the
+ /// current filesystem.
+ pub fn rustc_target(&self) -> InternedString {
+ self.name
+ }
+
+ /// Returns a "short" version of the target name suitable for usage within
+ /// Cargo for configuration and such.
+ ///
+ /// This is typically the same as `rustc_target`, or the full name, but for
+ /// JSON target files this returns just the file stem (e.g. `foo` out of
+ /// `foo.json`) instead of the full path.
+ pub fn short_name(&self) -> &str {
+ // Flexible target specifications often point at json files, so if it
+ // looks like we've got one of those just use the file stem (the file
+ // name without ".json") as a short name for this target. Note that the
+ // `unwrap()` here should never trigger since we have a nonempty name
+ // and it starts as utf-8 so it's always utf-8
+ if self.name.ends_with(".json") {
+ Path::new(&self.name).file_stem().unwrap().to_str().unwrap()
+ } else {
+ &self.name
+ }
+ }
+
+ /// See [`CompileKind::fingerprint_hash`].
+ pub fn fingerprint_hash(&self) -> u64 {
+ let mut hasher = StableHasher::new();
+ match self
+ .name
+ .ends_with(".json")
+ .then(|| fs::read_to_string(self.name))
+ {
+ Some(Ok(contents)) => {
+ // This may have some performance concerns, since it is called
+ // fairly often. If that ever seems worth fixing, consider
+ // embedding this in `CompileTarget`.
+ contents.hash(&mut hasher);
+ }
+ _ => {
+ self.name.hash(&mut hasher);
+ }
+ }
+ hasher.finish()
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/context/compilation_files.rs b/src/tools/cargo/src/cargo/core/compiler/context/compilation_files.rs
new file mode 100644
index 000000000..1c9d28461
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/context/compilation_files.rs
@@ -0,0 +1,724 @@
+//! See [`CompilationFiles`].
+
+use std::collections::HashMap;
+use std::fmt;
+use std::hash::{Hash, Hasher};
+use std::path::{Path, PathBuf};
+use std::sync::Arc;
+
+use lazycell::LazyCell;
+use log::debug;
+
+use super::{BuildContext, CompileKind, Context, FileFlavor, Layout};
+use crate::core::compiler::{CompileMode, CompileTarget, CrateType, FileType, Unit};
+use crate::core::{Target, TargetKind, Workspace};
+use crate::util::{self, CargoResult, StableHasher};
+
+/// This is a generic version number that can be changed to make
+/// backwards-incompatible changes to any file structures in the output
+/// directory. For example, the fingerprint files or the build-script
+/// output files.
+///
+/// Normally cargo updates ship with rustc updates which will
+/// cause a new hash due to the rustc version changing, but this allows
+/// cargo to be extra careful to deal with different versions of cargo that
+/// use the same rustc version.
+const METADATA_VERSION: u8 = 2;
+
+/// The `Metadata` is a hash used to make unique file names for each unit in a
+/// build. It is also use for symbol mangling.
+///
+/// For example:
+/// - A project may depend on crate `A` and crate `B`, so the package name must be in the file name.
+/// - Similarly a project may depend on two versions of `A`, so the version must be in the file name.
+///
+/// In general this must include all things that need to be distinguished in different parts of
+/// the same build. This is absolutely required or we override things before
+/// we get chance to use them.
+///
+/// It is also used for symbol mangling, because if you have two versions of
+/// the same crate linked together, their symbols need to be differentiated.
+///
+/// We use a hash because it is an easy way to guarantee
+/// that all the inputs can be converted to a valid path.
+///
+/// This also acts as the main layer of caching provided by Cargo.
+/// For example, we want to cache `cargo build` and `cargo doc` separately, so that running one
+/// does not invalidate the artifacts for the other. We do this by including [`CompileMode`] in the
+/// hash, thus the artifacts go in different folders and do not override each other.
+/// If we don't add something that we should have, for this reason, we get the
+/// correct output but rebuild more than is needed.
+///
+/// Some things that need to be tracked to ensure the correct output should definitely *not*
+/// go in the `Metadata`. For example, the modification time of a file, should be tracked to make a
+/// rebuild when the file changes. However, it would be wasteful to include in the `Metadata`. The
+/// old artifacts are never going to be needed again. We can save space by just overwriting them.
+/// If we add something that we should not have, for this reason, we get the correct output but take
+/// more space than needed. This makes not including something in `Metadata`
+/// a form of cache invalidation.
+///
+/// You should also avoid anything that would interfere with reproducible
+/// builds. For example, *any* absolute path should be avoided. This is one
+/// reason that `RUSTFLAGS` is not in `Metadata`, because it often has
+/// absolute paths (like `--remap-path-prefix` which is fundamentally used for
+/// reproducible builds and has absolute paths in it). Also, in some cases the
+/// mangled symbols need to be stable between different builds with different
+/// settings. For example, profile-guided optimizations need to swap
+/// `RUSTFLAGS` between runs, but needs to keep the same symbol names.
+///
+/// Note that the `Fingerprint` is in charge of tracking everything needed to determine if a
+/// rebuild is needed.
+#[derive(Copy, Clone, Hash, Eq, PartialEq, Ord, PartialOrd)]
+pub struct Metadata(u64);
+
+impl fmt::Display for Metadata {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{:016x}", self.0)
+ }
+}
+
+impl fmt::Debug for Metadata {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "Metadata({:016x})", self.0)
+ }
+}
+
+/// Information about the metadata hashes used for a `Unit`.
+struct MetaInfo {
+ /// The symbol hash to use.
+ meta_hash: Metadata,
+ /// Whether or not the `-C extra-filename` flag is used to generate unique
+ /// output filenames for this `Unit`.
+ ///
+ /// If this is `true`, the `meta_hash` is used for the filename.
+ use_extra_filename: bool,
+}
+
+/// Collection of information about the files emitted by the compiler, and the
+/// output directory structure.
+pub struct CompilationFiles<'a, 'cfg> {
+ /// The target directory layout for the host (and target if it is the same as host).
+ pub(super) host: Layout,
+ /// The target directory layout for the target (if different from then host).
+ pub(super) target: HashMap<CompileTarget, Layout>,
+ /// Additional directory to include a copy of the outputs.
+ export_dir: Option<PathBuf>,
+ /// The root targets requested by the user on the command line (does not
+ /// include dependencies).
+ roots: Vec<Unit>,
+ ws: &'a Workspace<'cfg>,
+ /// Metadata hash to use for each unit.
+ metas: HashMap<Unit, MetaInfo>,
+ /// For each Unit, a list all files produced.
+ outputs: HashMap<Unit, LazyCell<Arc<Vec<OutputFile>>>>,
+}
+
+/// Info about a single file emitted by the compiler.
+#[derive(Debug)]
+pub struct OutputFile {
+ /// Absolute path to the file that will be produced by the build process.
+ pub path: PathBuf,
+ /// If it should be linked into `target`, and what it should be called
+ /// (e.g., without metadata).
+ pub hardlink: Option<PathBuf>,
+ /// If `--out-dir` is specified, the absolute path to the exported file.
+ pub export_path: Option<PathBuf>,
+ /// Type of the file (library / debug symbol / else).
+ pub flavor: FileFlavor,
+}
+
+impl OutputFile {
+ /// Gets the hard link if present; otherwise, returns the path.
+ pub fn bin_dst(&self) -> &PathBuf {
+ match self.hardlink {
+ Some(ref link_dst) => link_dst,
+ None => &self.path,
+ }
+ }
+}
+
+impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> {
+ pub(super) fn new(
+ cx: &Context<'a, 'cfg>,
+ host: Layout,
+ target: HashMap<CompileTarget, Layout>,
+ ) -> CompilationFiles<'a, 'cfg> {
+ let mut metas = HashMap::new();
+ for unit in &cx.bcx.roots {
+ metadata_of(unit, cx, &mut metas);
+ }
+ let outputs = metas
+ .keys()
+ .cloned()
+ .map(|unit| (unit, LazyCell::new()))
+ .collect();
+ CompilationFiles {
+ ws: cx.bcx.ws,
+ host,
+ target,
+ export_dir: cx.bcx.build_config.export_dir.clone(),
+ roots: cx.bcx.roots.clone(),
+ metas,
+ outputs,
+ }
+ }
+
+ /// Returns the appropriate directory layout for either a plugin or not.
+ pub fn layout(&self, kind: CompileKind) -> &Layout {
+ match kind {
+ CompileKind::Host => &self.host,
+ CompileKind::Target(target) => &self.target[&target],
+ }
+ }
+
+ /// Gets the metadata for the given unit.
+ ///
+ /// See [`Metadata`] and [`fingerprint`] module for more.
+ ///
+ /// [`fingerprint`]: ../../fingerprint/index.html#fingerprints-and-metadata
+ pub fn metadata(&self, unit: &Unit) -> Metadata {
+ self.metas[unit].meta_hash
+ }
+
+ /// Returns whether or not `-C extra-filename` is used to extend the
+ /// output filenames to make them unique.
+ pub fn use_extra_filename(&self, unit: &Unit) -> bool {
+ self.metas[unit].use_extra_filename
+ }
+
+ /// Gets the short hash based only on the `PackageId`.
+ /// Used for the metadata when `metadata` returns `None`.
+ pub fn target_short_hash(&self, unit: &Unit) -> String {
+ let hashable = unit.pkg.package_id().stable_hash(self.ws.root());
+ util::short_hash(&(METADATA_VERSION, hashable))
+ }
+
+ /// Returns the directory where the artifacts for the given unit are
+ /// initially created.
+ pub fn out_dir(&self, unit: &Unit) -> PathBuf {
+ // Docscrape units need to have doc/ set as the out_dir so sources for reverse-dependencies
+ // will be put into doc/ and not into deps/ where the *.examples files are stored.
+ if unit.mode.is_doc() || unit.mode.is_doc_scrape() {
+ self.layout(unit.kind).doc().to_path_buf()
+ } else if unit.mode.is_doc_test() {
+ panic!("doc tests do not have an out dir");
+ } else if unit.target.is_custom_build() {
+ self.build_script_dir(unit)
+ } else if unit.target.is_example() {
+ self.layout(unit.kind).examples().to_path_buf()
+ } else if unit.artifact.is_true() {
+ self.artifact_dir(unit)
+ } else {
+ self.deps_dir(unit).to_path_buf()
+ }
+ }
+
+ /// Additional export directory from `--out-dir`.
+ pub fn export_dir(&self) -> Option<PathBuf> {
+ self.export_dir.clone()
+ }
+
+ /// Directory name to use for a package in the form `NAME-HASH`.
+ ///
+ /// Note that some units may share the same directory, so care should be
+ /// taken in those cases!
+ fn pkg_dir(&self, unit: &Unit) -> String {
+ let name = unit.pkg.package_id().name();
+ let meta = &self.metas[unit];
+ if meta.use_extra_filename {
+ format!("{}-{}", name, meta.meta_hash)
+ } else {
+ format!("{}-{}", name, self.target_short_hash(unit))
+ }
+ }
+
+ /// Returns the final artifact path for the host (`/…/target/debug`)
+ pub fn host_dest(&self) -> &Path {
+ self.host.dest()
+ }
+
+ /// Returns the root of the build output tree for the host (`/…/target`)
+ pub fn host_root(&self) -> &Path {
+ self.host.root()
+ }
+
+ /// Returns the host `deps` directory path.
+ pub fn host_deps(&self) -> &Path {
+ self.host.deps()
+ }
+
+ /// Returns the directories where Rust crate dependencies are found for the
+ /// specified unit.
+ pub fn deps_dir(&self, unit: &Unit) -> &Path {
+ self.layout(unit.kind).deps()
+ }
+
+ /// Directory where the fingerprint for the given unit should go.
+ pub fn fingerprint_dir(&self, unit: &Unit) -> PathBuf {
+ let dir = self.pkg_dir(unit);
+ self.layout(unit.kind).fingerprint().join(dir)
+ }
+
+ /// Returns the path for a file in the fingerprint directory.
+ ///
+ /// The "prefix" should be something to distinguish the file from other
+ /// files in the fingerprint directory.
+ pub fn fingerprint_file_path(&self, unit: &Unit, prefix: &str) -> PathBuf {
+ // Different targets need to be distinguished in the
+ let kind = unit.target.kind().description();
+ let flavor = if unit.mode.is_any_test() {
+ "test-"
+ } else if unit.mode.is_doc() {
+ "doc-"
+ } else if unit.mode.is_run_custom_build() {
+ "run-"
+ } else {
+ ""
+ };
+ let name = format!("{}{}{}-{}", prefix, flavor, kind, unit.target.name());
+ self.fingerprint_dir(unit).join(name)
+ }
+
+ /// Path where compiler output is cached.
+ pub fn message_cache_path(&self, unit: &Unit) -> PathBuf {
+ self.fingerprint_file_path(unit, "output-")
+ }
+
+ /// Returns the directory where a compiled build script is stored.
+ /// `/path/to/target/{debug,release}/build/PKG-HASH`
+ pub fn build_script_dir(&self, unit: &Unit) -> PathBuf {
+ assert!(unit.target.is_custom_build());
+ assert!(!unit.mode.is_run_custom_build());
+ assert!(self.metas.contains_key(unit));
+ let dir = self.pkg_dir(unit);
+ self.layout(CompileKind::Host).build().join(dir)
+ }
+
+ /// Returns the directory for compiled artifacts files.
+ /// `/path/to/target/{debug,release}/deps/artifact/KIND/PKG-HASH`
+ fn artifact_dir(&self, unit: &Unit) -> PathBuf {
+ assert!(self.metas.contains_key(unit));
+ assert!(unit.artifact.is_true());
+ let dir = self.pkg_dir(unit);
+ let kind = match unit.target.kind() {
+ TargetKind::Bin => "bin",
+ TargetKind::Lib(lib_kinds) => match lib_kinds.as_slice() {
+ &[CrateType::Cdylib] => "cdylib",
+ &[CrateType::Staticlib] => "staticlib",
+ invalid => unreachable!(
+ "BUG: unexpected artifact library type(s): {:?} - these should have been split",
+ invalid
+ ),
+ },
+ invalid => unreachable!(
+ "BUG: {:?} are not supposed to be used as artifacts",
+ invalid
+ ),
+ };
+ self.layout(unit.kind).artifact().join(dir).join(kind)
+ }
+
+ /// Returns the directory where information about running a build script
+ /// is stored.
+ /// `/path/to/target/{debug,release}/build/PKG-HASH`
+ pub fn build_script_run_dir(&self, unit: &Unit) -> PathBuf {
+ assert!(unit.target.is_custom_build());
+ assert!(unit.mode.is_run_custom_build());
+ let dir = self.pkg_dir(unit);
+ self.layout(unit.kind).build().join(dir)
+ }
+
+ /// Returns the "OUT_DIR" directory for running a build script.
+ /// `/path/to/target/{debug,release}/build/PKG-HASH/out`
+ pub fn build_script_out_dir(&self, unit: &Unit) -> PathBuf {
+ self.build_script_run_dir(unit).join("out")
+ }
+
+ /// Returns the path to the executable binary for the given bin target.
+ ///
+ /// This should only to be used when a `Unit` is not available.
+ pub fn bin_link_for_target(
+ &self,
+ target: &Target,
+ kind: CompileKind,
+ bcx: &BuildContext<'_, '_>,
+ ) -> CargoResult<PathBuf> {
+ assert!(target.is_bin());
+ let dest = self.layout(kind).dest();
+ let info = bcx.target_data.info(kind);
+ let (file_types, _) = info
+ .rustc_outputs(
+ CompileMode::Build,
+ &TargetKind::Bin,
+ bcx.target_data.short_name(&kind),
+ )
+ .expect("target must support `bin`");
+
+ let file_type = file_types
+ .iter()
+ .find(|file_type| file_type.flavor == FileFlavor::Normal)
+ .expect("target must support `bin`");
+
+ Ok(dest.join(file_type.uplift_filename(target)))
+ }
+
+ /// Returns the filenames that the given unit will generate.
+ ///
+ /// Note: It is not guaranteed that all of the files will be generated.
+ pub(super) fn outputs(
+ &self,
+ unit: &Unit,
+ bcx: &BuildContext<'a, 'cfg>,
+ ) -> CargoResult<Arc<Vec<OutputFile>>> {
+ self.outputs[unit]
+ .try_borrow_with(|| self.calc_outputs(unit, bcx))
+ .map(Arc::clone)
+ }
+
+ /// Returns the path where the output for the given unit and FileType
+ /// should be uplifted to.
+ ///
+ /// Returns `None` if the unit shouldn't be uplifted (for example, a
+ /// dependent rlib).
+ fn uplift_to(&self, unit: &Unit, file_type: &FileType, from_path: &Path) -> Option<PathBuf> {
+ // Tests, check, doc, etc. should not be uplifted.
+ if unit.mode != CompileMode::Build || file_type.flavor == FileFlavor::Rmeta {
+ return None;
+ }
+
+ // Artifact dependencies are never uplifted.
+ if unit.artifact.is_true() {
+ return None;
+ }
+
+ // - Binaries: The user always wants to see these, even if they are
+ // implicitly built (for example for integration tests).
+ // - dylibs: This ensures that the dynamic linker pulls in all the
+ // latest copies (even if the dylib was built from a previous cargo
+ // build). There are complex reasons for this, see #8139, #6167, #6162.
+ // - Things directly requested from the command-line (the "roots").
+ // This one is a little questionable for rlibs (see #6131), but is
+ // historically how Cargo has operated. This is primarily useful to
+ // give the user access to staticlibs and cdylibs.
+ if !unit.target.is_bin()
+ && !unit.target.is_custom_build()
+ && file_type.crate_type != Some(CrateType::Dylib)
+ && !self.roots.contains(unit)
+ {
+ return None;
+ }
+
+ let filename = file_type.uplift_filename(&unit.target);
+ let uplift_path = if unit.target.is_example() {
+ // Examples live in their own little world.
+ self.layout(unit.kind).examples().join(filename)
+ } else if unit.target.is_custom_build() {
+ self.build_script_dir(unit).join(filename)
+ } else {
+ self.layout(unit.kind).dest().join(filename)
+ };
+ if from_path == uplift_path {
+ // This can happen with things like examples that reside in the
+ // same directory, do not have a metadata hash (like on Windows),
+ // and do not have hyphens.
+ return None;
+ }
+ Some(uplift_path)
+ }
+
+ /// Calculates the filenames that the given unit will generate.
+ /// Should use [`CompilationFiles::outputs`] instead
+ /// as it caches the result of this function.
+ fn calc_outputs(
+ &self,
+ unit: &Unit,
+ bcx: &BuildContext<'a, 'cfg>,
+ ) -> CargoResult<Arc<Vec<OutputFile>>> {
+ let ret = match unit.mode {
+ CompileMode::Doc { .. } => {
+ let path = self
+ .out_dir(unit)
+ .join(unit.target.crate_name())
+ .join("index.html");
+ vec![OutputFile {
+ path,
+ hardlink: None,
+ export_path: None,
+ flavor: FileFlavor::Normal,
+ }]
+ }
+ CompileMode::RunCustomBuild => {
+ // At this time, this code path does not handle build script
+ // outputs.
+ vec![]
+ }
+ CompileMode::Doctest => {
+ // Doctests are built in a temporary directory and then
+ // deleted. There is the `--persist-doctests` unstable flag,
+ // but Cargo does not know about that.
+ vec![]
+ }
+ CompileMode::Docscrape => {
+ // The file name needs to be stable across Cargo sessions.
+ // This originally used unit.buildkey(), but that isn't stable,
+ // so we use metadata instead (prefixed with name for debugging).
+ let file_name = format!("{}-{}.examples", unit.pkg.name(), self.metadata(unit));
+ let path = self.deps_dir(unit).join(file_name);
+ vec![OutputFile {
+ path,
+ hardlink: None,
+ export_path: None,
+ flavor: FileFlavor::Normal,
+ }]
+ }
+ CompileMode::Test
+ | CompileMode::Build
+ | CompileMode::Bench
+ | CompileMode::Check { .. } => self.calc_outputs_rustc(unit, bcx)?,
+ };
+ debug!("Target filenames: {:?}", ret);
+
+ Ok(Arc::new(ret))
+ }
+
+ /// Computes the actual, full pathnames for all the files generated by rustc.
+ ///
+ /// The `OutputFile` also contains the paths where those files should be
+ /// "uplifted" to.
+ fn calc_outputs_rustc(
+ &self,
+ unit: &Unit,
+ bcx: &BuildContext<'a, 'cfg>,
+ ) -> CargoResult<Vec<OutputFile>> {
+ let out_dir = self.out_dir(unit);
+
+ let info = bcx.target_data.info(unit.kind);
+ let triple = bcx.target_data.short_name(&unit.kind);
+ let (file_types, unsupported) =
+ info.rustc_outputs(unit.mode, unit.target.kind(), triple)?;
+ if file_types.is_empty() {
+ if !unsupported.is_empty() {
+ let unsupported_strs: Vec<_> = unsupported.iter().map(|ct| ct.as_str()).collect();
+ anyhow::bail!(
+ "cannot produce {} for `{}` as the target `{}` \
+ does not support these crate types",
+ unsupported_strs.join(", "),
+ unit.pkg,
+ triple,
+ )
+ }
+ anyhow::bail!(
+ "cannot compile `{}` as the target `{}` does not \
+ support any of the output crate types",
+ unit.pkg,
+ triple,
+ );
+ }
+
+ // Convert FileType to OutputFile.
+ let mut outputs = Vec::new();
+ for file_type in file_types {
+ let meta = &self.metas[unit];
+ let meta_opt = meta.use_extra_filename.then(|| meta.meta_hash.to_string());
+ let path = out_dir.join(file_type.output_filename(&unit.target, meta_opt.as_deref()));
+
+ // If, the `different_binary_name` feature is enabled, the name of the hardlink will
+ // be the name of the binary provided by the user in `Cargo.toml`.
+ let hardlink = self.uplift_to(unit, &file_type, &path);
+ let export_path = if unit.target.is_custom_build() {
+ None
+ } else {
+ self.export_dir.as_ref().and_then(|export_dir| {
+ hardlink
+ .as_ref()
+ .map(|hardlink| export_dir.join(hardlink.file_name().unwrap()))
+ })
+ };
+ outputs.push(OutputFile {
+ path,
+ hardlink,
+ export_path,
+ flavor: file_type.flavor,
+ });
+ }
+ Ok(outputs)
+ }
+}
+
+/// Gets the metadata hash for the given [`Unit`].
+///
+/// Whne a metadata hash doesn't exist for the given unit,
+/// this calls itself recursively to compute metadata hashes of all its dependencies.
+/// See [`compute_metadata`] for how a single metadata hash is computed.
+fn metadata_of<'a>(
+ unit: &Unit,
+ cx: &Context<'_, '_>,
+ metas: &'a mut HashMap<Unit, MetaInfo>,
+) -> &'a MetaInfo {
+ if !metas.contains_key(unit) {
+ let meta = compute_metadata(unit, cx, metas);
+ metas.insert(unit.clone(), meta);
+ for dep in cx.unit_deps(unit) {
+ metadata_of(&dep.unit, cx, metas);
+ }
+ }
+ &metas[unit]
+}
+
+/// Computes the metadata hash for the given [`Unit`].
+fn compute_metadata(
+ unit: &Unit,
+ cx: &Context<'_, '_>,
+ metas: &mut HashMap<Unit, MetaInfo>,
+) -> MetaInfo {
+ let bcx = &cx.bcx;
+ let mut hasher = StableHasher::new();
+
+ METADATA_VERSION.hash(&mut hasher);
+
+ // Unique metadata per (name, source, version) triple. This'll allow us
+ // to pull crates from anywhere without worrying about conflicts.
+ unit.pkg
+ .package_id()
+ .stable_hash(bcx.ws.root())
+ .hash(&mut hasher);
+
+ // Also mix in enabled features to our metadata. This'll ensure that
+ // when changing feature sets each lib is separately cached.
+ unit.features.hash(&mut hasher);
+
+ // Mix in the target-metadata of all the dependencies of this target.
+ let mut deps_metadata = cx
+ .unit_deps(unit)
+ .iter()
+ .map(|dep| metadata_of(&dep.unit, cx, metas).meta_hash)
+ .collect::<Vec<_>>();
+ deps_metadata.sort();
+ deps_metadata.hash(&mut hasher);
+
+ // Throw in the profile we're compiling with. This helps caching
+ // `panic=abort` and `panic=unwind` artifacts, additionally with various
+ // settings like debuginfo and whatnot.
+ unit.profile.hash(&mut hasher);
+ unit.mode.hash(&mut hasher);
+ cx.lto[unit].hash(&mut hasher);
+
+ // Artifacts compiled for the host should have a different
+ // metadata piece than those compiled for the target, so make sure
+ // we throw in the unit's `kind` as well. Use `fingerprint_hash`
+ // so that the StableHash doesn't change based on the pathnames
+ // of the custom target JSON spec files.
+ unit.kind.fingerprint_hash().hash(&mut hasher);
+
+ // Finally throw in the target name/kind. This ensures that concurrent
+ // compiles of targets in the same crate don't collide.
+ unit.target.name().hash(&mut hasher);
+ unit.target.kind().hash(&mut hasher);
+
+ hash_rustc_version(bcx, &mut hasher);
+
+ if cx.bcx.ws.is_member(&unit.pkg) {
+ // This is primarily here for clippy. This ensures that the clippy
+ // artifacts are separate from the `check` ones.
+ if let Some(path) = &cx.bcx.rustc().workspace_wrapper {
+ path.hash(&mut hasher);
+ }
+ }
+
+ // Seed the contents of `__CARGO_DEFAULT_LIB_METADATA` to the hasher if present.
+ // This should be the release channel, to get a different hash for each channel.
+ if let Ok(ref channel) = cx.bcx.config.get_env("__CARGO_DEFAULT_LIB_METADATA") {
+ channel.hash(&mut hasher);
+ }
+
+ // std units need to be kept separate from user dependencies. std crates
+ // are differentiated in the Unit with `is_std` (for things like
+ // `-Zforce-unstable-if-unmarked`), so they are always built separately.
+ // This isn't strictly necessary for build dependencies which probably
+ // don't need unstable support. A future experiment might be to set
+ // `is_std` to false for build dependencies so that they can be shared
+ // with user dependencies.
+ unit.is_std.hash(&mut hasher);
+
+ MetaInfo {
+ meta_hash: Metadata(hasher.finish()),
+ use_extra_filename: should_use_metadata(bcx, unit),
+ }
+}
+
+/// Hash the version of rustc being used during the build process.
+fn hash_rustc_version(bcx: &BuildContext<'_, '_>, hasher: &mut StableHasher) {
+ let vers = &bcx.rustc().version;
+ if vers.pre.is_empty() || bcx.config.cli_unstable().separate_nightlies {
+ // For stable, keep the artifacts separate. This helps if someone is
+ // testing multiple versions, to avoid recompiles.
+ bcx.rustc().verbose_version.hash(hasher);
+ return;
+ }
+ // On "nightly"/"beta"/"dev"/etc, keep each "channel" separate. Don't hash
+ // the date/git information, so that whenever someone updates "nightly",
+ // they won't have a bunch of stale artifacts in the target directory.
+ //
+ // This assumes that the first segment is the important bit ("nightly",
+ // "beta", "dev", etc.). Skip other parts like the `.3` in `-beta.3`.
+ vers.pre.split('.').next().hash(hasher);
+ // Keep "host" since some people switch hosts to implicitly change
+ // targets, (like gnu vs musl or gnu vs msvc). In the future, we may want
+ // to consider hashing `unit.kind.short_name()` instead.
+ bcx.rustc().host.hash(hasher);
+ // None of the other lines are important. Currently they are:
+ // binary: rustc <-- or "rustdoc"
+ // commit-hash: 38114ff16e7856f98b2b4be7ab4cd29b38bed59a
+ // commit-date: 2020-03-21
+ // host: x86_64-apple-darwin
+ // release: 1.44.0-nightly
+ // LLVM version: 9.0
+ //
+ // The backend version ("LLVM version") might become more relevant in
+ // the future when cranelift sees more use, and people want to switch
+ // between different backends without recompiling.
+}
+
+/// Returns whether or not this unit should use a metadata hash.
+fn should_use_metadata(bcx: &BuildContext<'_, '_>, unit: &Unit) -> bool {
+ if unit.mode.is_doc_test() || unit.mode.is_doc() {
+ // Doc tests do not have metadata.
+ return false;
+ }
+ if unit.mode.is_any_test() || unit.mode.is_check() {
+ // These always use metadata.
+ return true;
+ }
+ // No metadata in these cases:
+ //
+ // - dylibs:
+ // - if any dylib names are encoded in executables, so they can't be renamed.
+ // - TODO: Maybe use `-install-name` on macOS or `-soname` on other UNIX systems
+ // to specify the dylib name to be used by the linker instead of the filename.
+ // - Windows MSVC executables: The path to the PDB is embedded in the
+ // executable, and we don't want the PDB path to include the hash in it.
+ // - wasm32-unknown-emscripten executables: When using emscripten, the path to the
+ // .wasm file is embedded in the .js file, so we don't want the hash in there.
+ //
+ // This is only done for local packages, as we don't expect to export
+ // dependencies.
+ //
+ // The __CARGO_DEFAULT_LIB_METADATA env var is used to override this to
+ // force metadata in the hash. This is only used for building libstd. For
+ // example, if libstd is placed in a common location, we don't want a file
+ // named /usr/lib/libstd.so which could conflict with other rustc
+ // installs. In addition it prevents accidentally loading a libstd of a
+ // different compiler at runtime.
+ // See https://github.com/rust-lang/cargo/issues/3005
+ let short_name = bcx.target_data.short_name(&unit.kind);
+ if (unit.target.is_dylib()
+ || unit.target.is_cdylib()
+ || (unit.target.is_executable() && short_name == "wasm32-unknown-emscripten")
+ || (unit.target.is_executable() && short_name.contains("msvc")))
+ && unit.pkg.package_id().source_id().is_path()
+ && bcx.config.get_env("__CARGO_DEFAULT_LIB_METADATA").is_err()
+ {
+ return false;
+ }
+ true
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/context/mod.rs b/src/tools/cargo/src/cargo/core/compiler/context/mod.rs
new file mode 100644
index 000000000..3f13f086c
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/context/mod.rs
@@ -0,0 +1,646 @@
+//! [`Context`] is the mutable state used during the build process.
+
+use std::collections::{BTreeSet, HashMap, HashSet};
+use std::path::{Path, PathBuf};
+use std::sync::{Arc, Mutex};
+
+use crate::core::compiler::compilation::{self, UnitOutput};
+use crate::core::compiler::{self, artifact, Unit};
+use crate::core::PackageId;
+use crate::util::errors::CargoResult;
+use crate::util::profile;
+use anyhow::{bail, Context as _};
+use filetime::FileTime;
+use jobserver::Client;
+
+use super::build_plan::BuildPlan;
+use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
+use super::fingerprint::Fingerprint;
+use super::job_queue::JobQueue;
+use super::layout::Layout;
+use super::lto::Lto;
+use super::unit_graph::UnitDep;
+use super::{
+ BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor, RustDocFingerprint,
+};
+
+mod compilation_files;
+use self::compilation_files::CompilationFiles;
+pub use self::compilation_files::{Metadata, OutputFile};
+
+/// Collection of all the stuff that is needed to perform a build.
+///
+/// Different from the [`BuildContext`], `Context` is a _mutable_ state used
+/// throughout the entire build process. Everything is coordinated through this.
+///
+/// [`BuildContext`]: crate::core::compiler::BuildContext
+pub struct Context<'a, 'cfg> {
+ /// Mostly static information about the build task.
+ pub bcx: &'a BuildContext<'a, 'cfg>,
+ /// A large collection of information about the result of the entire compilation.
+ pub compilation: Compilation<'cfg>,
+ /// Output from build scripts, updated after each build script runs.
+ pub build_script_outputs: Arc<Mutex<BuildScriptOutputs>>,
+ /// Dependencies (like rerun-if-changed) declared by a build script.
+ /// This is *only* populated from the output from previous runs.
+ /// If the build script hasn't ever been run, then it must be run.
+ pub build_explicit_deps: HashMap<Unit, BuildDeps>,
+ /// Fingerprints used to detect if a unit is out-of-date.
+ pub fingerprints: HashMap<Unit, Arc<Fingerprint>>,
+ /// Cache of file mtimes to reduce filesystem hits.
+ pub mtime_cache: HashMap<PathBuf, FileTime>,
+ /// A set used to track which units have been compiled.
+ /// A unit may appear in the job graph multiple times as a dependency of
+ /// multiple packages, but it only needs to run once.
+ pub compiled: HashSet<Unit>,
+ /// Linking information for each `Unit`.
+ /// See `build_map` for details.
+ pub build_scripts: HashMap<Unit, Arc<BuildScripts>>,
+ /// Job server client to manage concurrency with other processes.
+ pub jobserver: Client,
+ /// "Primary" packages are the ones the user selected on the command-line
+ /// with `-p` flags. If no flags are specified, then it is the defaults
+ /// based on the current directory and the default workspace members.
+ primary_packages: HashSet<PackageId>,
+ /// An abstraction of the files and directories that will be generated by
+ /// the compilation. This is `None` until after `unit_dependencies` has
+ /// been computed.
+ files: Option<CompilationFiles<'a, 'cfg>>,
+
+ /// A set of units which are compiling rlibs and are expected to produce
+ /// metadata files in addition to the rlib itself.
+ rmeta_required: HashSet<Unit>,
+
+ /// Map of the LTO-status of each unit. This indicates what sort of
+ /// compilation is happening (only object, only bitcode, both, etc), and is
+ /// precalculated early on.
+ pub lto: HashMap<Unit, Lto>,
+
+ /// Map of Doc/Docscrape units to metadata for their -Cmetadata flag.
+ /// See Context::find_metadata_units for more details.
+ pub metadata_for_doc_units: HashMap<Unit, Metadata>,
+
+ /// Set of metadata of Docscrape units that fail before completion, e.g.
+ /// because the target has a type error. This is in an Arc<Mutex<..>>
+ /// because it is continuously updated as the job progresses.
+ pub failed_scrape_units: Arc<Mutex<HashSet<Metadata>>>,
+}
+
+impl<'a, 'cfg> Context<'a, 'cfg> {
+ pub fn new(bcx: &'a BuildContext<'a, 'cfg>) -> CargoResult<Self> {
+ // Load up the jobserver that we'll use to manage our parallelism. This
+ // is the same as the GNU make implementation of a jobserver, and
+ // intentionally so! It's hoped that we can interact with GNU make and
+ // all share the same jobserver.
+ //
+ // Note that if we don't have a jobserver in our environment then we
+ // create our own, and we create it with `n` tokens, but immediately
+ // acquire one, because one token is ourself, a running process.
+ let jobserver = match bcx.config.jobserver_from_env() {
+ Some(c) => c.clone(),
+ None => {
+ let client = Client::new(bcx.jobs() as usize)
+ .with_context(|| "failed to create jobserver")?;
+ client.acquire_raw()?;
+ client
+ }
+ };
+
+ Ok(Self {
+ bcx,
+ compilation: Compilation::new(bcx)?,
+ build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())),
+ fingerprints: HashMap::new(),
+ mtime_cache: HashMap::new(),
+ compiled: HashSet::new(),
+ build_scripts: HashMap::new(),
+ build_explicit_deps: HashMap::new(),
+ jobserver,
+ primary_packages: HashSet::new(),
+ files: None,
+ rmeta_required: HashSet::new(),
+ lto: HashMap::new(),
+ metadata_for_doc_units: HashMap::new(),
+ failed_scrape_units: Arc::new(Mutex::new(HashSet::new())),
+ })
+ }
+
+ /// Starts compilation, waits for it to finish, and returns information
+ /// about the result of compilation.
+ ///
+ /// See [`ops::cargo_compile`] for a higher-level view of the compile process.
+ ///
+ /// [`ops::cargo_compile`]: ../../../ops/cargo_compile/index.html
+ pub fn compile(mut self, exec: &Arc<dyn Executor>) -> CargoResult<Compilation<'cfg>> {
+ let mut queue = JobQueue::new(self.bcx);
+ let mut plan = BuildPlan::new();
+ let build_plan = self.bcx.build_config.build_plan;
+ self.lto = super::lto::generate(self.bcx)?;
+ self.prepare_units()?;
+ self.prepare()?;
+ custom_build::build_map(&mut self)?;
+ self.check_collisions()?;
+ self.compute_metadata_for_doc_units();
+
+ // We need to make sure that if there were any previous docs
+ // already compiled, they were compiled with the same Rustc version that we're currently
+ // using. Otherwise we must remove the `doc/` folder and compile again forcing a rebuild.
+ //
+ // This is important because the `.js`/`.html` & `.css` files that are generated by Rustc don't have
+ // any versioning (See https://github.com/rust-lang/cargo/issues/8461).
+ // Therefore, we can end up with weird bugs and behaviours if we mix different
+ // versions of these files.
+ if self.bcx.build_config.mode.is_doc() {
+ RustDocFingerprint::check_rustdoc_fingerprint(&self)?
+ }
+
+ for unit in &self.bcx.roots {
+ let force_rebuild = self.bcx.build_config.force_rebuild;
+ super::compile(&mut self, &mut queue, &mut plan, unit, exec, force_rebuild)?;
+ }
+
+ // Now that we've got the full job queue and we've done all our
+ // fingerprint analysis to determine what to run, bust all the memoized
+ // fingerprint hashes to ensure that during the build they all get the
+ // most up-to-date values. In theory we only need to bust hashes that
+ // transitively depend on a dirty build script, but it shouldn't matter
+ // that much for performance anyway.
+ for fingerprint in self.fingerprints.values() {
+ fingerprint.clear_memoized();
+ }
+
+ // Now that we've figured out everything that we're going to do, do it!
+ queue.execute(&mut self, &mut plan)?;
+
+ if build_plan {
+ plan.set_inputs(self.build_plan_inputs()?);
+ plan.output_plan(self.bcx.config);
+ }
+
+ // Collect the result of the build into `self.compilation`.
+ for unit in &self.bcx.roots {
+ // Collect tests and executables.
+ for output in self.outputs(unit)?.iter() {
+ if output.flavor == FileFlavor::DebugInfo || output.flavor == FileFlavor::Auxiliary
+ {
+ continue;
+ }
+
+ let bindst = output.bin_dst();
+
+ if unit.mode == CompileMode::Test {
+ self.compilation
+ .tests
+ .push(self.unit_output(unit, &output.path));
+ } else if unit.target.is_executable() {
+ self.compilation
+ .binaries
+ .push(self.unit_output(unit, bindst));
+ } else if unit.target.is_cdylib()
+ && !self.compilation.cdylibs.iter().any(|uo| uo.unit == *unit)
+ {
+ self.compilation
+ .cdylibs
+ .push(self.unit_output(unit, bindst));
+ }
+ }
+
+ // If the unit has a build script, add `OUT_DIR` to the
+ // environment variables.
+ if unit.target.is_lib() {
+ for dep in &self.bcx.unit_graph[unit] {
+ if dep.unit.mode.is_run_custom_build() {
+ let out_dir = self
+ .files()
+ .build_script_out_dir(&dep.unit)
+ .display()
+ .to_string();
+ let script_meta = self.get_run_build_script_metadata(&dep.unit);
+ self.compilation
+ .extra_env
+ .entry(script_meta)
+ .or_insert_with(Vec::new)
+ .push(("OUT_DIR".to_string(), out_dir));
+ }
+ }
+ }
+
+ // Collect information for `rustdoc --test`.
+ if unit.mode.is_doc_test() {
+ let mut unstable_opts = false;
+ let mut args = compiler::extern_args(&self, unit, &mut unstable_opts)?;
+ args.extend(compiler::lto_args(&self, unit));
+ args.extend(compiler::features_args(unit));
+ args.extend(compiler::check_cfg_args(&self, unit));
+
+ let script_meta = self.find_build_script_metadata(unit);
+ if let Some(meta) = script_meta {
+ if let Some(output) = self.build_script_outputs.lock().unwrap().get(meta) {
+ for cfg in &output.cfgs {
+ args.push("--cfg".into());
+ args.push(cfg.into());
+ }
+
+ if !output.check_cfgs.is_empty() {
+ args.push("-Zunstable-options".into());
+ for check_cfg in &output.check_cfgs {
+ args.push("--check-cfg".into());
+ args.push(check_cfg.into());
+ }
+ }
+
+ for (lt, arg) in &output.linker_args {
+ if lt.applies_to(&unit.target) {
+ args.push("-C".into());
+ args.push(format!("link-arg={}", arg).into());
+ }
+ }
+ }
+ }
+ args.extend(self.bcx.rustdocflags_args(unit).iter().map(Into::into));
+
+ use super::MessageFormat;
+ let format = match self.bcx.build_config.message_format {
+ MessageFormat::Short => "short",
+ MessageFormat::Human => "human",
+ MessageFormat::Json { .. } => "json",
+ };
+ args.push("--error-format".into());
+ args.push(format.into());
+
+ self.compilation.to_doc_test.push(compilation::Doctest {
+ unit: unit.clone(),
+ args,
+ unstable_opts,
+ linker: self.bcx.linker(unit.kind),
+ script_meta,
+ env: artifact::get_env(&self, self.unit_deps(unit))?,
+ });
+ }
+
+ super::output_depinfo(&mut self, unit)?;
+ }
+
+ for (script_meta, output) in self.build_script_outputs.lock().unwrap().iter() {
+ self.compilation
+ .extra_env
+ .entry(*script_meta)
+ .or_insert_with(Vec::new)
+ .extend(output.env.iter().cloned());
+
+ for dir in output.library_paths.iter() {
+ self.compilation.native_dirs.insert(dir.clone());
+ }
+ }
+ Ok(self.compilation)
+ }
+
+ /// Returns the executable for the specified unit (if any).
+ pub fn get_executable(&mut self, unit: &Unit) -> CargoResult<Option<PathBuf>> {
+ let is_binary = unit.target.is_executable();
+ let is_test = unit.mode.is_any_test();
+ if !unit.mode.generates_executable() || !(is_binary || is_test) {
+ return Ok(None);
+ }
+ Ok(self
+ .outputs(unit)?
+ .iter()
+ .find(|o| o.flavor == FileFlavor::Normal)
+ .map(|output| output.bin_dst().clone()))
+ }
+
+ pub fn prepare_units(&mut self) -> CargoResult<()> {
+ let dest = self.bcx.profiles.get_dir_name();
+ let host_layout = Layout::new(self.bcx.ws, None, &dest)?;
+ let mut targets = HashMap::new();
+ for kind in self.bcx.all_kinds.iter() {
+ if let CompileKind::Target(target) = *kind {
+ let layout = Layout::new(self.bcx.ws, Some(target), &dest)?;
+ targets.insert(target, layout);
+ }
+ }
+ self.primary_packages
+ .extend(self.bcx.roots.iter().map(|u| u.pkg.package_id()));
+ self.compilation
+ .root_crate_names
+ .extend(self.bcx.roots.iter().map(|u| u.target.crate_name()));
+
+ self.record_units_requiring_metadata();
+
+ let files = CompilationFiles::new(self, host_layout, targets);
+ self.files = Some(files);
+ Ok(())
+ }
+
+ /// Prepare this context, ensuring that all filesystem directories are in
+ /// place.
+ pub fn prepare(&mut self) -> CargoResult<()> {
+ let _p = profile::start("preparing layout");
+
+ self.files
+ .as_mut()
+ .unwrap()
+ .host
+ .prepare()
+ .with_context(|| "couldn't prepare build directories")?;
+ for target in self.files.as_mut().unwrap().target.values_mut() {
+ target
+ .prepare()
+ .with_context(|| "couldn't prepare build directories")?;
+ }
+
+ let files = self.files.as_ref().unwrap();
+ for &kind in self.bcx.all_kinds.iter() {
+ let layout = files.layout(kind);
+ self.compilation
+ .root_output
+ .insert(kind, layout.dest().to_path_buf());
+ self.compilation
+ .deps_output
+ .insert(kind, layout.deps().to_path_buf());
+ }
+ Ok(())
+ }
+
+ pub fn files(&self) -> &CompilationFiles<'a, 'cfg> {
+ self.files.as_ref().unwrap()
+ }
+
+ /// Returns the filenames that the given unit will generate.
+ pub fn outputs(&self, unit: &Unit) -> CargoResult<Arc<Vec<OutputFile>>> {
+ self.files.as_ref().unwrap().outputs(unit, self.bcx)
+ }
+
+ /// Direct dependencies for the given unit.
+ pub fn unit_deps(&self, unit: &Unit) -> &[UnitDep] {
+ &self.bcx.unit_graph[unit]
+ }
+
+ /// Returns the RunCustomBuild Unit associated with the given Unit.
+ ///
+ /// If the package does not have a build script, this returns None.
+ pub fn find_build_script_unit(&self, unit: &Unit) -> Option<Unit> {
+ if unit.mode.is_run_custom_build() {
+ return Some(unit.clone());
+ }
+ self.bcx.unit_graph[unit]
+ .iter()
+ .find(|unit_dep| {
+ unit_dep.unit.mode.is_run_custom_build()
+ && unit_dep.unit.pkg.package_id() == unit.pkg.package_id()
+ })
+ .map(|unit_dep| unit_dep.unit.clone())
+ }
+
+ /// Returns the metadata hash for the RunCustomBuild Unit associated with
+ /// the given unit.
+ ///
+ /// If the package does not have a build script, this returns None.
+ pub fn find_build_script_metadata(&self, unit: &Unit) -> Option<Metadata> {
+ let script_unit = self.find_build_script_unit(unit)?;
+ Some(self.get_run_build_script_metadata(&script_unit))
+ }
+
+ /// Returns the metadata hash for a RunCustomBuild unit.
+ pub fn get_run_build_script_metadata(&self, unit: &Unit) -> Metadata {
+ assert!(unit.mode.is_run_custom_build());
+ self.files().metadata(unit)
+ }
+
+ pub fn is_primary_package(&self, unit: &Unit) -> bool {
+ self.primary_packages.contains(&unit.pkg.package_id())
+ }
+
+ /// Returns the list of filenames read by cargo to generate the [`BuildContext`]
+ /// (all `Cargo.toml`, etc.).
+ pub fn build_plan_inputs(&self) -> CargoResult<Vec<PathBuf>> {
+ // Keep sorted for consistency.
+ let mut inputs = BTreeSet::new();
+ // Note: dev-deps are skipped if they are not present in the unit graph.
+ for unit in self.bcx.unit_graph.keys() {
+ inputs.insert(unit.pkg.manifest_path().to_path_buf());
+ }
+ Ok(inputs.into_iter().collect())
+ }
+
+ /// Returns a [`UnitOutput`] which represents some information about the
+ /// output of a unit.
+ pub fn unit_output(&self, unit: &Unit, path: &Path) -> UnitOutput {
+ let script_meta = self.find_build_script_metadata(unit);
+ UnitOutput {
+ unit: unit.clone(),
+ path: path.to_path_buf(),
+ script_meta,
+ }
+ }
+
+ /// Check if any output file name collision happens.
+ /// See <https://github.com/rust-lang/cargo/issues/6313> for more.
+ fn check_collisions(&self) -> CargoResult<()> {
+ let mut output_collisions = HashMap::new();
+ let describe_collision = |unit: &Unit, other_unit: &Unit, path: &PathBuf| -> String {
+ format!(
+ "The {} target `{}` in package `{}` has the same output \
+ filename as the {} target `{}` in package `{}`.\n\
+ Colliding filename is: {}\n",
+ unit.target.kind().description(),
+ unit.target.name(),
+ unit.pkg.package_id(),
+ other_unit.target.kind().description(),
+ other_unit.target.name(),
+ other_unit.pkg.package_id(),
+ path.display()
+ )
+ };
+ let suggestion =
+ "Consider changing their names to be unique or compiling them separately.\n\
+ This may become a hard error in the future; see \
+ <https://github.com/rust-lang/cargo/issues/6313>.";
+ let rustdoc_suggestion =
+ "This is a known bug where multiple crates with the same name use\n\
+ the same path; see <https://github.com/rust-lang/cargo/issues/6313>.";
+ let report_collision = |unit: &Unit,
+ other_unit: &Unit,
+ path: &PathBuf,
+ suggestion: &str|
+ -> CargoResult<()> {
+ if unit.target.name() == other_unit.target.name() {
+ self.bcx.config.shell().warn(format!(
+ "output filename collision.\n\
+ {}\
+ The targets should have unique names.\n\
+ {}",
+ describe_collision(unit, other_unit, path),
+ suggestion
+ ))
+ } else {
+ self.bcx.config.shell().warn(format!(
+ "output filename collision.\n\
+ {}\
+ The output filenames should be unique.\n\
+ {}\n\
+ If this looks unexpected, it may be a bug in Cargo. Please file a bug report at\n\
+ https://github.com/rust-lang/cargo/issues/ with as much information as you\n\
+ can provide.\n\
+ cargo {} running on `{}` target `{}`\n\
+ First unit: {:?}\n\
+ Second unit: {:?}",
+ describe_collision(unit, other_unit, path),
+ suggestion,
+ crate::version(),
+ self.bcx.host_triple(),
+ self.bcx.target_data.short_name(&unit.kind),
+ unit,
+ other_unit))
+ }
+ };
+
+ fn doc_collision_error(unit: &Unit, other_unit: &Unit) -> CargoResult<()> {
+ bail!(
+ "document output filename collision\n\
+ The {} `{}` in package `{}` has the same name as the {} `{}` in package `{}`.\n\
+ Only one may be documented at once since they output to the same path.\n\
+ Consider documenting only one, renaming one, \
+ or marking one with `doc = false` in Cargo.toml.",
+ unit.target.kind().description(),
+ unit.target.name(),
+ unit.pkg,
+ other_unit.target.kind().description(),
+ other_unit.target.name(),
+ other_unit.pkg,
+ );
+ }
+
+ let mut keys = self
+ .bcx
+ .unit_graph
+ .keys()
+ .filter(|unit| !unit.mode.is_run_custom_build())
+ .collect::<Vec<_>>();
+ // Sort for consistent error messages.
+ keys.sort_unstable();
+ // These are kept separate to retain compatibility with older
+ // versions, which generated an error when there was a duplicate lib
+ // or bin (but the old code did not check bin<->lib collisions). To
+ // retain backwards compatibility, this only generates an error for
+ // duplicate libs or duplicate bins (but not both). Ideally this
+ // shouldn't be here, but since there isn't a complete workaround,
+ // yet, this retains the old behavior.
+ let mut doc_libs = HashMap::new();
+ let mut doc_bins = HashMap::new();
+ for unit in keys {
+ if unit.mode.is_doc() && self.is_primary_package(unit) {
+ // These situations have been an error since before 1.0, so it
+ // is not a warning like the other situations.
+ if unit.target.is_lib() {
+ if let Some(prev) = doc_libs.insert((unit.target.crate_name(), unit.kind), unit)
+ {
+ doc_collision_error(unit, prev)?;
+ }
+ } else if let Some(prev) =
+ doc_bins.insert((unit.target.crate_name(), unit.kind), unit)
+ {
+ doc_collision_error(unit, prev)?;
+ }
+ }
+ for output in self.outputs(unit)?.iter() {
+ if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) {
+ if unit.mode.is_doc() {
+ // See https://github.com/rust-lang/rust/issues/56169
+ // and https://github.com/rust-lang/rust/issues/61378
+ report_collision(unit, other_unit, &output.path, rustdoc_suggestion)?;
+ } else {
+ report_collision(unit, other_unit, &output.path, suggestion)?;
+ }
+ }
+ if let Some(hardlink) = output.hardlink.as_ref() {
+ if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) {
+ report_collision(unit, other_unit, hardlink, suggestion)?;
+ }
+ }
+ if let Some(ref export_path) = output.export_path {
+ if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) {
+ self.bcx.config.shell().warn(format!(
+ "`--out-dir` filename collision.\n\
+ {}\
+ The exported filenames should be unique.\n\
+ {}",
+ describe_collision(unit, other_unit, export_path),
+ suggestion
+ ))?;
+ }
+ }
+ }
+ }
+ Ok(())
+ }
+
+ /// Records the list of units which are required to emit metadata.
+ ///
+ /// Units which depend only on the metadata of others requires the others to
+ /// actually produce metadata, so we'll record that here.
+ fn record_units_requiring_metadata(&mut self) {
+ for (key, deps) in self.bcx.unit_graph.iter() {
+ for dep in deps {
+ if self.only_requires_rmeta(key, &dep.unit) {
+ self.rmeta_required.insert(dep.unit.clone());
+ }
+ }
+ }
+ }
+
+ /// Returns whether when `parent` depends on `dep` if it only requires the
+ /// metadata file from `dep`.
+ pub fn only_requires_rmeta(&self, parent: &Unit, dep: &Unit) -> bool {
+ // We're only a candidate for requiring an `rmeta` file if we
+ // ourselves are building an rlib,
+ !parent.requires_upstream_objects()
+ && parent.mode == CompileMode::Build
+ // Our dependency must also be built as an rlib, otherwise the
+ // object code must be useful in some fashion
+ && !dep.requires_upstream_objects()
+ && dep.mode == CompileMode::Build
+ }
+
+ /// Returns whether when `unit` is built whether it should emit metadata as
+ /// well because some compilations rely on that.
+ pub fn rmeta_required(&self, unit: &Unit) -> bool {
+ self.rmeta_required.contains(unit)
+ }
+
+ /// Finds metadata for Doc/Docscrape units.
+ ///
+ /// rustdoc needs a -Cmetadata flag in order to recognize StableCrateIds that refer to
+ /// items in the crate being documented. The -Cmetadata flag used by reverse-dependencies
+ /// will be the metadata of the Cargo unit that generated the current library's rmeta file,
+ /// which should be a Check unit.
+ ///
+ /// If the current crate has reverse-dependencies, such a Check unit should exist, and so
+ /// we use that crate's metadata. If not, we use the crate's Doc unit so at least examples
+ /// scraped from the current crate can be used when documenting the current crate.
+ pub fn compute_metadata_for_doc_units(&mut self) {
+ for unit in self.bcx.unit_graph.keys() {
+ if !unit.mode.is_doc() && !unit.mode.is_doc_scrape() {
+ continue;
+ }
+
+ let matching_units = self
+ .bcx
+ .unit_graph
+ .keys()
+ .filter(|other| {
+ unit.pkg == other.pkg
+ && unit.target == other.target
+ && !other.mode.is_doc_scrape()
+ })
+ .collect::<Vec<_>>();
+ let metadata_unit = matching_units
+ .iter()
+ .find(|other| other.mode.is_check())
+ .or_else(|| matching_units.iter().find(|other| other.mode.is_doc()))
+ .unwrap_or(&unit);
+ self.metadata_for_doc_units
+ .insert(unit.clone(), self.files().metadata(metadata_unit));
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/crate_type.rs b/src/tools/cargo/src/cargo/core/compiler/crate_type.rs
new file mode 100644
index 000000000..a36ef6c0f
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/crate_type.rs
@@ -0,0 +1,115 @@
+use std::fmt;
+
+/// Types of the output artifact that the compiler emits.
+/// Usually distributable or linkable either statically or dynamically.
+///
+/// See <https://doc.rust-lang.org/nightly/reference/linkage.html>.
+#[derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub enum CrateType {
+ Bin,
+ Lib,
+ Rlib,
+ Dylib,
+ Cdylib,
+ Staticlib,
+ ProcMacro,
+ Other(String),
+}
+
+impl CrateType {
+ pub fn as_str(&self) -> &str {
+ match self {
+ CrateType::Bin => "bin",
+ CrateType::Lib => "lib",
+ CrateType::Rlib => "rlib",
+ CrateType::Dylib => "dylib",
+ CrateType::Cdylib => "cdylib",
+ CrateType::Staticlib => "staticlib",
+ CrateType::ProcMacro => "proc-macro",
+ CrateType::Other(s) => s,
+ }
+ }
+
+ pub fn can_lto(&self) -> bool {
+ match self {
+ CrateType::Bin | CrateType::Staticlib | CrateType::Cdylib => true,
+ CrateType::Lib
+ | CrateType::Rlib
+ | CrateType::Dylib
+ | CrateType::ProcMacro
+ | CrateType::Other(..) => false,
+ }
+ }
+
+ pub fn is_linkable(&self) -> bool {
+ match self {
+ CrateType::Lib | CrateType::Rlib | CrateType::Dylib | CrateType::ProcMacro => true,
+ CrateType::Bin | CrateType::Cdylib | CrateType::Staticlib | CrateType::Other(..) => {
+ false
+ }
+ }
+ }
+
+ pub fn is_dynamic(&self) -> bool {
+ match self {
+ CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro => true,
+ CrateType::Lib
+ | CrateType::Rlib
+ | CrateType::Bin
+ | CrateType::Staticlib
+ | CrateType::Other(..) => false,
+ }
+ }
+
+ /// Returns whether production of this crate type requires the object files
+ /// from dependencies to be available.
+ ///
+ /// See also [`TargetKind::requires_upstream_objects`].
+ ///
+ /// [`TargetKind::requires_upstream_objects`]: crate::core::manifest::TargetKind::requires_upstream_objects
+ pub fn requires_upstream_objects(&self) -> bool {
+ // "lib" == "rlib" and is a compilation that doesn't actually
+ // require upstream object files to exist, only upstream metadata
+ // files. As a result, it doesn't require upstream artifacts
+
+ !matches!(self, CrateType::Lib | CrateType::Rlib)
+ // Everything else, however, is some form of "linkable output" or
+ // something that requires upstream object files.
+ }
+}
+
+impl fmt::Display for CrateType {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.as_str().fmt(f)
+ }
+}
+
+impl<'a> From<&'a String> for CrateType {
+ fn from(s: &'a String) -> Self {
+ match s.as_str() {
+ "bin" => CrateType::Bin,
+ "lib" => CrateType::Lib,
+ "rlib" => CrateType::Rlib,
+ "dylib" => CrateType::Dylib,
+ "cdylib" => CrateType::Cdylib,
+ "staticlib" => CrateType::Staticlib,
+ "procmacro" => CrateType::ProcMacro,
+ _ => CrateType::Other(s.clone()),
+ }
+ }
+}
+
+impl fmt::Debug for CrateType {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.to_string().fmt(f)
+ }
+}
+
+impl serde::Serialize for CrateType {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::ser::Serializer,
+ {
+ self.to_string().serialize(s)
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/custom_build.rs b/src/tools/cargo/src/cargo/core/compiler/custom_build.rs
new file mode 100644
index 000000000..5728d0c85
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/custom_build.rs
@@ -0,0 +1,1060 @@
+use super::{fingerprint, Context, Job, LinkType, Unit, Work};
+use crate::core::compiler::artifact;
+use crate::core::compiler::context::Metadata;
+use crate::core::compiler::job_queue::JobState;
+use crate::core::{profiles::ProfileRoot, PackageId, Target};
+use crate::util::errors::CargoResult;
+use crate::util::machine_message::{self, Message};
+use crate::util::{internal, profile};
+use anyhow::{bail, Context as _};
+use cargo_platform::Cfg;
+use cargo_util::paths;
+use std::collections::hash_map::{Entry, HashMap};
+use std::collections::{BTreeSet, HashSet};
+use std::path::{Path, PathBuf};
+use std::str;
+use std::sync::{Arc, Mutex};
+
+const CARGO_WARNING: &str = "cargo:warning=";
+
+/// Contains the parsed output of a custom build script.
+#[derive(Clone, Debug, Hash, Default)]
+pub struct BuildOutput {
+ /// Paths to pass to rustc with the `-L` flag.
+ pub library_paths: Vec<PathBuf>,
+ /// Names and link kinds of libraries, suitable for the `-l` flag.
+ pub library_links: Vec<String>,
+ /// Linker arguments suitable to be passed to `-C link-arg=<args>`
+ pub linker_args: Vec<(LinkType, String)>,
+ /// Various `--cfg` flags to pass to the compiler.
+ pub cfgs: Vec<String>,
+ /// Various `--check-cfg` flags to pass to the compiler.
+ pub check_cfgs: Vec<String>,
+ /// Additional environment variables to run the compiler with.
+ pub env: Vec<(String, String)>,
+ /// Metadata to pass to the immediate dependencies.
+ pub metadata: Vec<(String, String)>,
+ /// Paths to trigger a rerun of this build script.
+ /// May be absolute or relative paths (relative to package root).
+ pub rerun_if_changed: Vec<PathBuf>,
+ /// Environment variables which, when changed, will cause a rebuild.
+ pub rerun_if_env_changed: Vec<String>,
+ /// Warnings generated by this build.
+ ///
+ /// These are only displayed if this is a "local" package, `-vv` is used,
+ /// or there is a build error for any target in this package.
+ pub warnings: Vec<String>,
+}
+
+/// Map of packages to build script output.
+///
+/// This initially starts out as empty. Overridden build scripts get
+/// inserted during `build_map`. The rest of the entries are added
+/// immediately after each build script runs.
+///
+/// The `Metadata` is the unique metadata hash for the RunCustomBuild Unit of
+/// the package. It needs a unique key, since the build script can be run
+/// multiple times with different profiles or features. We can't embed a
+/// `Unit` because this structure needs to be shareable between threads.
+#[derive(Default)]
+pub struct BuildScriptOutputs {
+ outputs: HashMap<Metadata, BuildOutput>,
+}
+
+/// Linking information for a `Unit`.
+///
+/// See `build_map` for more details.
+#[derive(Default)]
+pub struct BuildScripts {
+ /// List of build script outputs this Unit needs to include for linking. Each
+ /// element is an index into `BuildScriptOutputs`.
+ ///
+ /// Cargo will use this `to_link` vector to add `-L` flags to compiles as we
+ /// propagate them upwards towards the final build. Note, however, that we
+ /// need to preserve the ordering of `to_link` to be topologically sorted.
+ /// This will ensure that build scripts which print their paths properly will
+ /// correctly pick up the files they generated (if there are duplicates
+ /// elsewhere).
+ ///
+ /// To preserve this ordering, the (id, metadata) is stored in two places, once
+ /// in the `Vec` and once in `seen_to_link` for a fast lookup. We maintain
+ /// this as we're building interactively below to ensure that the memory
+ /// usage here doesn't blow up too much.
+ ///
+ /// For more information, see #2354.
+ pub to_link: Vec<(PackageId, Metadata)>,
+ /// This is only used while constructing `to_link` to avoid duplicates.
+ seen_to_link: HashSet<(PackageId, Metadata)>,
+ /// Host-only dependencies that have build scripts. Each element is an
+ /// index into `BuildScriptOutputs`.
+ ///
+ /// This is the set of transitive dependencies that are host-only
+ /// (proc-macro, plugin, build-dependency) that contain a build script.
+ /// Any `BuildOutput::library_paths` path relative to `target` will be
+ /// added to LD_LIBRARY_PATH so that the compiler can find any dynamic
+ /// libraries a build script may have generated.
+ pub plugins: BTreeSet<(PackageId, Metadata)>,
+}
+
+/// Dependency information as declared by a build script.
+#[derive(Debug)]
+pub struct BuildDeps {
+ /// Absolute path to the file in the target directory that stores the
+ /// output of the build script.
+ pub build_script_output: PathBuf,
+ /// Files that trigger a rebuild if they change.
+ pub rerun_if_changed: Vec<PathBuf>,
+ /// Environment variables that trigger a rebuild if they change.
+ pub rerun_if_env_changed: Vec<String>,
+}
+
+/// Prepares a `Work` that executes the target as a custom build script.
+pub fn prepare(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<Job> {
+ let _p = profile::start(format!(
+ "build script prepare: {}/{}",
+ unit.pkg,
+ unit.target.name()
+ ));
+
+ let metadata = cx.get_run_build_script_metadata(unit);
+ if cx
+ .build_script_outputs
+ .lock()
+ .unwrap()
+ .contains_key(metadata)
+ {
+ // The output is already set, thus the build script is overridden.
+ fingerprint::prepare_target(cx, unit, false)
+ } else {
+ build_work(cx, unit)
+ }
+}
+
+fn emit_build_output(
+ state: &JobState<'_, '_>,
+ output: &BuildOutput,
+ out_dir: &Path,
+ package_id: PackageId,
+) -> CargoResult<()> {
+ let library_paths = output
+ .library_paths
+ .iter()
+ .map(|l| l.display().to_string())
+ .collect::<Vec<_>>();
+
+ let msg = machine_message::BuildScript {
+ package_id,
+ linked_libs: &output.library_links,
+ linked_paths: &library_paths,
+ cfgs: &output.cfgs,
+ env: &output.env,
+ out_dir,
+ }
+ .to_json_string();
+ state.stdout(msg)?;
+ Ok(())
+}
+
+fn build_work(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<Job> {
+ assert!(unit.mode.is_run_custom_build());
+ let bcx = &cx.bcx;
+ let dependencies = cx.unit_deps(unit);
+ let build_script_unit = dependencies
+ .iter()
+ .find(|d| !d.unit.mode.is_run_custom_build() && d.unit.target.is_custom_build())
+ .map(|d| &d.unit)
+ .expect("running a script not depending on an actual script");
+ let script_dir = cx.files().build_script_dir(build_script_unit);
+ let script_out_dir = cx.files().build_script_out_dir(unit);
+ let script_run_dir = cx.files().build_script_run_dir(unit);
+ let build_plan = bcx.build_config.build_plan;
+ let invocation_name = unit.buildkey();
+
+ if let Some(deps) = unit.pkg.manifest().metabuild() {
+ prepare_metabuild(cx, build_script_unit, deps)?;
+ }
+
+ // Building the command to execute
+ let to_exec = script_dir.join(unit.target.name());
+
+ // Start preparing the process to execute, starting out with some
+ // environment variables. Note that the profile-related environment
+ // variables are not set with this the build script's profile but rather the
+ // package's library profile.
+ // NOTE: if you add any profile flags, be sure to update
+ // `Profiles::get_profile_run_custom_build` so that those flags get
+ // carried over.
+ let to_exec = to_exec.into_os_string();
+ let mut cmd = cx.compilation.host_process(to_exec, &unit.pkg)?;
+ let debug = unit.profile.debuginfo.is_turned_on();
+ cmd.env("OUT_DIR", &script_out_dir)
+ .env("CARGO_MANIFEST_DIR", unit.pkg.root())
+ .env("NUM_JOBS", &bcx.jobs().to_string())
+ .env("TARGET", bcx.target_data.short_name(&unit.kind))
+ .env("DEBUG", debug.to_string())
+ .env("OPT_LEVEL", &unit.profile.opt_level.to_string())
+ .env(
+ "PROFILE",
+ match unit.profile.root {
+ ProfileRoot::Release => "release",
+ ProfileRoot::Debug => "debug",
+ },
+ )
+ .env("HOST", &bcx.host_triple())
+ .env("RUSTC", &bcx.rustc().path)
+ .env("RUSTDOC", &*bcx.config.rustdoc()?)
+ .inherit_jobserver(&cx.jobserver);
+
+ // Find all artifact dependencies and make their file and containing directory discoverable using environment variables.
+ for (var, value) in artifact::get_env(cx, dependencies)? {
+ cmd.env(&var, value);
+ }
+
+ if let Some(linker) = &bcx.target_data.target_config(unit.kind).linker {
+ cmd.env(
+ "RUSTC_LINKER",
+ linker.val.clone().resolve_program(bcx.config),
+ );
+ }
+
+ if let Some(links) = unit.pkg.manifest().links() {
+ cmd.env("CARGO_MANIFEST_LINKS", links);
+ }
+
+ // Be sure to pass along all enabled features for this package, this is the
+ // last piece of statically known information that we have.
+ for feat in &unit.features {
+ cmd.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1");
+ }
+
+ let mut cfg_map = HashMap::new();
+ for cfg in bcx.target_data.cfg(unit.kind) {
+ match *cfg {
+ Cfg::Name(ref n) => {
+ cfg_map.insert(n.clone(), Vec::new());
+ }
+ Cfg::KeyPair(ref k, ref v) => {
+ let values = cfg_map.entry(k.clone()).or_default();
+ values.push(v.clone());
+ }
+ }
+ }
+ for (k, v) in cfg_map {
+ if k == "debug_assertions" {
+ // This cfg is always true and misleading, so avoid setting it.
+ // That is because Cargo queries rustc without any profile settings.
+ continue;
+ }
+ let k = format!("CARGO_CFG_{}", super::envify(&k));
+ cmd.env(&k, v.join(","));
+ }
+
+ // Also inform the build script of the rustc compiler context.
+ if let Some(wrapper) = bcx.rustc().wrapper.as_ref() {
+ cmd.env("RUSTC_WRAPPER", wrapper);
+ } else {
+ cmd.env_remove("RUSTC_WRAPPER");
+ }
+ cmd.env_remove("RUSTC_WORKSPACE_WRAPPER");
+ if cx.bcx.ws.is_member(&unit.pkg) {
+ if let Some(wrapper) = bcx.rustc().workspace_wrapper.as_ref() {
+ cmd.env("RUSTC_WORKSPACE_WRAPPER", wrapper);
+ }
+ }
+ cmd.env(
+ "CARGO_ENCODED_RUSTFLAGS",
+ bcx.rustflags_args(unit).join("\x1f"),
+ );
+ cmd.env_remove("RUSTFLAGS");
+
+ // Gather the set of native dependencies that this package has along with
+ // some other variables to close over.
+ //
+ // This information will be used at build-time later on to figure out which
+ // sorts of variables need to be discovered at that time.
+ let lib_deps = dependencies
+ .iter()
+ .filter_map(|dep| {
+ if dep.unit.mode.is_run_custom_build() {
+ let dep_metadata = cx.get_run_build_script_metadata(&dep.unit);
+ Some((
+ dep.unit.pkg.manifest().links().unwrap().to_string(),
+ dep.unit.pkg.package_id(),
+ dep_metadata,
+ ))
+ } else {
+ None
+ }
+ })
+ .collect::<Vec<_>>();
+ let library_name = unit.pkg.library().map(|t| t.crate_name());
+ let pkg_descr = unit.pkg.to_string();
+ let build_script_outputs = Arc::clone(&cx.build_script_outputs);
+ let id = unit.pkg.package_id();
+ let output_file = script_run_dir.join("output");
+ let err_file = script_run_dir.join("stderr");
+ let root_output_file = script_run_dir.join("root-output");
+ let host_target_root = cx.files().host_dest().to_path_buf();
+ let all = (
+ id,
+ library_name.clone(),
+ pkg_descr.clone(),
+ Arc::clone(&build_script_outputs),
+ output_file.clone(),
+ script_out_dir.clone(),
+ );
+ let build_scripts = cx.build_scripts.get(unit).cloned();
+ let json_messages = bcx.build_config.emit_json();
+ let extra_verbose = bcx.config.extra_verbose();
+ let (prev_output, prev_script_out_dir) = prev_build_output(cx, unit);
+ let metadata_hash = cx.get_run_build_script_metadata(unit);
+
+ paths::create_dir_all(&script_dir)?;
+ paths::create_dir_all(&script_out_dir)?;
+
+ let nightly_features_allowed = cx.bcx.config.nightly_features_allowed;
+ let extra_check_cfg = match cx.bcx.config.cli_unstable().check_cfg {
+ Some((_, _, _, output)) => output,
+ None => false,
+ };
+ let targets: Vec<Target> = unit.pkg.targets().to_vec();
+ // Need a separate copy for the fresh closure.
+ let targets_fresh = targets.clone();
+
+ let env_profile_name = unit.profile.name.to_uppercase();
+ let built_with_debuginfo = cx
+ .bcx
+ .unit_graph
+ .get(unit)
+ .and_then(|deps| deps.iter().find(|dep| dep.unit.target == unit.target))
+ .map(|dep| dep.unit.profile.debuginfo.is_turned_on())
+ .unwrap_or(false);
+
+ // Prepare the unit of "dirty work" which will actually run the custom build
+ // command.
+ //
+ // Note that this has to do some extra work just before running the command
+ // to determine extra environment variables and such.
+ let dirty = Work::new(move |state| {
+ // Make sure that OUT_DIR exists.
+ //
+ // If we have an old build directory, then just move it into place,
+ // otherwise create it!
+ paths::create_dir_all(&script_out_dir)
+ .with_context(|| "failed to create script output directory for build command")?;
+
+ // For all our native lib dependencies, pick up their metadata to pass
+ // along to this custom build command. We're also careful to augment our
+ // dynamic library search path in case the build script depended on any
+ // native dynamic libraries.
+ if !build_plan {
+ let build_script_outputs = build_script_outputs.lock().unwrap();
+ for (name, dep_id, dep_metadata) in lib_deps {
+ let script_output = build_script_outputs.get(dep_metadata).ok_or_else(|| {
+ internal(format!(
+ "failed to locate build state for env vars: {}/{}",
+ dep_id, dep_metadata
+ ))
+ })?;
+ let data = &script_output.metadata;
+ for &(ref key, ref value) in data.iter() {
+ cmd.env(
+ &format!("DEP_{}_{}", super::envify(&name), super::envify(key)),
+ value,
+ );
+ }
+ }
+ if let Some(build_scripts) = build_scripts {
+ super::add_plugin_deps(
+ &mut cmd,
+ &build_script_outputs,
+ &build_scripts,
+ &host_target_root,
+ )?;
+ }
+ }
+
+ if build_plan {
+ state.build_plan(invocation_name, cmd.clone(), Arc::new(Vec::new()));
+ return Ok(());
+ }
+
+ // And now finally, run the build command itself!
+ state.running(&cmd);
+ let timestamp = paths::set_invocation_time(&script_run_dir)?;
+ let prefix = format!("[{} {}] ", id.name(), id.version());
+ let mut warnings_in_case_of_panic = Vec::new();
+ let output = cmd
+ .exec_with_streaming(
+ &mut |stdout| {
+ if let Some(warning) = stdout.strip_prefix(CARGO_WARNING) {
+ warnings_in_case_of_panic.push(warning.to_owned());
+ }
+ if extra_verbose {
+ state.stdout(format!("{}{}", prefix, stdout))?;
+ }
+ Ok(())
+ },
+ &mut |stderr| {
+ if extra_verbose {
+ state.stderr(format!("{}{}", prefix, stderr))?;
+ }
+ Ok(())
+ },
+ true,
+ )
+ .with_context(|| {
+ let mut build_error_context =
+ format!("failed to run custom build command for `{}`", pkg_descr);
+
+ // If we're opting into backtraces, mention that build dependencies' backtraces can
+ // be improved by requesting debuginfo to be built, if we're not building with
+ // debuginfo already.
+ //
+ // ALLOWED: Other tools like `rustc` might read it directly
+ // through `std::env`. We should make their behavior consistent.
+ #[allow(clippy::disallowed_methods)]
+ if let Ok(show_backtraces) = std::env::var("RUST_BACKTRACE") {
+ if !built_with_debuginfo && show_backtraces != "0" {
+ build_error_context.push_str(&format!(
+ "\n\
+ note: To improve backtraces for build dependencies, set the \
+ CARGO_PROFILE_{env_profile_name}_BUILD_OVERRIDE_DEBUG=true environment \
+ variable to enable debug information generation.",
+ ));
+ }
+ }
+
+ build_error_context
+ });
+
+ if let Err(error) = output {
+ insert_warnings_in_build_outputs(
+ build_script_outputs,
+ id,
+ metadata_hash,
+ warnings_in_case_of_panic,
+ );
+ return Err(error);
+ }
+
+ let output = output.unwrap();
+
+ // After the build command has finished running, we need to be sure to
+ // remember all of its output so we can later discover precisely what it
+ // was, even if we don't run the build command again (due to freshness).
+ //
+ // This is also the location where we provide feedback into the build
+ // state informing what variables were discovered via our script as
+ // well.
+ paths::write(&output_file, &output.stdout)?;
+ // This mtime shift allows Cargo to detect if a source file was
+ // modified in the middle of the build.
+ paths::set_file_time_no_err(output_file, timestamp);
+ paths::write(&err_file, &output.stderr)?;
+ paths::write(&root_output_file, paths::path2bytes(&script_out_dir)?)?;
+ let parsed_output = BuildOutput::parse(
+ &output.stdout,
+ library_name,
+ &pkg_descr,
+ &script_out_dir,
+ &script_out_dir,
+ extra_check_cfg,
+ nightly_features_allowed,
+ &targets,
+ )?;
+
+ if json_messages {
+ emit_build_output(state, &parsed_output, script_out_dir.as_path(), id)?;
+ }
+ build_script_outputs
+ .lock()
+ .unwrap()
+ .insert(id, metadata_hash, parsed_output);
+ Ok(())
+ });
+
+ // Now that we've prepared our work-to-do, we need to prepare the fresh work
+ // itself to run when we actually end up just discarding what we calculated
+ // above.
+ let fresh = Work::new(move |state| {
+ let (id, library_name, pkg_descr, build_script_outputs, output_file, script_out_dir) = all;
+ let output = match prev_output {
+ Some(output) => output,
+ None => BuildOutput::parse_file(
+ &output_file,
+ library_name,
+ &pkg_descr,
+ &prev_script_out_dir,
+ &script_out_dir,
+ extra_check_cfg,
+ nightly_features_allowed,
+ &targets_fresh,
+ )?,
+ };
+
+ if json_messages {
+ emit_build_output(state, &output, script_out_dir.as_path(), id)?;
+ }
+
+ build_script_outputs
+ .lock()
+ .unwrap()
+ .insert(id, metadata_hash, output);
+ Ok(())
+ });
+
+ let mut job = if cx.bcx.build_config.build_plan {
+ Job::new_dirty(Work::noop(), None)
+ } else {
+ fingerprint::prepare_target(cx, unit, false)?
+ };
+ if job.freshness().is_dirty() {
+ job.before(dirty);
+ } else {
+ job.before(fresh);
+ }
+ Ok(job)
+}
+
+fn insert_warnings_in_build_outputs(
+ build_script_outputs: Arc<Mutex<BuildScriptOutputs>>,
+ id: PackageId,
+ metadata_hash: Metadata,
+ warnings: Vec<String>,
+) {
+ let build_output_with_only_warnings = BuildOutput {
+ warnings,
+ ..BuildOutput::default()
+ };
+ build_script_outputs
+ .lock()
+ .unwrap()
+ .insert(id, metadata_hash, build_output_with_only_warnings);
+}
+
+impl BuildOutput {
+ pub fn parse_file(
+ path: &Path,
+ library_name: Option<String>,
+ pkg_descr: &str,
+ script_out_dir_when_generated: &Path,
+ script_out_dir: &Path,
+ extra_check_cfg: bool,
+ nightly_features_allowed: bool,
+ targets: &[Target],
+ ) -> CargoResult<BuildOutput> {
+ let contents = paths::read_bytes(path)?;
+ BuildOutput::parse(
+ &contents,
+ library_name,
+ pkg_descr,
+ script_out_dir_when_generated,
+ script_out_dir,
+ extra_check_cfg,
+ nightly_features_allowed,
+ targets,
+ )
+ }
+
+ // Parses the output of a script.
+ // The `pkg_descr` is used for error messages.
+ // The `library_name` is used for determining if RUSTC_BOOTSTRAP should be allowed.
+ pub fn parse(
+ input: &[u8],
+ // Takes String instead of InternedString so passing `unit.pkg.name()` will give a compile error.
+ library_name: Option<String>,
+ pkg_descr: &str,
+ script_out_dir_when_generated: &Path,
+ script_out_dir: &Path,
+ extra_check_cfg: bool,
+ nightly_features_allowed: bool,
+ targets: &[Target],
+ ) -> CargoResult<BuildOutput> {
+ let mut library_paths = Vec::new();
+ let mut library_links = Vec::new();
+ let mut linker_args = Vec::new();
+ let mut cfgs = Vec::new();
+ let mut check_cfgs = Vec::new();
+ let mut env = Vec::new();
+ let mut metadata = Vec::new();
+ let mut rerun_if_changed = Vec::new();
+ let mut rerun_if_env_changed = Vec::new();
+ let mut warnings = Vec::new();
+ let whence = format!("build script of `{}`", pkg_descr);
+
+ for line in input.split(|b| *b == b'\n') {
+ let line = match str::from_utf8(line) {
+ Ok(line) => line.trim(),
+ Err(..) => continue,
+ };
+ let mut iter = line.splitn(2, ':');
+ if iter.next() != Some("cargo") {
+ // skip this line since it doesn't start with "cargo:"
+ continue;
+ }
+ let data = match iter.next() {
+ Some(val) => val,
+ None => continue,
+ };
+
+ // getting the `key=value` part of the line
+ let mut iter = data.splitn(2, '=');
+ let key = iter.next();
+ let value = iter.next();
+ let (key, value) = match (key, value) {
+ (Some(a), Some(b)) => (a, b.trim_end()),
+ // Line started with `cargo:` but didn't match `key=value`.
+ _ => bail!("invalid output in {}: `{}`\n\
+ Expected a line with `cargo:key=value` with an `=` character, \
+ but none was found.\n\
+ See https://doc.rust-lang.org/cargo/reference/build-scripts.html#outputs-of-the-build-script \
+ for more information about build script outputs.", whence, line),
+ };
+
+ // This will rewrite paths if the target directory has been moved.
+ let value = value.replace(
+ script_out_dir_when_generated.to_str().unwrap(),
+ script_out_dir.to_str().unwrap(),
+ );
+
+ macro_rules! check_and_add_target {
+ ($target_kind: expr, $is_target_kind: expr, $link_type: expr) => {
+ if !targets.iter().any(|target| $is_target_kind(target)) {
+ bail!(
+ "invalid instruction `cargo:{}` from {}\n\
+ The package {} does not have a {} target.",
+ key,
+ whence,
+ pkg_descr,
+ $target_kind
+ );
+ }
+ linker_args.push(($link_type, value));
+ };
+ }
+
+ // Keep in sync with TargetConfig::parse_links_overrides.
+ match key {
+ "rustc-flags" => {
+ let (paths, links) = BuildOutput::parse_rustc_flags(&value, &whence)?;
+ library_links.extend(links.into_iter());
+ library_paths.extend(paths.into_iter());
+ }
+ "rustc-link-lib" => library_links.push(value.to_string()),
+ "rustc-link-search" => library_paths.push(PathBuf::from(value)),
+ "rustc-link-arg-cdylib" | "rustc-cdylib-link-arg" => {
+ if !targets.iter().any(|target| target.is_cdylib()) {
+ warnings.push(format!(
+ "cargo:{} was specified in the build script of {}, \
+ but that package does not contain a cdylib target\n\
+ \n\
+ Allowing this was an unintended change in the 1.50 \
+ release, and may become an error in the future. \
+ For more information, see \
+ <https://github.com/rust-lang/cargo/issues/9562>.",
+ key, pkg_descr
+ ));
+ }
+ linker_args.push((LinkType::Cdylib, value))
+ }
+ "rustc-link-arg-bins" => {
+ check_and_add_target!("bin", Target::is_bin, LinkType::Bin);
+ }
+ "rustc-link-arg-bin" => {
+ let mut parts = value.splitn(2, '=');
+ let bin_name = parts.next().unwrap().to_string();
+ let arg = parts.next().ok_or_else(|| {
+ anyhow::format_err!(
+ "invalid instruction `cargo:{}={}` from {}\n\
+ The instruction should have the form cargo:{}=BIN=ARG",
+ key,
+ value,
+ whence,
+ key
+ )
+ })?;
+ if !targets
+ .iter()
+ .any(|target| target.is_bin() && target.name() == bin_name)
+ {
+ bail!(
+ "invalid instruction `cargo:{}` from {}\n\
+ The package {} does not have a bin target with the name `{}`.",
+ key,
+ whence,
+ pkg_descr,
+ bin_name
+ );
+ }
+ linker_args.push((LinkType::SingleBin(bin_name), arg.to_string()));
+ }
+ "rustc-link-arg-tests" => {
+ check_and_add_target!("test", Target::is_test, LinkType::Test);
+ }
+ "rustc-link-arg-benches" => {
+ check_and_add_target!("benchmark", Target::is_bench, LinkType::Bench);
+ }
+ "rustc-link-arg-examples" => {
+ check_and_add_target!("example", Target::is_example, LinkType::Example);
+ }
+ "rustc-link-arg" => {
+ linker_args.push((LinkType::All, value));
+ }
+ "rustc-cfg" => cfgs.push(value.to_string()),
+ "rustc-check-cfg" => {
+ if extra_check_cfg {
+ check_cfgs.push(value.to_string());
+ } else {
+ warnings.push(format!("cargo:{} requires -Zcheck-cfg=output flag", key));
+ }
+ }
+ "rustc-env" => {
+ let (key, val) = BuildOutput::parse_rustc_env(&value, &whence)?;
+ // Build scripts aren't allowed to set RUSTC_BOOTSTRAP.
+ // See https://github.com/rust-lang/cargo/issues/7088.
+ if key == "RUSTC_BOOTSTRAP" {
+ // If RUSTC_BOOTSTRAP is already set, the user of Cargo knows about
+ // bootstrap and still wants to override the channel. Give them a way to do
+ // so, but still emit a warning that the current crate shouldn't be trying
+ // to set RUSTC_BOOTSTRAP.
+ // If this is a nightly build, setting RUSTC_BOOTSTRAP wouldn't affect the
+ // behavior, so still only give a warning.
+ // NOTE: cargo only allows nightly features on RUSTC_BOOTSTRAP=1, but we
+ // want setting any value of RUSTC_BOOTSTRAP to downgrade this to a warning
+ // (so that `RUSTC_BOOTSTRAP=library_name` will work)
+ let rustc_bootstrap_allows = |name: Option<&str>| {
+ let name = match name {
+ // as of 2021, no binaries on crates.io use RUSTC_BOOTSTRAP, so
+ // fine-grained opt-outs aren't needed. end-users can always use
+ // RUSTC_BOOTSTRAP=1 from the top-level if it's really a problem.
+ None => return false,
+ Some(n) => n,
+ };
+ // ALLOWED: the process of rustc boostrapping reads this through
+ // `std::env`. We should make the behavior consistent. Also, we
+ // don't advertise this for bypassing nightly.
+ #[allow(clippy::disallowed_methods)]
+ std::env::var("RUSTC_BOOTSTRAP")
+ .map_or(false, |var| var.split(',').any(|s| s == name))
+ };
+ if nightly_features_allowed
+ || rustc_bootstrap_allows(library_name.as_deref())
+ {
+ warnings.push(format!("Cannot set `RUSTC_BOOTSTRAP={}` from {}.\n\
+ note: Crates cannot set `RUSTC_BOOTSTRAP` themselves, as doing so would subvert the stability guarantees of Rust for your project.",
+ val, whence
+ ));
+ } else {
+ // Setting RUSTC_BOOTSTRAP would change the behavior of the crate.
+ // Abort with an error.
+ bail!("Cannot set `RUSTC_BOOTSTRAP={}` from {}.\n\
+ note: Crates cannot set `RUSTC_BOOTSTRAP` themselves, as doing so would subvert the stability guarantees of Rust for your project.\n\
+ help: If you're sure you want to do this in your project, set the environment variable `RUSTC_BOOTSTRAP={}` before running cargo instead.",
+ val,
+ whence,
+ library_name.as_deref().unwrap_or("1"),
+ );
+ }
+ } else {
+ env.push((key, val));
+ }
+ }
+ "warning" => warnings.push(value.to_string()),
+ "rerun-if-changed" => rerun_if_changed.push(PathBuf::from(value)),
+ "rerun-if-env-changed" => rerun_if_env_changed.push(value.to_string()),
+ _ => metadata.push((key.to_string(), value.to_string())),
+ }
+ }
+
+ Ok(BuildOutput {
+ library_paths,
+ library_links,
+ linker_args,
+ cfgs,
+ check_cfgs,
+ env,
+ metadata,
+ rerun_if_changed,
+ rerun_if_env_changed,
+ warnings,
+ })
+ }
+
+ pub fn parse_rustc_flags(
+ value: &str,
+ whence: &str,
+ ) -> CargoResult<(Vec<PathBuf>, Vec<String>)> {
+ let value = value.trim();
+ let mut flags_iter = value
+ .split(|c: char| c.is_whitespace())
+ .filter(|w| w.chars().any(|c| !c.is_whitespace()));
+ let (mut library_paths, mut library_links) = (Vec::new(), Vec::new());
+
+ while let Some(flag) = flags_iter.next() {
+ if flag.starts_with("-l") || flag.starts_with("-L") {
+ // Check if this flag has no space before the value as is
+ // common with tools like pkg-config
+ // e.g. -L/some/dir/local/lib or -licui18n
+ let (flag, mut value) = flag.split_at(2);
+ if value.is_empty() {
+ value = match flags_iter.next() {
+ Some(v) => v,
+ None => bail! {
+ "Flag in rustc-flags has no value in {}: {}",
+ whence,
+ value
+ },
+ }
+ }
+
+ match flag {
+ "-l" => library_links.push(value.to_string()),
+ "-L" => library_paths.push(PathBuf::from(value)),
+
+ // This was already checked above
+ _ => unreachable!(),
+ };
+ } else {
+ bail!(
+ "Only `-l` and `-L` flags are allowed in {}: `{}`",
+ whence,
+ value
+ )
+ }
+ }
+ Ok((library_paths, library_links))
+ }
+
+ pub fn parse_rustc_env(value: &str, whence: &str) -> CargoResult<(String, String)> {
+ let mut iter = value.splitn(2, '=');
+ let name = iter.next();
+ let val = iter.next();
+ match (name, val) {
+ (Some(n), Some(v)) => Ok((n.to_owned(), v.to_owned())),
+ _ => bail!("Variable rustc-env has no value in {}: {}", whence, value),
+ }
+ }
+}
+
+fn prepare_metabuild(cx: &Context<'_, '_>, unit: &Unit, deps: &[String]) -> CargoResult<()> {
+ let mut output = Vec::new();
+ let available_deps = cx.unit_deps(unit);
+ // Filter out optional dependencies, and look up the actual lib name.
+ let meta_deps: Vec<_> = deps
+ .iter()
+ .filter_map(|name| {
+ available_deps
+ .iter()
+ .find(|d| d.unit.pkg.name().as_str() == name.as_str())
+ .map(|d| d.unit.target.crate_name())
+ })
+ .collect();
+ for dep in &meta_deps {
+ output.push(format!("use {};\n", dep));
+ }
+ output.push("fn main() {\n".to_string());
+ for dep in &meta_deps {
+ output.push(format!(" {}::metabuild();\n", dep));
+ }
+ output.push("}\n".to_string());
+ let output = output.join("");
+ let path = unit.pkg.manifest().metabuild_path(cx.bcx.ws.target_dir());
+ paths::create_dir_all(path.parent().unwrap())?;
+ paths::write_if_changed(path, &output)?;
+ Ok(())
+}
+
+impl BuildDeps {
+ pub fn new(output_file: &Path, output: Option<&BuildOutput>) -> BuildDeps {
+ BuildDeps {
+ build_script_output: output_file.to_path_buf(),
+ rerun_if_changed: output
+ .map(|p| &p.rerun_if_changed)
+ .cloned()
+ .unwrap_or_default(),
+ rerun_if_env_changed: output
+ .map(|p| &p.rerun_if_env_changed)
+ .cloned()
+ .unwrap_or_default(),
+ }
+ }
+}
+
+/// Computes several maps in `Context`:
+/// - `build_scripts`: A map that tracks which build scripts each package
+/// depends on.
+/// - `build_explicit_deps`: Dependency statements emitted by build scripts
+/// from a previous run.
+/// - `build_script_outputs`: Pre-populates this with any overridden build
+/// scripts.
+///
+/// The important one here is `build_scripts`, which for each `(package,
+/// metadata)` stores a `BuildScripts` object which contains a list of
+/// dependencies with build scripts that the unit should consider when
+/// linking. For example this lists all dependencies' `-L` flags which need to
+/// be propagated transitively.
+///
+/// The given set of units to this function is the initial set of
+/// targets/profiles which are being built.
+pub fn build_map(cx: &mut Context<'_, '_>) -> CargoResult<()> {
+ let mut ret = HashMap::new();
+ for unit in &cx.bcx.roots {
+ build(&mut ret, cx, unit)?;
+ }
+ cx.build_scripts
+ .extend(ret.into_iter().map(|(k, v)| (k, Arc::new(v))));
+ return Ok(());
+
+ // Recursive function to build up the map we're constructing. This function
+ // memoizes all of its return values as it goes along.
+ fn build<'a>(
+ out: &'a mut HashMap<Unit, BuildScripts>,
+ cx: &mut Context<'_, '_>,
+ unit: &Unit,
+ ) -> CargoResult<&'a BuildScripts> {
+ // Do a quick pre-flight check to see if we've already calculated the
+ // set of dependencies.
+ if out.contains_key(unit) {
+ return Ok(&out[unit]);
+ }
+
+ // If there is a build script override, pre-fill the build output.
+ if unit.mode.is_run_custom_build() {
+ if let Some(links) = unit.pkg.manifest().links() {
+ if let Some(output) = cx.bcx.target_data.script_override(links, unit.kind) {
+ let metadata = cx.get_run_build_script_metadata(unit);
+ cx.build_script_outputs.lock().unwrap().insert(
+ unit.pkg.package_id(),
+ metadata,
+ output.clone(),
+ );
+ }
+ }
+ }
+
+ let mut ret = BuildScripts::default();
+
+ // If a package has a build script, add itself as something to inspect for linking.
+ if !unit.target.is_custom_build() && unit.pkg.has_custom_build() {
+ let script_meta = cx
+ .find_build_script_metadata(unit)
+ .expect("has_custom_build should have RunCustomBuild");
+ add_to_link(&mut ret, unit.pkg.package_id(), script_meta);
+ }
+
+ // Load any dependency declarations from a previous run.
+ if unit.mode.is_run_custom_build() {
+ parse_previous_explicit_deps(cx, unit);
+ }
+
+ // We want to invoke the compiler deterministically to be cache-friendly
+ // to rustc invocation caching schemes, so be sure to generate the same
+ // set of build script dependency orderings via sorting the targets that
+ // come out of the `Context`.
+ let mut dependencies: Vec<Unit> =
+ cx.unit_deps(unit).iter().map(|d| d.unit.clone()).collect();
+ dependencies.sort_by_key(|u| u.pkg.package_id());
+
+ for dep_unit in dependencies.iter() {
+ let dep_scripts = build(out, cx, dep_unit)?;
+
+ if dep_unit.target.for_host() {
+ ret.plugins.extend(dep_scripts.to_link.iter().cloned());
+ } else if dep_unit.target.is_linkable() {
+ for &(pkg, metadata) in dep_scripts.to_link.iter() {
+ add_to_link(&mut ret, pkg, metadata);
+ }
+ }
+ }
+
+ match out.entry(unit.clone()) {
+ Entry::Vacant(entry) => Ok(entry.insert(ret)),
+ Entry::Occupied(_) => panic!("cyclic dependencies in `build_map`"),
+ }
+ }
+
+ // When adding an entry to 'to_link' we only actually push it on if the
+ // script hasn't seen it yet (e.g., we don't push on duplicates).
+ fn add_to_link(scripts: &mut BuildScripts, pkg: PackageId, metadata: Metadata) {
+ if scripts.seen_to_link.insert((pkg, metadata)) {
+ scripts.to_link.push((pkg, metadata));
+ }
+ }
+
+ fn parse_previous_explicit_deps(cx: &mut Context<'_, '_>, unit: &Unit) {
+ let script_run_dir = cx.files().build_script_run_dir(unit);
+ let output_file = script_run_dir.join("output");
+ let (prev_output, _) = prev_build_output(cx, unit);
+ let deps = BuildDeps::new(&output_file, prev_output.as_ref());
+ cx.build_explicit_deps.insert(unit.clone(), deps);
+ }
+}
+
+/// Returns the previous parsed `BuildOutput`, if any, from a previous
+/// execution.
+///
+/// Also returns the directory containing the output, typically used later in
+/// processing.
+fn prev_build_output(cx: &mut Context<'_, '_>, unit: &Unit) -> (Option<BuildOutput>, PathBuf) {
+ let script_out_dir = cx.files().build_script_out_dir(unit);
+ let script_run_dir = cx.files().build_script_run_dir(unit);
+ let root_output_file = script_run_dir.join("root-output");
+ let output_file = script_run_dir.join("output");
+
+ let prev_script_out_dir = paths::read_bytes(&root_output_file)
+ .and_then(|bytes| paths::bytes2path(&bytes))
+ .unwrap_or_else(|_| script_out_dir.clone());
+
+ (
+ BuildOutput::parse_file(
+ &output_file,
+ unit.pkg.library().map(|t| t.crate_name()),
+ &unit.pkg.to_string(),
+ &prev_script_out_dir,
+ &script_out_dir,
+ match cx.bcx.config.cli_unstable().check_cfg {
+ Some((_, _, _, output)) => output,
+ None => false,
+ },
+ cx.bcx.config.nightly_features_allowed,
+ unit.pkg.targets(),
+ )
+ .ok(),
+ prev_script_out_dir,
+ )
+}
+
+impl BuildScriptOutputs {
+ /// Inserts a new entry into the map.
+ fn insert(&mut self, pkg_id: PackageId, metadata: Metadata, parsed_output: BuildOutput) {
+ match self.outputs.entry(metadata) {
+ Entry::Vacant(entry) => {
+ entry.insert(parsed_output);
+ }
+ Entry::Occupied(entry) => panic!(
+ "build script output collision for {}/{}\n\
+ old={:?}\nnew={:?}",
+ pkg_id,
+ metadata,
+ entry.get(),
+ parsed_output
+ ),
+ }
+ }
+
+ /// Returns `true` if the given key already exists.
+ fn contains_key(&self, metadata: Metadata) -> bool {
+ self.outputs.contains_key(&metadata)
+ }
+
+ /// Gets the build output for the given key.
+ pub fn get(&self, meta: Metadata) -> Option<&BuildOutput> {
+ self.outputs.get(&meta)
+ }
+
+ /// Returns an iterator over all entries.
+ pub fn iter(&self) -> impl Iterator<Item = (&Metadata, &BuildOutput)> {
+ self.outputs.iter()
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/fingerprint/dirty_reason.rs b/src/tools/cargo/src/cargo/core/compiler/fingerprint/dirty_reason.rs
new file mode 100644
index 000000000..363aab7e6
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/fingerprint/dirty_reason.rs
@@ -0,0 +1,255 @@
+use std::fmt;
+use std::fmt::Debug;
+
+use super::*;
+use crate::core::Shell;
+
+/// Tells a better story of why a build is considered "dirty" that leads
+/// to a recompile. Usually constructed via [`Fingerprint::compare`].
+///
+/// [`Fingerprint::compare`]: super::Fingerprint::compare
+#[derive(Clone, Debug)]
+pub enum DirtyReason {
+ RustcChanged,
+ FeaturesChanged {
+ old: String,
+ new: String,
+ },
+ TargetConfigurationChanged,
+ PathToSourceChanged,
+ ProfileConfigurationChanged,
+ RustflagsChanged {
+ old: Vec<String>,
+ new: Vec<String>,
+ },
+ MetadataChanged,
+ ConfigSettingsChanged,
+ CompileKindChanged,
+ LocalLengthsChanged,
+ PrecalculatedComponentsChanged {
+ old: String,
+ new: String,
+ },
+ DepInfoOutputChanged {
+ old: PathBuf,
+ new: PathBuf,
+ },
+ RerunIfChangedOutputFileChanged {
+ old: PathBuf,
+ new: PathBuf,
+ },
+ RerunIfChangedOutputPathsChanged {
+ old: Vec<PathBuf>,
+ new: Vec<PathBuf>,
+ },
+ EnvVarsChanged {
+ old: String,
+ new: String,
+ },
+ EnvVarChanged {
+ name: String,
+ old_value: Option<String>,
+ new_value: Option<String>,
+ },
+ LocalFingerprintTypeChanged {
+ old: &'static str,
+ new: &'static str,
+ },
+ NumberOfDependenciesChanged {
+ old: usize,
+ new: usize,
+ },
+ UnitDependencyNameChanged {
+ old: InternedString,
+ new: InternedString,
+ },
+ UnitDependencyInfoChanged {
+ old_name: InternedString,
+ old_fingerprint: u64,
+
+ new_name: InternedString,
+ new_fingerprint: u64,
+ },
+ FsStatusOutdated(FsStatus),
+ NothingObvious,
+ Forced,
+}
+
+trait ShellExt {
+ fn dirty_because(&mut self, unit: &Unit, s: impl fmt::Display) -> CargoResult<()>;
+}
+
+impl ShellExt for Shell {
+ fn dirty_because(&mut self, unit: &Unit, s: impl fmt::Display) -> CargoResult<()> {
+ self.status("Dirty", format_args!("{}: {s}", &unit.pkg))
+ }
+}
+
+struct FileTimeDiff {
+ old_time: FileTime,
+ new_time: FileTime,
+}
+
+impl fmt::Display for FileTimeDiff {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let s_diff = self.new_time.seconds() - self.old_time.seconds();
+ if s_diff >= 1 {
+ fmt::Display::fmt(
+ &humantime::Duration::from(std::time::Duration::from_secs(s_diff as u64)),
+ f,
+ )
+ } else {
+ // format nanoseconds as it is, humantime would display ms, us and ns
+ let ns_diff = self.new_time.nanoseconds() - self.old_time.nanoseconds();
+ write!(f, "{ns_diff}ns")
+ }
+ }
+}
+
+#[derive(Copy, Clone)]
+struct After {
+ old_time: FileTime,
+ new_time: FileTime,
+ what: &'static str,
+}
+
+impl fmt::Display for After {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let Self {
+ old_time,
+ new_time,
+ what,
+ } = *self;
+ let diff = FileTimeDiff { old_time, new_time };
+
+ write!(f, "{new_time}, {diff} after {what} at {old_time}")
+ }
+}
+
+impl DirtyReason {
+ fn after(old_time: FileTime, new_time: FileTime, what: &'static str) -> After {
+ After {
+ old_time,
+ new_time,
+ what,
+ }
+ }
+
+ pub fn present_to(&self, s: &mut Shell, unit: &Unit, root: &Path) -> CargoResult<()> {
+ match self {
+ DirtyReason::RustcChanged => s.dirty_because(unit, "the toolchain changed"),
+ DirtyReason::FeaturesChanged { .. } => {
+ s.dirty_because(unit, "the list of features changed")
+ }
+ DirtyReason::TargetConfigurationChanged => {
+ s.dirty_because(unit, "the target configuration changed")
+ }
+ DirtyReason::PathToSourceChanged => {
+ s.dirty_because(unit, "the path to the source changed")
+ }
+ DirtyReason::ProfileConfigurationChanged => {
+ s.dirty_because(unit, "the profile configuration changed")
+ }
+ DirtyReason::RustflagsChanged { .. } => s.dirty_because(unit, "the rustflags changed"),
+ DirtyReason::MetadataChanged => s.dirty_because(unit, "the metadata changed"),
+ DirtyReason::ConfigSettingsChanged => {
+ s.dirty_because(unit, "the config settings changed")
+ }
+ DirtyReason::CompileKindChanged => {
+ s.dirty_because(unit, "the rustc compile kind changed")
+ }
+ DirtyReason::LocalLengthsChanged => {
+ s.dirty_because(unit, "the local lengths changed")?;
+ s.note(
+ "This could happen because of added/removed `cargo:rerun-if` instructions in the build script",
+ )?;
+
+ Ok(())
+ }
+ DirtyReason::PrecalculatedComponentsChanged { .. } => {
+ s.dirty_because(unit, "the precalculated components changed")
+ }
+ DirtyReason::DepInfoOutputChanged { .. } => {
+ s.dirty_because(unit, "the dependency info output changed")
+ }
+ DirtyReason::RerunIfChangedOutputFileChanged { .. } => {
+ s.dirty_because(unit, "rerun-if-changed output file path changed")
+ }
+ DirtyReason::RerunIfChangedOutputPathsChanged { .. } => {
+ s.dirty_because(unit, "the rerun-if-changed instructions changed")
+ }
+ DirtyReason::EnvVarsChanged { .. } => {
+ s.dirty_because(unit, "the environment variables changed")
+ }
+ DirtyReason::EnvVarChanged { name, .. } => {
+ s.dirty_because(unit, format_args!("the env variable {name} changed"))
+ }
+ DirtyReason::LocalFingerprintTypeChanged { .. } => {
+ s.dirty_because(unit, "the local fingerprint type changed")
+ }
+ DirtyReason::NumberOfDependenciesChanged { old, new } => s.dirty_because(
+ unit,
+ format_args!("number of dependencies changed ({old} => {new})",),
+ ),
+ DirtyReason::UnitDependencyNameChanged { old, new } => s.dirty_because(
+ unit,
+ format_args!("name of dependency changed ({old} => {new})"),
+ ),
+ DirtyReason::UnitDependencyInfoChanged { .. } => {
+ s.dirty_because(unit, "dependency info changed")
+ }
+ DirtyReason::FsStatusOutdated(status) => match status {
+ FsStatus::Stale => s.dirty_because(unit, "stale, unknown reason"),
+ FsStatus::StaleItem(item) => match item {
+ StaleItem::MissingFile(missing_file) => {
+ let file = missing_file.strip_prefix(root).unwrap_or(&missing_file);
+ s.dirty_because(
+ unit,
+ format_args!("the file `{}` is missing", file.display()),
+ )
+ }
+ StaleItem::ChangedFile {
+ stale,
+ stale_mtime,
+ reference_mtime,
+ ..
+ } => {
+ let file = stale.strip_prefix(root).unwrap_or(&stale);
+ let after = Self::after(*reference_mtime, *stale_mtime, "last build");
+ s.dirty_because(
+ unit,
+ format_args!("the file `{}` has changed ({after})", file.display()),
+ )
+ }
+ StaleItem::ChangedEnv { var, .. } => s.dirty_because(
+ unit,
+ format_args!("the environment variable {var} changed"),
+ ),
+ },
+ FsStatus::StaleDependency {
+ name,
+ dep_mtime,
+ max_mtime,
+ ..
+ } => {
+ let after = Self::after(*max_mtime, *dep_mtime, "last build");
+ s.dirty_because(
+ unit,
+ format_args!("the dependency {name} was rebuilt ({after})"),
+ )
+ }
+ FsStatus::StaleDepFingerprint { name } => {
+ s.dirty_because(unit, format_args!("the dependency {name} was rebuilt"))
+ }
+ FsStatus::UpToDate { .. } => {
+ unreachable!()
+ }
+ },
+ DirtyReason::NothingObvious => {
+ // See comment in fingerprint compare method.
+ s.dirty_because(unit, "the fingerprint comparison turned up nothing obvious")
+ }
+ DirtyReason::Forced => s.dirty_because(unit, "forced"),
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/fingerprint/mod.rs b/src/tools/cargo/src/cargo/core/compiler/fingerprint/mod.rs
new file mode 100644
index 000000000..7401afebc
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/fingerprint/mod.rs
@@ -0,0 +1,2191 @@
+//! # Fingerprints
+//!
+//! This module implements change-tracking so that Cargo can know whether or
+//! not something needs to be recompiled. A Cargo [`Unit`] can be either "dirty"
+//! (needs to be recompiled) or "fresh" (it does not need to be recompiled).
+//! There are several mechanisms that influence a Unit's freshness:
+//!
+//! - The [`Fingerprint`] is a hash, saved to the filesystem in the
+//! `.fingerprint` directory, that tracks information about the Unit. If the
+//! fingerprint is missing (such as the first time the unit is being
+//! compiled), then the unit is dirty. If any of the fingerprint fields
+//! change (like the name of the source file), then the Unit is considered
+//! dirty.
+//!
+//! The `Fingerprint` also tracks the fingerprints of all its dependencies,
+//! so a change in a dependency will propagate the "dirty" status up.
+//!
+//! - Filesystem mtime tracking is also used to check if a unit is dirty.
+//! See the section below on "Mtime comparison" for more details. There
+//! are essentially two parts to mtime tracking:
+//!
+//! 1. The mtime of a Unit's output files is compared to the mtime of all
+//! its dependencies' output file mtimes (see
+//! [`check_filesystem`]). If any output is missing, or is
+//! older than a dependency's output, then the unit is dirty.
+//! 2. The mtime of a Unit's source files is compared to the mtime of its
+//! dep-info file in the fingerprint directory (see [`find_stale_file`]).
+//! The dep-info file is used as an anchor to know when the last build of
+//! the unit was done. See the "dep-info files" section below for more
+//! details. If any input files are missing, or are newer than the
+//! dep-info, then the unit is dirty.
+//!
+//! Note: Fingerprinting is not a perfect solution. Filesystem mtime tracking
+//! is notoriously imprecise and problematic. Only a small part of the
+//! environment is captured. This is a balance of performance, simplicity, and
+//! completeness. Sandboxing, hashing file contents, tracking every file
+//! access, environment variable, and network operation would ensure more
+//! reliable and reproducible builds at the cost of being complex, slow, and
+//! platform-dependent.
+//!
+//! ## Fingerprints and Metadata
+//!
+//! The [`Metadata`] hash is a hash added to the output filenames to isolate
+//! each unit. See its documentationfor more details.
+//! NOTE: Not all output files are isolated via filename hashes (like dylibs).
+//! The fingerprint directory uses a hash, but sometimes units share the same
+//! fingerprint directory (when they don't have Metadata) so care should be
+//! taken to handle this!
+//!
+//! Fingerprints and Metadata are similar, and track some of the same things.
+//! The Metadata contains information that is required to keep Units separate.
+//! The Fingerprint includes additional information that should cause a
+//! recompile, but it is desired to reuse the same filenames. A comparison
+//! of what is tracked:
+//!
+//! Value | Fingerprint | Metadata
+//! -------------------------------------------|-------------|----------
+//! rustc | ✓ | ✓
+//! [`Profile`] | ✓ | ✓
+//! `cargo rustc` extra args | ✓ | ✓
+//! [`CompileMode`] | ✓ | ✓
+//! Target Name | ✓ | ✓
+//! TargetKind (bin/lib/etc.) | ✓ | ✓
+//! Enabled Features | ✓ | ✓
+//! Immediate dependency’s hashes | ✓[^1] | ✓
+//! [`CompileKind`] (host/target) | ✓ | ✓
+//! __CARGO_DEFAULT_LIB_METADATA[^4] | | ✓
+//! package_id | | ✓
+//! authors, description, homepage, repo | ✓ |
+//! Target src path relative to ws | ✓ |
+//! Target flags (test/bench/for_host/edition) | ✓ |
+//! -C incremental=… flag | ✓ |
+//! mtime of sources | ✓[^3] |
+//! RUSTFLAGS/RUSTDOCFLAGS | ✓ |
+//! [`Lto`] flags | ✓ | ✓
+//! config settings[^5] | ✓ |
+//! is_std | | ✓
+//!
+//! [^1]: Build script and bin dependencies are not included.
+//!
+//! [^3]: See below for details on mtime tracking.
+//!
+//! [^4]: `__CARGO_DEFAULT_LIB_METADATA` is set by rustbuild to embed the
+//! release channel (bootstrap/stable/beta/nightly) in libstd.
+//!
+//! [^5]: Config settings that are not otherwise captured anywhere else.
+//! Currently, this is only `doc.extern-map`.
+//!
+//! When deciding what should go in the Metadata vs the Fingerprint, consider
+//! that some files (like dylibs) do not have a hash in their filename. Thus,
+//! if a value changes, only the fingerprint will detect the change (consider,
+//! for example, swapping between different features). Fields that are only in
+//! Metadata generally aren't relevant to the fingerprint because they
+//! fundamentally change the output (like target vs host changes the directory
+//! where it is emitted).
+//!
+//! ## Fingerprint files
+//!
+//! Fingerprint information is stored in the
+//! `target/{debug,release}/.fingerprint/` directory. Each Unit is stored in a
+//! separate directory. Each Unit directory contains:
+//!
+//! - A file with a 16 hex-digit hash. This is the Fingerprint hash, used for
+//! quick loading and comparison.
+//! - A `.json` file that contains details about the Fingerprint. This is only
+//! used to log details about *why* a fingerprint is considered dirty.
+//! `CARGO_LOG=cargo::core::compiler::fingerprint=trace cargo build` can be
+//! used to display this log information.
+//! - A "dep-info" file which is a translation of rustc's `*.d` dep-info files
+//! to a Cargo-specific format that tweaks file names and is optimized for
+//! reading quickly.
+//! - An `invoked.timestamp` file whose filesystem mtime is updated every time
+//! the Unit is built. This is used for capturing the time when the build
+//! starts, to detect if files are changed in the middle of the build. See
+//! below for more details.
+//!
+//! Note that some units are a little different. A Unit for *running* a build
+//! script or for `rustdoc` does not have a dep-info file (it's not
+//! applicable). Build script `invoked.timestamp` files are in the build
+//! output directory.
+//!
+//! ## Fingerprint calculation
+//!
+//! After the list of Units has been calculated, the Units are added to the
+//! [`JobQueue`]. As each one is added, the fingerprint is calculated, and the
+//! dirty/fresh status is recorded. A closure is used to update the fingerprint
+//! on-disk when the Unit successfully finishes. The closure will recompute the
+//! Fingerprint based on the updated information. If the Unit fails to compile,
+//! the fingerprint is not updated.
+//!
+//! Fingerprints are cached in the [`Context`]. This makes computing
+//! Fingerprints faster, but also is necessary for properly updating
+//! dependency information. Since a Fingerprint includes the Fingerprints of
+//! all dependencies, when it is updated, by using `Arc` clones, it
+//! automatically picks up the updates to its dependencies.
+//!
+//! ### dep-info files
+//!
+//! Cargo has several kinds of "dep info" files:
+//!
+//! * dep-info files generated by `rustc`.
+//! * Fingerprint dep-info files translated from the first one.
+//! * dep-info for external build system integration.
+//! * Unstable `-Zbinary-dep-depinfo`.
+//!
+//! #### `rustc` dep-info files
+//!
+//! Cargo passes the `--emit=dep-info` flag to `rustc` so that `rustc` will
+//! generate a "dep info" file (with the `.d` extension). This is a
+//! Makefile-like syntax that includes all of the source files used to build
+//! the crate. This file is used by Cargo to know which files to check to see
+//! if the crate will need to be rebuilt. Example:
+//!
+//! ```makefile
+//! /path/to/target/debug/deps/cargo-b6219d178925203d: src/bin/main.rs src/bin/cargo/cli.rs # … etc.
+//! ```
+//!
+//! #### Fingerprint dep-info files
+//!
+//! After `rustc` exits successfully, Cargo will read the first kind of dep
+//! info file and translate it into a binary format that is stored in the
+//! fingerprint directory ([`translate_dep_info`]).
+//!
+//! These are used to quickly scan for any changed files. The mtime of the
+//! fingerprint dep-info file itself is used as the reference for comparing the
+//! source files to determine if any of the source files have been modified
+//! (see [below](#mtime-comparison) for more detail).
+//!
+//! Note that Cargo parses the special `# env-var:...` comments in dep-info
+//! files to learn about environment variables that the rustc compile depends on.
+//! Cargo then later uses this to trigger a recompile if a referenced env var
+//! changes (even if the source didn't change).
+//!
+//! #### dep-info files for build system integration.
+//!
+//! There is also a third dep-info file. Cargo will extend the file created by
+//! rustc with some additional information and saves this into the output
+//! directory. This is intended for build system integration. See the
+//! [`output_depinfo`] function for more detail.
+//!
+//! #### -Zbinary-dep-depinfo
+//!
+//! `rustc` has an experimental flag `-Zbinary-dep-depinfo`. This causes
+//! `rustc` to include binary files (like rlibs) in the dep-info file. This is
+//! primarily to support rustc development, so that Cargo can check the
+//! implicit dependency to the standard library (which lives in the sysroot).
+//! We want Cargo to recompile whenever the standard library rlib/dylibs
+//! change, and this is a generic mechanism to make that work.
+//!
+//! ### Mtime comparison
+//!
+//! The use of modification timestamps is the most common way a unit will be
+//! determined to be dirty or fresh between builds. There are many subtle
+//! issues and edge cases with mtime comparisons. This gives a high-level
+//! overview, but you'll need to read the code for the gritty details. Mtime
+//! handling is different for different unit kinds. The different styles are
+//! driven by the [`Fingerprint::local`] field, which is set based on the unit
+//! kind.
+//!
+//! The status of whether or not the mtime is "stale" or "up-to-date" is
+//! stored in [`Fingerprint::fs_status`].
+//!
+//! All units will compare the mtime of its newest output file with the mtimes
+//! of the outputs of all its dependencies. If any output file is missing,
+//! then the unit is stale. If any dependency is newer, the unit is stale.
+//!
+//! #### Normal package mtime handling
+//!
+//! [`LocalFingerprint::CheckDepInfo`] is used for checking the mtime of
+//! packages. It compares the mtime of the input files (the source files) to
+//! the mtime of the dep-info file (which is written last after a build is
+//! finished). If the dep-info is missing, the unit is stale (it has never
+//! been built). The list of input files comes from the dep-info file. See the
+//! section above for details on dep-info files.
+//!
+//! Also note that although registry and git packages use [`CheckDepInfo`], none
+//! of their source files are included in the dep-info (see
+//! [`translate_dep_info`]), so for those kinds no mtime checking is done
+//! (unless `-Zbinary-dep-depinfo` is used). Repository and git packages are
+//! static, so there is no need to check anything.
+//!
+//! When a build is complete, the mtime of the dep-info file in the
+//! fingerprint directory is modified to rewind it to the time when the build
+//! started. This is done by creating an `invoked.timestamp` file when the
+//! build starts to capture the start time. The mtime is rewound to the start
+//! to handle the case where the user modifies a source file while a build is
+//! running. Cargo can't know whether or not the file was included in the
+//! build, so it takes a conservative approach of assuming the file was *not*
+//! included, and it should be rebuilt during the next build.
+//!
+//! #### Rustdoc mtime handling
+//!
+//! Rustdoc does not emit a dep-info file, so Cargo currently has a relatively
+//! simple system for detecting rebuilds. [`LocalFingerprint::Precalculated`] is
+//! used for rustdoc units. For registry packages, this is the package
+//! version. For git packages, it is the git hash. For path packages, it is
+//! the a string of the mtime of the newest file in the package.
+//!
+//! There are some known bugs with how this works, so it should be improved at
+//! some point.
+//!
+//! #### Build script mtime handling
+//!
+//! Build script mtime handling runs in different modes. There is the "old
+//! style" where the build script does not emit any `rerun-if` directives. In
+//! this mode, Cargo will use [`LocalFingerprint::Precalculated`]. See the
+//! "rustdoc" section above how it works.
+//!
+//! In the new-style, each `rerun-if` directive is translated to the
+//! corresponding [`LocalFingerprint`] variant. The [`RerunIfChanged`] variant
+//! compares the mtime of the given filenames against the mtime of the
+//! "output" file.
+//!
+//! Similar to normal units, the build script "output" file mtime is rewound
+//! to the time just before the build script is executed to handle mid-build
+//! modifications.
+//!
+//! ## Considerations for inclusion in a fingerprint
+//!
+//! Over time we've realized a few items which historically were included in
+//! fingerprint hashings should not actually be included. Examples are:
+//!
+//! * Modification time values. We strive to never include a modification time
+//! inside a `Fingerprint` to get hashed into an actual value. While
+//! theoretically fine to do, in practice this causes issues with common
+//! applications like Docker. Docker, after a layer is built, will zero out
+//! the nanosecond part of all filesystem modification times. This means that
+//! the actual modification time is different for all build artifacts, which
+//! if we tracked the actual values of modification times would cause
+//! unnecessary recompiles. To fix this we instead only track paths which are
+//! relevant. These paths are checked dynamically to see if they're up to
+//! date, and the modification time doesn't make its way into the fingerprint
+//! hash.
+//!
+//! * Absolute path names. We strive to maintain a property where if you rename
+//! a project directory Cargo will continue to preserve all build artifacts
+//! and reuse the cache. This means that we can't ever hash an absolute path
+//! name. Instead we always hash relative path names and the "root" is passed
+//! in at runtime dynamically. Some of this is best effort, but the general
+//! idea is that we assume all accesses within a crate stay within that
+//! crate.
+//!
+//! These are pretty tricky to test for unfortunately, but we should have a good
+//! test suite nowadays and lord knows Cargo gets enough testing in the wild!
+//!
+//! ## Build scripts
+//!
+//! The *running* of a build script ([`CompileMode::RunCustomBuild`]) is treated
+//! significantly different than all other Unit kinds. It has its own function
+//! for calculating the Fingerprint ([`calculate_run_custom_build`]) and has some
+//! unique considerations. It does not track the same information as a normal
+//! Unit. The information tracked depends on the `rerun-if-changed` and
+//! `rerun-if-env-changed` statements produced by the build script. If the
+//! script does not emit either of these statements, the Fingerprint runs in
+//! "old style" mode where an mtime change of *any* file in the package will
+//! cause the build script to be re-run. Otherwise, the fingerprint *only*
+//! tracks the individual "rerun-if" items listed by the build script.
+//!
+//! The "rerun-if" statements from a *previous* build are stored in the build
+//! output directory in a file called `output`. Cargo parses this file when
+//! the Unit for that build script is prepared for the [`JobQueue`]. The
+//! Fingerprint code can then use that information to compute the Fingerprint
+//! and compare against the old fingerprint hash.
+//!
+//! Care must be taken with build script Fingerprints because the
+//! [`Fingerprint::local`] value may be changed after the build script runs
+//! (such as if the build script adds or removes "rerun-if" items).
+//!
+//! Another complication is if a build script is overridden. In that case, the
+//! fingerprint is the hash of the output of the override.
+//!
+//! ## Special considerations
+//!
+//! Registry dependencies do not track the mtime of files. This is because
+//! registry dependencies are not expected to change (if a new version is
+//! used, the Package ID will change, causing a rebuild). Cargo currently
+//! partially works with Docker caching. When a Docker image is built, it has
+//! normal mtime information. However, when a step is cached, the nanosecond
+//! portions of all files is zeroed out. Currently this works, but care must
+//! be taken for situations like these.
+//!
+//! HFS on macOS only supports 1 second timestamps. This causes a significant
+//! number of problems, particularly with Cargo's testsuite which does rapid
+//! builds in succession. Other filesystems have various degrees of
+//! resolution.
+//!
+//! Various weird filesystems (such as network filesystems) also can cause
+//! complications. Network filesystems may track the time on the server
+//! (except when the time is set manually such as with
+//! `filetime::set_file_times`). Not all filesystems support modifying the
+//! mtime.
+//!
+//! See the [`A-rebuild-detection`] label on the issue tracker for more.
+//!
+//! [`check_filesystem`]: Fingerprint::check_filesystem
+//! [`Metadata`]: crate::core::compiler::Metadata
+//! [`Profile`]: crate::core::profiles::Profile
+//! [`CompileMode`]: crate::core::compiler::CompileMode
+//! [`Lto`]: crate::core::compiler::Lto
+//! [`CompileKind`]: crate::core::compiler::CompileKind
+//! [`JobQueue`]: super::job_queue::JobQueue
+//! [`output_depinfo`]: super::output_depinfo()
+//! [`CheckDepInfo`]: LocalFingerprint::CheckDepInfo
+//! [`RerunIfChanged`]: LocalFingerprint::RerunIfChanged
+//! [`CompileMode::RunCustomBuild`]: crate::core::compiler::CompileMode::RunCustomBuild
+//! [`A-rebuild-detection`]: https://github.com/rust-lang/cargo/issues?q=is%3Aissue+is%3Aopen+label%3AA-rebuild-detection
+
+mod dirty_reason;
+
+use std::collections::hash_map::{Entry, HashMap};
+
+use std::env;
+use std::hash::{self, Hash, Hasher};
+use std::io;
+use std::path::{Path, PathBuf};
+use std::str;
+use std::sync::{Arc, Mutex};
+use std::time::SystemTime;
+
+use anyhow::{bail, format_err, Context as _};
+use cargo_util::{paths, ProcessBuilder};
+use filetime::FileTime;
+use log::{debug, info};
+use serde::de;
+use serde::ser;
+use serde::{Deserialize, Serialize};
+
+use crate::core::compiler::unit_graph::UnitDep;
+use crate::core::Package;
+use crate::util::errors::CargoResult;
+use crate::util::interning::InternedString;
+use crate::util::{self, try_canonicalize};
+use crate::util::{internal, path_args, profile, StableHasher};
+use crate::{Config, CARGO_ENV};
+
+use super::custom_build::BuildDeps;
+use super::{BuildContext, Context, FileFlavor, Job, Unit, Work};
+
+pub use dirty_reason::DirtyReason;
+
+/// Determines if a [`Unit`] is up-to-date, and if not prepares necessary work to
+/// update the persisted fingerprint.
+///
+/// This function will inspect `Unit`, calculate a fingerprint for it, and then
+/// return an appropriate [`Job`] to run. The returned `Job` will be a noop if
+/// `unit` is considered "fresh", or if it was previously built and cached.
+/// Otherwise the `Job` returned will write out the true fingerprint to the
+/// filesystem, to be executed after the unit's work has completed.
+///
+/// The `force` flag is a way to force the `Job` to be "dirty", or always
+/// update the fingerprint. **Beware using this flag** because it does not
+/// transitively propagate throughout the dependency graph, it only forces this
+/// one unit which is very unlikely to be what you want unless you're
+/// exclusively talking about top-level units.
+pub fn prepare_target(cx: &mut Context<'_, '_>, unit: &Unit, force: bool) -> CargoResult<Job> {
+ let _p = profile::start(format!(
+ "fingerprint: {} / {}",
+ unit.pkg.package_id(),
+ unit.target.name()
+ ));
+ let bcx = cx.bcx;
+ let loc = cx.files().fingerprint_file_path(unit, "");
+
+ debug!("fingerprint at: {}", loc.display());
+
+ // Figure out if this unit is up to date. After calculating the fingerprint
+ // compare it to an old version, if any, and attempt to print diagnostic
+ // information about failed comparisons to aid in debugging.
+ let fingerprint = calculate(cx, unit)?;
+ let mtime_on_use = cx.bcx.config.cli_unstable().mtime_on_use;
+ let compare = compare_old_fingerprint(&loc, &*fingerprint, mtime_on_use);
+ log_compare(unit, &compare);
+
+ // If our comparison failed or reported dirty (e.g., we're going to trigger
+ // a rebuild of this crate), then we also ensure the source of the crate
+ // passes all verification checks before we build it.
+ //
+ // The `Source::verify` method is intended to allow sources to execute
+ // pre-build checks to ensure that the relevant source code is all
+ // up-to-date and as expected. This is currently used primarily for
+ // directory sources which will use this hook to perform an integrity check
+ // on all files in the source to ensure they haven't changed. If they have
+ // changed then an error is issued.
+ if compare
+ .as_ref()
+ .map(|dirty| dirty.is_some())
+ .unwrap_or(true)
+ {
+ let source_id = unit.pkg.package_id().source_id();
+ let sources = bcx.packages.sources();
+ let source = sources
+ .get(source_id)
+ .ok_or_else(|| internal("missing package source"))?;
+ source.verify(unit.pkg.package_id())?;
+ }
+
+ let dirty_reason = match compare {
+ Ok(None) => {
+ if force {
+ Some(DirtyReason::Forced)
+ } else {
+ return Ok(Job::new_fresh());
+ }
+ }
+ Ok(reason) => reason,
+ Err(_) => None,
+ };
+
+ // Clear out the old fingerprint file if it exists. This protects when
+ // compilation is interrupted leaving a corrupt file. For example, a
+ // project with a lib.rs and integration test (two units):
+ //
+ // 1. Build the library and integration test.
+ // 2. Make a change to lib.rs (NOT the integration test).
+ // 3. Build the integration test, hit Ctrl-C while linking. With gcc, this
+ // will leave behind an incomplete executable (zero size, or partially
+ // written). NOTE: The library builds successfully, it is the linking
+ // of the integration test that we are interrupting.
+ // 4. Build the integration test again.
+ //
+ // Without the following line, then step 3 will leave a valid fingerprint
+ // on the disk. Then step 4 will think the integration test is "fresh"
+ // because:
+ //
+ // - There is a valid fingerprint hash on disk (written in step 1).
+ // - The mtime of the output file (the corrupt integration executable
+ // written in step 3) is newer than all of its dependencies.
+ // - The mtime of the integration test fingerprint dep-info file (written
+ // in step 1) is newer than the integration test's source files, because
+ // we haven't modified any of its source files.
+ //
+ // But the executable is corrupt and needs to be rebuilt. Clearing the
+ // fingerprint at step 3 ensures that Cargo never mistakes a partially
+ // written output as up-to-date.
+ if loc.exists() {
+ // Truncate instead of delete so that compare_old_fingerprint will
+ // still log the reason for the fingerprint failure instead of just
+ // reporting "failed to read fingerprint" during the next build if
+ // this build fails.
+ paths::write(&loc, b"")?;
+ }
+
+ let write_fingerprint = if unit.mode.is_run_custom_build() {
+ // For build scripts the `local` field of the fingerprint may change
+ // while we're executing it. For example it could be in the legacy
+ // "consider everything a dependency mode" and then we switch to "deps
+ // are explicitly specified" mode.
+ //
+ // To handle this movement we need to regenerate the `local` field of a
+ // build script's fingerprint after it's executed. We do this by
+ // using the `build_script_local_fingerprints` function which returns a
+ // thunk we can invoke on a foreign thread to calculate this.
+ let build_script_outputs = Arc::clone(&cx.build_script_outputs);
+ let metadata = cx.get_run_build_script_metadata(unit);
+ let (gen_local, _overridden) = build_script_local_fingerprints(cx, unit);
+ let output_path = cx.build_explicit_deps[unit].build_script_output.clone();
+ Work::new(move |_| {
+ let outputs = build_script_outputs.lock().unwrap();
+ let output = outputs
+ .get(metadata)
+ .expect("output must exist after running");
+ let deps = BuildDeps::new(&output_path, Some(output));
+
+ // FIXME: it's basically buggy that we pass `None` to `call_box`
+ // here. See documentation on `build_script_local_fingerprints`
+ // below for more information. Despite this just try to proceed and
+ // hobble along if it happens to return `Some`.
+ if let Some(new_local) = (gen_local)(&deps, None)? {
+ *fingerprint.local.lock().unwrap() = new_local;
+ }
+
+ write_fingerprint(&loc, &fingerprint)
+ })
+ } else {
+ Work::new(move |_| write_fingerprint(&loc, &fingerprint))
+ };
+
+ Ok(Job::new_dirty(write_fingerprint, dirty_reason))
+}
+
+/// Dependency edge information for fingerprints. This is generated for each
+/// dependency and is stored in a [`Fingerprint`].
+#[derive(Clone)]
+struct DepFingerprint {
+ /// The hash of the package id that this dependency points to
+ pkg_id: u64,
+ /// The crate name we're using for this dependency, which if we change we'll
+ /// need to recompile!
+ name: InternedString,
+ /// Whether or not this dependency is flagged as a public dependency or not.
+ public: bool,
+ /// Whether or not this dependency is an rmeta dependency or a "full"
+ /// dependency. In the case of an rmeta dependency our dependency edge only
+ /// actually requires the rmeta from what we depend on, so when checking
+ /// mtime information all files other than the rmeta can be ignored.
+ only_requires_rmeta: bool,
+ /// The dependency's fingerprint we recursively point to, containing all the
+ /// other hash information we'd otherwise need.
+ fingerprint: Arc<Fingerprint>,
+}
+
+/// A fingerprint can be considered to be a "short string" representing the
+/// state of a world for a package.
+///
+/// If a fingerprint ever changes, then the package itself needs to be
+/// recompiled. Inputs to the fingerprint include source code modifications,
+/// compiler flags, compiler version, etc. This structure is not simply a
+/// `String` due to the fact that some fingerprints cannot be calculated lazily.
+///
+/// Path sources, for example, use the mtime of the corresponding dep-info file
+/// as a fingerprint (all source files must be modified *before* this mtime).
+/// This dep-info file is not generated, however, until after the crate is
+/// compiled. As a result, this structure can be thought of as a fingerprint
+/// to-be. The actual value can be calculated via `hash_u64()`, but the operation
+/// may fail as some files may not have been generated.
+///
+/// Note that dependencies are taken into account for fingerprints because rustc
+/// requires that whenever an upstream crate is recompiled that all downstream
+/// dependents are also recompiled. This is typically tracked through
+/// `DependencyQueue`, but it also needs to be retained here because Cargo can
+/// be interrupted while executing, losing the state of the `DependencyQueue`
+/// graph.
+#[derive(Serialize, Deserialize)]
+pub struct Fingerprint {
+ /// Hash of the version of `rustc` used.
+ rustc: u64,
+ /// Sorted list of cfg features enabled.
+ features: String,
+ /// Hash of the `Target` struct, including the target name,
+ /// package-relative source path, edition, etc.
+ target: u64,
+ /// Hash of the [`Profile`], [`CompileMode`], and any extra flags passed via
+ /// `cargo rustc` or `cargo rustdoc`.
+ ///
+ /// [`Profile`]: crate::core::profiles::Profile
+ /// [`CompileMode`]: crate::core::compiler::CompileMode
+ profile: u64,
+ /// Hash of the path to the base source file. This is relative to the
+ /// workspace root for path members, or absolute for other sources.
+ path: u64,
+ /// Fingerprints of dependencies.
+ deps: Vec<DepFingerprint>,
+ /// Information about the inputs that affect this Unit (such as source
+ /// file mtimes or build script environment variables).
+ local: Mutex<Vec<LocalFingerprint>>,
+ /// Cached hash of the [`Fingerprint`] struct. Used to improve performance
+ /// for hashing.
+ #[serde(skip)]
+ memoized_hash: Mutex<Option<u64>>,
+ /// RUSTFLAGS/RUSTDOCFLAGS environment variable value (or config value).
+ rustflags: Vec<String>,
+ /// Hash of some metadata from the manifest, such as "authors", or
+ /// "description", which are exposed as environment variables during
+ /// compilation.
+ metadata: u64,
+ /// Hash of various config settings that change how things are compiled.
+ config: u64,
+ /// The rustc target. This is only relevant for `.json` files, otherwise
+ /// the metadata hash segregates the units.
+ compile_kind: u64,
+ /// Description of whether the filesystem status for this unit is up to date
+ /// or should be considered stale.
+ #[serde(skip)]
+ fs_status: FsStatus,
+ /// Files, relative to `target_root`, that are produced by the step that
+ /// this `Fingerprint` represents. This is used to detect when the whole
+ /// fingerprint is out of date if this is missing, or if previous
+ /// fingerprints output files are regenerated and look newer than this one.
+ #[serde(skip)]
+ outputs: Vec<PathBuf>,
+}
+
+/// Indication of the status on the filesystem for a particular unit.
+#[derive(Clone, Default, Debug)]
+pub enum FsStatus {
+ /// This unit is to be considered stale, even if hash information all
+ /// matches.
+ #[default]
+ Stale,
+
+ /// File system inputs have changed (or are missing), or there were
+ /// changes to the environment variables that affect this unit. See
+ /// the variants of [`StaleItem`] for more information.
+ StaleItem(StaleItem),
+
+ /// A dependency was stale.
+ StaleDependency {
+ name: InternedString,
+ dep_mtime: FileTime,
+ max_mtime: FileTime,
+ },
+
+ /// A dependency was stale.
+ StaleDepFingerprint { name: InternedString },
+
+ /// This unit is up-to-date. All outputs and their corresponding mtime are
+ /// listed in the payload here for other dependencies to compare against.
+ UpToDate { mtimes: HashMap<PathBuf, FileTime> },
+}
+
+impl FsStatus {
+ fn up_to_date(&self) -> bool {
+ match self {
+ FsStatus::UpToDate { .. } => true,
+ FsStatus::Stale
+ | FsStatus::StaleItem(_)
+ | FsStatus::StaleDependency { .. }
+ | FsStatus::StaleDepFingerprint { .. } => false,
+ }
+ }
+}
+
+impl Serialize for DepFingerprint {
+ fn serialize<S>(&self, ser: S) -> Result<S::Ok, S::Error>
+ where
+ S: ser::Serializer,
+ {
+ (
+ &self.pkg_id,
+ &self.name,
+ &self.public,
+ &self.fingerprint.hash_u64(),
+ )
+ .serialize(ser)
+ }
+}
+
+impl<'de> Deserialize<'de> for DepFingerprint {
+ fn deserialize<D>(d: D) -> Result<DepFingerprint, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ let (pkg_id, name, public, hash) = <(u64, String, bool, u64)>::deserialize(d)?;
+ Ok(DepFingerprint {
+ pkg_id,
+ name: InternedString::new(&name),
+ public,
+ fingerprint: Arc::new(Fingerprint {
+ memoized_hash: Mutex::new(Some(hash)),
+ ..Fingerprint::new()
+ }),
+ // This field is never read since it's only used in
+ // `check_filesystem` which isn't used by fingerprints loaded from
+ // disk.
+ only_requires_rmeta: false,
+ })
+ }
+}
+
+/// A `LocalFingerprint` represents something that we use to detect direct
+/// changes to a `Fingerprint`.
+///
+/// This is where we track file information, env vars, etc. This
+/// `LocalFingerprint` struct is hashed and if the hash changes will force a
+/// recompile of any fingerprint it's included into. Note that the "local"
+/// terminology comes from the fact that it only has to do with one crate, and
+/// `Fingerprint` tracks the transitive propagation of fingerprint changes.
+///
+/// Note that because this is hashed its contents are carefully managed. Like
+/// mentioned in the above module docs, we don't want to hash absolute paths or
+/// mtime information.
+///
+/// Also note that a `LocalFingerprint` is used in `check_filesystem` to detect
+/// when the filesystem contains stale information (based on mtime currently).
+/// The paths here don't change much between compilations but they're used as
+/// inputs when we probe the filesystem looking at information.
+#[derive(Debug, Serialize, Deserialize, Hash)]
+enum LocalFingerprint {
+ /// This is a precalculated fingerprint which has an opaque string we just
+ /// hash as usual. This variant is primarily used for rustdoc where we
+ /// don't have a dep-info file to compare against.
+ ///
+ /// This is also used for build scripts with no `rerun-if-*` statements, but
+ /// that's overall a mistake and causes bugs in Cargo. We shouldn't use this
+ /// for build scripts.
+ Precalculated(String),
+
+ /// This is used for crate compilations. The `dep_info` file is a relative
+ /// path anchored at `target_root(...)` to the dep-info file that Cargo
+ /// generates (which is a custom serialization after parsing rustc's own
+ /// `dep-info` output).
+ ///
+ /// The `dep_info` file, when present, also lists a number of other files
+ /// for us to look at. If any of those files are newer than this file then
+ /// we need to recompile.
+ CheckDepInfo { dep_info: PathBuf },
+
+ /// This represents a nonempty set of `rerun-if-changed` annotations printed
+ /// out by a build script. The `output` file is a relative file anchored at
+ /// `target_root(...)` which is the actual output of the build script. That
+ /// output has already been parsed and the paths printed out via
+ /// `rerun-if-changed` are listed in `paths`. The `paths` field is relative
+ /// to `pkg.root()`
+ ///
+ /// This is considered up-to-date if all of the `paths` are older than
+ /// `output`, otherwise we need to recompile.
+ RerunIfChanged {
+ output: PathBuf,
+ paths: Vec<PathBuf>,
+ },
+
+ /// This represents a single `rerun-if-env-changed` annotation printed by a
+ /// build script. The exact env var and value are hashed here. There's no
+ /// filesystem dependence here, and if the values are changed the hash will
+ /// change forcing a recompile.
+ RerunIfEnvChanged { var: String, val: Option<String> },
+}
+
+/// See [`FsStatus::StaleItem`].
+#[derive(Clone, Debug)]
+pub enum StaleItem {
+ MissingFile(PathBuf),
+ ChangedFile {
+ reference: PathBuf,
+ reference_mtime: FileTime,
+ stale: PathBuf,
+ stale_mtime: FileTime,
+ },
+ ChangedEnv {
+ var: String,
+ previous: Option<String>,
+ current: Option<String>,
+ },
+}
+
+impl LocalFingerprint {
+ /// Read the environment variable of the given env `key`, and creates a new
+ /// [`LocalFingerprint::RerunIfEnvChanged`] for it.
+ ///
+ // TODO: This is allowed at this moment. Should figure out if it makes
+ // sense if permitting to read env from the config system.
+ #[allow(clippy::disallowed_methods)]
+ fn from_env<K: AsRef<str>>(key: K) -> LocalFingerprint {
+ let key = key.as_ref();
+ let var = key.to_owned();
+ let val = env::var(key).ok();
+ LocalFingerprint::RerunIfEnvChanged { var, val }
+ }
+
+ /// Checks dynamically at runtime if this `LocalFingerprint` has a stale
+ /// item inside of it.
+ ///
+ /// The main purpose of this function is to handle two different ways
+ /// fingerprints can be invalidated:
+ ///
+ /// * One is a dependency listed in rustc's dep-info files is invalid. Note
+ /// that these could either be env vars or files. We check both here.
+ ///
+ /// * Another is the `rerun-if-changed` directive from build scripts. This
+ /// is where we'll find whether files have actually changed
+ fn find_stale_item(
+ &self,
+ mtime_cache: &mut HashMap<PathBuf, FileTime>,
+ pkg_root: &Path,
+ target_root: &Path,
+ cargo_exe: &Path,
+ config: &Config,
+ ) -> CargoResult<Option<StaleItem>> {
+ match self {
+ // We need to parse `dep_info`, learn about the crate's dependencies.
+ //
+ // For each env var we see if our current process's env var still
+ // matches, and for each file we see if any of them are newer than
+ // the `dep_info` file itself whose mtime represents the start of
+ // rustc.
+ LocalFingerprint::CheckDepInfo { dep_info } => {
+ let dep_info = target_root.join(dep_info);
+ let info = match parse_dep_info(pkg_root, target_root, &dep_info)? {
+ Some(info) => info,
+ None => return Ok(Some(StaleItem::MissingFile(dep_info))),
+ };
+ for (key, previous) in info.env.iter() {
+ let current = if key == CARGO_ENV {
+ Some(
+ cargo_exe
+ .to_str()
+ .ok_or_else(|| {
+ format_err!(
+ "cargo exe path {} must be valid UTF-8",
+ cargo_exe.display()
+ )
+ })?
+ .to_string(),
+ )
+ } else {
+ config.get_env(key).ok()
+ };
+ if current == *previous {
+ continue;
+ }
+ return Ok(Some(StaleItem::ChangedEnv {
+ var: key.clone(),
+ previous: previous.clone(),
+ current,
+ }));
+ }
+ Ok(find_stale_file(mtime_cache, &dep_info, info.files.iter()))
+ }
+
+ // We need to verify that no paths listed in `paths` are newer than
+ // the `output` path itself, or the last time the build script ran.
+ LocalFingerprint::RerunIfChanged { output, paths } => Ok(find_stale_file(
+ mtime_cache,
+ &target_root.join(output),
+ paths.iter().map(|p| pkg_root.join(p)),
+ )),
+
+ // These have no dependencies on the filesystem, and their values
+ // are included natively in the `Fingerprint` hash so nothing
+ // tocheck for here.
+ LocalFingerprint::RerunIfEnvChanged { .. } => Ok(None),
+ LocalFingerprint::Precalculated(..) => Ok(None),
+ }
+ }
+
+ fn kind(&self) -> &'static str {
+ match self {
+ LocalFingerprint::Precalculated(..) => "precalculated",
+ LocalFingerprint::CheckDepInfo { .. } => "dep-info",
+ LocalFingerprint::RerunIfChanged { .. } => "rerun-if-changed",
+ LocalFingerprint::RerunIfEnvChanged { .. } => "rerun-if-env-changed",
+ }
+ }
+}
+
+impl Fingerprint {
+ fn new() -> Fingerprint {
+ Fingerprint {
+ rustc: 0,
+ target: 0,
+ profile: 0,
+ path: 0,
+ features: String::new(),
+ deps: Vec::new(),
+ local: Mutex::new(Vec::new()),
+ memoized_hash: Mutex::new(None),
+ rustflags: Vec::new(),
+ metadata: 0,
+ config: 0,
+ compile_kind: 0,
+ fs_status: FsStatus::Stale,
+ outputs: Vec::new(),
+ }
+ }
+
+ /// For performance reasons fingerprints will memoize their own hash, but
+ /// there's also internal mutability with its `local` field which can
+ /// change, for example with build scripts, during a build.
+ ///
+ /// This method can be used to bust all memoized hashes just before a build
+ /// to ensure that after a build completes everything is up-to-date.
+ pub fn clear_memoized(&self) {
+ *self.memoized_hash.lock().unwrap() = None;
+ }
+
+ fn hash_u64(&self) -> u64 {
+ if let Some(s) = *self.memoized_hash.lock().unwrap() {
+ return s;
+ }
+ let ret = util::hash_u64(self);
+ *self.memoized_hash.lock().unwrap() = Some(ret);
+ ret
+ }
+
+ /// Compares this fingerprint with an old version which was previously
+ /// serialized to filesystem.
+ ///
+ /// The purpose of this is exclusively to produce a diagnostic message
+ /// [`DirtyReason`], indicating why we're recompiling something.
+ fn compare(&self, old: &Fingerprint) -> DirtyReason {
+ if self.rustc != old.rustc {
+ return DirtyReason::RustcChanged;
+ }
+ if self.features != old.features {
+ return DirtyReason::FeaturesChanged {
+ old: old.features.clone(),
+ new: self.features.clone(),
+ };
+ }
+ if self.target != old.target {
+ return DirtyReason::TargetConfigurationChanged;
+ }
+ if self.path != old.path {
+ return DirtyReason::PathToSourceChanged;
+ }
+ if self.profile != old.profile {
+ return DirtyReason::ProfileConfigurationChanged;
+ }
+ if self.rustflags != old.rustflags {
+ return DirtyReason::RustflagsChanged {
+ old: old.rustflags.clone(),
+ new: self.rustflags.clone(),
+ };
+ }
+ if self.metadata != old.metadata {
+ return DirtyReason::MetadataChanged;
+ }
+ if self.config != old.config {
+ return DirtyReason::ConfigSettingsChanged;
+ }
+ if self.compile_kind != old.compile_kind {
+ return DirtyReason::CompileKindChanged;
+ }
+ let my_local = self.local.lock().unwrap();
+ let old_local = old.local.lock().unwrap();
+ if my_local.len() != old_local.len() {
+ return DirtyReason::LocalLengthsChanged;
+ }
+ for (new, old) in my_local.iter().zip(old_local.iter()) {
+ match (new, old) {
+ (LocalFingerprint::Precalculated(a), LocalFingerprint::Precalculated(b)) => {
+ if a != b {
+ return DirtyReason::PrecalculatedComponentsChanged {
+ old: b.to_string(),
+ new: a.to_string(),
+ };
+ }
+ }
+ (
+ LocalFingerprint::CheckDepInfo { dep_info: adep },
+ LocalFingerprint::CheckDepInfo { dep_info: bdep },
+ ) => {
+ if adep != bdep {
+ return DirtyReason::DepInfoOutputChanged {
+ old: bdep.clone(),
+ new: adep.clone(),
+ };
+ }
+ }
+ (
+ LocalFingerprint::RerunIfChanged {
+ output: aout,
+ paths: apaths,
+ },
+ LocalFingerprint::RerunIfChanged {
+ output: bout,
+ paths: bpaths,
+ },
+ ) => {
+ if aout != bout {
+ return DirtyReason::RerunIfChangedOutputFileChanged {
+ old: bout.clone(),
+ new: aout.clone(),
+ };
+ }
+ if apaths != bpaths {
+ return DirtyReason::RerunIfChangedOutputPathsChanged {
+ old: bpaths.clone(),
+ new: apaths.clone(),
+ };
+ }
+ }
+ (
+ LocalFingerprint::RerunIfEnvChanged {
+ var: akey,
+ val: avalue,
+ },
+ LocalFingerprint::RerunIfEnvChanged {
+ var: bkey,
+ val: bvalue,
+ },
+ ) => {
+ if *akey != *bkey {
+ return DirtyReason::EnvVarsChanged {
+ old: bkey.clone(),
+ new: akey.clone(),
+ };
+ }
+ if *avalue != *bvalue {
+ return DirtyReason::EnvVarChanged {
+ name: akey.clone(),
+ old_value: bvalue.clone(),
+ new_value: avalue.clone(),
+ };
+ }
+ }
+ (a, b) => {
+ return DirtyReason::LocalFingerprintTypeChanged {
+ old: b.kind(),
+ new: a.kind(),
+ }
+ }
+ }
+ }
+
+ if self.deps.len() != old.deps.len() {
+ return DirtyReason::NumberOfDependenciesChanged {
+ old: old.deps.len(),
+ new: self.deps.len(),
+ };
+ }
+ for (a, b) in self.deps.iter().zip(old.deps.iter()) {
+ if a.name != b.name {
+ return DirtyReason::UnitDependencyNameChanged {
+ old: b.name.clone(),
+ new: a.name.clone(),
+ };
+ }
+
+ if a.fingerprint.hash_u64() != b.fingerprint.hash_u64() {
+ return DirtyReason::UnitDependencyInfoChanged {
+ new_name: a.name.clone(),
+ new_fingerprint: a.fingerprint.hash_u64(),
+ old_name: b.name.clone(),
+ old_fingerprint: b.fingerprint.hash_u64(),
+ };
+ }
+ }
+
+ if !self.fs_status.up_to_date() {
+ return DirtyReason::FsStatusOutdated(self.fs_status.clone());
+ }
+
+ // This typically means some filesystem modifications happened or
+ // something transitive was odd. In general we should strive to provide
+ // a better error message than this, so if you see this message a lot it
+ // likely means this method needs to be updated!
+ DirtyReason::NothingObvious
+ }
+
+ /// Dynamically inspect the local filesystem to update the `fs_status` field
+ /// of this `Fingerprint`.
+ ///
+ /// This function is used just after a `Fingerprint` is constructed to check
+ /// the local state of the filesystem and propagate any dirtiness from
+ /// dependencies up to this unit as well. This function assumes that the
+ /// unit starts out as [`FsStatus::Stale`] and then it will optionally switch
+ /// it to `UpToDate` if it can.
+ fn check_filesystem(
+ &mut self,
+ mtime_cache: &mut HashMap<PathBuf, FileTime>,
+ pkg_root: &Path,
+ target_root: &Path,
+ cargo_exe: &Path,
+ config: &Config,
+ ) -> CargoResult<()> {
+ assert!(!self.fs_status.up_to_date());
+
+ let mut mtimes = HashMap::new();
+
+ // Get the `mtime` of all outputs. Optionally update their mtime
+ // afterwards based on the `mtime_on_use` flag. Afterwards we want the
+ // minimum mtime as it's the one we'll be comparing to inputs and
+ // dependencies.
+ for output in self.outputs.iter() {
+ let mtime = match paths::mtime(output) {
+ Ok(mtime) => mtime,
+
+ // This path failed to report its `mtime`. It probably doesn't
+ // exists, so leave ourselves as stale and bail out.
+ Err(e) => {
+ debug!("failed to get mtime of {:?}: {}", output, e);
+ return Ok(());
+ }
+ };
+ assert!(mtimes.insert(output.clone(), mtime).is_none());
+ }
+
+ let opt_max = mtimes.iter().max_by_key(|kv| kv.1);
+ let (max_path, max_mtime) = match opt_max {
+ Some(mtime) => mtime,
+
+ // We had no output files. This means we're an overridden build
+ // script and we're just always up to date because we aren't
+ // watching the filesystem.
+ None => {
+ self.fs_status = FsStatus::UpToDate { mtimes };
+ return Ok(());
+ }
+ };
+ debug!(
+ "max output mtime for {:?} is {:?} {}",
+ pkg_root, max_path, max_mtime
+ );
+
+ for dep in self.deps.iter() {
+ let dep_mtimes = match &dep.fingerprint.fs_status {
+ FsStatus::UpToDate { mtimes } => mtimes,
+ // If our dependency is stale, so are we, so bail out.
+ FsStatus::Stale
+ | FsStatus::StaleItem(_)
+ | FsStatus::StaleDependency { .. }
+ | FsStatus::StaleDepFingerprint { .. } => {
+ self.fs_status = FsStatus::StaleDepFingerprint {
+ name: dep.name.clone(),
+ };
+ return Ok(());
+ }
+ };
+
+ // If our dependency edge only requires the rmeta file to be present
+ // then we only need to look at that one output file, otherwise we
+ // need to consider all output files to see if we're out of date.
+ let (dep_path, dep_mtime) = if dep.only_requires_rmeta {
+ dep_mtimes
+ .iter()
+ .find(|(path, _mtime)| {
+ path.extension().and_then(|s| s.to_str()) == Some("rmeta")
+ })
+ .expect("failed to find rmeta")
+ } else {
+ match dep_mtimes.iter().max_by_key(|kv| kv.1) {
+ Some(dep_mtime) => dep_mtime,
+ // If our dependencies is up to date and has no filesystem
+ // interactions, then we can move on to the next dependency.
+ None => continue,
+ }
+ };
+ debug!(
+ "max dep mtime for {:?} is {:?} {}",
+ pkg_root, dep_path, dep_mtime
+ );
+
+ // If the dependency is newer than our own output then it was
+ // recompiled previously. We transitively become stale ourselves in
+ // that case, so bail out.
+ //
+ // Note that this comparison should probably be `>=`, not `>`, but
+ // for a discussion of why it's `>` see the discussion about #5918
+ // below in `find_stale`.
+ if dep_mtime > max_mtime {
+ info!(
+ "dependency on `{}` is newer than we are {} > {} {:?}",
+ dep.name, dep_mtime, max_mtime, pkg_root
+ );
+
+ self.fs_status = FsStatus::StaleDependency {
+ name: dep.name.clone(),
+ dep_mtime: *dep_mtime,
+ max_mtime: *max_mtime,
+ };
+
+ return Ok(());
+ }
+ }
+
+ // If we reached this far then all dependencies are up to date. Check
+ // all our `LocalFingerprint` information to see if we have any stale
+ // files for this package itself. If we do find something log a helpful
+ // message and bail out so we stay stale.
+ for local in self.local.get_mut().unwrap().iter() {
+ if let Some(item) =
+ local.find_stale_item(mtime_cache, pkg_root, target_root, cargo_exe, config)?
+ {
+ item.log();
+ self.fs_status = FsStatus::StaleItem(item);
+ return Ok(());
+ }
+ }
+
+ // Everything was up to date! Record such.
+ self.fs_status = FsStatus::UpToDate { mtimes };
+ debug!("filesystem up-to-date {:?}", pkg_root);
+
+ Ok(())
+ }
+}
+
+impl hash::Hash for Fingerprint {
+ fn hash<H: Hasher>(&self, h: &mut H) {
+ let Fingerprint {
+ rustc,
+ ref features,
+ target,
+ path,
+ profile,
+ ref deps,
+ ref local,
+ metadata,
+ config,
+ compile_kind,
+ ref rustflags,
+ ..
+ } = *self;
+ let local = local.lock().unwrap();
+ (
+ rustc,
+ features,
+ target,
+ path,
+ profile,
+ &*local,
+ metadata,
+ config,
+ compile_kind,
+ rustflags,
+ )
+ .hash(h);
+
+ h.write_usize(deps.len());
+ for DepFingerprint {
+ pkg_id,
+ name,
+ public,
+ fingerprint,
+ only_requires_rmeta: _, // static property, no need to hash
+ } in deps
+ {
+ pkg_id.hash(h);
+ name.hash(h);
+ public.hash(h);
+ // use memoized dep hashes to avoid exponential blowup
+ h.write_u64(fingerprint.hash_u64());
+ }
+ }
+}
+
+impl DepFingerprint {
+ fn new(cx: &mut Context<'_, '_>, parent: &Unit, dep: &UnitDep) -> CargoResult<DepFingerprint> {
+ let fingerprint = calculate(cx, &dep.unit)?;
+ // We need to be careful about what we hash here. We have a goal of
+ // supporting renaming a project directory and not rebuilding
+ // everything. To do that, however, we need to make sure that the cwd
+ // doesn't make its way into any hashes, and one source of that is the
+ // `SourceId` for `path` packages.
+ //
+ // We already have a requirement that `path` packages all have unique
+ // names (sort of for this same reason), so if the package source is a
+ // `path` then we just hash the name, but otherwise we hash the full
+ // id as it won't change when the directory is renamed.
+ let pkg_id = if dep.unit.pkg.package_id().source_id().is_path() {
+ util::hash_u64(dep.unit.pkg.package_id().name())
+ } else {
+ util::hash_u64(dep.unit.pkg.package_id())
+ };
+
+ Ok(DepFingerprint {
+ pkg_id,
+ name: dep.extern_crate_name,
+ public: dep.public,
+ fingerprint,
+ only_requires_rmeta: cx.only_requires_rmeta(parent, &dep.unit),
+ })
+ }
+}
+
+impl StaleItem {
+ /// Use the `log` crate to log a hopefully helpful message in diagnosing
+ /// what file is considered stale and why. This is intended to be used in
+ /// conjunction with `CARGO_LOG` to determine why Cargo is recompiling
+ /// something. Currently there's no user-facing usage of this other than
+ /// that.
+ fn log(&self) {
+ match self {
+ StaleItem::MissingFile(path) => {
+ info!("stale: missing {:?}", path);
+ }
+ StaleItem::ChangedFile {
+ reference,
+ reference_mtime,
+ stale,
+ stale_mtime,
+ } => {
+ info!("stale: changed {:?}", stale);
+ info!(" (vs) {:?}", reference);
+ info!(" {:?} < {:?}", reference_mtime, stale_mtime);
+ }
+ StaleItem::ChangedEnv {
+ var,
+ previous,
+ current,
+ } => {
+ info!("stale: changed env {:?}", var);
+ info!(" {:?} != {:?}", previous, current);
+ }
+ }
+ }
+}
+
+/// Calculates the fingerprint for a [`Unit`].
+///
+/// This fingerprint is used by Cargo to learn about when information such as:
+///
+/// * A non-path package changes (changes version, changes revision, etc).
+/// * Any dependency changes
+/// * The compiler changes
+/// * The set of features a package is built with changes
+/// * The profile a target is compiled with changes (e.g., opt-level changes)
+/// * Any other compiler flags change that will affect the result
+///
+/// Information like file modification time is only calculated for path
+/// dependencies.
+fn calculate(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<Arc<Fingerprint>> {
+ // This function is slammed quite a lot, so the result is memoized.
+ if let Some(s) = cx.fingerprints.get(unit) {
+ return Ok(Arc::clone(s));
+ }
+ let mut fingerprint = if unit.mode.is_run_custom_build() {
+ calculate_run_custom_build(cx, unit)?
+ } else if unit.mode.is_doc_test() {
+ panic!("doc tests do not fingerprint");
+ } else {
+ calculate_normal(cx, unit)?
+ };
+
+ // After we built the initial `Fingerprint` be sure to update the
+ // `fs_status` field of it.
+ let target_root = target_root(cx);
+ let cargo_exe = cx.bcx.config.cargo_exe()?;
+ fingerprint.check_filesystem(
+ &mut cx.mtime_cache,
+ unit.pkg.root(),
+ &target_root,
+ cargo_exe,
+ cx.bcx.config,
+ )?;
+
+ let fingerprint = Arc::new(fingerprint);
+ cx.fingerprints
+ .insert(unit.clone(), Arc::clone(&fingerprint));
+ Ok(fingerprint)
+}
+
+/// Calculate a fingerprint for a "normal" unit, or anything that's not a build
+/// script. This is an internal helper of [`calculate`], don't call directly.
+fn calculate_normal(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<Fingerprint> {
+ let deps = {
+ // Recursively calculate the fingerprint for all of our dependencies.
+ //
+ // Skip fingerprints of binaries because they don't actually induce a
+ // recompile, they're just dependencies in the sense that they need to be
+ // built. The only exception here are artifact dependencies,
+ // which is an actual dependency that needs a recompile.
+ //
+ // Create Vec since mutable cx is needed in closure.
+ let deps = Vec::from(cx.unit_deps(unit));
+ let mut deps = deps
+ .into_iter()
+ .filter(|dep| !dep.unit.target.is_bin() || dep.unit.artifact.is_true())
+ .map(|dep| DepFingerprint::new(cx, unit, &dep))
+ .collect::<CargoResult<Vec<_>>>()?;
+ deps.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id));
+ deps
+ };
+
+ // Afterwards calculate our own fingerprint information.
+ let target_root = target_root(cx);
+ let local = if unit.mode.is_doc() || unit.mode.is_doc_scrape() {
+ // rustdoc does not have dep-info files.
+ let fingerprint = pkg_fingerprint(cx.bcx, &unit.pkg).with_context(|| {
+ format!(
+ "failed to determine package fingerprint for documenting {}",
+ unit.pkg
+ )
+ })?;
+ vec![LocalFingerprint::Precalculated(fingerprint)]
+ } else {
+ let dep_info = dep_info_loc(cx, unit);
+ let dep_info = dep_info.strip_prefix(&target_root).unwrap().to_path_buf();
+ vec![LocalFingerprint::CheckDepInfo { dep_info }]
+ };
+
+ // Figure out what the outputs of our unit is, and we'll be storing them
+ // into the fingerprint as well.
+ let outputs = cx
+ .outputs(unit)?
+ .iter()
+ .filter(|output| !matches!(output.flavor, FileFlavor::DebugInfo | FileFlavor::Auxiliary))
+ .map(|output| output.path.clone())
+ .collect();
+
+ // Fill out a bunch more information that we'll be tracking typically
+ // hashed to take up less space on disk as we just need to know when things
+ // change.
+ let extra_flags = if unit.mode.is_doc() || unit.mode.is_doc_scrape() {
+ cx.bcx.rustdocflags_args(unit)
+ } else {
+ cx.bcx.rustflags_args(unit)
+ }
+ .to_vec();
+
+ let profile_hash = util::hash_u64((
+ &unit.profile,
+ unit.mode,
+ cx.bcx.extra_args_for(unit),
+ cx.lto[unit],
+ ));
+ // Include metadata since it is exposed as environment variables.
+ let m = unit.pkg.manifest().metadata();
+ let metadata = util::hash_u64((&m.authors, &m.description, &m.homepage, &m.repository));
+ let mut config = StableHasher::new();
+ if let Some(linker) = cx.bcx.linker(unit.kind) {
+ linker.hash(&mut config);
+ }
+ if unit.mode.is_doc() && cx.bcx.config.cli_unstable().rustdoc_map {
+ if let Ok(map) = cx.bcx.config.doc_extern_map() {
+ map.hash(&mut config);
+ }
+ }
+ if let Some(allow_features) = &cx.bcx.config.cli_unstable().allow_features {
+ allow_features.hash(&mut config);
+ }
+ let compile_kind = unit.kind.fingerprint_hash();
+ Ok(Fingerprint {
+ rustc: util::hash_u64(&cx.bcx.rustc().verbose_version),
+ target: util::hash_u64(&unit.target),
+ profile: profile_hash,
+ // Note that .0 is hashed here, not .1 which is the cwd. That doesn't
+ // actually affect the output artifact so there's no need to hash it.
+ path: util::hash_u64(path_args(cx.bcx.ws, unit).0),
+ features: format!("{:?}", unit.features),
+ deps,
+ local: Mutex::new(local),
+ memoized_hash: Mutex::new(None),
+ metadata,
+ config: config.finish(),
+ compile_kind,
+ rustflags: extra_flags,
+ fs_status: FsStatus::Stale,
+ outputs,
+ })
+}
+
+/// Calculate a fingerprint for an "execute a build script" unit. This is an
+/// internal helper of [`calculate`], don't call directly.
+fn calculate_run_custom_build(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<Fingerprint> {
+ assert!(unit.mode.is_run_custom_build());
+ // Using the `BuildDeps` information we'll have previously parsed and
+ // inserted into `build_explicit_deps` built an initial snapshot of the
+ // `LocalFingerprint` list for this build script. If we previously executed
+ // the build script this means we'll be watching files and env vars.
+ // Otherwise if we haven't previously executed it we'll just start watching
+ // the whole crate.
+ let (gen_local, overridden) = build_script_local_fingerprints(cx, unit);
+ let deps = &cx.build_explicit_deps[unit];
+ let local = (gen_local)(
+ deps,
+ Some(&|| {
+ const IO_ERR_MESSAGE: &str = "\
+An I/O error happened. Please make sure you can access the file.
+
+By default, if your project contains a build script, cargo scans all files in
+it to determine whether a rebuild is needed. If you don't expect to access the
+file, specify `rerun-if-changed` in your build script.
+See https://doc.rust-lang.org/cargo/reference/build-scripts.html#rerun-if-changed for more information.";
+ pkg_fingerprint(cx.bcx, &unit.pkg).map_err(|err| {
+ let mut message = format!("failed to determine package fingerprint for build script for {}", unit.pkg);
+ if err.root_cause().is::<io::Error>() {
+ message = format!("{}\n{}", message, IO_ERR_MESSAGE)
+ }
+ err.context(message)
+ })
+ }),
+ )?
+ .unwrap();
+ let output = deps.build_script_output.clone();
+
+ // Include any dependencies of our execution, which is typically just the
+ // compilation of the build script itself. (if the build script changes we
+ // should be rerun!). Note though that if we're an overridden build script
+ // we have no dependencies so no need to recurse in that case.
+ let deps = if overridden {
+ // Overridden build scripts don't need to track deps.
+ vec![]
+ } else {
+ // Create Vec since mutable cx is needed in closure.
+ let deps = Vec::from(cx.unit_deps(unit));
+ deps.into_iter()
+ .map(|dep| DepFingerprint::new(cx, unit, &dep))
+ .collect::<CargoResult<Vec<_>>>()?
+ };
+
+ Ok(Fingerprint {
+ local: Mutex::new(local),
+ rustc: util::hash_u64(&cx.bcx.rustc().verbose_version),
+ deps,
+ outputs: if overridden { Vec::new() } else { vec![output] },
+
+ // Most of the other info is blank here as we don't really include it
+ // in the execution of the build script, but... this may be a latent
+ // bug in Cargo.
+ ..Fingerprint::new()
+ })
+}
+
+/// Get ready to compute the [`LocalFingerprint`] values
+/// for a [`RunCustomBuild`] unit.
+///
+/// This function has, what's on the surface, a seriously wonky interface.
+/// You'll call this function and it'll return a closure and a boolean. The
+/// boolean is pretty simple in that it indicates whether the `unit` has been
+/// overridden via `.cargo/config.toml`. The closure is much more complicated.
+///
+/// This closure is intended to capture any local state necessary to compute
+/// the `LocalFingerprint` values for this unit. It is `Send` and `'static` to
+/// be sent to other threads as well (such as when we're executing build
+/// scripts). That deduplication is the rationale for the closure at least.
+///
+/// The arguments to the closure are a bit weirder, though, and I'll apologize
+/// in advance for the weirdness too. The first argument to the closure is a
+/// `&BuildDeps`. This is the parsed version of a build script, and when Cargo
+/// starts up this is cached from previous runs of a build script. After a
+/// build script executes the output file is reparsed and passed in here.
+///
+/// The second argument is the weirdest, it's *optionally* a closure to
+/// call [`pkg_fingerprint`]. The `pkg_fingerprint` requires access to
+/// "source map" located in `Context`. That's very non-`'static` and
+/// non-`Send`, so it can't be used on other threads, such as when we invoke
+/// this after a build script has finished. The `Option` allows us to for sure
+/// calculate it on the main thread at the beginning, and then swallow the bug
+/// for now where a worker thread after a build script has finished doesn't
+/// have access. Ideally there would be no second argument or it would be more
+/// "first class" and not an `Option` but something that can be sent between
+/// threads. In any case, it's a bug for now.
+///
+/// This isn't the greatest of interfaces, and if there's suggestions to
+/// improve please do so!
+///
+/// FIXME(#6779) - see all the words above
+///
+/// [`RunCustomBuild`]: crate::core::compiler::CompileMode::RunCustomBuild
+fn build_script_local_fingerprints(
+ cx: &mut Context<'_, '_>,
+ unit: &Unit,
+) -> (
+ Box<
+ dyn FnOnce(
+ &BuildDeps,
+ Option<&dyn Fn() -> CargoResult<String>>,
+ ) -> CargoResult<Option<Vec<LocalFingerprint>>>
+ + Send,
+ >,
+ bool,
+) {
+ assert!(unit.mode.is_run_custom_build());
+ // First up, if this build script is entirely overridden, then we just
+ // return the hash of what we overrode it with. This is the easy case!
+ if let Some(fingerprint) = build_script_override_fingerprint(cx, unit) {
+ debug!("override local fingerprints deps {}", unit.pkg);
+ return (
+ Box::new(
+ move |_: &BuildDeps, _: Option<&dyn Fn() -> CargoResult<String>>| {
+ Ok(Some(vec![fingerprint]))
+ },
+ ),
+ true, // this is an overridden build script
+ );
+ }
+
+ // ... Otherwise this is a "real" build script and we need to return a real
+ // closure. Our returned closure classifies the build script based on
+ // whether it prints `rerun-if-*`. If it *doesn't* print this it's where the
+ // magical second argument comes into play, which fingerprints a whole
+ // package. Remember that the fact that this is an `Option` is a bug, but a
+ // longstanding bug, in Cargo. Recent refactorings just made it painfully
+ // obvious.
+ let pkg_root = unit.pkg.root().to_path_buf();
+ let target_dir = target_root(cx);
+ let calculate =
+ move |deps: &BuildDeps, pkg_fingerprint: Option<&dyn Fn() -> CargoResult<String>>| {
+ if deps.rerun_if_changed.is_empty() && deps.rerun_if_env_changed.is_empty() {
+ match pkg_fingerprint {
+ // FIXME: this is somewhat buggy with respect to docker and
+ // weird filesystems. The `Precalculated` variant
+ // constructed below will, for `path` dependencies, contain
+ // a stringified version of the mtime for the local crate.
+ // This violates one of the things we describe in this
+ // module's doc comment, never hashing mtimes. We should
+ // figure out a better scheme where a package fingerprint
+ // may be a string (like for a registry) or a list of files
+ // (like for a path dependency). Those list of files would
+ // be stored here rather than the mtime of them.
+ Some(f) => {
+ let s = f()?;
+ debug!(
+ "old local fingerprints deps {:?} precalculated={:?}",
+ pkg_root, s
+ );
+ return Ok(Some(vec![LocalFingerprint::Precalculated(s)]));
+ }
+ None => return Ok(None),
+ }
+ }
+
+ // Ok so now we're in "new mode" where we can have files listed as
+ // dependencies as well as env vars listed as dependencies. Process
+ // them all here.
+ Ok(Some(local_fingerprints_deps(deps, &target_dir, &pkg_root)))
+ };
+
+ // Note that `false` == "not overridden"
+ (Box::new(calculate), false)
+}
+
+/// Create a [`LocalFingerprint`] for an overridden build script.
+/// Returns None if it is not overridden.
+fn build_script_override_fingerprint(
+ cx: &mut Context<'_, '_>,
+ unit: &Unit,
+) -> Option<LocalFingerprint> {
+ // Build script output is only populated at this stage when it is
+ // overridden.
+ let build_script_outputs = cx.build_script_outputs.lock().unwrap();
+ let metadata = cx.get_run_build_script_metadata(unit);
+ // Returns None if it is not overridden.
+ let output = build_script_outputs.get(metadata)?;
+ let s = format!(
+ "overridden build state with hash: {}",
+ util::hash_u64(output)
+ );
+ Some(LocalFingerprint::Precalculated(s))
+}
+
+/// Compute the [`LocalFingerprint`] values for a [`RunCustomBuild`] unit for
+/// non-overridden new-style build scripts only. This is only used when `deps`
+/// is already known to have a nonempty `rerun-if-*` somewhere.
+///
+/// [`RunCustomBuild`]: crate::core::compiler::CompileMode::RunCustomBuild
+fn local_fingerprints_deps(
+ deps: &BuildDeps,
+ target_root: &Path,
+ pkg_root: &Path,
+) -> Vec<LocalFingerprint> {
+ debug!("new local fingerprints deps {:?}", pkg_root);
+ let mut local = Vec::new();
+
+ if !deps.rerun_if_changed.is_empty() {
+ // Note that like the module comment above says we are careful to never
+ // store an absolute path in `LocalFingerprint`, so ensure that we strip
+ // absolute prefixes from them.
+ let output = deps
+ .build_script_output
+ .strip_prefix(target_root)
+ .unwrap()
+ .to_path_buf();
+ let paths = deps
+ .rerun_if_changed
+ .iter()
+ .map(|p| p.strip_prefix(pkg_root).unwrap_or(p).to_path_buf())
+ .collect();
+ local.push(LocalFingerprint::RerunIfChanged { output, paths });
+ }
+
+ local.extend(
+ deps.rerun_if_env_changed
+ .iter()
+ .map(LocalFingerprint::from_env),
+ );
+
+ local
+}
+
+/// Writes the short fingerprint hash value to `<loc>`
+/// and logs detailed JSON information to `<loc>.json`.
+fn write_fingerprint(loc: &Path, fingerprint: &Fingerprint) -> CargoResult<()> {
+ debug_assert_ne!(fingerprint.rustc, 0);
+ // fingerprint::new().rustc == 0, make sure it doesn't make it to the file system.
+ // This is mostly so outside tools can reliably find out what rust version this file is for,
+ // as we can use the full hash.
+ let hash = fingerprint.hash_u64();
+ debug!("write fingerprint ({:x}) : {}", hash, loc.display());
+ paths::write(loc, util::to_hex(hash).as_bytes())?;
+
+ let json = serde_json::to_string(fingerprint).unwrap();
+ if cfg!(debug_assertions) {
+ let f: Fingerprint = serde_json::from_str(&json).unwrap();
+ assert_eq!(f.hash_u64(), hash);
+ }
+ paths::write(&loc.with_extension("json"), json.as_bytes())?;
+ Ok(())
+}
+
+/// Prepare for work when a package starts to build
+pub fn prepare_init(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<()> {
+ let new1 = cx.files().fingerprint_dir(unit);
+
+ // Doc tests have no output, thus no fingerprint.
+ if !new1.exists() && !unit.mode.is_doc_test() {
+ paths::create_dir_all(&new1)?;
+ }
+
+ Ok(())
+}
+
+/// Returns the location that the dep-info file will show up at
+/// for the [`Unit`] specified.
+pub fn dep_info_loc(cx: &mut Context<'_, '_>, unit: &Unit) -> PathBuf {
+ cx.files().fingerprint_file_path(unit, "dep-")
+}
+
+/// Returns an absolute path that target directory.
+/// All paths are rewritten to be relative to this.
+fn target_root(cx: &Context<'_, '_>) -> PathBuf {
+ cx.bcx.ws.target_dir().into_path_unlocked()
+}
+
+/// Reads the value from the old fingerprint hash file and compare.
+///
+/// If dirty, it then restores the detailed information
+/// from the fingerprint JSON file, and provides an rich dirty reason.
+fn compare_old_fingerprint(
+ old_hash_path: &Path,
+ new_fingerprint: &Fingerprint,
+ mtime_on_use: bool,
+) -> CargoResult<Option<DirtyReason>> {
+ let old_fingerprint_short = paths::read(old_hash_path)?;
+
+ if mtime_on_use {
+ // update the mtime so other cleaners know we used it
+ let t = FileTime::from_system_time(SystemTime::now());
+ debug!("mtime-on-use forcing {:?} to {}", old_hash_path, t);
+ paths::set_file_time_no_err(old_hash_path, t);
+ }
+
+ let new_hash = new_fingerprint.hash_u64();
+
+ if util::to_hex(new_hash) == old_fingerprint_short && new_fingerprint.fs_status.up_to_date() {
+ return Ok(None);
+ }
+
+ let old_fingerprint_json = paths::read(&old_hash_path.with_extension("json"))?;
+ let old_fingerprint: Fingerprint = serde_json::from_str(&old_fingerprint_json)
+ .with_context(|| internal("failed to deserialize json"))?;
+ // Fingerprint can be empty after a failed rebuild (see comment in prepare_target).
+ if !old_fingerprint_short.is_empty() {
+ debug_assert_eq!(
+ util::to_hex(old_fingerprint.hash_u64()),
+ old_fingerprint_short
+ );
+ }
+
+ Ok(Some(new_fingerprint.compare(&old_fingerprint)))
+}
+
+/// Logs the result of fingerprint comparison.
+///
+/// TODO: Obsolete and mostly superceded by [`DirtyReason`]. Could be removed.
+fn log_compare(unit: &Unit, compare: &CargoResult<Option<DirtyReason>>) {
+ match compare {
+ Ok(None) => {}
+ Ok(Some(reason)) => {
+ info!(
+ "fingerprint dirty for {}/{:?}/{:?}",
+ unit.pkg, unit.mode, unit.target,
+ );
+ info!(" dirty: {reason:?}");
+ }
+ Err(e) => {
+ info!(
+ "fingerprint error for {}/{:?}/{:?}",
+ unit.pkg, unit.mode, unit.target,
+ );
+ info!(" err: {e:?}");
+ }
+ }
+}
+
+/// Parses Cargo's internal [`EncodedDepInfo`] structure that was previously
+/// serialized to disk.
+///
+/// Note that this is not rustc's `*.d` files.
+///
+/// Also note that rustc's `*.d` files are translated to Cargo-specific
+/// `EncodedDepInfo` files after compilations have finished in
+/// [`translate_dep_info`].
+///
+/// Returns `None` if the file is corrupt or couldn't be read from disk. This
+/// indicates that the crate should likely be rebuilt.
+pub fn parse_dep_info(
+ pkg_root: &Path,
+ target_root: &Path,
+ dep_info: &Path,
+) -> CargoResult<Option<RustcDepInfo>> {
+ let data = match paths::read_bytes(dep_info) {
+ Ok(data) => data,
+ Err(_) => return Ok(None),
+ };
+ let info = match EncodedDepInfo::parse(&data) {
+ Some(info) => info,
+ None => {
+ log::warn!("failed to parse cargo's dep-info at {:?}", dep_info);
+ return Ok(None);
+ }
+ };
+ let mut ret = RustcDepInfo::default();
+ ret.env = info.env;
+ ret.files.extend(info.files.into_iter().map(|(ty, path)| {
+ match ty {
+ DepInfoPathType::PackageRootRelative => pkg_root.join(path),
+ // N.B. path might be absolute here in which case the join will have no effect
+ DepInfoPathType::TargetRootRelative => target_root.join(path),
+ }
+ }));
+ Ok(Some(ret))
+}
+
+/// Calcuates the fingerprint of a unit thats contains no dep-info files.
+fn pkg_fingerprint(bcx: &BuildContext<'_, '_>, pkg: &Package) -> CargoResult<String> {
+ let source_id = pkg.package_id().source_id();
+ let sources = bcx.packages.sources();
+
+ let source = sources
+ .get(source_id)
+ .ok_or_else(|| internal("missing package source"))?;
+ source.fingerprint(pkg)
+}
+
+/// The `reference` file is considered as "stale" if any file from `paths` has a newer mtime.
+fn find_stale_file<I>(
+ mtime_cache: &mut HashMap<PathBuf, FileTime>,
+ reference: &Path,
+ paths: I,
+) -> Option<StaleItem>
+where
+ I: IntoIterator,
+ I::Item: AsRef<Path>,
+{
+ let reference_mtime = match paths::mtime(reference) {
+ Ok(mtime) => mtime,
+ Err(..) => return Some(StaleItem::MissingFile(reference.to_path_buf())),
+ };
+
+ for path in paths {
+ let path = path.as_ref();
+
+ // Assuming anything in cargo_home is immutable (see also #9455 about marking it readonly)
+ // which avoids rebuilds when CI caches $CARGO_HOME/registry/{index, cache} and
+ // $CARGO_HOME/git/db across runs, keeping the content the same but changing the mtime.
+ if let Ok(true) = home::cargo_home().map(|home| path.starts_with(home)) {
+ continue;
+ }
+ let path_mtime = match mtime_cache.entry(path.to_path_buf()) {
+ Entry::Occupied(o) => *o.get(),
+ Entry::Vacant(v) => {
+ let mtime = match paths::mtime_recursive(path) {
+ Ok(mtime) => mtime,
+ Err(..) => return Some(StaleItem::MissingFile(path.to_path_buf())),
+ };
+ *v.insert(mtime)
+ }
+ };
+
+ // TODO: fix #5918.
+ // Note that equal mtimes should be considered "stale". For filesystems with
+ // not much timestamp precision like 1s this is would be a conservative approximation
+ // to handle the case where a file is modified within the same second after
+ // a build starts. We want to make sure that incremental rebuilds pick that up!
+ //
+ // For filesystems with nanosecond precision it's been seen in the wild that
+ // its "nanosecond precision" isn't really nanosecond-accurate. It turns out that
+ // kernels may cache the current time so files created at different times actually
+ // list the same nanosecond precision. Some digging on #5919 picked up that the
+ // kernel caches the current time between timer ticks, which could mean that if
+ // a file is updated at most 10ms after a build starts then Cargo may not
+ // pick up the build changes.
+ //
+ // All in all, an equality check here would be a conservative assumption that,
+ // if equal, files were changed just after a previous build finished.
+ // Unfortunately this became problematic when (in #6484) cargo switch to more accurately
+ // measuring the start time of builds.
+ if path_mtime <= reference_mtime {
+ continue;
+ }
+
+ return Some(StaleItem::ChangedFile {
+ reference: reference.to_path_buf(),
+ reference_mtime,
+ stale: path.to_path_buf(),
+ stale_mtime: path_mtime,
+ });
+ }
+
+ debug!(
+ "all paths up-to-date relative to {:?} mtime={}",
+ reference, reference_mtime
+ );
+ None
+}
+
+/// Tells the associated path in [`EncodedDepInfo::files`] is relative to package root,
+/// target root, or absolute.
+enum DepInfoPathType {
+ /// src/, e.g. src/lib.rs
+ PackageRootRelative,
+ /// target/debug/deps/lib...
+ /// or an absolute path /.../sysroot/...
+ TargetRootRelative,
+}
+
+/// Parses the dep-info file coming out of rustc into a Cargo-specific format.
+///
+/// This function will parse `rustc_dep_info` as a makefile-style dep info to
+/// learn about the all files which a crate depends on. This is then
+/// re-serialized into the `cargo_dep_info` path in a Cargo-specific format.
+///
+/// The `pkg_root` argument here is the absolute path to the directory
+/// containing `Cargo.toml` for this crate that was compiled. The paths listed
+/// in the rustc dep-info file may or may not be absolute but we'll want to
+/// consider all of them relative to the `root` specified.
+///
+/// The `rustc_cwd` argument is the absolute path to the cwd of the compiler
+/// when it was invoked.
+///
+/// If the `allow_package` argument is true, then package-relative paths are
+/// included. If it is false, then package-relative paths are skipped and
+/// ignored (typically used for registry or git dependencies where we assume
+/// the source never changes, and we don't want the cost of running `stat` on
+/// all those files). See the module-level docs for the note about
+/// `-Zbinary-dep-depinfo` for more details on why this is done.
+///
+/// The serialized Cargo format will contain a list of files, all of which are
+/// relative if they're under `root`. or absolute if they're elsewhere.
+pub fn translate_dep_info(
+ rustc_dep_info: &Path,
+ cargo_dep_info: &Path,
+ rustc_cwd: &Path,
+ pkg_root: &Path,
+ target_root: &Path,
+ rustc_cmd: &ProcessBuilder,
+ allow_package: bool,
+) -> CargoResult<()> {
+ let depinfo = parse_rustc_dep_info(rustc_dep_info)?;
+
+ let target_root = try_canonicalize(target_root)?;
+ let pkg_root = try_canonicalize(pkg_root)?;
+ let mut on_disk_info = EncodedDepInfo::default();
+ on_disk_info.env = depinfo.env;
+
+ // This is a bit of a tricky statement, but here we're *removing* the
+ // dependency on environment variables that were defined specifically for
+ // the command itself. Environment variables returned by `get_envs` includes
+ // environment variables like:
+ //
+ // * `OUT_DIR` if applicable
+ // * env vars added by a build script, if any
+ //
+ // The general idea here is that the dep info file tells us what, when
+ // changed, should cause us to rebuild the crate. These environment
+ // variables are synthesized by Cargo and/or the build script, and the
+ // intention is that their values are tracked elsewhere for whether the
+ // crate needs to be rebuilt.
+ //
+ // For example a build script says when it needs to be rerun and otherwise
+ // it's assumed to produce the same output, so we're guaranteed that env
+ // vars defined by the build script will always be the same unless the build
+ // script itself reruns, in which case the crate will rerun anyway.
+ //
+ // For things like `OUT_DIR` it's a bit sketchy for now. Most of the time
+ // that's used for code generation but this is technically buggy where if
+ // you write a binary that does `println!("{}", env!("OUT_DIR"))` we won't
+ // recompile that if you move the target directory. Hopefully that's not too
+ // bad of an issue for now...
+ //
+ // This also includes `CARGO` since if the code is explicitly wanting to
+ // know that path, it should be rebuilt if it changes. The CARGO path is
+ // not tracked elsewhere in the fingerprint.
+ on_disk_info
+ .env
+ .retain(|(key, _)| !rustc_cmd.get_envs().contains_key(key) || key == CARGO_ENV);
+
+ for file in depinfo.files {
+ // The path may be absolute or relative, canonical or not. Make sure
+ // it is canonicalized so we are comparing the same kinds of paths.
+ let abs_file = rustc_cwd.join(file);
+ // If canonicalization fails, just use the abs path. There is currently
+ // a bug where --remap-path-prefix is affecting .d files, causing them
+ // to point to non-existent paths.
+ let canon_file = try_canonicalize(&abs_file).unwrap_or_else(|_| abs_file.clone());
+
+ let (ty, path) = if let Ok(stripped) = canon_file.strip_prefix(&target_root) {
+ (DepInfoPathType::TargetRootRelative, stripped)
+ } else if let Ok(stripped) = canon_file.strip_prefix(&pkg_root) {
+ if !allow_package {
+ continue;
+ }
+ (DepInfoPathType::PackageRootRelative, stripped)
+ } else {
+ // It's definitely not target root relative, but this is an absolute path (since it was
+ // joined to rustc_cwd) and as such re-joining it later to the target root will have no
+ // effect.
+ (DepInfoPathType::TargetRootRelative, &*abs_file)
+ };
+ on_disk_info.files.push((ty, path.to_owned()));
+ }
+ paths::write(cargo_dep_info, on_disk_info.serialize()?)?;
+ Ok(())
+}
+
+/// The representation of the `.d` dep-info file generated by rustc
+#[derive(Default)]
+pub struct RustcDepInfo {
+ /// The list of files that the main target in the dep-info file depends on.
+ pub files: Vec<PathBuf>,
+ /// The list of environment variables we found that the rustc compilation
+ /// depends on.
+ ///
+ /// The first element of the pair is the name of the env var and the second
+ /// item is the value. `Some` means that the env var was set, and `None`
+ /// means that the env var wasn't actually set and the compilation depends
+ /// on it not being set.
+ pub env: Vec<(String, Option<String>)>,
+}
+
+/// Same as [`RustcDepInfo`] except avoids absolute paths as much as possible to
+/// allow moving around the target directory.
+///
+/// This is also stored in an optimized format to make parsing it fast because
+/// Cargo will read it for crates on all future compilations.
+#[derive(Default)]
+struct EncodedDepInfo {
+ files: Vec<(DepInfoPathType, PathBuf)>,
+ env: Vec<(String, Option<String>)>,
+}
+
+impl EncodedDepInfo {
+ fn parse(mut bytes: &[u8]) -> Option<EncodedDepInfo> {
+ let bytes = &mut bytes;
+ let nfiles = read_usize(bytes)?;
+ let mut files = Vec::with_capacity(nfiles as usize);
+ for _ in 0..nfiles {
+ let ty = match read_u8(bytes)? {
+ 0 => DepInfoPathType::PackageRootRelative,
+ 1 => DepInfoPathType::TargetRootRelative,
+ _ => return None,
+ };
+ let bytes = read_bytes(bytes)?;
+ files.push((ty, paths::bytes2path(bytes).ok()?));
+ }
+
+ let nenv = read_usize(bytes)?;
+ let mut env = Vec::with_capacity(nenv as usize);
+ for _ in 0..nenv {
+ let key = str::from_utf8(read_bytes(bytes)?).ok()?.to_string();
+ let val = match read_u8(bytes)? {
+ 0 => None,
+ 1 => Some(str::from_utf8(read_bytes(bytes)?).ok()?.to_string()),
+ _ => return None,
+ };
+ env.push((key, val));
+ }
+ return Some(EncodedDepInfo { files, env });
+
+ fn read_usize(bytes: &mut &[u8]) -> Option<usize> {
+ let ret = bytes.get(..4)?;
+ *bytes = &bytes[4..];
+ Some(u32::from_le_bytes(ret.try_into().unwrap()) as usize)
+ }
+
+ fn read_u8(bytes: &mut &[u8]) -> Option<u8> {
+ let ret = *bytes.get(0)?;
+ *bytes = &bytes[1..];
+ Some(ret)
+ }
+
+ fn read_bytes<'a>(bytes: &mut &'a [u8]) -> Option<&'a [u8]> {
+ let n = read_usize(bytes)? as usize;
+ let ret = bytes.get(..n)?;
+ *bytes = &bytes[n..];
+ Some(ret)
+ }
+ }
+
+ fn serialize(&self) -> CargoResult<Vec<u8>> {
+ let mut ret = Vec::new();
+ let dst = &mut ret;
+ write_usize(dst, self.files.len());
+ for (ty, file) in self.files.iter() {
+ match ty {
+ DepInfoPathType::PackageRootRelative => dst.push(0),
+ DepInfoPathType::TargetRootRelative => dst.push(1),
+ }
+ write_bytes(dst, paths::path2bytes(file)?);
+ }
+
+ write_usize(dst, self.env.len());
+ for (key, val) in self.env.iter() {
+ write_bytes(dst, key);
+ match val {
+ None => dst.push(0),
+ Some(val) => {
+ dst.push(1);
+ write_bytes(dst, val);
+ }
+ }
+ }
+ return Ok(ret);
+
+ fn write_bytes(dst: &mut Vec<u8>, val: impl AsRef<[u8]>) {
+ let val = val.as_ref();
+ write_usize(dst, val.len());
+ dst.extend_from_slice(val);
+ }
+
+ fn write_usize(dst: &mut Vec<u8>, val: usize) {
+ dst.extend(&u32::to_le_bytes(val as u32));
+ }
+ }
+}
+
+/// Parse the `.d` dep-info file generated by rustc.
+pub fn parse_rustc_dep_info(rustc_dep_info: &Path) -> CargoResult<RustcDepInfo> {
+ let contents = paths::read(rustc_dep_info)?;
+ let mut ret = RustcDepInfo::default();
+ let mut found_deps = false;
+
+ for line in contents.lines() {
+ if let Some(rest) = line.strip_prefix("# env-dep:") {
+ let mut parts = rest.splitn(2, '=');
+ let env_var = match parts.next() {
+ Some(s) => s,
+ None => continue,
+ };
+ let env_val = match parts.next() {
+ Some(s) => Some(unescape_env(s)?),
+ None => None,
+ };
+ ret.env.push((unescape_env(env_var)?, env_val));
+ } else if let Some(pos) = line.find(": ") {
+ if found_deps {
+ continue;
+ }
+ found_deps = true;
+ let mut deps = line[pos + 2..].split_whitespace();
+
+ while let Some(s) = deps.next() {
+ let mut file = s.to_string();
+ while file.ends_with('\\') {
+ file.pop();
+ file.push(' ');
+ file.push_str(deps.next().ok_or_else(|| {
+ internal("malformed dep-info format, trailing \\".to_string())
+ })?);
+ }
+ ret.files.push(file.into());
+ }
+ }
+ }
+ return Ok(ret);
+
+ // rustc tries to fit env var names and values all on a single line, which
+ // means it needs to escape `\r` and `\n`. The escape syntax used is "\n"
+ // which means that `\` also needs to be escaped.
+ fn unescape_env(s: &str) -> CargoResult<String> {
+ let mut ret = String::with_capacity(s.len());
+ let mut chars = s.chars();
+ while let Some(c) = chars.next() {
+ if c != '\\' {
+ ret.push(c);
+ continue;
+ }
+ match chars.next() {
+ Some('\\') => ret.push('\\'),
+ Some('n') => ret.push('\n'),
+ Some('r') => ret.push('\r'),
+ Some(c) => bail!("unknown escape character `{}`", c),
+ None => bail!("unterminated escape character"),
+ }
+ }
+ Ok(ret)
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/future_incompat.rs b/src/tools/cargo/src/cargo/core/compiler/future_incompat.rs
new file mode 100644
index 000000000..955dfb8f2
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/future_incompat.rs
@@ -0,0 +1,521 @@
+//! Support for [future-incompatible warning reporting][1].
+//!
+//! Here is an overview of how Cargo handles future-incompatible reports.
+//!
+//! ## Receive reports from the compiler
+//!
+//! When receiving a compiler message during a build, if it is effectively
+//! a [`FutureIncompatReport`], Cargo gathers and forwards it as a
+//! `Message::FutureIncompatReport` to the main thread.
+//!
+//! To have the correct layout of strucutures for deserializing a report
+//! emitted by the compiler, most of structure definitions, for example
+//! [`FutureIncompatReport`], are copied either partially or entirely from
+//! [compiler/rustc_errors/src/json.rs][2] in rust-lang/rust repository.
+//!
+//! ## Persist reports on disk
+//!
+//! When a build comes to an end, by calling [`save_and_display_report`]
+//! Cargo saves the report on disk, and displays it directly if requested
+//! via command line or configuration. The information of the on-disk file can
+//! be found in [`FUTURE_INCOMPAT_FILE`].
+//!
+//! During the persistent process, Cargo will attempt to query the source of
+//! each package emitting the report, for the sake of providing an upgrade
+//! information as a solution to fix the incompatibility.
+//!
+//! ## Display reports to users
+//!
+//! Users can run `cargo report future-incompat` to retrieve a report. This is
+//! done by [`OnDiskReports::load`]. Cargo simply prints reports to the
+//! standard output.
+//!
+//! [1]: https://doc.rust-lang.org/nightly/cargo/reference/future-incompat-report.html
+//! [2]: https://github.com/rust-lang/rust/blob/9bb6e60d1f1360234aae90c97964c0fa5524f141/compiler/rustc_errors/src/json.rs#L312-L315
+
+use crate::core::compiler::BuildContext;
+use crate::core::{Dependency, PackageId, QueryKind, Workspace};
+use crate::sources::SourceConfigMap;
+use crate::util::{iter_join, CargoResult, Config};
+use anyhow::{bail, format_err, Context};
+use serde::{Deserialize, Serialize};
+use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
+use std::fmt::Write as _;
+use std::io::{Read, Write};
+use std::task::Poll;
+
+pub const REPORT_PREAMBLE: &str = "\
+The following warnings were discovered during the build. These warnings are an
+indication that the packages contain code that will become an error in a
+future release of Rust. These warnings typically cover changes to close
+soundness problems, unintended or undocumented behavior, or critical problems
+that cannot be fixed in a backwards-compatible fashion, and are not expected
+to be in wide use.
+
+Each warning should contain a link for more information on what the warning
+means and how to resolve it.
+";
+
+/// Current version of the on-disk format.
+const ON_DISK_VERSION: u32 = 0;
+
+/// The future incompatibility report, emitted by the compiler as a JSON message.
+#[derive(serde::Deserialize)]
+pub struct FutureIncompatReport {
+ pub future_incompat_report: Vec<FutureBreakageItem>,
+}
+
+/// Structure used for collecting reports in-memory.
+pub struct FutureIncompatReportPackage {
+ pub package_id: PackageId,
+ pub items: Vec<FutureBreakageItem>,
+}
+
+/// A single future-incompatible warning emitted by rustc.
+#[derive(Serialize, Deserialize)]
+pub struct FutureBreakageItem {
+ /// The date at which this lint will become an error.
+ /// Currently unused
+ pub future_breakage_date: Option<String>,
+ /// The original diagnostic emitted by the compiler
+ pub diagnostic: Diagnostic,
+}
+
+/// A diagnostic emitted by the compiler as a JSON message.
+/// We only care about the 'rendered' field
+#[derive(Serialize, Deserialize)]
+pub struct Diagnostic {
+ pub rendered: String,
+ pub level: String,
+}
+
+/// The filename in the top-level `target` directory where we store
+/// the report
+const FUTURE_INCOMPAT_FILE: &str = ".future-incompat-report.json";
+/// Max number of reports to save on disk.
+const MAX_REPORTS: usize = 5;
+
+/// The structure saved to disk containing the reports.
+#[derive(Serialize, Deserialize)]
+pub struct OnDiskReports {
+ /// A schema version number, to handle older cargo's from trying to read
+ /// something that they don't understand.
+ version: u32,
+ /// The report ID to use for the next report to save.
+ next_id: u32,
+ /// Available reports.
+ reports: Vec<OnDiskReport>,
+}
+
+/// A single report for a given compilation session.
+#[derive(Serialize, Deserialize)]
+struct OnDiskReport {
+ /// Unique reference to the report for the `--id` CLI flag.
+ id: u32,
+ /// A message describing suggestions for fixing the
+ /// reported issues
+ suggestion_message: String,
+ /// Report, suitable for printing to the console.
+ /// Maps package names to the corresponding report
+ /// We use a `BTreeMap` so that the iteration order
+ /// is stable across multiple runs of `cargo`
+ per_package: BTreeMap<String, String>,
+}
+
+impl Default for OnDiskReports {
+ fn default() -> OnDiskReports {
+ OnDiskReports {
+ version: ON_DISK_VERSION,
+ next_id: 1,
+ reports: Vec::new(),
+ }
+ }
+}
+
+impl OnDiskReports {
+ /// Saves a new report returning its id
+ pub fn save_report(
+ mut self,
+ ws: &Workspace<'_>,
+ suggestion_message: String,
+ per_package_reports: &[FutureIncompatReportPackage],
+ ) -> u32 {
+ let per_package = render_report(per_package_reports);
+
+ if let Some(existing_report) = self
+ .reports
+ .iter()
+ .find(|existing| existing.per_package == per_package)
+ {
+ return existing_report.id;
+ }
+
+ let report = OnDiskReport {
+ id: self.next_id,
+ suggestion_message,
+ per_package,
+ };
+
+ let saved_id = report.id;
+ self.next_id += 1;
+ self.reports.push(report);
+ if self.reports.len() > MAX_REPORTS {
+ self.reports.remove(0);
+ }
+ let on_disk = serde_json::to_vec(&self).unwrap();
+ if let Err(e) = ws
+ .target_dir()
+ .open_rw(
+ FUTURE_INCOMPAT_FILE,
+ ws.config(),
+ "Future incompatibility report",
+ )
+ .and_then(|file| {
+ let mut file = file.file();
+ file.set_len(0)?;
+ file.write_all(&on_disk)?;
+ Ok(())
+ })
+ {
+ crate::display_warning_with_error(
+ "failed to write on-disk future incompatible report",
+ &e,
+ &mut ws.config().shell(),
+ );
+ }
+
+ saved_id
+ }
+
+ /// Loads the on-disk reports.
+ pub fn load(ws: &Workspace<'_>) -> CargoResult<OnDiskReports> {
+ let report_file = match ws.target_dir().open_ro(
+ FUTURE_INCOMPAT_FILE,
+ ws.config(),
+ "Future incompatible report",
+ ) {
+ Ok(r) => r,
+ Err(e) => {
+ if let Some(io_err) = e.downcast_ref::<std::io::Error>() {
+ if io_err.kind() == std::io::ErrorKind::NotFound {
+ bail!("no reports are currently available");
+ }
+ }
+ return Err(e);
+ }
+ };
+
+ let mut file_contents = String::new();
+ report_file
+ .file()
+ .read_to_string(&mut file_contents)
+ .with_context(|| "failed to read report")?;
+ let on_disk_reports: OnDiskReports =
+ serde_json::from_str(&file_contents).with_context(|| "failed to load report")?;
+ if on_disk_reports.version != ON_DISK_VERSION {
+ bail!("unable to read reports; reports were saved from a future version of Cargo");
+ }
+ Ok(on_disk_reports)
+ }
+
+ /// Returns the most recent report ID.
+ pub fn last_id(&self) -> u32 {
+ self.reports.last().map(|r| r.id).unwrap()
+ }
+
+ pub fn get_report(
+ &self,
+ id: u32,
+ config: &Config,
+ package: Option<&str>,
+ ) -> CargoResult<String> {
+ let report = self.reports.iter().find(|r| r.id == id).ok_or_else(|| {
+ let available = iter_join(self.reports.iter().map(|r| r.id.to_string()), ", ");
+ format_err!(
+ "could not find report with ID {}\n\
+ Available IDs are: {}",
+ id,
+ available
+ )
+ })?;
+
+ let mut to_display = report.suggestion_message.clone();
+ to_display += "\n";
+
+ let package_report = if let Some(package) = package {
+ report
+ .per_package
+ .get(package)
+ .ok_or_else(|| {
+ format_err!(
+ "could not find package with ID `{}`\n
+ Available packages are: {}\n
+ Omit the `--package` flag to display a report for all packages",
+ package,
+ iter_join(report.per_package.keys(), ", ")
+ )
+ })?
+ .to_string()
+ } else {
+ report
+ .per_package
+ .values()
+ .cloned()
+ .collect::<Vec<_>>()
+ .join("\n")
+ };
+ to_display += &package_report;
+
+ let shell = config.shell();
+
+ let to_display = if shell.err_supports_color() && shell.out_supports_color() {
+ to_display
+ } else {
+ strip_ansi_escapes::strip(&to_display)
+ .map(|v| String::from_utf8(v).expect("utf8"))
+ .expect("strip should never fail")
+ };
+ Ok(to_display)
+ }
+}
+
+fn render_report(per_package_reports: &[FutureIncompatReportPackage]) -> BTreeMap<String, String> {
+ let mut report: BTreeMap<String, String> = BTreeMap::new();
+ for per_package in per_package_reports {
+ let package_spec = format!(
+ "{}@{}",
+ per_package.package_id.name(),
+ per_package.package_id.version()
+ );
+ let rendered = report.entry(package_spec).or_default();
+ rendered.push_str(&format!(
+ "The package `{}` currently triggers the following future incompatibility lints:\n",
+ per_package.package_id
+ ));
+ for item in &per_package.items {
+ rendered.extend(
+ item.diagnostic
+ .rendered
+ .lines()
+ .map(|l| format!("> {}\n", l)),
+ );
+ }
+ }
+ report
+}
+
+/// Returns a user-readable message explaining which of
+/// the packages in `package_ids` have updates available.
+/// This is best-effort - if an error occurs, `None` will be returned.
+fn get_updates(ws: &Workspace<'_>, package_ids: &BTreeSet<PackageId>) -> Option<String> {
+ // This in general ignores all errors since this is opportunistic.
+ let _lock = ws.config().acquire_package_cache_lock().ok()?;
+ // Create a set of updated registry sources.
+ let map = SourceConfigMap::new(ws.config()).ok()?;
+ let mut package_ids: BTreeSet<_> = package_ids
+ .iter()
+ .filter(|pkg_id| pkg_id.source_id().is_registry())
+ .collect();
+ let source_ids: HashSet<_> = package_ids
+ .iter()
+ .map(|pkg_id| pkg_id.source_id())
+ .collect();
+ let mut sources: HashMap<_, _> = source_ids
+ .into_iter()
+ .filter_map(|sid| {
+ let source = map.load(sid, &HashSet::new()).ok()?;
+ Some((sid, source))
+ })
+ .collect();
+
+ // Query the sources for new versions, mapping `package_ids` into `summaries`.
+ let mut summaries = Vec::new();
+ while !package_ids.is_empty() {
+ package_ids.retain(|&pkg_id| {
+ let source = match sources.get_mut(&pkg_id.source_id()) {
+ Some(s) => s,
+ None => return false,
+ };
+ let dep = match Dependency::parse(pkg_id.name(), None, pkg_id.source_id()) {
+ Ok(dep) => dep,
+ Err(_) => return false,
+ };
+ match source.query_vec(&dep, QueryKind::Exact) {
+ Poll::Ready(Ok(sum)) => {
+ summaries.push((pkg_id, sum));
+ false
+ }
+ Poll::Ready(Err(_)) => false,
+ Poll::Pending => true,
+ }
+ });
+ for (_, source) in sources.iter_mut() {
+ source.block_until_ready().ok()?;
+ }
+ }
+
+ let mut updates = String::new();
+ for (pkg_id, summaries) in summaries {
+ let mut updated_versions: Vec<_> = summaries
+ .iter()
+ .map(|summary| summary.version())
+ .filter(|version| *version > pkg_id.version())
+ .collect();
+ updated_versions.sort();
+
+ let updated_versions = iter_join(
+ updated_versions
+ .into_iter()
+ .map(|version| version.to_string()),
+ ", ",
+ );
+
+ if !updated_versions.is_empty() {
+ writeln!(
+ updates,
+ "{} has the following newer versions available: {}",
+ pkg_id, updated_versions
+ )
+ .unwrap();
+ }
+ }
+ Some(updates)
+}
+
+/// Writes a future-incompat report to disk, using the per-package
+/// reports gathered during the build. If requested by the user,
+/// a message is also displayed in the build output.
+pub fn save_and_display_report(
+ bcx: &BuildContext<'_, '_>,
+ per_package_future_incompat_reports: &[FutureIncompatReportPackage],
+) {
+ let should_display_message = match bcx.config.future_incompat_config() {
+ Ok(config) => config.should_display_message(),
+ Err(e) => {
+ crate::display_warning_with_error(
+ "failed to read future-incompat config from disk",
+ &e,
+ &mut bcx.config.shell(),
+ );
+ true
+ }
+ };
+
+ if per_package_future_incompat_reports.is_empty() {
+ // Explicitly passing a command-line flag overrides
+ // `should_display_message` from the config file
+ if bcx.build_config.future_incompat_report {
+ drop(
+ bcx.config
+ .shell()
+ .note("0 dependencies had future-incompatible warnings"),
+ );
+ }
+ return;
+ }
+
+ let current_reports = match OnDiskReports::load(bcx.ws) {
+ Ok(r) => r,
+ Err(e) => {
+ log::debug!(
+ "saving future-incompatible reports failed to load current reports: {:?}",
+ e
+ );
+ OnDiskReports::default()
+ }
+ };
+ let report_id = current_reports.next_id;
+
+ // Get a list of unique and sorted package name/versions.
+ let package_ids: BTreeSet<_> = per_package_future_incompat_reports
+ .iter()
+ .map(|r| r.package_id)
+ .collect();
+ let package_vers: Vec<_> = package_ids.iter().map(|pid| pid.to_string()).collect();
+
+ if should_display_message || bcx.build_config.future_incompat_report {
+ drop(bcx.config.shell().warn(&format!(
+ "the following packages contain code that will be rejected by a future \
+ version of Rust: {}",
+ package_vers.join(", ")
+ )));
+ }
+
+ let updated_versions = get_updates(bcx.ws, &package_ids).unwrap_or(String::new());
+
+ let update_message = if !updated_versions.is_empty() {
+ format!(
+ "
+- Some affected dependencies have newer versions available.
+You may want to consider updating them to a newer version to see if the issue has been fixed.
+
+{updated_versions}\n",
+ updated_versions = updated_versions
+ )
+ } else {
+ String::new()
+ };
+
+ let upstream_info = package_ids
+ .iter()
+ .map(|package_id| {
+ let manifest = bcx.packages.get_one(*package_id).unwrap().manifest();
+ format!(
+ "
+ - {package_spec}
+ - Repository: {url}
+ - Detailed warning command: `cargo report future-incompatibilities --id {id} --package {package_spec}`",
+ package_spec = format!("{}@{}", package_id.name(), package_id.version()),
+ url = manifest
+ .metadata()
+ .repository
+ .as_deref()
+ .unwrap_or("<not found>"),
+ id = report_id,
+ )
+ })
+ .collect::<Vec<_>>()
+ .join("\n");
+
+ let suggestion_message = format!(
+ "
+To solve this problem, you can try the following approaches:
+
+{update_message}
+- If the issue is not solved by updating the dependencies, a fix has to be
+implemented by those dependencies. You can help with that by notifying the
+maintainers of this problem (e.g. by creating a bug report) or by proposing a
+fix to the maintainers (e.g. by creating a pull request):
+{upstream_info}
+
+- If waiting for an upstream fix is not an option, you can use the `[patch]`
+section in `Cargo.toml` to use your own version of the dependency. For more
+information, see:
+https://doc.rust-lang.org/cargo/reference/overriding-dependencies.html#the-patch-section
+ ",
+ upstream_info = upstream_info,
+ update_message = update_message,
+ );
+
+ let saved_report_id = current_reports.save_report(
+ bcx.ws,
+ suggestion_message.clone(),
+ per_package_future_incompat_reports,
+ );
+
+ if bcx.build_config.future_incompat_report {
+ drop(bcx.config.shell().note(&suggestion_message));
+ drop(bcx.config.shell().note(&format!(
+ "this report can be shown with `cargo report \
+ future-incompatibilities --id {}`",
+ saved_report_id
+ )));
+ } else if should_display_message {
+ drop(bcx.config.shell().note(&format!(
+ "to see what the problems were, use the option \
+ `--future-incompat-report`, or run `cargo report \
+ future-incompatibilities --id {}`",
+ saved_report_id
+ )));
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/job_queue/job.rs b/src/tools/cargo/src/cargo/core/compiler/job_queue/job.rs
new file mode 100644
index 000000000..ae802d6a0
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/job_queue/job.rs
@@ -0,0 +1,114 @@
+//! See [`Job`] and [`Work`].
+
+use std::fmt;
+use std::mem;
+
+use super::JobState;
+use crate::core::compiler::fingerprint::DirtyReason;
+use crate::util::CargoResult;
+
+/// Represents a unit of [`Work`] with a [`Freshness`] for caller
+/// to determine whether to re-execute or not.
+pub struct Job {
+ work: Work,
+ fresh: Freshness,
+}
+
+/// The basic unit of work.
+///
+/// Each proc should send its description before starting.
+/// It should send either once or close immediately.
+pub struct Work {
+ inner: Box<dyn FnOnce(&JobState<'_, '_>) -> CargoResult<()> + Send>,
+}
+
+impl Work {
+ /// Creates a unit of work.
+ pub fn new<F>(f: F) -> Work
+ where
+ F: FnOnce(&JobState<'_, '_>) -> CargoResult<()> + Send + 'static,
+ {
+ Work { inner: Box::new(f) }
+ }
+
+ /// Creates a unit of work that does nothing.
+ pub fn noop() -> Work {
+ Work::new(|_| Ok(()))
+ }
+
+ /// Consumes this work by running it.
+ pub fn call(self, tx: &JobState<'_, '_>) -> CargoResult<()> {
+ (self.inner)(tx)
+ }
+
+ /// Creates a new unit of work that chains `next` after ourself.
+ pub fn then(self, next: Work) -> Work {
+ Work::new(move |state| {
+ self.call(state)?;
+ next.call(state)
+ })
+ }
+}
+
+impl Job {
+ /// Creates a new job that does nothing.
+ pub fn new_fresh() -> Job {
+ Job {
+ work: Work::noop(),
+ fresh: Freshness::Fresh,
+ }
+ }
+
+ /// Creates a new job representing a unit of work.
+ pub fn new_dirty(work: Work, dirty_reason: Option<DirtyReason>) -> Job {
+ Job {
+ work,
+ fresh: Freshness::Dirty(dirty_reason),
+ }
+ }
+
+ /// Consumes this job by running it, returning the result of the
+ /// computation.
+ pub fn run(self, state: &JobState<'_, '_>) -> CargoResult<()> {
+ self.work.call(state)
+ }
+
+ /// Returns whether this job was fresh/dirty, where "fresh" means we're
+ /// likely to perform just some small bookkeeping where "dirty" means we'll
+ /// probably do something slow like invoke rustc.
+ pub fn freshness(&self) -> &Freshness {
+ &self.fresh
+ }
+
+ /// Chains the given work by putting it in front of our own unit of work.
+ pub fn before(&mut self, next: Work) {
+ let prev = mem::replace(&mut self.work, Work::noop());
+ self.work = next.then(prev);
+ }
+}
+
+impl fmt::Debug for Job {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "Job {{ ... }}")
+ }
+}
+
+/// Indication of the freshness of a package.
+///
+/// A fresh package does not necessarily need to be rebuilt (unless a dependency
+/// was also rebuilt), and a dirty package must always be rebuilt.
+#[derive(Debug, Clone)]
+pub enum Freshness {
+ Fresh,
+ Dirty(Option<DirtyReason>),
+}
+
+impl Freshness {
+ pub fn is_dirty(&self) -> bool {
+ matches!(self, Freshness::Dirty(_))
+ }
+
+ pub fn is_fresh(&self) -> bool {
+ matches!(self, Freshness::Fresh)
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/job_queue/job_state.rs b/src/tools/cargo/src/cargo/core/compiler/job_queue/job_state.rs
new file mode 100644
index 000000000..a513d3b89
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/job_queue/job_state.rs
@@ -0,0 +1,197 @@
+//! See [`JobState`].
+
+use std::{cell::Cell, marker, sync::Arc};
+
+use cargo_util::ProcessBuilder;
+
+use crate::core::compiler::context::OutputFile;
+use crate::core::compiler::future_incompat::FutureBreakageItem;
+use crate::util::Queue;
+use crate::CargoResult;
+
+use super::{Artifact, DiagDedupe, Job, JobId, Message};
+
+/// A `JobState` is constructed by `JobQueue::run` and passed to `Job::run`. It includes everything
+/// necessary to communicate between the main thread and the execution of the job.
+///
+/// The job may execute on either a dedicated thread or the main thread. If the job executes on the
+/// main thread, the `output` field must be set to prevent a deadlock.
+pub struct JobState<'a, 'cfg> {
+ /// Channel back to the main thread to coordinate messages and such.
+ ///
+ /// When the `output` field is `Some`, care must be taken to avoid calling `push_bounded` on
+ /// the message queue to prevent a deadlock.
+ messages: Arc<Queue<Message>>,
+
+ /// Normally output is sent to the job queue with backpressure. When the job is fresh
+ /// however we need to immediately display the output to prevent a deadlock as the
+ /// output messages are processed on the same thread as they are sent from. `output`
+ /// defines where to output in this case.
+ ///
+ /// Currently the `Shell` inside `Config` is wrapped in a `RefCell` and thus can't be passed
+ /// between threads. This means that it isn't possible for multiple output messages to be
+ /// interleaved. In the future, it may be wrapped in a `Mutex` instead. In this case
+ /// interleaving is still prevented as the lock would be held for the whole printing of an
+ /// output message.
+ output: Option<&'a DiagDedupe<'cfg>>,
+
+ /// The job id that this state is associated with, used when sending
+ /// messages back to the main thread.
+ id: JobId,
+
+ /// Whether or not we're expected to have a call to `rmeta_produced`. Once
+ /// that method is called this is dynamically set to `false` to prevent
+ /// sending a double message later on.
+ rmeta_required: Cell<bool>,
+
+ // Historical versions of Cargo made use of the `'a` argument here, so to
+ // leave the door open to future refactorings keep it here.
+ _marker: marker::PhantomData<&'a ()>,
+}
+
+impl<'a, 'cfg> JobState<'a, 'cfg> {
+ pub(super) fn new(
+ id: JobId,
+ messages: Arc<Queue<Message>>,
+ output: Option<&'a DiagDedupe<'cfg>>,
+ rmeta_required: bool,
+ ) -> Self {
+ Self {
+ id,
+ messages,
+ output,
+ rmeta_required: Cell::new(rmeta_required),
+ _marker: marker::PhantomData,
+ }
+ }
+
+ pub fn running(&self, cmd: &ProcessBuilder) {
+ self.messages.push(Message::Run(self.id, cmd.to_string()));
+ }
+
+ pub fn build_plan(
+ &self,
+ module_name: String,
+ cmd: ProcessBuilder,
+ filenames: Arc<Vec<OutputFile>>,
+ ) {
+ self.messages
+ .push(Message::BuildPlanMsg(module_name, cmd, filenames));
+ }
+
+ pub fn stdout(&self, stdout: String) -> CargoResult<()> {
+ if let Some(dedupe) = self.output {
+ writeln!(dedupe.config.shell().out(), "{}", stdout)?;
+ } else {
+ self.messages.push_bounded(Message::Stdout(stdout));
+ }
+ Ok(())
+ }
+
+ pub fn stderr(&self, stderr: String) -> CargoResult<()> {
+ if let Some(dedupe) = self.output {
+ let mut shell = dedupe.config.shell();
+ shell.print_ansi_stderr(stderr.as_bytes())?;
+ shell.err().write_all(b"\n")?;
+ } else {
+ self.messages.push_bounded(Message::Stderr(stderr));
+ }
+ Ok(())
+ }
+
+ /// See [`Message::Diagnostic`] and [`Message::WarningCount`].
+ pub fn emit_diag(&self, level: String, diag: String, fixable: bool) -> CargoResult<()> {
+ if let Some(dedupe) = self.output {
+ let emitted = dedupe.emit_diag(&diag)?;
+ if level == "warning" {
+ self.messages.push(Message::WarningCount {
+ id: self.id,
+ emitted,
+ fixable,
+ });
+ }
+ } else {
+ self.messages.push_bounded(Message::Diagnostic {
+ id: self.id,
+ level,
+ diag,
+ fixable,
+ });
+ }
+ Ok(())
+ }
+
+ /// See [`Message::Warning`].
+ pub fn warning(&self, warning: String) -> CargoResult<()> {
+ self.messages.push_bounded(Message::Warning {
+ id: self.id,
+ warning,
+ });
+ Ok(())
+ }
+
+ /// A method used to signal to the coordinator thread that the rmeta file
+ /// for an rlib has been produced. This is only called for some rmeta
+ /// builds when required, and can be called at any time before a job ends.
+ /// This should only be called once because a metadata file can only be
+ /// produced once!
+ pub fn rmeta_produced(&self) {
+ self.rmeta_required.set(false);
+ self.messages
+ .push(Message::Finish(self.id, Artifact::Metadata, Ok(())));
+ }
+
+ /// Drives a [`Job`] to finish. This ensures that a [`Message::Finish`] is
+ /// sent even if our job panics.
+ pub(super) fn run_to_finish(self, job: Job) {
+ let mut sender = FinishOnDrop {
+ messages: &self.messages,
+ id: self.id,
+ result: None,
+ };
+ sender.result = Some(job.run(&self));
+
+ // If the `rmeta_required` wasn't consumed but it was set
+ // previously, then we either have:
+ //
+ // 1. The `job` didn't do anything because it was "fresh".
+ // 2. The `job` returned an error and didn't reach the point where
+ // it called `rmeta_produced`.
+ // 3. We forgot to call `rmeta_produced` and there's a bug in Cargo.
+ //
+ // Ruling out the third, the other two are pretty common for 2
+ // we'll just naturally abort the compilation operation but for 1
+ // we need to make sure that the metadata is flagged as produced so
+ // send a synthetic message here.
+ if self.rmeta_required.get() && sender.result.as_ref().unwrap().is_ok() {
+ self.messages
+ .push(Message::Finish(self.id, Artifact::Metadata, Ok(())));
+ }
+
+ // Use a helper struct with a `Drop` implementation to guarantee
+ // that a `Finish` message is sent even if our job panics. We
+ // shouldn't panic unless there's a bug in Cargo, so we just need
+ // to make sure nothing hangs by accident.
+ struct FinishOnDrop<'a> {
+ messages: &'a Queue<Message>,
+ id: JobId,
+ result: Option<CargoResult<()>>,
+ }
+
+ impl Drop for FinishOnDrop<'_> {
+ fn drop(&mut self) {
+ let result = self
+ .result
+ .take()
+ .unwrap_or_else(|| Err(anyhow::format_err!("worker panicked")));
+ self.messages
+ .push(Message::Finish(self.id, Artifact::All, result));
+ }
+ }
+ }
+
+ pub fn future_incompat_report(&self, report: Vec<FutureBreakageItem>) {
+ self.messages
+ .push(Message::FutureIncompatReport(self.id, report));
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/job_queue/mod.rs b/src/tools/cargo/src/cargo/core/compiler/job_queue/mod.rs
new file mode 100644
index 000000000..38ab0fe49
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/job_queue/mod.rs
@@ -0,0 +1,1178 @@
+//! Management of the interaction between the main `cargo` and all spawned jobs.
+//!
+//! ## Overview
+//!
+//! This module implements a job queue. A job here represents a unit of work,
+//! which is roughly a rusc invocation, a build script run, or just a no-op.
+//! The job queue primarily handles the following things:
+//!
+//! * Spawns concurrent jobs. Depending on its [`Freshness`], a job could be
+//! either executed on a spawned thread or ran on the same thread to avoid
+//! the threading overhead.
+//! * Controls the number of concurrency. It allocates and manages [`jobserver`]
+//! tokens to each spawned off rustc and build scripts.
+//! * Manages the communication between the main `cargo` process and its
+//! spawned jobs. Those [`Message`]s are sent over a [`Queue`] shared
+//! across threads.
+//! * Schedules the execution order of each [`Job`]. Priorities are determined
+//! when calling [`JobQueue::enqueue`] to enqueue a job. The scheduling is
+//! relatively rudimentary and could likely be improved.
+//!
+//! A rough outline of building a queue and executing jobs is:
+//!
+//! 1. [`JobQueue::new`] to simply create one queue.
+//! 2. [`JobQueue::enqueue`] to add new jobs onto the queue.
+//! 3. Consumes the queue and executes all jobs via [`JobQueue::execute`].
+//!
+//! The primary loop happens insides [`JobQueue::execute`], which is effectively
+//! [`DrainState::drain_the_queue`]. [`DrainState`] is, as its name tells,
+//! the running state of the job queue getting drained.
+//!
+//! ## Jobserver
+//!
+//! As of Feb. 2023, Cargo and rustc have a relatively simple jobserver
+//! relationship with each other. They share a single jobserver amongst what
+//! is potentially hundreds of threads of work on many-cored systems.
+//! The jobserver could come from either the environment (e.g., from a `make`
+//! invocation), or from Cargo creating its own jobserver server if there is no
+//! jobserver to inherit from.
+//!
+//! Cargo wants to complete the build as quickly as possible, fully saturating
+//! all cores (as constrained by the `-j=N`) parameter. Cargo also must not spawn
+//! more than N threads of work: the total amount of tokens we have floating
+//! around must always be limited to N.
+//!
+//! It is not really possible to optimally choose which crate should build
+//! first or last; nor is it possible to decide whether to give an additional
+//! token to rustc first or rather spawn a new crate of work. The algorithm in
+//! Cargo prioritizes spawning as many crates (i.e., rustc processes) as
+//! possible. In short, the jobserver relationship among Cargo and rustc
+//! processes is **1 `cargo` to N `rustc`**. Cargo knows nothing beyond rustc
+//! processes in terms of parallelism[^parallel-rustc].
+//!
+//! We integrate with the [jobserver] crate, originating from GNU make
+//! [POSIX jobserver], to make sure that build scripts which use make to
+//! build C code can cooperate with us on the number of used tokens and
+//! avoid overfilling the system we're on.
+//!
+//! ## Scheduling
+//!
+//! The current scheduling algorithm is not really polished. It is simply based
+//! on a dependency graph [`DependencyQueue`]. We continue adding nodes onto
+//! the graph until we finalize it. When the graph gets finalized, it finds the
+//! sum of the cost of each dependencies of each node, including transitively.
+//! The sum of dependency cost turns out to be the cost of each given node.
+//!
+//! At the time being, the cost is just passed as a fixed placeholder in
+//! [`JobQueue::enqueue`]. In the future, we could explore more possibilities
+//! around it. For instance, we start persisting timing information for each
+//! build somewhere. For a subsequent build, we can look into the historical
+//! data and perform a PGO-like optimization to prioritize jobs, making a build
+//! fully pipelined.
+//!
+//! ## Message queue
+//!
+//! Each spawned thread running a process uses the message queue [`Queue`] to
+//! send messages back to the main thread (the one running `cargo`).
+//! The main thread coordinates everything, and handles printing output.
+//!
+//! It is important to be careful which messages use [`push`] vs [`push_bounded`].
+//! `push` is for priority messages (like tokens, or "finished") where the
+//! sender shouldn't block. We want to handle those so real work can proceed
+//! ASAP.
+//!
+//! `push_bounded` is only for messages being printed to stdout/stderr. Being
+//! bounded prevents a flood of messages causing a large amount of memory
+//! being used.
+//!
+//! `push` also avoids blocking which helps avoid deadlocks. For example, when
+//! the diagnostic server thread is dropped, it waits for the thread to exit.
+//! But if the thread is blocked on a full queue, and there is a critical
+//! error, the drop will deadlock. This should be fixed at some point in the
+//! future. The jobserver thread has a similar problem, though it will time
+//! out after 1 second.
+//!
+//! To access the message queue, each running `Job` is given its own [`JobState`],
+//! containing everything it needs to communicate with the main thread.
+//!
+//! See [`Message`] for all available message kinds.
+//!
+//! [^parallel-rustc]: In fact, `jobserver` that Cargo uses also manages the
+//! allocation of tokens to rustc beyond the implicit token each rustc owns
+//! (i.e., the ones used for parallel LLVM work and parallel rustc threads).
+//! See also ["Rust Compiler Development Guide: Parallel Compilation"]
+//! and [this comment][rustc-codegen] in rust-lang/rust.
+//!
+//! ["Rust Compiler Development Guide: Parallel Compilation"]: https://rustc-dev-guide.rust-lang.org/parallel-rustc.html
+//! [rustc-codegen]: https://github.com/rust-lang/rust/blob/5423745db8b434fcde54888b35f518f00cce00e4/compiler/rustc_codegen_ssa/src/back/write.rs#L1204-L1217
+//! [jobserver]: https://docs.rs/jobserver
+//! [POSIX jobserver]: https://www.gnu.org/software/make/manual/html_node/POSIX-Jobserver.html
+//! [`push`]: Queue::push
+//! [`push_bounded`]: Queue::push_bounded
+
+mod job;
+mod job_state;
+
+use std::cell::RefCell;
+use std::collections::{HashMap, HashSet};
+use std::fmt::Write as _;
+use std::io;
+use std::path::{Path, PathBuf};
+use std::sync::Arc;
+use std::thread::{self, Scope};
+use std::time::Duration;
+
+use anyhow::{format_err, Context as _};
+use cargo_util::ProcessBuilder;
+use jobserver::{Acquired, HelperThread};
+use log::{debug, trace};
+use semver::Version;
+
+pub use self::job::Freshness::{self, Dirty, Fresh};
+pub use self::job::{Job, Work};
+pub use self::job_state::JobState;
+use super::context::OutputFile;
+use super::timings::Timings;
+use super::{BuildContext, BuildPlan, CompileMode, Context, Unit};
+use crate::core::compiler::descriptive_pkg_name;
+use crate::core::compiler::future_incompat::{
+ self, FutureBreakageItem, FutureIncompatReportPackage,
+};
+use crate::core::resolver::ResolveBehavior;
+use crate::core::{PackageId, Shell, TargetKind};
+use crate::util::diagnostic_server::{self, DiagnosticPrinter};
+use crate::util::errors::AlreadyPrintedError;
+use crate::util::machine_message::{self, Message as _};
+use crate::util::CargoResult;
+use crate::util::{self, internal, profile};
+use crate::util::{Config, DependencyQueue, Progress, ProgressStyle, Queue};
+
+/// This structure is backed by the `DependencyQueue` type and manages the
+/// queueing of compilation steps for each package. Packages enqueue units of
+/// work and then later on the entire graph is converted to DrainState and
+/// executed.
+pub struct JobQueue<'cfg> {
+ queue: DependencyQueue<Unit, Artifact, Job>,
+ counts: HashMap<PackageId, usize>,
+ timings: Timings<'cfg>,
+}
+
+/// This structure is backed by the `DependencyQueue` type and manages the
+/// actual compilation step of each package. Packages enqueue units of work and
+/// then later on the entire graph is processed and compiled.
+///
+/// It is created from JobQueue when we have fully assembled the crate graph
+/// (i.e., all package dependencies are known).
+struct DrainState<'cfg> {
+ // This is the length of the DependencyQueue when starting out
+ total_units: usize,
+
+ queue: DependencyQueue<Unit, Artifact, Job>,
+ messages: Arc<Queue<Message>>,
+ /// Diagnostic deduplication support.
+ diag_dedupe: DiagDedupe<'cfg>,
+ /// Count of warnings, used to print a summary after the job succeeds
+ warning_count: HashMap<JobId, WarningCount>,
+ active: HashMap<JobId, Unit>,
+ compiled: HashSet<PackageId>,
+ documented: HashSet<PackageId>,
+ scraped: HashSet<PackageId>,
+ counts: HashMap<PackageId, usize>,
+ progress: Progress<'cfg>,
+ next_id: u32,
+ timings: Timings<'cfg>,
+
+ /// Tokens that are currently owned by this Cargo, and may be "associated"
+ /// with a rustc process. They may also be unused, though if so will be
+ /// dropped on the next loop iteration.
+ ///
+ /// Note that the length of this may be zero, but we will still spawn work,
+ /// as we share the implicit token given to this Cargo process with a
+ /// single rustc process.
+ tokens: Vec<Acquired>,
+
+ /// The list of jobs that we have not yet started executing, but have
+ /// retrieved from the `queue`. We eagerly pull jobs off the main queue to
+ /// allow us to request jobserver tokens pretty early.
+ pending_queue: Vec<(Unit, Job, usize)>,
+ print: DiagnosticPrinter<'cfg>,
+
+ /// How many jobs we've finished
+ finished: usize,
+ per_package_future_incompat_reports: Vec<FutureIncompatReportPackage>,
+}
+
+/// Count of warnings, used to print a summary after the job succeeds
+#[derive(Default)]
+pub struct WarningCount {
+ /// total number of warnings
+ pub total: usize,
+ /// number of warnings that were suppressed because they
+ /// were duplicates of a previous warning
+ pub duplicates: usize,
+ /// number of fixable warnings set to `NotAllowed`
+ /// if any errors have been seen ofr the current
+ /// target
+ pub fixable: FixableWarnings,
+}
+
+impl WarningCount {
+ /// If an error is seen this should be called
+ /// to set `fixable` to `NotAllowed`
+ fn disallow_fixable(&mut self) {
+ self.fixable = FixableWarnings::NotAllowed;
+ }
+
+ /// Checks fixable if warnings are allowed
+ /// fixable warnings are allowed if no
+ /// errors have been seen for the current
+ /// target. If an error was seen `fixable`
+ /// will be `NotAllowed`.
+ fn fixable_allowed(&self) -> bool {
+ match &self.fixable {
+ FixableWarnings::NotAllowed => false,
+ _ => true,
+ }
+ }
+}
+
+/// Used to keep track of how many fixable warnings there are
+/// and if fixable warnings are allowed
+#[derive(Default)]
+pub enum FixableWarnings {
+ NotAllowed,
+ #[default]
+ Zero,
+ Positive(usize),
+}
+
+pub struct ErrorsDuringDrain {
+ pub count: usize,
+}
+
+struct ErrorToHandle {
+ error: anyhow::Error,
+
+ /// This field is true for "interesting" errors and false for "mundane"
+ /// errors. If false, we print the above error only if it's the first one
+ /// encountered so far while draining the job queue.
+ ///
+ /// At most places that an error is propagated, we set this to false to
+ /// avoid scenarios where Cargo might end up spewing tons of redundant error
+ /// messages. For example if an i/o stream got closed somewhere, we don't
+ /// care about individually reporting every thread that it broke; just the
+ /// first is enough.
+ ///
+ /// The exception where print_always is true is that we do report every
+ /// instance of a rustc invocation that failed with diagnostics. This
+ /// corresponds to errors from Message::Finish.
+ print_always: bool,
+}
+
+impl<E> From<E> for ErrorToHandle
+where
+ anyhow::Error: From<E>,
+{
+ fn from(error: E) -> Self {
+ ErrorToHandle {
+ error: anyhow::Error::from(error),
+ print_always: false,
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct JobId(pub u32);
+
+impl std::fmt::Display for JobId {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}", self.0)
+ }
+}
+
+/// Handler for deduplicating diagnostics.
+struct DiagDedupe<'cfg> {
+ seen: RefCell<HashSet<u64>>,
+ config: &'cfg Config,
+}
+
+impl<'cfg> DiagDedupe<'cfg> {
+ fn new(config: &'cfg Config) -> Self {
+ DiagDedupe {
+ seen: RefCell::new(HashSet::new()),
+ config,
+ }
+ }
+
+ /// Emits a diagnostic message.
+ ///
+ /// Returns `true` if the message was emitted, or `false` if it was
+ /// suppressed for being a duplicate.
+ fn emit_diag(&self, diag: &str) -> CargoResult<bool> {
+ let h = util::hash_u64(diag);
+ if !self.seen.borrow_mut().insert(h) {
+ return Ok(false);
+ }
+ let mut shell = self.config.shell();
+ shell.print_ansi_stderr(diag.as_bytes())?;
+ shell.err().write_all(b"\n")?;
+ Ok(true)
+ }
+}
+
+/// Possible artifacts that can be produced by compilations, used as edge values
+/// in the dependency graph.
+///
+/// As edge values we can have multiple kinds of edges depending on one node,
+/// for example some units may only depend on the metadata for an rlib while
+/// others depend on the full rlib. This `Artifact` enum is used to distinguish
+/// this case and track the progress of compilations as they proceed.
+#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
+enum Artifact {
+ /// A generic placeholder for "depends on everything run by a step" and
+ /// means that we can't start the next compilation until the previous has
+ /// finished entirely.
+ All,
+
+ /// A node indicating that we only depend on the metadata of a compilation,
+ /// but the compilation is typically also producing an rlib. We can start
+ /// our step, however, before the full rlib is available.
+ Metadata,
+}
+
+enum Message {
+ Run(JobId, String),
+ BuildPlanMsg(String, ProcessBuilder, Arc<Vec<OutputFile>>),
+ Stdout(String),
+ Stderr(String),
+
+ // This is for general stderr output from subprocesses
+ Diagnostic {
+ id: JobId,
+ level: String,
+ diag: String,
+ fixable: bool,
+ },
+ // This handles duplicate output that is suppressed, for showing
+ // only a count of duplicate messages instead
+ WarningCount {
+ id: JobId,
+ emitted: bool,
+ fixable: bool,
+ },
+ // This is for warnings generated by Cargo's interpretation of the
+ // subprocess output, e.g. scrape-examples prints a warning if a
+ // unit fails to be scraped
+ Warning {
+ id: JobId,
+ warning: String,
+ },
+
+ FixDiagnostic(diagnostic_server::Message),
+ Token(io::Result<Acquired>),
+ Finish(JobId, Artifact, CargoResult<()>),
+ FutureIncompatReport(JobId, Vec<FutureBreakageItem>),
+}
+
+impl<'cfg> JobQueue<'cfg> {
+ pub fn new(bcx: &BuildContext<'_, 'cfg>) -> JobQueue<'cfg> {
+ JobQueue {
+ queue: DependencyQueue::new(),
+ counts: HashMap::new(),
+ timings: Timings::new(bcx, &bcx.roots),
+ }
+ }
+
+ pub fn enqueue(&mut self, cx: &Context<'_, 'cfg>, unit: &Unit, job: Job) -> CargoResult<()> {
+ let dependencies = cx.unit_deps(unit);
+ let mut queue_deps = dependencies
+ .iter()
+ .filter(|dep| {
+ // Binaries aren't actually needed to *compile* tests, just to run
+ // them, so we don't include this dependency edge in the job graph.
+ // But we shouldn't filter out dependencies being scraped for Rustdoc.
+ (!dep.unit.target.is_test() && !dep.unit.target.is_bin())
+ || dep.unit.artifact.is_true()
+ || dep.unit.mode.is_doc_scrape()
+ })
+ .map(|dep| {
+ // Handle the case here where our `unit -> dep` dependency may
+ // only require the metadata, not the full compilation to
+ // finish. Use the tables in `cx` to figure out what kind
+ // of artifact is associated with this dependency.
+ let artifact = if cx.only_requires_rmeta(unit, &dep.unit) {
+ Artifact::Metadata
+ } else {
+ Artifact::All
+ };
+ (dep.unit.clone(), artifact)
+ })
+ .collect::<HashMap<_, _>>();
+
+ // This is somewhat tricky, but we may need to synthesize some
+ // dependencies for this target if it requires full upstream
+ // compilations to have completed. Because of pipelining, some
+ // dependency edges may be `Metadata` due to the above clause (as
+ // opposed to everything being `All`). For example consider:
+ //
+ // a (binary)
+ // └ b (lib)
+ // └ c (lib)
+ //
+ // Here the dependency edge from B to C will be `Metadata`, and the
+ // dependency edge from A to B will be `All`. For A to be compiled,
+ // however, it currently actually needs the full rlib of C. This means
+ // that we need to synthesize a dependency edge for the dependency graph
+ // from A to C. That's done here.
+ //
+ // This will walk all dependencies of the current target, and if any of
+ // *their* dependencies are `Metadata` then we depend on the `All` of
+ // the target as well. This should ensure that edges changed to
+ // `Metadata` propagate upwards `All` dependencies to anything that
+ // transitively contains the `Metadata` edge.
+ if unit.requires_upstream_objects() {
+ for dep in dependencies {
+ depend_on_deps_of_deps(cx, &mut queue_deps, dep.unit.clone());
+ }
+
+ fn depend_on_deps_of_deps(
+ cx: &Context<'_, '_>,
+ deps: &mut HashMap<Unit, Artifact>,
+ unit: Unit,
+ ) {
+ for dep in cx.unit_deps(&unit) {
+ if deps.insert(dep.unit.clone(), Artifact::All).is_none() {
+ depend_on_deps_of_deps(cx, deps, dep.unit.clone());
+ }
+ }
+ }
+ }
+
+ // For now we use a fixed placeholder value for the cost of each unit, but
+ // in the future this could be used to allow users to provide hints about
+ // relative expected costs of units, or this could be automatically set in
+ // a smarter way using timing data from a previous compilation.
+ self.queue.queue(unit.clone(), job, queue_deps, 100);
+ *self.counts.entry(unit.pkg.package_id()).or_insert(0) += 1;
+ Ok(())
+ }
+
+ /// Executes all jobs necessary to build the dependency graph.
+ ///
+ /// This function will spawn off `config.jobs()` workers to build all of the
+ /// necessary dependencies, in order. Freshness is propagated as far as
+ /// possible along each dependency chain.
+ pub fn execute(mut self, cx: &mut Context<'_, '_>, plan: &mut BuildPlan) -> CargoResult<()> {
+ let _p = profile::start("executing the job graph");
+ self.queue.queue_finished();
+
+ let progress = Progress::with_style("Building", ProgressStyle::Ratio, cx.bcx.config);
+ let state = DrainState {
+ total_units: self.queue.len(),
+ queue: self.queue,
+ // 100 here is somewhat arbitrary. It is a few screenfulls of
+ // output, and hopefully at most a few megabytes of memory for
+ // typical messages. If you change this, please update the test
+ // caching_large_output, too.
+ messages: Arc::new(Queue::new(100)),
+ diag_dedupe: DiagDedupe::new(cx.bcx.config),
+ warning_count: HashMap::new(),
+ active: HashMap::new(),
+ compiled: HashSet::new(),
+ documented: HashSet::new(),
+ scraped: HashSet::new(),
+ counts: self.counts,
+ progress,
+ next_id: 0,
+ timings: self.timings,
+ tokens: Vec::new(),
+ pending_queue: Vec::new(),
+ print: DiagnosticPrinter::new(cx.bcx.config),
+ finished: 0,
+ per_package_future_incompat_reports: Vec::new(),
+ };
+
+ // Create a helper thread for acquiring jobserver tokens
+ let messages = state.messages.clone();
+ let helper = cx
+ .jobserver
+ .clone()
+ .into_helper_thread(move |token| {
+ messages.push(Message::Token(token));
+ })
+ .with_context(|| "failed to create helper thread for jobserver management")?;
+
+ // Create a helper thread to manage the diagnostics for rustfix if
+ // necessary.
+ let messages = state.messages.clone();
+ // It is important that this uses `push` instead of `push_bounded` for
+ // now. If someone wants to fix this to be bounded, the `drop`
+ // implementation needs to be changed to avoid possible deadlocks.
+ let _diagnostic_server = cx
+ .bcx
+ .build_config
+ .rustfix_diagnostic_server
+ .borrow_mut()
+ .take()
+ .map(move |srv| srv.start(move |msg| messages.push(Message::FixDiagnostic(msg))));
+
+ thread::scope(
+ move |scope| match state.drain_the_queue(cx, plan, scope, &helper) {
+ Some(err) => Err(err),
+ None => Ok(()),
+ },
+ )
+ }
+}
+
+impl<'cfg> DrainState<'cfg> {
+ fn spawn_work_if_possible<'s>(
+ &mut self,
+ cx: &mut Context<'_, '_>,
+ jobserver_helper: &HelperThread,
+ scope: &'s Scope<'s, '_>,
+ ) -> CargoResult<()> {
+ // Dequeue as much work as we can, learning about everything
+ // possible that can run. Note that this is also the point where we
+ // start requesting job tokens. Each job after the first needs to
+ // request a token.
+ while let Some((unit, job, priority)) = self.queue.dequeue() {
+ // We want to keep the pieces of work in the `pending_queue` sorted
+ // by their priorities, and insert the current job at its correctly
+ // sorted position: following the lower priority jobs, and the ones
+ // with the same priority (since they were dequeued before the
+ // current one, we also keep that relation).
+ let idx = self
+ .pending_queue
+ .partition_point(|&(_, _, p)| p <= priority);
+ self.pending_queue.insert(idx, (unit, job, priority));
+ if self.active.len() + self.pending_queue.len() > 1 {
+ jobserver_helper.request_token();
+ }
+ }
+
+ // Now that we've learned of all possible work that we can execute
+ // try to spawn it so long as we've got a jobserver token which says
+ // we're able to perform some parallel work.
+ // The `pending_queue` is sorted in ascending priority order, and we
+ // remove items from its end to schedule the highest priority items
+ // sooner.
+ while self.has_extra_tokens() && !self.pending_queue.is_empty() {
+ let (unit, job, _) = self.pending_queue.pop().unwrap();
+ *self.counts.get_mut(&unit.pkg.package_id()).unwrap() -= 1;
+ if !cx.bcx.build_config.build_plan {
+ // Print out some nice progress information.
+ // NOTE: An error here will drop the job without starting it.
+ // That should be OK, since we want to exit as soon as
+ // possible during an error.
+ self.note_working_on(cx.bcx.config, cx.bcx.ws.root(), &unit, job.freshness())?;
+ }
+ self.run(&unit, job, cx, scope);
+ }
+
+ Ok(())
+ }
+
+ fn has_extra_tokens(&self) -> bool {
+ self.active.len() < self.tokens.len() + 1
+ }
+
+ fn handle_event(
+ &mut self,
+ cx: &mut Context<'_, '_>,
+ plan: &mut BuildPlan,
+ event: Message,
+ ) -> Result<(), ErrorToHandle> {
+ match event {
+ Message::Run(id, cmd) => {
+ cx.bcx
+ .config
+ .shell()
+ .verbose(|c| c.status("Running", &cmd))?;
+ self.timings.unit_start(id, self.active[&id].clone());
+ }
+ Message::BuildPlanMsg(module_name, cmd, filenames) => {
+ plan.update(&module_name, &cmd, &filenames)?;
+ }
+ Message::Stdout(out) => {
+ writeln!(cx.bcx.config.shell().out(), "{}", out)?;
+ }
+ Message::Stderr(err) => {
+ let mut shell = cx.bcx.config.shell();
+ shell.print_ansi_stderr(err.as_bytes())?;
+ shell.err().write_all(b"\n")?;
+ }
+ Message::Diagnostic {
+ id,
+ level,
+ diag,
+ fixable,
+ } => {
+ let emitted = self.diag_dedupe.emit_diag(&diag)?;
+ if level == "warning" {
+ self.bump_warning_count(id, emitted, fixable);
+ }
+ if level == "error" {
+ let cnts = self.warning_count.entry(id).or_default();
+ // If there is an error, the `cargo fix` message should not show
+ cnts.disallow_fixable();
+ }
+ }
+ Message::Warning { id, warning } => {
+ cx.bcx.config.shell().warn(warning)?;
+ self.bump_warning_count(id, true, false);
+ }
+ Message::WarningCount {
+ id,
+ emitted,
+ fixable,
+ } => {
+ self.bump_warning_count(id, emitted, fixable);
+ }
+ Message::FixDiagnostic(msg) => {
+ self.print.print(&msg)?;
+ }
+ Message::Finish(id, artifact, result) => {
+ let unit = match artifact {
+ // If `id` has completely finished we remove it
+ // from the `active` map ...
+ Artifact::All => {
+ trace!("end: {:?}", id);
+ self.finished += 1;
+ self.report_warning_count(
+ cx.bcx.config,
+ id,
+ &cx.bcx.rustc().workspace_wrapper,
+ );
+ self.active.remove(&id).unwrap()
+ }
+ // ... otherwise if it hasn't finished we leave it
+ // in there as we'll get another `Finish` later on.
+ Artifact::Metadata => {
+ trace!("end (meta): {:?}", id);
+ self.active[&id].clone()
+ }
+ };
+ debug!("end ({:?}): {:?}", unit, result);
+ match result {
+ Ok(()) => self.finish(id, &unit, artifact, cx)?,
+ Err(_) if cx.bcx.unit_can_fail_for_docscraping(&unit) => {
+ cx.failed_scrape_units
+ .lock()
+ .unwrap()
+ .insert(cx.files().metadata(&unit));
+ self.queue.finish(&unit, &artifact);
+ }
+ Err(error) => {
+ let msg = "The following warnings were emitted during compilation:";
+ self.emit_warnings(Some(msg), &unit, cx)?;
+ self.back_compat_notice(cx, &unit)?;
+ return Err(ErrorToHandle {
+ error,
+ print_always: true,
+ });
+ }
+ }
+ }
+ Message::FutureIncompatReport(id, items) => {
+ let package_id = self.active[&id].pkg.package_id();
+ self.per_package_future_incompat_reports
+ .push(FutureIncompatReportPackage { package_id, items });
+ }
+ Message::Token(acquired_token) => {
+ let token = acquired_token.with_context(|| "failed to acquire jobserver token")?;
+ self.tokens.push(token);
+ }
+ }
+
+ Ok(())
+ }
+
+ // This will also tick the progress bar as appropriate
+ fn wait_for_events(&mut self) -> Vec<Message> {
+ // Drain all events at once to avoid displaying the progress bar
+ // unnecessarily. If there's no events we actually block waiting for
+ // an event, but we keep a "heartbeat" going to allow `record_cpu`
+ // to run above to calculate CPU usage over time. To do this we
+ // listen for a message with a timeout, and on timeout we run the
+ // previous parts of the loop again.
+ let mut events = self.messages.try_pop_all();
+ if events.is_empty() {
+ loop {
+ self.tick_progress();
+ self.tokens.truncate(self.active.len() - 1);
+ match self.messages.pop(Duration::from_millis(500)) {
+ Some(message) => {
+ events.push(message);
+ break;
+ }
+ None => continue,
+ }
+ }
+ }
+ events
+ }
+
+ /// This is the "main" loop, where Cargo does all work to run the
+ /// compiler.
+ ///
+ /// This returns an Option to prevent the use of `?` on `Result` types
+ /// because it is important for the loop to carefully handle errors.
+ fn drain_the_queue<'s>(
+ mut self,
+ cx: &mut Context<'_, '_>,
+ plan: &mut BuildPlan,
+ scope: &'s Scope<'s, '_>,
+ jobserver_helper: &HelperThread,
+ ) -> Option<anyhow::Error> {
+ trace!("queue: {:#?}", self.queue);
+
+ // Iteratively execute the entire dependency graph. Each turn of the
+ // loop starts out by scheduling as much work as possible (up to the
+ // maximum number of parallel jobs we have tokens for). A local queue
+ // is maintained separately from the main dependency queue as one
+ // dequeue may actually dequeue quite a bit of work (e.g., 10 binaries
+ // in one package).
+ //
+ // After a job has finished we update our internal state if it was
+ // successful and otherwise wait for pending work to finish if it failed
+ // and then immediately return (or keep going, if requested by the build
+ // config).
+ let mut errors = ErrorsDuringDrain { count: 0 };
+ // CAUTION! Do not use `?` or break out of the loop early. Every error
+ // must be handled in such a way that the loop is still allowed to
+ // drain event messages.
+ loop {
+ if errors.count == 0 || cx.bcx.build_config.keep_going {
+ if let Err(e) = self.spawn_work_if_possible(cx, jobserver_helper, scope) {
+ self.handle_error(&mut cx.bcx.config.shell(), &mut errors, e);
+ }
+ }
+
+ // If after all that we're not actually running anything then we're
+ // done!
+ if self.active.is_empty() {
+ break;
+ }
+
+ // And finally, before we block waiting for the next event, drop any
+ // excess tokens we may have accidentally acquired. Due to how our
+ // jobserver interface is architected we may acquire a token that we
+ // don't actually use, and if this happens just relinquish it back
+ // to the jobserver itself.
+ for event in self.wait_for_events() {
+ if let Err(event_err) = self.handle_event(cx, plan, event) {
+ self.handle_error(&mut cx.bcx.config.shell(), &mut errors, event_err);
+ }
+ }
+ }
+ self.progress.clear();
+
+ let profile_name = cx.bcx.build_config.requested_profile;
+ // NOTE: this may be a bit inaccurate, since this may not display the
+ // profile for what was actually built. Profile overrides can change
+ // these settings, and in some cases different targets are built with
+ // different profiles. To be accurate, it would need to collect a
+ // list of Units built, and maybe display a list of the different
+ // profiles used. However, to keep it simple and compatible with old
+ // behavior, we just display what the base profile is.
+ let profile = cx.bcx.profiles.base_profile();
+ let mut opt_type = String::from(if profile.opt_level.as_str() == "0" {
+ "unoptimized"
+ } else {
+ "optimized"
+ });
+ if profile.debuginfo.is_turned_on() {
+ opt_type += " + debuginfo";
+ }
+
+ let time_elapsed = util::elapsed(cx.bcx.config.creation_time().elapsed());
+ if let Err(e) = self.timings.finished(cx, &errors.to_error()) {
+ self.handle_error(&mut cx.bcx.config.shell(), &mut errors, e);
+ }
+ if cx.bcx.build_config.emit_json() {
+ let mut shell = cx.bcx.config.shell();
+ let msg = machine_message::BuildFinished {
+ success: errors.count == 0,
+ }
+ .to_json_string();
+ if let Err(e) = writeln!(shell.out(), "{}", msg) {
+ self.handle_error(&mut shell, &mut errors, e);
+ }
+ }
+
+ if let Some(error) = errors.to_error() {
+ // Any errors up to this point have already been printed via the
+ // `display_error` inside `handle_error`.
+ Some(anyhow::Error::new(AlreadyPrintedError::new(error)))
+ } else if self.queue.is_empty() && self.pending_queue.is_empty() {
+ let message = format!(
+ "{} [{}] target(s) in {}",
+ profile_name, opt_type, time_elapsed
+ );
+ if !cx.bcx.build_config.build_plan {
+ // It doesn't really matter if this fails.
+ drop(cx.bcx.config.shell().status("Finished", message));
+ future_incompat::save_and_display_report(
+ cx.bcx,
+ &self.per_package_future_incompat_reports,
+ );
+ }
+
+ None
+ } else {
+ debug!("queue: {:#?}", self.queue);
+ Some(internal("finished with jobs still left in the queue"))
+ }
+ }
+
+ fn handle_error(
+ &self,
+ shell: &mut Shell,
+ err_state: &mut ErrorsDuringDrain,
+ new_err: impl Into<ErrorToHandle>,
+ ) {
+ let new_err = new_err.into();
+ if new_err.print_always || err_state.count == 0 {
+ crate::display_error(&new_err.error, shell);
+ if err_state.count == 0 && !self.active.is_empty() {
+ drop(shell.warn("build failed, waiting for other jobs to finish..."));
+ }
+ err_state.count += 1;
+ } else {
+ log::warn!("{:?}", new_err.error);
+ }
+ }
+
+ // This also records CPU usage and marks concurrency; we roughly want to do
+ // this as often as we spin on the events receiver (at least every 500ms or
+ // so).
+ fn tick_progress(&mut self) {
+ // Record some timing information if `--timings` is enabled, and
+ // this'll end up being a noop if we're not recording this
+ // information.
+ self.timings.mark_concurrency(
+ self.active.len(),
+ self.pending_queue.len(),
+ self.queue.len(),
+ );
+ self.timings.record_cpu();
+
+ let active_names = self
+ .active
+ .values()
+ .map(|u| self.name_for_progress(u))
+ .collect::<Vec<_>>();
+ drop(self.progress.tick_now(
+ self.finished,
+ self.total_units,
+ &format!(": {}", active_names.join(", ")),
+ ));
+ }
+
+ fn name_for_progress(&self, unit: &Unit) -> String {
+ let pkg_name = unit.pkg.name();
+ let target_name = unit.target.name();
+ match unit.mode {
+ CompileMode::Doc { .. } => format!("{}(doc)", pkg_name),
+ CompileMode::RunCustomBuild => format!("{}(build)", pkg_name),
+ CompileMode::Test | CompileMode::Check { test: true } => match unit.target.kind() {
+ TargetKind::Lib(_) => format!("{}(test)", target_name),
+ TargetKind::CustomBuild => panic!("cannot test build script"),
+ TargetKind::Bin => format!("{}(bin test)", target_name),
+ TargetKind::Test => format!("{}(test)", target_name),
+ TargetKind::Bench => format!("{}(bench)", target_name),
+ TargetKind::ExampleBin | TargetKind::ExampleLib(_) => {
+ format!("{}(example test)", target_name)
+ }
+ },
+ _ => match unit.target.kind() {
+ TargetKind::Lib(_) => pkg_name.to_string(),
+ TargetKind::CustomBuild => format!("{}(build.rs)", pkg_name),
+ TargetKind::Bin => format!("{}(bin)", target_name),
+ TargetKind::Test => format!("{}(test)", target_name),
+ TargetKind::Bench => format!("{}(bench)", target_name),
+ TargetKind::ExampleBin | TargetKind::ExampleLib(_) => {
+ format!("{}(example)", target_name)
+ }
+ },
+ }
+ }
+
+ /// Executes a job.
+ ///
+ /// Fresh jobs block until finished (which should be very fast!), Dirty
+ /// jobs will spawn a thread in the background and return immediately.
+ fn run<'s>(&mut self, unit: &Unit, job: Job, cx: &Context<'_, '_>, scope: &'s Scope<'s, '_>) {
+ let id = JobId(self.next_id);
+ self.next_id = self.next_id.checked_add(1).unwrap();
+
+ debug!("start {}: {:?}", id, unit);
+
+ assert!(self.active.insert(id, unit.clone()).is_none());
+
+ let messages = self.messages.clone();
+ let is_fresh = job.freshness().is_fresh();
+ let rmeta_required = cx.rmeta_required(unit);
+
+ let doit = move |diag_dedupe| {
+ let state = JobState::new(id, messages, diag_dedupe, rmeta_required);
+ state.run_to_finish(job);
+ };
+
+ match is_fresh {
+ true => {
+ self.timings.add_fresh();
+ // Running a fresh job on the same thread is often much faster than spawning a new
+ // thread to run the job.
+ doit(Some(&self.diag_dedupe));
+ }
+ false => {
+ self.timings.add_dirty();
+ scope.spawn(move || doit(None));
+ }
+ }
+ }
+
+ fn emit_warnings(
+ &mut self,
+ msg: Option<&str>,
+ unit: &Unit,
+ cx: &mut Context<'_, '_>,
+ ) -> CargoResult<()> {
+ let outputs = cx.build_script_outputs.lock().unwrap();
+ let metadata = match cx.find_build_script_metadata(unit) {
+ Some(metadata) => metadata,
+ None => return Ok(()),
+ };
+ let bcx = &mut cx.bcx;
+ if let Some(output) = outputs.get(metadata) {
+ if !output.warnings.is_empty() {
+ if let Some(msg) = msg {
+ writeln!(bcx.config.shell().err(), "{}\n", msg)?;
+ }
+
+ for warning in output.warnings.iter() {
+ bcx.config.shell().warn(warning)?;
+ }
+
+ if msg.is_some() {
+ // Output an empty line.
+ writeln!(bcx.config.shell().err())?;
+ }
+ }
+ }
+
+ Ok(())
+ }
+
+ fn bump_warning_count(&mut self, id: JobId, emitted: bool, fixable: bool) {
+ let cnts = self.warning_count.entry(id).or_default();
+ cnts.total += 1;
+ if !emitted {
+ cnts.duplicates += 1;
+ // Don't add to fixable if it's already been emitted
+ } else if fixable {
+ // Do not add anything to the fixable warning count if
+ // is `NotAllowed` since that indicates there was an
+ // error while building this `Unit`
+ if cnts.fixable_allowed() {
+ cnts.fixable = match cnts.fixable {
+ FixableWarnings::NotAllowed => FixableWarnings::NotAllowed,
+ FixableWarnings::Zero => FixableWarnings::Positive(1),
+ FixableWarnings::Positive(fixable) => FixableWarnings::Positive(fixable + 1),
+ };
+ }
+ }
+ }
+
+ /// Displays a final report of the warnings emitted by a particular job.
+ fn report_warning_count(
+ &mut self,
+ config: &Config,
+ id: JobId,
+ rustc_workspace_wrapper: &Option<PathBuf>,
+ ) {
+ let count = match self.warning_count.remove(&id) {
+ // An error could add an entry for a `Unit`
+ // with 0 warnings but having fixable
+ // warnings be disallowed
+ Some(count) if count.total > 0 => count,
+ None | Some(_) => return,
+ };
+ let unit = &self.active[&id];
+ let mut message = descriptive_pkg_name(&unit.pkg.name(), &unit.target, &unit.mode);
+ message.push_str(" generated ");
+ match count.total {
+ 1 => message.push_str("1 warning"),
+ n => drop(write!(message, "{} warnings", n)),
+ };
+ match count.duplicates {
+ 0 => {}
+ 1 => message.push_str(" (1 duplicate)"),
+ n => drop(write!(message, " ({} duplicates)", n)),
+ }
+ // Only show the `cargo fix` message if its a local `Unit`
+ if unit.is_local() {
+ // Do not show this if there are any errors or no fixable warnings
+ if let FixableWarnings::Positive(fixable) = count.fixable {
+ // `cargo fix` doesnt have an option for custom builds
+ if !unit.target.is_custom_build() {
+ // To make sure the correct command is shown for `clippy` we
+ // check if `RUSTC_WORKSPACE_WRAPPER` is set and pointing towards
+ // `clippy-driver`.
+ let clippy = std::ffi::OsStr::new("clippy-driver");
+ let command = match rustc_workspace_wrapper.as_ref().and_then(|x| x.file_stem())
+ {
+ Some(wrapper) if wrapper == clippy => "cargo clippy --fix",
+ _ => "cargo fix",
+ };
+ let mut args = {
+ let named = unit.target.description_named();
+ // if its a lib we need to add the package to fix
+ if unit.target.is_lib() {
+ format!("{} -p {}", named, unit.pkg.name())
+ } else {
+ named
+ }
+ };
+ if unit.mode.is_rustc_test()
+ && !(unit.target.is_test() || unit.target.is_bench())
+ {
+ args.push_str(" --tests");
+ }
+ let mut suggestions = format!("{} suggestion", fixable);
+ if fixable > 1 {
+ suggestions.push_str("s")
+ }
+ drop(write!(
+ message,
+ " (run `{command} --{args}` to apply {suggestions})"
+ ))
+ }
+ }
+ }
+ // Errors are ignored here because it is tricky to handle them
+ // correctly, and they aren't important.
+ drop(config.shell().warn(message));
+ }
+
+ fn finish(
+ &mut self,
+ id: JobId,
+ unit: &Unit,
+ artifact: Artifact,
+ cx: &mut Context<'_, '_>,
+ ) -> CargoResult<()> {
+ if unit.mode.is_run_custom_build() && unit.show_warnings(cx.bcx.config) {
+ self.emit_warnings(None, unit, cx)?;
+ }
+ let unlocked = self.queue.finish(unit, &artifact);
+ match artifact {
+ Artifact::All => self.timings.unit_finished(id, unlocked),
+ Artifact::Metadata => self.timings.unit_rmeta_finished(id, unlocked),
+ }
+ Ok(())
+ }
+
+ // This isn't super trivial because we don't want to print loads and
+ // loads of information to the console, but we also want to produce a
+ // faithful representation of what's happening. This is somewhat nuanced
+ // as a package can start compiling *very* early on because of custom
+ // build commands and such.
+ //
+ // In general, we try to print "Compiling" for the first nontrivial task
+ // run for a package, regardless of when that is. We then don't print
+ // out any more information for a package after we've printed it once.
+ fn note_working_on(
+ &mut self,
+ config: &Config,
+ ws_root: &Path,
+ unit: &Unit,
+ fresh: &Freshness,
+ ) -> CargoResult<()> {
+ if (self.compiled.contains(&unit.pkg.package_id())
+ && !unit.mode.is_doc()
+ && !unit.mode.is_doc_scrape())
+ || (self.documented.contains(&unit.pkg.package_id()) && unit.mode.is_doc())
+ || (self.scraped.contains(&unit.pkg.package_id()) && unit.mode.is_doc_scrape())
+ {
+ return Ok(());
+ }
+
+ match fresh {
+ // Any dirty stage which runs at least one command gets printed as
+ // being a compiled package.
+ Dirty(dirty_reason) => {
+ if let Some(reason) = dirty_reason {
+ config
+ .shell()
+ .verbose(|shell| reason.present_to(shell, unit, ws_root))?;
+ }
+
+ if unit.mode.is_doc() {
+ self.documented.insert(unit.pkg.package_id());
+ config.shell().status("Documenting", &unit.pkg)?;
+ } else if unit.mode.is_doc_test() {
+ // Skip doc test.
+ } else if unit.mode.is_doc_scrape() {
+ self.scraped.insert(unit.pkg.package_id());
+ config.shell().status("Scraping", &unit.pkg)?;
+ } else {
+ self.compiled.insert(unit.pkg.package_id());
+ if unit.mode.is_check() {
+ config.shell().status("Checking", &unit.pkg)?;
+ } else {
+ config.shell().status("Compiling", &unit.pkg)?;
+ }
+ }
+ }
+ Fresh => {
+ // If doc test are last, only print "Fresh" if nothing has been printed.
+ if self.counts[&unit.pkg.package_id()] == 0
+ && !(unit.mode.is_doc_test() && self.compiled.contains(&unit.pkg.package_id()))
+ {
+ self.compiled.insert(unit.pkg.package_id());
+ config.shell().verbose(|c| c.status("Fresh", &unit.pkg))?;
+ }
+ }
+ }
+ Ok(())
+ }
+
+ fn back_compat_notice(&self, cx: &Context<'_, '_>, unit: &Unit) -> CargoResult<()> {
+ if unit.pkg.name() != "diesel"
+ || unit.pkg.version() >= &Version::new(1, 4, 8)
+ || cx.bcx.ws.resolve_behavior() == ResolveBehavior::V1
+ || !unit.pkg.package_id().source_id().is_registry()
+ || !unit.features.is_empty()
+ {
+ return Ok(());
+ }
+ if !cx
+ .bcx
+ .unit_graph
+ .keys()
+ .any(|unit| unit.pkg.name() == "diesel" && !unit.features.is_empty())
+ {
+ return Ok(());
+ }
+ cx.bcx.config.shell().note(
+ "\
+This error may be due to an interaction between diesel and Cargo's new
+feature resolver. Try updating to diesel 1.4.8 to fix this error.
+",
+ )?;
+ Ok(())
+ }
+}
+
+impl ErrorsDuringDrain {
+ fn to_error(&self) -> Option<anyhow::Error> {
+ match self.count {
+ 0 => None,
+ 1 => Some(format_err!("1 job failed")),
+ n => Some(format_err!("{} jobs failed", n)),
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/layout.rs b/src/tools/cargo/src/cargo/core/compiler/layout.rs
new file mode 100644
index 000000000..d92adffeb
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/layout.rs
@@ -0,0 +1,242 @@
+//! Management of the directory layout of a build
+//!
+//! The directory layout is a little tricky at times, hence a separate file to
+//! house this logic. The current layout looks like this:
+//!
+//! ```text
+//! # This is the root directory for all output, the top-level package
+//! # places all of its output here.
+//! target/
+//!
+//! # Cache of `rustc -Vv` output for performance.
+//! .rustc-info.json
+//!
+//! # All final artifacts are linked into this directory from `deps`.
+//! # Note that named profiles will soon be included as separate directories
+//! # here. They have a restricted format, similar to Rust identifiers, so
+//! # Cargo-specific directories added in the future should use some prefix
+//! # like `.` to avoid name collisions.
+//! debug/ # or release/
+//!
+//! # File used to lock the directory to prevent multiple cargo processes
+//! # from using it at the same time.
+//! .cargo-lock
+//!
+//! # Hidden directory that holds all of the fingerprint files for all
+//! # packages
+//! .fingerprint/
+//! # Each package is in a separate directory.
+//! # Note that different target kinds have different filename prefixes.
+//! $pkgname-$META/
+//! # Set of source filenames for this package.
+//! dep-lib-$targetname
+//! # Timestamp when this package was last built.
+//! invoked.timestamp
+//! # The fingerprint hash.
+//! lib-$targetname
+//! # Detailed information used for logging the reason why
+//! # something is being recompiled.
+//! lib-$targetname.json
+//! # The console output from the compiler. This is cached
+//! # so that warnings can be redisplayed for "fresh" units.
+//! output-lib-$targetname
+//!
+//! # This is the root directory for all rustc artifacts except build
+//! # scripts, examples, and test and bench executables. Almost every
+//! # artifact should have a metadata hash added to its filename to
+//! # prevent collisions. One notable exception is dynamic libraries.
+//! deps/
+//!
+//! # Each artifact dependency gets in its own directory.
+//! /artifact/$pkgname-$META/$kind
+//!
+//! # Root directory for all compiled examples.
+//! examples/
+//!
+//! # Directory used to store incremental data for the compiler (when
+//! # incremental is enabled.
+//! incremental/
+//!
+//! # This is the location at which the output of all custom build
+//! # commands are rooted.
+//! build/
+//!
+//! # Each package gets its own directory where its build script and
+//! # script output are placed
+//! $pkgname-$META/ # For the build script itself.
+//! # The build script executable (name may be changed by user).
+//! build-script-build-$META
+//! # Hard link to build-script-build-$META.
+//! build-script-build
+//! # Dependency information generated by rustc.
+//! build-script-build-$META.d
+//! # Debug information, depending on platform and profile
+//! # settings.
+//! <debug symbols>
+//!
+//! # The package shows up twice with two different metadata hashes.
+//! $pkgname-$META/ # For the output of the build script.
+//! # Timestamp when the build script was last executed.
+//! invoked.timestamp
+//! # Directory where script can output files ($OUT_DIR).
+//! out/
+//! # Output from the build script.
+//! output
+//! # Path to `out`, used to help when the target directory is
+//! # moved.
+//! root-output
+//! # Stderr output from the build script.
+//! stderr
+//!
+//! # Output from rustdoc
+//! doc/
+//!
+//! # Used by `cargo package` and `cargo publish` to build a `.crate` file.
+//! package/
+//!
+//! # Experimental feature for generated build scripts.
+//! .metabuild/
+//! ```
+//!
+//! When cross-compiling, the layout is the same, except it appears in
+//! `target/$TRIPLE`.
+
+use crate::core::compiler::CompileTarget;
+use crate::core::Workspace;
+use crate::util::{CargoResult, FileLock};
+use cargo_util::paths;
+use std::path::{Path, PathBuf};
+
+/// Contains the paths of all target output locations.
+///
+/// See module docs for more information.
+pub struct Layout {
+ /// The root directory: `/path/to/target`.
+ /// If cross compiling: `/path/to/target/$TRIPLE`.
+ root: PathBuf,
+ /// The final artifact destination: `$root/debug` (or `release`).
+ dest: PathBuf,
+ /// The directory with rustc artifacts: `$dest/deps`
+ deps: PathBuf,
+ /// The directory for build scripts: `$dest/build`
+ build: PathBuf,
+ /// The directory for artifacts, i.e. binaries, cdylibs, staticlibs: `$dest/deps/artifact`
+ artifact: PathBuf,
+ /// The directory for incremental files: `$dest/incremental`
+ incremental: PathBuf,
+ /// The directory for fingerprints: `$dest/.fingerprint`
+ fingerprint: PathBuf,
+ /// The directory for examples: `$dest/examples`
+ examples: PathBuf,
+ /// The directory for rustdoc output: `$root/doc`
+ doc: PathBuf,
+ /// The directory for temporary data of integration tests and benches: `$dest/tmp`
+ tmp: PathBuf,
+ /// The lockfile for a build (`.cargo-lock`). Will be unlocked when this
+ /// struct is `drop`ped.
+ _lock: FileLock,
+}
+
+impl Layout {
+ /// Calculate the paths for build output, lock the build directory, and return as a Layout.
+ ///
+ /// This function will block if the directory is already locked.
+ ///
+ /// `dest` should be the final artifact directory name. Currently either
+ /// "debug" or "release".
+ pub fn new(
+ ws: &Workspace<'_>,
+ target: Option<CompileTarget>,
+ dest: &str,
+ ) -> CargoResult<Layout> {
+ let mut root = ws.target_dir();
+ if let Some(target) = target {
+ root.push(target.short_name());
+ }
+ let dest = root.join(dest);
+ // If the root directory doesn't already exist go ahead and create it
+ // here. Use this opportunity to exclude it from backups as well if the
+ // system supports it since this is a freshly created folder.
+ //
+ paths::create_dir_all_excluded_from_backups_atomic(root.as_path_unlocked())?;
+ // Now that the excluded from backups target root is created we can create the
+ // actual destination (sub)subdirectory.
+ paths::create_dir_all(dest.as_path_unlocked())?;
+
+ // For now we don't do any more finer-grained locking on the artifact
+ // directory, so just lock the entire thing for the duration of this
+ // compile.
+ let lock = dest.open_rw(".cargo-lock", ws.config(), "build directory")?;
+ let root = root.into_path_unlocked();
+ let dest = dest.into_path_unlocked();
+ let deps = dest.join("deps");
+ let artifact = deps.join("artifact");
+
+ Ok(Layout {
+ deps,
+ build: dest.join("build"),
+ artifact,
+ incremental: dest.join("incremental"),
+ fingerprint: dest.join(".fingerprint"),
+ examples: dest.join("examples"),
+ doc: root.join("doc"),
+ tmp: root.join("tmp"),
+ root,
+ dest,
+ _lock: lock,
+ })
+ }
+
+ /// Makes sure all directories stored in the Layout exist on the filesystem.
+ pub fn prepare(&mut self) -> CargoResult<()> {
+ paths::create_dir_all(&self.deps)?;
+ paths::create_dir_all(&self.incremental)?;
+ paths::create_dir_all(&self.fingerprint)?;
+ paths::create_dir_all(&self.examples)?;
+ paths::create_dir_all(&self.build)?;
+
+ Ok(())
+ }
+
+ /// Fetch the destination path for final artifacts (`/…/target/debug`).
+ pub fn dest(&self) -> &Path {
+ &self.dest
+ }
+ /// Fetch the deps path.
+ pub fn deps(&self) -> &Path {
+ &self.deps
+ }
+ /// Fetch the examples path.
+ pub fn examples(&self) -> &Path {
+ &self.examples
+ }
+ /// Fetch the doc path.
+ pub fn doc(&self) -> &Path {
+ &self.doc
+ }
+ /// Fetch the root path (`/…/target`).
+ pub fn root(&self) -> &Path {
+ &self.root
+ }
+ /// Fetch the incremental path.
+ pub fn incremental(&self) -> &Path {
+ &self.incremental
+ }
+ /// Fetch the fingerprint path.
+ pub fn fingerprint(&self) -> &Path {
+ &self.fingerprint
+ }
+ /// Fetch the build script path.
+ pub fn build(&self) -> &Path {
+ &self.build
+ }
+ /// Fetch the artifact path.
+ pub fn artifact(&self) -> &Path {
+ &self.artifact
+ }
+ /// Create and return the tmp path.
+ pub fn prepare_tmp(&self) -> CargoResult<&Path> {
+ paths::create_dir_all(&self.tmp)?;
+ Ok(&self.tmp)
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/links.rs b/src/tools/cargo/src/cargo/core/compiler/links.rs
new file mode 100644
index 000000000..4cef2eb39
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/links.rs
@@ -0,0 +1,61 @@
+use super::unit_graph::UnitGraph;
+use crate::core::resolver::errors::describe_path;
+use crate::core::{PackageId, Resolve};
+use crate::util::errors::CargoResult;
+use std::collections::{HashMap, HashSet};
+
+/// Validates [`package.links`] field in the manifest file does not conflict
+/// between packages.
+///
+/// NOTE: This is the *old* links validator. Links are usually validated in the
+/// resolver. However, the `links` field was added to the index in early 2018
+/// (see [rust-lang/cargo#4978]). However, `links` has been around since 2014,
+/// so there are still many crates in the index that don't have `links`
+/// properly set in the index (over 600 at the time of this writing in 2019).
+/// This can probably be removed at some point in the future, though it might
+/// be worth considering fixing the index.
+///
+/// [rust-lang/cargo#4978]: https://github.com/rust-lang/cargo/pull/4978
+/// [`package.links`]: https://doc.rust-lang.org/nightly/cargo/reference/build-scripts.html#the-links-manifest-key
+pub fn validate_links(resolve: &Resolve, unit_graph: &UnitGraph) -> CargoResult<()> {
+ let mut validated: HashSet<PackageId> = HashSet::new();
+ let mut links: HashMap<String, PackageId> = HashMap::new();
+ let mut units: Vec<_> = unit_graph.keys().collect();
+ // Sort primarily to make testing easier.
+ units.sort_unstable();
+ for unit in units {
+ if !validated.insert(unit.pkg.package_id()) {
+ continue;
+ }
+ let lib = match unit.pkg.manifest().links() {
+ Some(lib) => lib,
+ None => continue,
+ };
+ if let Some(&prev) = links.get(lib) {
+ let prev_path = resolve
+ .path_to_top(&prev)
+ .into_iter()
+ .map(|(p, d)| (p, d.and_then(|d| d.iter().next())));
+ let pkg = unit.pkg.package_id();
+ let path = resolve
+ .path_to_top(&pkg)
+ .into_iter()
+ .map(|(p, d)| (p, d.and_then(|d| d.iter().next())));
+ anyhow::bail!(
+ "multiple packages link to native library `{}`, \
+ but a native library can be linked only once\n\
+ \n\
+ {}\nlinks to native library `{}`\n\
+ \n\
+ {}\nalso links to native library `{}`",
+ lib,
+ describe_path(prev_path),
+ lib,
+ describe_path(path),
+ lib
+ )
+ }
+ links.insert(lib.to_string(), unit.pkg.package_id());
+ }
+ Ok(())
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/lto.rs b/src/tools/cargo/src/cargo/core/compiler/lto.rs
new file mode 100644
index 000000000..e934244c9
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/lto.rs
@@ -0,0 +1,194 @@
+use crate::core::compiler::{BuildContext, CompileMode, CrateType, Unit};
+use crate::core::profiles;
+use crate::util::interning::InternedString;
+
+use crate::util::errors::CargoResult;
+use std::collections::hash_map::{Entry, HashMap};
+
+/// Possible ways to run rustc and request various parts of [LTO].
+///
+/// Variant | Flag | Object Code | Bitcode
+/// -------------------|------------------------|-------------|--------
+/// `Run` | `-C lto=foo` | n/a | n/a
+/// `Off` | `-C lto=off` | n/a | n/a
+/// `OnlyBitcode` | `-C linker-plugin-lto` | | ✓
+/// `ObjectAndBitcode` | | ✓ | ✓
+/// `OnlyObject` | `-C embed-bitcode=no` | ✓ |
+///
+/// [LTO]: https://doc.rust-lang.org/nightly/cargo/reference/profiles.html#lto
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum Lto {
+ /// LTO is run for this rustc, and it's `-Clto=foo`. If the given value is
+ /// None, that corresponds to `-Clto` with no argument, which means do
+ /// "fat" LTO.
+ Run(Option<InternedString>),
+
+ /// LTO has been explicitly listed as "off". This means no thin-local-LTO,
+ /// no LTO anywhere, I really mean it!
+ Off,
+
+ /// This rustc invocation only needs to produce bitcode (it is *only* used
+ /// for LTO), there's no need to produce object files, so we can pass
+ /// `-Clinker-plugin-lto`
+ OnlyBitcode,
+
+ /// This rustc invocation needs to embed bitcode in object files. This means
+ /// that object files may be used for a normal link, and the crate may be
+ /// loaded for LTO later, so both are required.
+ ObjectAndBitcode,
+
+ /// This should not include bitcode. This is primarily to reduce disk
+ /// space usage.
+ OnlyObject,
+}
+
+pub fn generate(bcx: &BuildContext<'_, '_>) -> CargoResult<HashMap<Unit, Lto>> {
+ let mut map = HashMap::new();
+ for unit in bcx.roots.iter() {
+ let root_lto = match unit.profile.lto {
+ // LTO not requested, no need for bitcode.
+ profiles::Lto::Bool(false) => Lto::OnlyObject,
+ profiles::Lto::Off => Lto::Off,
+ _ => {
+ let crate_types = unit.target.rustc_crate_types();
+ if unit.target.for_host() {
+ Lto::OnlyObject
+ } else if needs_object(&crate_types) {
+ lto_when_needs_object(&crate_types)
+ } else {
+ // This may or may not participate in LTO, let's start
+ // with the minimum requirements. This may be expanded in
+ // `calculate` below if necessary.
+ Lto::OnlyBitcode
+ }
+ }
+ };
+ calculate(bcx, &mut map, unit, root_lto)?;
+ }
+ Ok(map)
+}
+
+/// Whether or not any of these crate types need object code.
+fn needs_object(crate_types: &[CrateType]) -> bool {
+ crate_types.iter().any(|k| k.can_lto() || k.is_dynamic())
+}
+
+/// Lto setting to use when this unit needs object code.
+fn lto_when_needs_object(crate_types: &[CrateType]) -> Lto {
+ if crate_types.iter().all(|ct| *ct == CrateType::Dylib) {
+ // A dylib whose parent is running LTO. rustc currently
+ // doesn't support LTO with dylibs, so bitcode is not
+ // needed.
+ Lto::OnlyObject
+ } else {
+ // Mixed rlib with a dylib or cdylib whose parent is running LTO. This
+ // needs both: bitcode for the rlib (for LTO) and object code for the
+ // dylib.
+ Lto::ObjectAndBitcode
+ }
+}
+
+fn calculate(
+ bcx: &BuildContext<'_, '_>,
+ map: &mut HashMap<Unit, Lto>,
+ unit: &Unit,
+ parent_lto: Lto,
+) -> CargoResult<()> {
+ let crate_types = match unit.mode {
+ // Note: Doctest ignores LTO, but for now we'll compute it as-if it is
+ // a Bin, in case it is ever supported in the future.
+ CompileMode::Test | CompileMode::Bench | CompileMode::Doctest => vec![CrateType::Bin],
+ // Notes on other modes:
+ // - Check: Treat as the underlying type, it doesn't really matter.
+ // - Doc: LTO is N/A for the Doc unit itself since rustdoc does not
+ // support codegen flags. We still compute the dependencies, which
+ // are mostly `Check`.
+ // - RunCustomBuild is ignored because it is always "for_host".
+ _ => unit.target.rustc_crate_types(),
+ };
+ // LTO can only be performed if *all* of the crate types support it.
+ // For example, a cdylib/rlib combination won't allow LTO.
+ let all_lto_types = crate_types.iter().all(CrateType::can_lto);
+ // Compute the LTO based on the profile, and what our parent requires.
+ let lto = if unit.target.for_host() {
+ // Disable LTO for host builds since we only really want to perform LTO
+ // for the final binary, and LTO on plugins/build scripts/proc macros is
+ // largely not desired.
+ Lto::OnlyObject
+ } else if all_lto_types {
+ // Note that this ignores the `parent_lto` because this isn't a
+ // linkable crate type; this unit is not being embedded in the parent.
+ match unit.profile.lto {
+ profiles::Lto::Named(s) => Lto::Run(Some(s)),
+ profiles::Lto::Off => Lto::Off,
+ profiles::Lto::Bool(true) => Lto::Run(None),
+ profiles::Lto::Bool(false) => Lto::OnlyObject,
+ }
+ } else {
+ match (parent_lto, needs_object(&crate_types)) {
+ // An rlib whose parent is running LTO, we only need bitcode.
+ (Lto::Run(_), false) => Lto::OnlyBitcode,
+ // LTO when something needs object code.
+ (Lto::Run(_), true) | (Lto::OnlyBitcode, true) => lto_when_needs_object(&crate_types),
+ // LTO is disabled, continue to disable it.
+ (Lto::Off, _) => Lto::Off,
+ // If this doesn't have any requirements, or the requirements are
+ // already satisfied, then stay with our parent.
+ (_, false) | (Lto::OnlyObject, true) | (Lto::ObjectAndBitcode, true) => parent_lto,
+ }
+ };
+
+ // Merge the computed LTO. If this unit appears multiple times in the
+ // graph, the merge may expand the requirements.
+ let merged_lto = match map.entry(unit.clone()) {
+ // If we haven't seen this unit before then insert our value and keep
+ // going.
+ Entry::Vacant(v) => *v.insert(lto),
+
+ Entry::Occupied(mut v) => {
+ let result = match (lto, v.get()) {
+ // No change in requirements.
+ (Lto::OnlyBitcode, Lto::OnlyBitcode) => Lto::OnlyBitcode,
+ (Lto::OnlyObject, Lto::OnlyObject) => Lto::OnlyObject,
+
+ // Once we're running LTO we keep running LTO. We should always
+ // calculate the same thing here each iteration because if we
+ // see this twice then it means, for example, two unit tests
+ // depend on a binary, which is normal.
+ (Lto::Run(s), _) | (_, &Lto::Run(s)) => Lto::Run(s),
+
+ // Off means off! This has the same reasoning as `Lto::Run`.
+ (Lto::Off, _) | (_, Lto::Off) => Lto::Off,
+
+ // Once a target has requested both, that's the maximal amount
+ // of work that can be done, so we just keep doing that work.
+ (Lto::ObjectAndBitcode, _) | (_, Lto::ObjectAndBitcode) => Lto::ObjectAndBitcode,
+
+ // Upgrade so that both requirements can be met.
+ //
+ // This is where the trickiness happens. This unit needs
+ // bitcode and the previously calculated value for this unit
+ // says it didn't need bitcode (or vice versa). This means that
+ // we're a shared dependency between some targets which require
+ // LTO and some which don't. This means that instead of being
+ // either only-objects or only-bitcode we have to embed both in
+ // rlibs (used for different compilations), so we switch to
+ // including both.
+ (Lto::OnlyObject, Lto::OnlyBitcode) | (Lto::OnlyBitcode, Lto::OnlyObject) => {
+ Lto::ObjectAndBitcode
+ }
+ };
+ // No need to recurse if we calculated the same value as before.
+ if result == *v.get() {
+ return Ok(());
+ }
+ v.insert(result);
+ result
+ }
+ };
+
+ for dep in &bcx.unit_graph[unit] {
+ calculate(bcx, map, &dep.unit, merged_lto)?;
+ }
+ Ok(())
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/mod.rs b/src/tools/cargo/src/cargo/core/compiler/mod.rs
new file mode 100644
index 000000000..7b43fd27d
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/mod.rs
@@ -0,0 +1,1795 @@
+//! # Interact with the compiler
+//!
+//! If you consider [`ops::cargo_compile::compile`] as a `rustc` driver but on
+//! Cargo side, this module is kinda the `rustc_interface` for that merits.
+//! It contains all the interaction between Cargo and the rustc compiler,
+//! from preparing the context for the entire build process, to scheduling
+//! and executing each unit of work (e.g. running `rustc`), to managing and
+//! caching the output artifact of a build.
+//!
+//! However, it hasn't yet exposed a clear definition of each phase or session,
+//! like what rustc has done[^1]. Also, no one knows if Cargo really needs that.
+//! To be pragmatic, here we list a handful of items you may want to learn:
+//!
+//! * [`BuildContext`] is a static context containg all information you need
+//! before a build gets started.
+//! * [`Context`] is the center of the world, coordinating a running build and
+//! collecting information from it.
+//! * [`custom_build`] is the home of build script executions and output parsing.
+//! * [`fingerprint`] not only defines but also executes a set of rules to
+//! determine if a re-compile is needed.
+//! * [`job_queue`] is where the parallelism, job scheduling, and communication
+//! machinary happen between Cargo and the compiler.
+//! * [`layout`] defines and manages output artifacts of a build in the filesystem.
+//! * [`unit_dependencies`] is for building a dependency graph for compilation
+//! from a result of dependency resolution.
+//! * [`Unit`] contains sufficient information to build something, usually
+//! turning into a compiler invocation in a later phase.
+//!
+//! [^1]: Maybe [`-Zbuild-plan`](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#build-plan)
+//! was designed to serve that purpose but still [in flux](https://github.com/rust-lang/cargo/issues/7614).
+//!
+//! [`ops::cargo_compile::compile`]: crate::ops::compile
+
+pub mod artifact;
+mod build_config;
+pub(crate) mod build_context;
+mod build_plan;
+mod compilation;
+mod compile_kind;
+pub(crate) mod context;
+mod crate_type;
+mod custom_build;
+pub(crate) mod fingerprint;
+pub mod future_incompat;
+pub(crate) mod job_queue;
+pub(crate) mod layout;
+mod links;
+mod lto;
+mod output_depinfo;
+pub mod rustdoc;
+pub mod standard_lib;
+mod timings;
+mod unit;
+pub mod unit_dependencies;
+pub mod unit_graph;
+
+use std::collections::{HashMap, HashSet};
+use std::env;
+use std::ffi::{OsStr, OsString};
+use std::fmt::Display;
+use std::fs::{self, File};
+use std::io::{BufRead, Write};
+use std::path::{Path, PathBuf};
+use std::sync::Arc;
+
+use anyhow::{Context as _, Error};
+use lazycell::LazyCell;
+use log::{debug, trace};
+
+pub use self::build_config::{BuildConfig, CompileMode, MessageFormat, TimingOutput};
+pub use self::build_context::{
+ BuildContext, FileFlavor, FileType, RustDocFingerprint, RustcTargetData, TargetInfo,
+};
+use self::build_plan::BuildPlan;
+pub use self::compilation::{Compilation, Doctest, UnitOutput};
+pub use self::compile_kind::{CompileKind, CompileTarget};
+pub use self::context::{Context, Metadata};
+pub use self::crate_type::CrateType;
+pub use self::custom_build::{BuildOutput, BuildScriptOutputs, BuildScripts};
+pub(crate) use self::fingerprint::DirtyReason;
+pub use self::job_queue::Freshness;
+use self::job_queue::{Job, JobQueue, JobState, Work};
+pub(crate) use self::layout::Layout;
+pub use self::lto::Lto;
+use self::output_depinfo::output_depinfo;
+use self::unit_graph::UnitDep;
+use crate::core::compiler::future_incompat::FutureIncompatReport;
+pub use crate::core::compiler::unit::{Unit, UnitInterner};
+use crate::core::manifest::TargetSourcePath;
+use crate::core::profiles::{PanicStrategy, Profile, Strip};
+use crate::core::{Feature, PackageId, Target, Verbosity};
+use crate::util::errors::{CargoResult, VerboseError};
+use crate::util::interning::InternedString;
+use crate::util::machine_message::{self, Message};
+use crate::util::{add_path_args, internal, iter_join_onto, profile};
+use cargo_util::{paths, ProcessBuilder, ProcessError};
+use rustfix::diagnostics::Applicability;
+
+const RUSTDOC_CRATE_VERSION_FLAG: &str = "--crate-version";
+
+// TODO: Rename this to `ExtraLinkArgFor` or else, and move to compiler/custom_build.rs?
+/// Represents one of the instruction from `cargo:rustc-link-arg-*` build script
+/// instruction family.
+///
+/// In other words, indicates targets that custom linker arguments applies to.
+#[derive(Clone, Hash, Debug, PartialEq, Eq)]
+pub enum LinkType {
+ /// Represents `cargo:rustc-link-arg=FLAG`.
+ All,
+ /// Represents `cargo:rustc-cdylib-link-arg=FLAG`.
+ Cdylib,
+ /// Represents `cargo:rustc-link-arg-bins=FLAG`.
+ Bin,
+ /// Represents `cargo:rustc-link-arg-bin=BIN=FLAG`.
+ SingleBin(String),
+ /// Represents `cargo:rustc-link-arg-tests=FLAG`.
+ Test,
+ /// Represents `cargo:rustc-link-arg-benches=FLAG`.
+ Bench,
+ /// Represents `cargo:rustc-link-arg-examples=FLAG`.
+ Example,
+}
+
+impl LinkType {
+ /// Checks if this link type applies to a given [`Target`].
+ pub fn applies_to(&self, target: &Target) -> bool {
+ match self {
+ LinkType::All => true,
+ LinkType::Cdylib => target.is_cdylib(),
+ LinkType::Bin => target.is_bin(),
+ LinkType::SingleBin(name) => target.is_bin() && target.name() == name,
+ LinkType::Test => target.is_test(),
+ LinkType::Bench => target.is_bench(),
+ LinkType::Example => target.is_exe_example(),
+ }
+ }
+}
+
+/// A glorified callback for executing calls to rustc. Rather than calling rustc
+/// directly, we'll use an `Executor`, giving clients an opportunity to intercept
+/// the build calls.
+pub trait Executor: Send + Sync + 'static {
+ /// Called after a rustc process invocation is prepared up-front for a given
+ /// unit of work (may still be modified for runtime-known dependencies, when
+ /// the work is actually executed).
+ fn init(&self, _cx: &Context<'_, '_>, _unit: &Unit) {}
+
+ /// In case of an `Err`, Cargo will not continue with the build process for
+ /// this package.
+ fn exec(
+ &self,
+ cmd: &ProcessBuilder,
+ id: PackageId,
+ target: &Target,
+ mode: CompileMode,
+ on_stdout_line: &mut dyn FnMut(&str) -> CargoResult<()>,
+ on_stderr_line: &mut dyn FnMut(&str) -> CargoResult<()>,
+ ) -> CargoResult<()>;
+
+ /// Queried when queuing each unit of work. If it returns true, then the
+ /// unit will always be rebuilt, independent of whether it needs to be.
+ fn force_rebuild(&self, _unit: &Unit) -> bool {
+ false
+ }
+}
+
+/// A `DefaultExecutor` calls rustc without doing anything else. It is Cargo's
+/// default behaviour.
+#[derive(Copy, Clone)]
+pub struct DefaultExecutor;
+
+impl Executor for DefaultExecutor {
+ fn exec(
+ &self,
+ cmd: &ProcessBuilder,
+ _id: PackageId,
+ _target: &Target,
+ _mode: CompileMode,
+ on_stdout_line: &mut dyn FnMut(&str) -> CargoResult<()>,
+ on_stderr_line: &mut dyn FnMut(&str) -> CargoResult<()>,
+ ) -> CargoResult<()> {
+ cmd.exec_with_streaming(on_stdout_line, on_stderr_line, false)
+ .map(drop)
+ }
+}
+
+/// Builds up and enqueue a list of pending jobs onto the `job` queue.
+///
+/// Starting from the `unit`, this function recursively calls itself to build
+/// all jobs for dependencies of the `unit`. Each of these jobs represents
+/// compiling a particular package.
+///
+/// Note that **no actual work is executed as part of this**, that's all done
+/// next as part of [`JobQueue::execute`] function which will run everything
+/// in order with proper parallelism.
+fn compile<'cfg>(
+ cx: &mut Context<'_, 'cfg>,
+ jobs: &mut JobQueue<'cfg>,
+ plan: &mut BuildPlan,
+ unit: &Unit,
+ exec: &Arc<dyn Executor>,
+ force_rebuild: bool,
+) -> CargoResult<()> {
+ let bcx = cx.bcx;
+ let build_plan = bcx.build_config.build_plan;
+ if !cx.compiled.insert(unit.clone()) {
+ return Ok(());
+ }
+
+ // Build up the work to be done to compile this unit, enqueuing it once
+ // we've got everything constructed.
+ let p = profile::start(format!("preparing: {}/{}", unit.pkg, unit.target.name()));
+ fingerprint::prepare_init(cx, unit)?;
+
+ let job = if unit.mode.is_run_custom_build() {
+ custom_build::prepare(cx, unit)?
+ } else if unit.mode.is_doc_test() {
+ // We run these targets later, so this is just a no-op for now.
+ Job::new_fresh()
+ } else if build_plan {
+ Job::new_dirty(rustc(cx, unit, &exec.clone())?, None)
+ } else {
+ let force = exec.force_rebuild(unit) || force_rebuild;
+ let mut job = fingerprint::prepare_target(cx, unit, force)?;
+ job.before(if job.freshness().is_dirty() {
+ let work = if unit.mode.is_doc() || unit.mode.is_doc_scrape() {
+ rustdoc(cx, unit)?
+ } else {
+ rustc(cx, unit, exec)?
+ };
+ work.then(link_targets(cx, unit, false)?)
+ } else {
+ // We always replay the output cache,
+ // since it might contain future-incompat-report messages
+ let work = replay_output_cache(
+ unit.pkg.package_id(),
+ PathBuf::from(unit.pkg.manifest_path()),
+ &unit.target,
+ cx.files().message_cache_path(unit),
+ cx.bcx.build_config.message_format,
+ cx.bcx.config.shell().err_supports_color(),
+ unit.show_warnings(bcx.config),
+ );
+ // Need to link targets on both the dirty and fresh.
+ work.then(link_targets(cx, unit, true)?)
+ });
+
+ job
+ };
+ jobs.enqueue(cx, unit, job)?;
+ drop(p);
+
+ // Be sure to compile all dependencies of this target as well.
+ let deps = Vec::from(cx.unit_deps(unit)); // Create vec due to mutable borrow.
+ for dep in deps {
+ compile(cx, jobs, plan, &dep.unit, exec, false)?;
+ }
+ if build_plan {
+ plan.add(cx, unit)?;
+ }
+
+ Ok(())
+}
+
+/// Generates the warning message used when fallible doc-scrape units fail,
+/// either for rustdoc or rustc.
+fn make_failed_scrape_diagnostic(
+ cx: &Context<'_, '_>,
+ unit: &Unit,
+ top_line: impl Display,
+) -> String {
+ let manifest_path = unit.pkg.manifest_path();
+ let relative_manifest_path = manifest_path
+ .strip_prefix(cx.bcx.ws.root())
+ .unwrap_or(&manifest_path);
+
+ format!(
+ "\
+{top_line}
+ Try running with `--verbose` to see the error message.
+ If an example should not be scanned, then consider adding `doc-scrape-examples = false` to its `[[example]]` definition in {}",
+ relative_manifest_path.display()
+ )
+}
+
+/// Creates a unit of work invoking `rustc` for building the `unit`.
+fn rustc(cx: &mut Context<'_, '_>, unit: &Unit, exec: &Arc<dyn Executor>) -> CargoResult<Work> {
+ let mut rustc = prepare_rustc(cx, &unit.target.rustc_crate_types(), unit)?;
+ let build_plan = cx.bcx.build_config.build_plan;
+
+ let name = unit.pkg.name().to_string();
+ let buildkey = unit.buildkey();
+
+ add_cap_lints(cx.bcx, unit, &mut rustc);
+
+ let outputs = cx.outputs(unit)?;
+ let root = cx.files().out_dir(unit);
+
+ // Prepare the native lib state (extra `-L` and `-l` flags).
+ let build_script_outputs = Arc::clone(&cx.build_script_outputs);
+ let current_id = unit.pkg.package_id();
+ let manifest_path = PathBuf::from(unit.pkg.manifest_path());
+ let build_scripts = cx.build_scripts.get(unit).cloned();
+
+ // If we are a binary and the package also contains a library, then we
+ // don't pass the `-l` flags.
+ let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib());
+
+ let dep_info_name = if cx.files().use_extra_filename(unit) {
+ format!(
+ "{}-{}.d",
+ unit.target.crate_name(),
+ cx.files().metadata(unit)
+ )
+ } else {
+ format!("{}.d", unit.target.crate_name())
+ };
+ let rustc_dep_info_loc = root.join(dep_info_name);
+ let dep_info_loc = fingerprint::dep_info_loc(cx, unit);
+
+ rustc.args(cx.bcx.rustflags_args(unit));
+ if cx.bcx.config.cli_unstable().binary_dep_depinfo {
+ rustc.arg("-Z").arg("binary-dep-depinfo");
+ }
+ let mut output_options = OutputOptions::new(cx, unit);
+ let package_id = unit.pkg.package_id();
+ let target = Target::clone(&unit.target);
+ let mode = unit.mode;
+
+ exec.init(cx, unit);
+ let exec = exec.clone();
+
+ let root_output = cx.files().host_dest().to_path_buf();
+ let target_dir = cx.bcx.ws.target_dir().into_path_unlocked();
+ let pkg_root = unit.pkg.root().to_path_buf();
+ let cwd = rustc
+ .get_cwd()
+ .unwrap_or_else(|| cx.bcx.config.cwd())
+ .to_path_buf();
+ let fingerprint_dir = cx.files().fingerprint_dir(unit);
+ let script_metadata = cx.find_build_script_metadata(unit);
+ let is_local = unit.is_local();
+ let artifact = unit.artifact;
+
+ let hide_diagnostics_for_scrape_unit = cx.bcx.unit_can_fail_for_docscraping(unit)
+ && !matches!(cx.bcx.config.shell().verbosity(), Verbosity::Verbose);
+ let failed_scrape_diagnostic = hide_diagnostics_for_scrape_unit.then(|| {
+ // If this unit is needed for doc-scraping, then we generate a diagnostic that
+ // describes the set of reverse-dependencies that cause the unit to be needed.
+ let target_desc = unit.target.description_named();
+ let mut for_scrape_units = cx
+ .bcx
+ .scrape_units_have_dep_on(unit)
+ .into_iter()
+ .map(|unit| unit.target.description_named())
+ .collect::<Vec<_>>();
+ for_scrape_units.sort();
+ let for_scrape_units = for_scrape_units.join(", ");
+ make_failed_scrape_diagnostic(cx, unit, format_args!("failed to check {target_desc} in package `{name}` as a prerequisite for scraping examples from: {for_scrape_units}"))
+ });
+ if hide_diagnostics_for_scrape_unit {
+ output_options.show_diagnostics = false;
+ }
+
+ return Ok(Work::new(move |state| {
+ // Artifacts are in a different location than typical units,
+ // hence we must assure the crate- and target-dependent
+ // directory is present.
+ if artifact.is_true() {
+ paths::create_dir_all(&root)?;
+ }
+
+ // Only at runtime have we discovered what the extra -L and -l
+ // arguments are for native libraries, so we process those here. We
+ // also need to be sure to add any -L paths for our plugins to the
+ // dynamic library load path as a plugin's dynamic library may be
+ // located somewhere in there.
+ // Finally, if custom environment variables have been produced by
+ // previous build scripts, we include them in the rustc invocation.
+ if let Some(build_scripts) = build_scripts {
+ let script_outputs = build_script_outputs.lock().unwrap();
+ if !build_plan {
+ add_native_deps(
+ &mut rustc,
+ &script_outputs,
+ &build_scripts,
+ pass_l_flag,
+ &target,
+ current_id,
+ )?;
+ add_plugin_deps(&mut rustc, &script_outputs, &build_scripts, &root_output)?;
+ }
+ add_custom_flags(&mut rustc, &script_outputs, script_metadata)?;
+ }
+
+ for output in outputs.iter() {
+ // If there is both an rmeta and rlib, rustc will prefer to use the
+ // rlib, even if it is older. Therefore, we must delete the rlib to
+ // force using the new rmeta.
+ if output.path.extension() == Some(OsStr::new("rmeta")) {
+ let dst = root.join(&output.path).with_extension("rlib");
+ if dst.exists() {
+ paths::remove_file(&dst)?;
+ }
+ }
+
+ // Some linkers do not remove the executable, but truncate and modify it.
+ // That results in the old hard-link being modified even after renamed.
+ // We delete the old artifact here to prevent this behavior from confusing users.
+ // See rust-lang/cargo#8348.
+ if output.hardlink.is_some() && output.path.exists() {
+ _ = paths::remove_file(&output.path).map_err(|e| {
+ log::debug!(
+ "failed to delete previous output file `{:?}`: {e:?}",
+ output.path
+ );
+ });
+ }
+ }
+
+ fn verbose_if_simple_exit_code(err: Error) -> Error {
+ // If a signal on unix (`code == None`) or an abnormal termination
+ // on Windows (codes like `0xC0000409`), don't hide the error details.
+ match err
+ .downcast_ref::<ProcessError>()
+ .as_ref()
+ .and_then(|perr| perr.code)
+ {
+ Some(n) if cargo_util::is_simple_exit_code(n) => VerboseError::new(err).into(),
+ _ => err,
+ }
+ }
+
+ state.running(&rustc);
+ let timestamp = paths::set_invocation_time(&fingerprint_dir)?;
+ if build_plan {
+ state.build_plan(buildkey, rustc.clone(), outputs.clone());
+ } else {
+ let result = exec
+ .exec(
+ &rustc,
+ package_id,
+ &target,
+ mode,
+ &mut |line| on_stdout_line(state, line, package_id, &target),
+ &mut |line| {
+ on_stderr_line(
+ state,
+ line,
+ package_id,
+ &manifest_path,
+ &target,
+ &mut output_options,
+ )
+ },
+ )
+ .map_err(verbose_if_simple_exit_code)
+ .with_context(|| {
+ // adapted from rustc_errors/src/lib.rs
+ let warnings = match output_options.warnings_seen {
+ 0 => String::new(),
+ 1 => "; 1 warning emitted".to_string(),
+ count => format!("; {} warnings emitted", count),
+ };
+ let errors = match output_options.errors_seen {
+ 0 => String::new(),
+ 1 => " due to previous error".to_string(),
+ count => format!(" due to {} previous errors", count),
+ };
+ let name = descriptive_pkg_name(&name, &target, &mode);
+ format!("could not compile {name}{errors}{warnings}")
+ });
+
+ if let Err(e) = result {
+ if let Some(diagnostic) = failed_scrape_diagnostic {
+ state.warning(diagnostic)?;
+ }
+
+ return Err(e);
+ }
+
+ // Exec should never return with success *and* generate an error.
+ debug_assert_eq!(output_options.errors_seen, 0);
+ }
+
+ if rustc_dep_info_loc.exists() {
+ fingerprint::translate_dep_info(
+ &rustc_dep_info_loc,
+ &dep_info_loc,
+ &cwd,
+ &pkg_root,
+ &target_dir,
+ &rustc,
+ // Do not track source files in the fingerprint for registry dependencies.
+ is_local,
+ )
+ .with_context(|| {
+ internal(format!(
+ "could not parse/generate dep info at: {}",
+ rustc_dep_info_loc.display()
+ ))
+ })?;
+ // This mtime shift allows Cargo to detect if a source file was
+ // modified in the middle of the build.
+ paths::set_file_time_no_err(dep_info_loc, timestamp);
+ }
+
+ Ok(())
+ }));
+
+ // Add all relevant `-L` and `-l` flags from dependencies (now calculated and
+ // present in `state`) to the command provided.
+ fn add_native_deps(
+ rustc: &mut ProcessBuilder,
+ build_script_outputs: &BuildScriptOutputs,
+ build_scripts: &BuildScripts,
+ pass_l_flag: bool,
+ target: &Target,
+ current_id: PackageId,
+ ) -> CargoResult<()> {
+ for key in build_scripts.to_link.iter() {
+ let output = build_script_outputs.get(key.1).ok_or_else(|| {
+ internal(format!(
+ "couldn't find build script output for {}/{}",
+ key.0, key.1
+ ))
+ })?;
+ for path in output.library_paths.iter() {
+ rustc.arg("-L").arg(path);
+ }
+
+ if key.0 == current_id {
+ if pass_l_flag {
+ for name in output.library_links.iter() {
+ rustc.arg("-l").arg(name);
+ }
+ }
+ }
+
+ for (lt, arg) in &output.linker_args {
+ // There was an unintentional change where cdylibs were
+ // allowed to be passed via transitive dependencies. This
+ // clause should have been kept in the `if` block above. For
+ // now, continue allowing it for cdylib only.
+ // See https://github.com/rust-lang/cargo/issues/9562
+ if lt.applies_to(target) && (key.0 == current_id || *lt == LinkType::Cdylib) {
+ rustc.arg("-C").arg(format!("link-arg={}", arg));
+ }
+ }
+ }
+ Ok(())
+ }
+}
+
+/// Link the compiled target (often of form `foo-{metadata_hash}`) to the
+/// final target. This must happen during both "Fresh" and "Compile".
+fn link_targets(cx: &mut Context<'_, '_>, unit: &Unit, fresh: bool) -> CargoResult<Work> {
+ let bcx = cx.bcx;
+ let outputs = cx.outputs(unit)?;
+ let export_dir = cx.files().export_dir();
+ let package_id = unit.pkg.package_id();
+ let manifest_path = PathBuf::from(unit.pkg.manifest_path());
+ let profile = unit.profile.clone();
+ let unit_mode = unit.mode;
+ let features = unit.features.iter().map(|s| s.to_string()).collect();
+ let json_messages = bcx.build_config.emit_json();
+ let executable = cx.get_executable(unit)?;
+ let mut target = Target::clone(&unit.target);
+ if let TargetSourcePath::Metabuild = target.src_path() {
+ // Give it something to serialize.
+ let path = unit.pkg.manifest().metabuild_path(cx.bcx.ws.target_dir());
+ target.set_src_path(TargetSourcePath::Path(path));
+ }
+
+ Ok(Work::new(move |state| {
+ // If we're a "root crate", e.g., the target of this compilation, then we
+ // hard link our outputs out of the `deps` directory into the directory
+ // above. This means that `cargo build` will produce binaries in
+ // `target/debug` which one probably expects.
+ let mut destinations = vec![];
+ for output in outputs.iter() {
+ let src = &output.path;
+ // This may have been a `cargo rustc` command which changes the
+ // output, so the source may not actually exist.
+ if !src.exists() {
+ continue;
+ }
+ let dst = match output.hardlink.as_ref() {
+ Some(dst) => dst,
+ None => {
+ destinations.push(src.clone());
+ continue;
+ }
+ };
+ destinations.push(dst.clone());
+ paths::link_or_copy(src, dst)?;
+ if let Some(ref path) = output.export_path {
+ let export_dir = export_dir.as_ref().unwrap();
+ paths::create_dir_all(export_dir)?;
+
+ paths::link_or_copy(src, path)?;
+ }
+ }
+
+ if json_messages {
+ let art_profile = machine_message::ArtifactProfile {
+ opt_level: profile.opt_level.as_str(),
+ debuginfo: profile.debuginfo.to_option(),
+ debug_assertions: profile.debug_assertions,
+ overflow_checks: profile.overflow_checks,
+ test: unit_mode.is_any_test(),
+ };
+
+ let msg = machine_message::Artifact {
+ package_id,
+ manifest_path,
+ target: &target,
+ profile: art_profile,
+ features,
+ filenames: destinations,
+ executable,
+ fresh,
+ }
+ .to_json_string();
+ state.stdout(msg)?;
+ }
+ Ok(())
+ }))
+}
+
+// For all plugin dependencies, add their -L paths (now calculated and present
+// in `build_script_outputs`) to the dynamic library load path for the command
+// to execute.
+fn add_plugin_deps(
+ rustc: &mut ProcessBuilder,
+ build_script_outputs: &BuildScriptOutputs,
+ build_scripts: &BuildScripts,
+ root_output: &Path,
+) -> CargoResult<()> {
+ let var = paths::dylib_path_envvar();
+ let search_path = rustc.get_env(var).unwrap_or_default();
+ let mut search_path = env::split_paths(&search_path).collect::<Vec<_>>();
+ for (pkg_id, metadata) in &build_scripts.plugins {
+ let output = build_script_outputs
+ .get(*metadata)
+ .ok_or_else(|| internal(format!("couldn't find libs for plugin dep {}", pkg_id)))?;
+ search_path.append(&mut filter_dynamic_search_path(
+ output.library_paths.iter(),
+ root_output,
+ ));
+ }
+ let search_path = paths::join_paths(&search_path, var)?;
+ rustc.env(var, &search_path);
+ Ok(())
+}
+
+// Determine paths to add to the dynamic search path from -L entries
+//
+// Strip off prefixes like "native=" or "framework=" and filter out directories
+// **not** inside our output directory since they are likely spurious and can cause
+// clashes with system shared libraries (issue #3366).
+fn filter_dynamic_search_path<'a, I>(paths: I, root_output: &Path) -> Vec<PathBuf>
+where
+ I: Iterator<Item = &'a PathBuf>,
+{
+ let mut search_path = vec![];
+ for dir in paths {
+ let dir = match dir.to_str() {
+ Some(s) => {
+ let mut parts = s.splitn(2, '=');
+ match (parts.next(), parts.next()) {
+ (Some("native"), Some(path))
+ | (Some("crate"), Some(path))
+ | (Some("dependency"), Some(path))
+ | (Some("framework"), Some(path))
+ | (Some("all"), Some(path)) => path.into(),
+ _ => dir.clone(),
+ }
+ }
+ None => dir.clone(),
+ };
+ if dir.starts_with(&root_output) {
+ search_path.push(dir);
+ } else {
+ debug!(
+ "Not including path {} in runtime library search path because it is \
+ outside target root {}",
+ dir.display(),
+ root_output.display()
+ );
+ }
+ }
+ search_path
+}
+
+// TODO: do we really need this as a separate function?
+// Maybe we should reorganize `rustc` fn to make it more traceable and readable.
+fn prepare_rustc(
+ cx: &mut Context<'_, '_>,
+ crate_types: &[CrateType],
+ unit: &Unit,
+) -> CargoResult<ProcessBuilder> {
+ let is_primary = cx.is_primary_package(unit);
+ let is_workspace = cx.bcx.ws.is_member(&unit.pkg);
+
+ let mut base = cx
+ .compilation
+ .rustc_process(unit, is_primary, is_workspace)?;
+
+ if is_primary {
+ base.env("CARGO_PRIMARY_PACKAGE", "1");
+ }
+
+ if unit.target.is_test() || unit.target.is_bench() {
+ let tmp = cx.files().layout(unit.kind).prepare_tmp()?;
+ base.env("CARGO_TARGET_TMPDIR", tmp.display().to_string());
+ }
+
+ base.inherit_jobserver(&cx.jobserver);
+ build_base_args(cx, &mut base, unit, crate_types)?;
+ build_deps_args(&mut base, cx, unit)?;
+ Ok(base)
+}
+
+/// Creates a unit of work invoking `rustdoc` for documenting the `unit`.
+fn rustdoc(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<Work> {
+ let bcx = cx.bcx;
+ // script_metadata is not needed here, it is only for tests.
+ let mut rustdoc = cx.compilation.rustdoc_process(unit, None)?;
+ rustdoc.inherit_jobserver(&cx.jobserver);
+ let crate_name = unit.target.crate_name();
+ rustdoc.arg("--crate-name").arg(&crate_name);
+ add_path_args(bcx.ws, unit, &mut rustdoc);
+ add_cap_lints(bcx, unit, &mut rustdoc);
+
+ if let CompileKind::Target(target) = unit.kind {
+ rustdoc.arg("--target").arg(target.rustc_target());
+ }
+ let doc_dir = cx.files().out_dir(unit);
+
+ // Create the documentation directory ahead of time as rustdoc currently has
+ // a bug where concurrent invocations will race to create this directory if
+ // it doesn't already exist.
+ paths::create_dir_all(&doc_dir)?;
+
+ rustdoc.arg("-o").arg(&doc_dir);
+ rustdoc.args(&features_args(unit));
+ rustdoc.args(&check_cfg_args(cx, unit));
+
+ add_error_format_and_color(cx, &mut rustdoc);
+ add_allow_features(cx, &mut rustdoc);
+
+ if let Some(args) = cx.bcx.extra_args_for(unit) {
+ rustdoc.args(args);
+ }
+
+ let metadata = cx.metadata_for_doc_units[unit];
+ rustdoc.arg("-C").arg(format!("metadata={}", metadata));
+
+ let scrape_output_path = |unit: &Unit| -> CargoResult<PathBuf> {
+ cx.outputs(unit).map(|outputs| outputs[0].path.clone())
+ };
+
+ if unit.mode.is_doc_scrape() {
+ debug_assert!(cx.bcx.scrape_units.contains(unit));
+
+ if unit.target.is_test() {
+ rustdoc.arg("--scrape-tests");
+ }
+
+ rustdoc.arg("-Zunstable-options");
+
+ rustdoc
+ .arg("--scrape-examples-output-path")
+ .arg(scrape_output_path(unit)?);
+
+ // Only scrape example for items from crates in the workspace, to reduce generated file size
+ for pkg in cx.bcx.ws.members() {
+ let names = pkg
+ .targets()
+ .iter()
+ .map(|target| target.crate_name())
+ .collect::<HashSet<_>>();
+ for name in names {
+ rustdoc.arg("--scrape-examples-target-crate").arg(name);
+ }
+ }
+ }
+
+ let should_include_scrape_units = unit.mode.is_doc()
+ && cx.bcx.scrape_units.len() > 0
+ && cx.bcx.ws.unit_needs_doc_scrape(unit);
+ let scrape_outputs = if should_include_scrape_units {
+ rustdoc.arg("-Zunstable-options");
+ Some(
+ cx.bcx
+ .scrape_units
+ .iter()
+ .map(|unit| Ok((cx.files().metadata(unit), scrape_output_path(unit)?)))
+ .collect::<CargoResult<HashMap<_, _>>>()?,
+ )
+ } else {
+ None
+ };
+
+ build_deps_args(&mut rustdoc, cx, unit)?;
+ rustdoc::add_root_urls(cx, unit, &mut rustdoc)?;
+
+ rustdoc.args(bcx.rustdocflags_args(unit));
+
+ if !crate_version_flag_already_present(&rustdoc) {
+ append_crate_version_flag(unit, &mut rustdoc);
+ }
+
+ let target_desc = unit.target.description_named();
+ let name = unit.pkg.name().to_string();
+ let build_script_outputs = Arc::clone(&cx.build_script_outputs);
+ let package_id = unit.pkg.package_id();
+ let manifest_path = PathBuf::from(unit.pkg.manifest_path());
+ let target = Target::clone(&unit.target);
+ let mut output_options = OutputOptions::new(cx, unit);
+ let script_metadata = cx.find_build_script_metadata(unit);
+
+ let failed_scrape_units = Arc::clone(&cx.failed_scrape_units);
+ let hide_diagnostics_for_scrape_unit = cx.bcx.unit_can_fail_for_docscraping(unit)
+ && !matches!(cx.bcx.config.shell().verbosity(), Verbosity::Verbose);
+ let failed_scrape_diagnostic = hide_diagnostics_for_scrape_unit.then(|| {
+ make_failed_scrape_diagnostic(
+ cx,
+ unit,
+ format_args!("failed to scan {target_desc} in package `{name}` for example code usage"),
+ )
+ });
+ if hide_diagnostics_for_scrape_unit {
+ output_options.show_diagnostics = false;
+ }
+
+ Ok(Work::new(move |state| {
+ add_custom_flags(
+ &mut rustdoc,
+ &build_script_outputs.lock().unwrap(),
+ script_metadata,
+ )?;
+
+ // Add the output of scraped examples to the rustdoc command.
+ // This action must happen after the unit's dependencies have finished,
+ // because some of those deps may be Docscrape units which have failed.
+ // So we dynamically determine which `--with-examples` flags to pass here.
+ if let Some(scrape_outputs) = scrape_outputs {
+ let failed_scrape_units = failed_scrape_units.lock().unwrap();
+ for (metadata, output_path) in &scrape_outputs {
+ if !failed_scrape_units.contains(metadata) {
+ rustdoc.arg("--with-examples").arg(output_path);
+ }
+ }
+ }
+
+ let crate_dir = doc_dir.join(&crate_name);
+ if crate_dir.exists() {
+ // Remove output from a previous build. This ensures that stale
+ // files for removed items are removed.
+ debug!("removing pre-existing doc directory {:?}", crate_dir);
+ paths::remove_dir_all(crate_dir)?;
+ }
+ state.running(&rustdoc);
+
+ let result = rustdoc
+ .exec_with_streaming(
+ &mut |line| on_stdout_line(state, line, package_id, &target),
+ &mut |line| {
+ on_stderr_line(
+ state,
+ line,
+ package_id,
+ &manifest_path,
+ &target,
+ &mut output_options,
+ )
+ },
+ false,
+ )
+ .with_context(|| format!("could not document `{}`", name));
+
+ if let Err(e) = result {
+ if let Some(diagnostic) = failed_scrape_diagnostic {
+ state.warning(diagnostic)?;
+ }
+
+ return Err(e);
+ }
+
+ Ok(())
+ }))
+}
+
+// The --crate-version flag could have already been passed in RUSTDOCFLAGS
+// or as an extra compiler argument for rustdoc
+fn crate_version_flag_already_present(rustdoc: &ProcessBuilder) -> bool {
+ rustdoc.get_args().any(|flag| {
+ flag.to_str()
+ .map_or(false, |flag| flag.starts_with(RUSTDOC_CRATE_VERSION_FLAG))
+ })
+}
+
+fn append_crate_version_flag(unit: &Unit, rustdoc: &mut ProcessBuilder) {
+ rustdoc
+ .arg(RUSTDOC_CRATE_VERSION_FLAG)
+ .arg(unit.pkg.version().to_string());
+}
+
+/// Adds [`--cap-lints`] to the command to execute.
+///
+/// [`--cap-lints`]: https://doc.rust-lang.org/nightly/rustc/lints/levels.html#capping-lints
+fn add_cap_lints(bcx: &BuildContext<'_, '_>, unit: &Unit, cmd: &mut ProcessBuilder) {
+ // If this is an upstream dep we don't want warnings from, turn off all
+ // lints.
+ if !unit.show_warnings(bcx.config) {
+ cmd.arg("--cap-lints").arg("allow");
+
+ // If this is an upstream dep but we *do* want warnings, make sure that they
+ // don't fail compilation.
+ } else if !unit.is_local() {
+ cmd.arg("--cap-lints").arg("warn");
+ }
+}
+
+/// Forwards [`-Zallow-features`] if it is set for cargo.
+///
+/// [`-Zallow-features`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#allow-features
+fn add_allow_features(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder) {
+ if let Some(allow) = &cx.bcx.config.cli_unstable().allow_features {
+ let mut arg = String::from("-Zallow-features=");
+ let _ = iter_join_onto(&mut arg, allow, ",");
+ cmd.arg(&arg);
+ }
+}
+
+/// Adds [`--error-format`] to the command to execute.
+///
+/// Cargo always uses JSON output. This has several benefits, such as being
+/// easier to parse, handles changing formats (for replaying cached messages),
+/// ensures atomic output (so messages aren't interleaved), allows for
+/// intercepting messages like rmeta artifacts, etc. rustc includes a
+/// "rendered" field in the JSON message with the message properly formatted,
+/// which Cargo will extract and display to the user.
+///
+/// [`--error-format`]: https://doc.rust-lang.org/nightly/rustc/command-line-arguments.html#--error-format-control-how-errors-are-produced
+fn add_error_format_and_color(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder) {
+ cmd.arg("--error-format=json");
+ let mut json = String::from("--json=diagnostic-rendered-ansi,artifacts,future-incompat");
+
+ match cx.bcx.build_config.message_format {
+ MessageFormat::Short | MessageFormat::Json { short: true, .. } => {
+ json.push_str(",diagnostic-short");
+ }
+ _ => {}
+ }
+ cmd.arg(json);
+
+ let config = cx.bcx.config;
+ if let Some(width) = config.shell().err_width().diagnostic_terminal_width() {
+ cmd.arg(format!("--diagnostic-width={width}"));
+ }
+}
+
+/// Adds essential rustc flags and environment variables to the command to execute.
+fn build_base_args(
+ cx: &mut Context<'_, '_>,
+ cmd: &mut ProcessBuilder,
+ unit: &Unit,
+ crate_types: &[CrateType],
+) -> CargoResult<()> {
+ assert!(!unit.mode.is_run_custom_build());
+
+ let bcx = cx.bcx;
+ let Profile {
+ ref opt_level,
+ codegen_backend,
+ codegen_units,
+ debuginfo,
+ debug_assertions,
+ split_debuginfo,
+ overflow_checks,
+ rpath,
+ ref panic,
+ incremental,
+ strip,
+ rustflags,
+ ..
+ } = unit.profile.clone();
+ let test = unit.mode.is_any_test();
+
+ cmd.arg("--crate-name").arg(&unit.target.crate_name());
+
+ let edition = unit.target.edition();
+ edition.cmd_edition_arg(cmd);
+
+ add_path_args(bcx.ws, unit, cmd);
+ add_error_format_and_color(cx, cmd);
+ add_allow_features(cx, cmd);
+
+ let mut contains_dy_lib = false;
+ if !test {
+ for crate_type in crate_types {
+ cmd.arg("--crate-type").arg(crate_type.as_str());
+ contains_dy_lib |= crate_type == &CrateType::Dylib;
+ }
+ }
+
+ if unit.mode.is_check() {
+ cmd.arg("--emit=dep-info,metadata");
+ } else if !unit.requires_upstream_objects() {
+ // Always produce metadata files for rlib outputs. Metadata may be used
+ // in this session for a pipelined compilation, or it may be used in a
+ // future Cargo session as part of a pipelined compile.
+ cmd.arg("--emit=dep-info,metadata,link");
+ } else {
+ cmd.arg("--emit=dep-info,link");
+ }
+
+ let prefer_dynamic = (unit.target.for_host() && !unit.target.is_custom_build())
+ || (contains_dy_lib && !cx.is_primary_package(unit));
+ if prefer_dynamic {
+ cmd.arg("-C").arg("prefer-dynamic");
+ }
+
+ if opt_level.as_str() != "0" {
+ cmd.arg("-C").arg(&format!("opt-level={}", opt_level));
+ }
+
+ if !rustflags.is_empty() {
+ cmd.args(&rustflags);
+ }
+
+ if *panic != PanicStrategy::Unwind {
+ cmd.arg("-C").arg(format!("panic={}", panic));
+ }
+
+ cmd.args(&lto_args(cx, unit));
+
+ // This is generally just an optimization on build time so if we don't pass
+ // it then it's ok. The values for the flag (off, packed, unpacked) may be supported
+ // or not depending on the platform, so availability is checked per-value.
+ // For example, at the time of writing this code, on Windows the only stable valid
+ // value for split-debuginfo is "packed", while on Linux "unpacked" is also stable.
+ if let Some(split) = split_debuginfo {
+ if cx
+ .bcx
+ .target_data
+ .info(unit.kind)
+ .supports_debuginfo_split(split)
+ {
+ cmd.arg("-C").arg(format!("split-debuginfo={}", split));
+ }
+ }
+
+ if let Some(backend) = codegen_backend {
+ cmd.arg("-Z").arg(&format!("codegen-backend={}", backend));
+ }
+
+ if let Some(n) = codegen_units {
+ cmd.arg("-C").arg(&format!("codegen-units={}", n));
+ }
+
+ if let Some(debuginfo) = debuginfo.to_option() {
+ cmd.arg("-C").arg(format!("debuginfo={}", debuginfo));
+ }
+
+ if let Some(args) = cx.bcx.extra_args_for(unit) {
+ cmd.args(args);
+ }
+
+ // `-C overflow-checks` is implied by the setting of `-C debug-assertions`,
+ // so we only need to provide `-C overflow-checks` if it differs from
+ // the value of `-C debug-assertions` we would provide.
+ if opt_level.as_str() != "0" {
+ if debug_assertions {
+ cmd.args(&["-C", "debug-assertions=on"]);
+ if !overflow_checks {
+ cmd.args(&["-C", "overflow-checks=off"]);
+ }
+ } else if overflow_checks {
+ cmd.args(&["-C", "overflow-checks=on"]);
+ }
+ } else if !debug_assertions {
+ cmd.args(&["-C", "debug-assertions=off"]);
+ if overflow_checks {
+ cmd.args(&["-C", "overflow-checks=on"]);
+ }
+ } else if !overflow_checks {
+ cmd.args(&["-C", "overflow-checks=off"]);
+ }
+
+ if test && unit.target.harness() {
+ cmd.arg("--test");
+
+ // Cargo has historically never compiled `--test` binaries with
+ // `panic=abort` because the `test` crate itself didn't support it.
+ // Support is now upstream, however, but requires an unstable flag to be
+ // passed when compiling the test. We require, in Cargo, an unstable
+ // flag to pass to rustc, so register that here. Eventually this flag
+ // will simply not be needed when the behavior is stabilized in the Rust
+ // compiler itself.
+ if *panic == PanicStrategy::Abort {
+ cmd.arg("-Z").arg("panic-abort-tests");
+ }
+ } else if test {
+ cmd.arg("--cfg").arg("test");
+ }
+
+ cmd.args(&features_args(unit));
+ cmd.args(&check_cfg_args(cx, unit));
+
+ let meta = cx.files().metadata(unit);
+ cmd.arg("-C").arg(&format!("metadata={}", meta));
+ if cx.files().use_extra_filename(unit) {
+ cmd.arg("-C").arg(&format!("extra-filename=-{}", meta));
+ }
+
+ if rpath {
+ cmd.arg("-C").arg("rpath");
+ }
+
+ cmd.arg("--out-dir").arg(&cx.files().out_dir(unit));
+
+ fn opt(cmd: &mut ProcessBuilder, key: &str, prefix: &str, val: Option<&OsStr>) {
+ if let Some(val) = val {
+ let mut joined = OsString::from(prefix);
+ joined.push(val);
+ cmd.arg(key).arg(joined);
+ }
+ }
+
+ if let CompileKind::Target(n) = unit.kind {
+ cmd.arg("--target").arg(n.rustc_target());
+ }
+
+ opt(
+ cmd,
+ "-C",
+ "linker=",
+ bcx.linker(unit.kind).as_ref().map(|s| s.as_ref()),
+ );
+ if incremental {
+ let dir = cx.files().layout(unit.kind).incremental().as_os_str();
+ opt(cmd, "-C", "incremental=", Some(dir));
+ }
+
+ if strip != Strip::None {
+ cmd.arg("-C").arg(format!("strip={}", strip));
+ }
+
+ if unit.is_std {
+ // -Zforce-unstable-if-unmarked prevents the accidental use of
+ // unstable crates within the sysroot (such as "extern crate libc" or
+ // any non-public crate in the sysroot).
+ //
+ // RUSTC_BOOTSTRAP allows unstable features on stable.
+ cmd.arg("-Z")
+ .arg("force-unstable-if-unmarked")
+ .env("RUSTC_BOOTSTRAP", "1");
+ }
+
+ // Add `CARGO_BIN_EXE_` environment variables for building tests.
+ if unit.target.is_test() || unit.target.is_bench() {
+ for bin_target in unit
+ .pkg
+ .manifest()
+ .targets()
+ .iter()
+ .filter(|target| target.is_bin())
+ {
+ let exe_path = cx
+ .files()
+ .bin_link_for_target(bin_target, unit.kind, cx.bcx)?;
+ let name = bin_target
+ .binary_filename()
+ .unwrap_or(bin_target.name().to_string());
+ let key = format!("CARGO_BIN_EXE_{}", name);
+ cmd.env(&key, exe_path);
+ }
+ }
+ Ok(())
+}
+
+/// All active features for the unit passed as `--cfg features=<feature-name>`.
+fn features_args(unit: &Unit) -> Vec<OsString> {
+ let mut args = Vec::with_capacity(unit.features.len() * 2);
+
+ for feat in &unit.features {
+ args.push(OsString::from("--cfg"));
+ args.push(OsString::from(format!("feature=\"{}\"", feat)));
+ }
+
+ args
+}
+
+/// Generates the `--check-cfg` arguments for the `unit`.
+/// See unstable feature [`check-cfg`].
+///
+/// [`check-cfg`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#check-cfg
+fn check_cfg_args(cx: &Context<'_, '_>, unit: &Unit) -> Vec<OsString> {
+ if let Some((features, well_known_names, well_known_values, _output)) =
+ cx.bcx.config.cli_unstable().check_cfg
+ {
+ let mut args = Vec::with_capacity(unit.pkg.summary().features().len() * 2 + 4);
+ args.push(OsString::from("-Zunstable-options"));
+
+ if features {
+ // This generate something like this:
+ // - values(feature)
+ // - values(feature, "foo", "bar")
+ let mut arg = OsString::from("values(feature");
+ for (&feat, _) in unit.pkg.summary().features() {
+ arg.push(", \"");
+ arg.push(&feat);
+ arg.push("\"");
+ }
+ arg.push(")");
+
+ args.push(OsString::from("--check-cfg"));
+ args.push(arg);
+ }
+
+ if well_known_names {
+ args.push(OsString::from("--check-cfg"));
+ args.push(OsString::from("names()"));
+ }
+
+ if well_known_values {
+ args.push(OsString::from("--check-cfg"));
+ args.push(OsString::from("values()"));
+ }
+
+ args
+ } else {
+ Vec::new()
+ }
+}
+
+/// Adds LTO related codegen flags.
+fn lto_args(cx: &Context<'_, '_>, unit: &Unit) -> Vec<OsString> {
+ let mut result = Vec::new();
+ let mut push = |arg: &str| {
+ result.push(OsString::from("-C"));
+ result.push(OsString::from(arg));
+ };
+ match cx.lto[unit] {
+ lto::Lto::Run(None) => push("lto"),
+ lto::Lto::Run(Some(s)) => push(&format!("lto={}", s)),
+ lto::Lto::Off => {
+ push("lto=off");
+ push("embed-bitcode=no");
+ }
+ lto::Lto::ObjectAndBitcode => {} // this is rustc's default
+ lto::Lto::OnlyBitcode => push("linker-plugin-lto"),
+ lto::Lto::OnlyObject => push("embed-bitcode=no"),
+ }
+ result
+}
+
+/// Adds dependency-relevant rustc flags and environment variables
+/// to the command to execute, such as [`-L`] and [`--extern`].
+///
+/// [`-L`]: https://doc.rust-lang.org/nightly/rustc/command-line-arguments.html#-l-add-a-directory-to-the-library-search-path
+/// [`--extern`]: https://doc.rust-lang.org/nightly/rustc/command-line-arguments.html#--extern-specify-where-an-external-library-is-located
+fn build_deps_args(
+ cmd: &mut ProcessBuilder,
+ cx: &mut Context<'_, '_>,
+ unit: &Unit,
+) -> CargoResult<()> {
+ let bcx = cx.bcx;
+ cmd.arg("-L").arg(&{
+ let mut deps = OsString::from("dependency=");
+ deps.push(cx.files().deps_dir(unit));
+ deps
+ });
+
+ // Be sure that the host path is also listed. This'll ensure that proc macro
+ // dependencies are correctly found (for reexported macros).
+ if !unit.kind.is_host() {
+ cmd.arg("-L").arg(&{
+ let mut deps = OsString::from("dependency=");
+ deps.push(cx.files().host_deps());
+ deps
+ });
+ }
+
+ let deps = cx.unit_deps(unit);
+
+ // If there is not one linkable target but should, rustc fails later
+ // on if there is an `extern crate` for it. This may turn into a hard
+ // error in the future (see PR #4797).
+ if !deps
+ .iter()
+ .any(|dep| !dep.unit.mode.is_doc() && dep.unit.target.is_linkable())
+ {
+ if let Some(dep) = deps.iter().find(|dep| {
+ !dep.unit.mode.is_doc() && dep.unit.target.is_lib() && !dep.unit.artifact.is_true()
+ }) {
+ bcx.config.shell().warn(format!(
+ "The package `{}` \
+ provides no linkable target. The compiler might raise an error while compiling \
+ `{}`. Consider adding 'dylib' or 'rlib' to key `crate-type` in `{}`'s \
+ Cargo.toml. This warning might turn into a hard error in the future.",
+ dep.unit.target.crate_name(),
+ unit.target.crate_name(),
+ dep.unit.target.crate_name()
+ ))?;
+ }
+ }
+
+ let mut unstable_opts = false;
+
+ for dep in deps {
+ if dep.unit.mode.is_run_custom_build() {
+ cmd.env("OUT_DIR", &cx.files().build_script_out_dir(&dep.unit));
+ }
+ }
+
+ for arg in extern_args(cx, unit, &mut unstable_opts)? {
+ cmd.arg(arg);
+ }
+
+ for (var, env) in artifact::get_env(cx, deps)? {
+ cmd.env(&var, env);
+ }
+
+ // This will only be set if we're already using a feature
+ // requiring nightly rust
+ if unstable_opts {
+ cmd.arg("-Z").arg("unstable-options");
+ }
+
+ Ok(())
+}
+
+/// Adds extra rustc flags and environment variables collected from the output
+/// of a build-script to the command to execute, include custom environment
+/// variables and `cfg`.
+fn add_custom_flags(
+ cmd: &mut ProcessBuilder,
+ build_script_outputs: &BuildScriptOutputs,
+ metadata: Option<Metadata>,
+) -> CargoResult<()> {
+ if let Some(metadata) = metadata {
+ if let Some(output) = build_script_outputs.get(metadata) {
+ for cfg in output.cfgs.iter() {
+ cmd.arg("--cfg").arg(cfg);
+ }
+ if !output.check_cfgs.is_empty() {
+ cmd.arg("-Zunstable-options");
+ for check_cfg in &output.check_cfgs {
+ cmd.arg("--check-cfg").arg(check_cfg);
+ }
+ }
+ for &(ref name, ref value) in output.env.iter() {
+ cmd.env(name, value);
+ }
+ }
+ }
+
+ Ok(())
+}
+
+/// Generates a list of `--extern` arguments.
+pub fn extern_args(
+ cx: &Context<'_, '_>,
+ unit: &Unit,
+ unstable_opts: &mut bool,
+) -> CargoResult<Vec<OsString>> {
+ let mut result = Vec::new();
+ let deps = cx.unit_deps(unit);
+
+ // Closure to add one dependency to `result`.
+ let mut link_to =
+ |dep: &UnitDep, extern_crate_name: InternedString, noprelude: bool| -> CargoResult<()> {
+ let mut value = OsString::new();
+ let mut opts = Vec::new();
+ if unit
+ .pkg
+ .manifest()
+ .unstable_features()
+ .require(Feature::public_dependency())
+ .is_ok()
+ && !dep.public
+ {
+ opts.push("priv");
+ *unstable_opts = true;
+ }
+ if noprelude {
+ opts.push("noprelude");
+ *unstable_opts = true;
+ }
+ if !opts.is_empty() {
+ value.push(opts.join(","));
+ value.push(":");
+ }
+ value.push(extern_crate_name.as_str());
+ value.push("=");
+
+ let mut pass = |file| {
+ let mut value = value.clone();
+ value.push(file);
+ result.push(OsString::from("--extern"));
+ result.push(value);
+ };
+
+ let outputs = cx.outputs(&dep.unit)?;
+
+ if cx.only_requires_rmeta(unit, &dep.unit) || dep.unit.mode.is_check() {
+ // Example: rlib dependency for an rlib, rmeta is all that is required.
+ let output = outputs
+ .iter()
+ .find(|output| output.flavor == FileFlavor::Rmeta)
+ .expect("failed to find rmeta dep for pipelined dep");
+ pass(&output.path);
+ } else {
+ // Example: a bin needs `rlib` for dependencies, it cannot use rmeta.
+ for output in outputs.iter() {
+ if output.flavor == FileFlavor::Linkable {
+ pass(&output.path);
+ }
+ }
+ }
+ Ok(())
+ };
+
+ for dep in deps {
+ if dep.unit.target.is_linkable() && !dep.unit.mode.is_doc() {
+ link_to(dep, dep.extern_crate_name, dep.noprelude)?;
+ }
+ }
+ if unit.target.proc_macro() {
+ // Automatically import `proc_macro`.
+ result.push(OsString::from("--extern"));
+ result.push(OsString::from("proc_macro"));
+ }
+
+ Ok(result)
+}
+
+fn envify(s: &str) -> String {
+ s.chars()
+ .flat_map(|c| c.to_uppercase())
+ .map(|c| if c == '-' { '_' } else { c })
+ .collect()
+}
+
+/// Configuration of the display of messages emitted by the compiler,
+/// e.g. diagnostics, warnings, errors, and message caching.
+struct OutputOptions {
+ /// What format we're emitting from Cargo itself.
+ format: MessageFormat,
+ /// Whether or not to display messages in color.
+ color: bool,
+ /// Where to write the JSON messages to support playback later if the unit
+ /// is fresh. The file is created lazily so that in the normal case, lots
+ /// of empty files are not created. If this is None, the output will not
+ /// be cached (such as when replaying cached messages).
+ cache_cell: Option<(PathBuf, LazyCell<File>)>,
+ /// If `true`, display any diagnostics.
+ /// Other types of JSON messages are processed regardless
+ /// of the value of this flag.
+ ///
+ /// This is used primarily for cache replay. If you build with `-vv`, the
+ /// cache will be filled with diagnostics from dependencies. When the
+ /// cache is replayed without `-vv`, we don't want to show them.
+ show_diagnostics: bool,
+ /// Tracks the number of warnings we've seen so far.
+ warnings_seen: usize,
+ /// Tracks the number of errors we've seen so far.
+ errors_seen: usize,
+}
+
+impl OutputOptions {
+ fn new(cx: &Context<'_, '_>, unit: &Unit) -> OutputOptions {
+ let color = cx.bcx.config.shell().err_supports_color();
+ let path = cx.files().message_cache_path(unit);
+ // Remove old cache, ignore ENOENT, which is the common case.
+ drop(fs::remove_file(&path));
+ let cache_cell = Some((path, LazyCell::new()));
+ OutputOptions {
+ format: cx.bcx.build_config.message_format,
+ color,
+ cache_cell,
+ show_diagnostics: true,
+ warnings_seen: 0,
+ errors_seen: 0,
+ }
+ }
+}
+
+fn on_stdout_line(
+ state: &JobState<'_, '_>,
+ line: &str,
+ _package_id: PackageId,
+ _target: &Target,
+) -> CargoResult<()> {
+ state.stdout(line.to_string())?;
+ Ok(())
+}
+
+fn on_stderr_line(
+ state: &JobState<'_, '_>,
+ line: &str,
+ package_id: PackageId,
+ manifest_path: &std::path::Path,
+ target: &Target,
+ options: &mut OutputOptions,
+) -> CargoResult<()> {
+ if on_stderr_line_inner(state, line, package_id, manifest_path, target, options)? {
+ // Check if caching is enabled.
+ if let Some((path, cell)) = &mut options.cache_cell {
+ // Cache the output, which will be replayed later when Fresh.
+ let f = cell.try_borrow_mut_with(|| paths::create(path))?;
+ debug_assert!(!line.contains('\n'));
+ f.write_all(line.as_bytes())?;
+ f.write_all(&[b'\n'])?;
+ }
+ }
+ Ok(())
+}
+
+/// Returns true if the line should be cached.
+fn on_stderr_line_inner(
+ state: &JobState<'_, '_>,
+ line: &str,
+ package_id: PackageId,
+ manifest_path: &std::path::Path,
+ target: &Target,
+ options: &mut OutputOptions,
+) -> CargoResult<bool> {
+ // We primarily want to use this function to process JSON messages from
+ // rustc. The compiler should always print one JSON message per line, and
+ // otherwise it may have other output intermingled (think RUST_LOG or
+ // something like that), so skip over everything that doesn't look like a
+ // JSON message.
+ if !line.starts_with('{') {
+ state.stderr(line.to_string())?;
+ return Ok(true);
+ }
+
+ let mut compiler_message: Box<serde_json::value::RawValue> = match serde_json::from_str(line) {
+ Ok(msg) => msg,
+
+ // If the compiler produced a line that started with `{` but it wasn't
+ // valid JSON, maybe it wasn't JSON in the first place! Forward it along
+ // to stderr.
+ Err(e) => {
+ debug!("failed to parse json: {:?}", e);
+ state.stderr(line.to_string())?;
+ return Ok(true);
+ }
+ };
+
+ let count_diagnostic = |level, options: &mut OutputOptions| {
+ if level == "warning" {
+ options.warnings_seen += 1;
+ } else if level == "error" {
+ options.errors_seen += 1;
+ }
+ };
+
+ if let Ok(report) = serde_json::from_str::<FutureIncompatReport>(compiler_message.get()) {
+ for item in &report.future_incompat_report {
+ count_diagnostic(&*item.diagnostic.level, options);
+ }
+ state.future_incompat_report(report.future_incompat_report);
+ return Ok(true);
+ }
+
+ // Depending on what we're emitting from Cargo itself, we figure out what to
+ // do with this JSON message.
+ match options.format {
+ // In the "human" output formats (human/short) or if diagnostic messages
+ // from rustc aren't being included in the output of Cargo's JSON
+ // messages then we extract the diagnostic (if present) here and handle
+ // it ourselves.
+ MessageFormat::Human
+ | MessageFormat::Short
+ | MessageFormat::Json {
+ render_diagnostics: true,
+ ..
+ } => {
+ #[derive(serde::Deserialize)]
+ struct CompilerMessage {
+ rendered: String,
+ message: String,
+ level: String,
+ children: Vec<PartialDiagnostic>,
+ }
+
+ // A partial rustfix::diagnostics::Diagnostic. We deserialize only a
+ // subset of the fields because rustc's output can be extremely
+ // deeply nested JSON in pathological cases involving macro
+ // expansion. Rustfix's Diagnostic struct is recursive containing a
+ // field `children: Vec<Self>`, and it can cause deserialization to
+ // hit serde_json's default recursion limit, or overflow the stack
+ // if we turn that off. Cargo only cares about the 1 field listed
+ // here.
+ #[derive(serde::Deserialize)]
+ struct PartialDiagnostic {
+ spans: Vec<PartialDiagnosticSpan>,
+ }
+
+ // A partial rustfix::diagnostics::DiagnosticSpan.
+ #[derive(serde::Deserialize)]
+ struct PartialDiagnosticSpan {
+ suggestion_applicability: Option<Applicability>,
+ }
+
+ if let Ok(mut msg) = serde_json::from_str::<CompilerMessage>(compiler_message.get()) {
+ if msg.message.starts_with("aborting due to")
+ || msg.message.ends_with("warning emitted")
+ || msg.message.ends_with("warnings emitted")
+ {
+ // Skip this line; we'll print our own summary at the end.
+ return Ok(true);
+ }
+ // state.stderr will add a newline
+ if msg.rendered.ends_with('\n') {
+ msg.rendered.pop();
+ }
+ let rendered = if options.color {
+ msg.rendered
+ } else {
+ // Strip only fails if the Writer fails, which is Cursor
+ // on a Vec, which should never fail.
+ strip_ansi_escapes::strip(&msg.rendered)
+ .map(|v| String::from_utf8(v).expect("utf8"))
+ .expect("strip should never fail")
+ };
+ if options.show_diagnostics {
+ let machine_applicable: bool = msg
+ .children
+ .iter()
+ .map(|child| {
+ child
+ .spans
+ .iter()
+ .filter_map(|span| span.suggestion_applicability)
+ .any(|app| app == Applicability::MachineApplicable)
+ })
+ .any(|b| b);
+ count_diagnostic(&msg.level, options);
+ state.emit_diag(msg.level, rendered, machine_applicable)?;
+ }
+ return Ok(true);
+ }
+ }
+
+ // Remove color information from the rendered string if color is not
+ // enabled. Cargo always asks for ANSI colors from rustc. This allows
+ // cached replay to enable/disable colors without re-invoking rustc.
+ MessageFormat::Json { ansi: false, .. } => {
+ #[derive(serde::Deserialize, serde::Serialize)]
+ struct CompilerMessage {
+ rendered: String,
+ #[serde(flatten)]
+ other: std::collections::BTreeMap<String, serde_json::Value>,
+ }
+ if let Ok(mut error) = serde_json::from_str::<CompilerMessage>(compiler_message.get()) {
+ error.rendered = strip_ansi_escapes::strip(&error.rendered)
+ .map(|v| String::from_utf8(v).expect("utf8"))
+ .unwrap_or(error.rendered);
+ let new_line = serde_json::to_string(&error)?;
+ let new_msg: Box<serde_json::value::RawValue> = serde_json::from_str(&new_line)?;
+ compiler_message = new_msg;
+ }
+ }
+
+ // If ansi colors are desired then we should be good to go! We can just
+ // pass through this message as-is.
+ MessageFormat::Json { ansi: true, .. } => {}
+ }
+
+ // We always tell rustc to emit messages about artifacts being produced.
+ // These messages feed into pipelined compilation, as well as timing
+ // information.
+ //
+ // Look for a matching directive and inform Cargo internally that a
+ // metadata file has been produced.
+ #[derive(serde::Deserialize)]
+ struct ArtifactNotification {
+ artifact: String,
+ }
+
+ if let Ok(artifact) = serde_json::from_str::<ArtifactNotification>(compiler_message.get()) {
+ trace!("found directive from rustc: `{}`", artifact.artifact);
+ if artifact.artifact.ends_with(".rmeta") {
+ debug!("looks like metadata finished early!");
+ state.rmeta_produced();
+ }
+ return Ok(false);
+ }
+
+ // And failing all that above we should have a legitimate JSON diagnostic
+ // from the compiler, so wrap it in an external Cargo JSON message
+ // indicating which package it came from and then emit it.
+
+ if !options.show_diagnostics {
+ return Ok(true);
+ }
+
+ #[derive(serde::Deserialize)]
+ struct CompilerMessage {
+ level: String,
+ }
+ if let Ok(message) = serde_json::from_str::<CompilerMessage>(compiler_message.get()) {
+ count_diagnostic(&message.level, options);
+ }
+
+ let msg = machine_message::FromCompiler {
+ package_id,
+ manifest_path,
+ target,
+ message: compiler_message,
+ }
+ .to_json_string();
+
+ // Switch json lines from rustc/rustdoc that appear on stderr to stdout
+ // instead. We want the stdout of Cargo to always be machine parseable as
+ // stderr has our colorized human-readable messages.
+ state.stdout(msg)?;
+ Ok(true)
+}
+
+/// Creates a unit of work that replays the cached compiler message.
+///
+/// Usually used when a job is fresh and doesn't need to recompile.
+fn replay_output_cache(
+ package_id: PackageId,
+ manifest_path: PathBuf,
+ target: &Target,
+ path: PathBuf,
+ format: MessageFormat,
+ color: bool,
+ show_diagnostics: bool,
+) -> Work {
+ let target = target.clone();
+ let mut options = OutputOptions {
+ format,
+ color,
+ cache_cell: None,
+ show_diagnostics,
+ warnings_seen: 0,
+ errors_seen: 0,
+ };
+ Work::new(move |state| {
+ if !path.exists() {
+ // No cached output, probably didn't emit anything.
+ return Ok(());
+ }
+ // We sometimes have gigabytes of output from the compiler, so avoid
+ // loading it all into memory at once, as that can cause OOM where
+ // otherwise there would be none.
+ let file = paths::open(&path)?;
+ let mut reader = std::io::BufReader::new(file);
+ let mut line = String::new();
+ loop {
+ let length = reader.read_line(&mut line)?;
+ if length == 0 {
+ break;
+ }
+ let trimmed = line.trim_end_matches(&['\n', '\r'][..]);
+ on_stderr_line(
+ state,
+ trimmed,
+ package_id,
+ &manifest_path,
+ &target,
+ &mut options,
+ )?;
+ line.clear();
+ }
+ Ok(())
+ })
+}
+
+/// Provides a package name with descriptive target information,
+/// e.g., '`foo` (bin "bar" test)', '`foo` (lib doctest)'.
+fn descriptive_pkg_name(name: &str, target: &Target, mode: &CompileMode) -> String {
+ let desc_name = target.description_named();
+ let mode = if mode.is_rustc_test() && !(target.is_test() || target.is_bench()) {
+ " test"
+ } else if mode.is_doc_test() {
+ " doctest"
+ } else if mode.is_doc() {
+ " doc"
+ } else {
+ ""
+ };
+ format!("`{name}` ({desc_name}{mode})")
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/output_depinfo.rs b/src/tools/cargo/src/cargo/core/compiler/output_depinfo.rs
new file mode 100644
index 000000000..d659d620c
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/output_depinfo.rs
@@ -0,0 +1,172 @@
+//! dep-info files for external build system integration.
+//! See [`output_depinfo`] for more.
+
+use cargo_util::paths::normalize_path;
+use std::collections::{BTreeSet, HashSet};
+use std::io::{BufWriter, Write};
+use std::path::{Path, PathBuf};
+
+use super::{fingerprint, Context, FileFlavor, Unit};
+use crate::util::{internal, CargoResult};
+use cargo_util::paths;
+use log::debug;
+
+/// Bacially just normalizes a given path and converts it to a string.
+fn render_filename<P: AsRef<Path>>(path: P, basedir: Option<&str>) -> CargoResult<String> {
+ fn wrap_path(path: &Path) -> CargoResult<String> {
+ path.to_str()
+ .ok_or_else(|| internal(format!("path `{:?}` not utf-8", path)))
+ .map(|f| f.replace(" ", "\\ "))
+ }
+
+ let path = path.as_ref();
+ if let Some(basedir) = basedir {
+ let norm_path = normalize_path(path);
+ let norm_basedir = normalize_path(basedir.as_ref());
+ match norm_path.strip_prefix(norm_basedir) {
+ Ok(relpath) => wrap_path(relpath),
+ _ => wrap_path(path),
+ }
+ } else {
+ wrap_path(path)
+ }
+}
+
+/// Collects all dependencies of the `unit` for the output dep info file.
+///
+/// Dependencies will be stored in `deps`, including:
+///
+/// * dependencies from [fingerprint dep-info]
+/// * paths from `rerun-if-changed` build script instruction
+/// * ...and traverse transitive dependencies recursively
+///
+/// [fingerprint dep-info]: super::fingerprint#fingerprint-dep-info-files
+fn add_deps_for_unit(
+ deps: &mut BTreeSet<PathBuf>,
+ cx: &mut Context<'_, '_>,
+ unit: &Unit,
+ visited: &mut HashSet<Unit>,
+) -> CargoResult<()> {
+ if !visited.insert(unit.clone()) {
+ return Ok(());
+ }
+
+ // units representing the execution of a build script don't actually
+ // generate a dep info file, so we just keep on going below
+ if !unit.mode.is_run_custom_build() {
+ // Add dependencies from rustc dep-info output (stored in fingerprint directory)
+ let dep_info_loc = fingerprint::dep_info_loc(cx, unit);
+ if let Some(paths) =
+ fingerprint::parse_dep_info(unit.pkg.root(), cx.files().host_root(), &dep_info_loc)?
+ {
+ for path in paths.files {
+ deps.insert(path);
+ }
+ } else {
+ debug!(
+ "can't find dep_info for {:?} {}",
+ unit.pkg.package_id(),
+ unit.target
+ );
+ return Err(internal("dep_info missing"));
+ }
+ }
+
+ // Add rerun-if-changed dependencies
+ if let Some(metadata) = cx.find_build_script_metadata(unit) {
+ if let Some(output) = cx.build_script_outputs.lock().unwrap().get(metadata) {
+ for path in &output.rerun_if_changed {
+ // The paths we have saved from the unit are of arbitrary relativeness and may be
+ // relative to the crate root of the dependency.
+ let path = unit.pkg.root().join(path);
+ deps.insert(path);
+ }
+ }
+ }
+
+ // Recursively traverse all transitive dependencies
+ let unit_deps = Vec::from(cx.unit_deps(unit)); // Create vec due to mutable borrow.
+ for dep in unit_deps {
+ if dep.unit.is_local() {
+ add_deps_for_unit(deps, cx, &dep.unit, visited)?;
+ }
+ }
+ Ok(())
+}
+
+/// Save a `.d` dep-info file for the given unit. This is the third kind of
+/// dep-info mentioned in [`fingerprint`] module.
+///
+/// Argument `unit` is expected to be the root unit, which will be uplifted.
+///
+/// Cargo emits its own dep-info files in the output directory. This is
+/// only done for every "uplifted" artifact. These are intended to be used
+/// with external build systems so that they can detect if Cargo needs to be
+/// re-executed.
+///
+/// It includes all the entries from the `rustc` dep-info file, and extends it
+/// with any `rerun-if-changed` entries from build scripts. It also includes
+/// sources from any path dependencies. Registry dependencies are not included
+/// under the assumption that changes to them can be detected via changes to
+/// `Cargo.lock`.
+///
+/// [`fingerprint`]: super::fingerprint#dep-info-files
+pub fn output_depinfo(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<()> {
+ let bcx = cx.bcx;
+ let mut deps = BTreeSet::new();
+ let mut visited = HashSet::new();
+ let success = add_deps_for_unit(&mut deps, cx, unit, &mut visited).is_ok();
+ let basedir_string;
+ let basedir = match bcx.config.build_config()?.dep_info_basedir.clone() {
+ Some(value) => {
+ basedir_string = value
+ .resolve_path(bcx.config)
+ .as_os_str()
+ .to_str()
+ .ok_or_else(|| anyhow::format_err!("build.dep-info-basedir path not utf-8"))?
+ .to_string();
+ Some(basedir_string.as_str())
+ }
+ None => None,
+ };
+ let deps = deps
+ .iter()
+ .map(|f| render_filename(f, basedir))
+ .collect::<CargoResult<Vec<_>>>()?;
+
+ for output in cx
+ .outputs(unit)?
+ .iter()
+ .filter(|o| !matches!(o.flavor, FileFlavor::DebugInfo | FileFlavor::Auxiliary))
+ {
+ if let Some(ref link_dst) = output.hardlink {
+ let output_path = link_dst.with_extension("d");
+ if success {
+ let target_fn = render_filename(link_dst, basedir)?;
+
+ // If nothing changed don't recreate the file which could alter
+ // its mtime
+ if let Ok(previous) = fingerprint::parse_rustc_dep_info(&output_path) {
+ if previous.files.iter().eq(deps.iter().map(Path::new)) {
+ continue;
+ }
+ }
+
+ // Otherwise write it all out
+ let mut outfile = BufWriter::new(paths::create(output_path)?);
+ write!(outfile, "{}:", target_fn)?;
+ for dep in &deps {
+ write!(outfile, " {}", dep)?;
+ }
+ writeln!(outfile)?;
+
+ // dep-info generation failed, so delete output file. This will
+ // usually cause the build system to always rerun the build
+ // rule, which is correct if inefficient.
+ } else if output_path.exists() {
+ paths::remove_file(output_path)?;
+ }
+ }
+ }
+ Ok(())
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/rustdoc.rs b/src/tools/cargo/src/cargo/core/compiler/rustdoc.rs
new file mode 100644
index 000000000..f6fdd005a
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/rustdoc.rs
@@ -0,0 +1,272 @@
+//! Utilities for building with rustdoc.
+
+use crate::core::compiler::context::Context;
+use crate::core::compiler::unit::Unit;
+use crate::core::compiler::{BuildContext, CompileKind};
+use crate::sources::CRATES_IO_REGISTRY;
+use crate::util::errors::{internal, CargoResult};
+use cargo_util::ProcessBuilder;
+use std::collections::HashMap;
+use std::fmt;
+use std::hash;
+use url::Url;
+
+const DOCS_RS_URL: &'static str = "https://docs.rs/";
+
+/// Mode used for `std`. This is for unstable feature [`-Zrustdoc-map`][1].
+///
+/// [1]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#rustdoc-map
+#[derive(Debug, Hash)]
+pub enum RustdocExternMode {
+ /// Use a local `file://` URL.
+ Local,
+ /// Use a remote URL to <https://doc.rust-lang.org/> (default).
+ Remote,
+ /// An arbitrary URL.
+ Url(String),
+}
+
+impl From<String> for RustdocExternMode {
+ fn from(s: String) -> RustdocExternMode {
+ match s.as_ref() {
+ "local" => RustdocExternMode::Local,
+ "remote" => RustdocExternMode::Remote,
+ _ => RustdocExternMode::Url(s),
+ }
+ }
+}
+
+impl fmt::Display for RustdocExternMode {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ RustdocExternMode::Local => "local".fmt(f),
+ RustdocExternMode::Remote => "remote".fmt(f),
+ RustdocExternMode::Url(s) => s.fmt(f),
+ }
+ }
+}
+
+impl<'de> serde::de::Deserialize<'de> for RustdocExternMode {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::de::Deserializer<'de>,
+ {
+ let s = String::deserialize(deserializer)?;
+ Ok(s.into())
+ }
+}
+
+/// A map of registry names to URLs where documentations are hosted.
+/// This is for unstable feature [`-Zrustdoc-map`][1].
+///
+/// [1]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#rustdoc-map
+#[derive(serde::Deserialize, Debug)]
+#[serde(default)]
+pub struct RustdocExternMap {
+ #[serde(deserialize_with = "default_crates_io_to_docs_rs")]
+ /// * Key is the registry name in the configuration `[registries.<name>]`.
+ /// * Value is the URL where the documentation is hosted.
+ registries: HashMap<String, String>,
+ std: Option<RustdocExternMode>,
+}
+
+impl Default for RustdocExternMap {
+ fn default() -> Self {
+ Self {
+ registries: HashMap::from([(CRATES_IO_REGISTRY.into(), DOCS_RS_URL.into())]),
+ std: None,
+ }
+ }
+}
+
+fn default_crates_io_to_docs_rs<'de, D: serde::Deserializer<'de>>(
+ de: D,
+) -> Result<HashMap<String, String>, D::Error> {
+ use serde::Deserialize;
+ let mut registries = HashMap::deserialize(de)?;
+ if !registries.contains_key(CRATES_IO_REGISTRY) {
+ registries.insert(CRATES_IO_REGISTRY.into(), DOCS_RS_URL.into());
+ }
+ Ok(registries)
+}
+
+impl hash::Hash for RustdocExternMap {
+ fn hash<H: hash::Hasher>(&self, into: &mut H) {
+ self.std.hash(into);
+ for (key, value) in &self.registries {
+ key.hash(into);
+ value.hash(into);
+ }
+ }
+}
+
+/// Adds unstable flag [`--extern-html-root-url`][1] to the given `rustdoc`
+/// invocation. This is for unstable feature [`-Zrustdoc-map`][2].
+///
+/// [1]: https://doc.rust-lang.org/nightly/rustdoc/unstable-features.html#--extern-html-root-url-control-how-rustdoc-links-to-non-local-crates
+/// [2]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#rustdoc-map
+pub fn add_root_urls(
+ cx: &Context<'_, '_>,
+ unit: &Unit,
+ rustdoc: &mut ProcessBuilder,
+) -> CargoResult<()> {
+ let config = cx.bcx.config;
+ if !config.cli_unstable().rustdoc_map {
+ log::debug!("`doc.extern-map` ignored, requires -Zrustdoc-map flag");
+ return Ok(());
+ }
+ let map = config.doc_extern_map()?;
+ let mut unstable_opts = false;
+ // Collect mapping of registry name -> index url.
+ let name2url: HashMap<&String, Url> = map
+ .registries
+ .keys()
+ .filter_map(|name| {
+ if let Ok(index_url) = config.get_registry_index(name) {
+ Some((name, index_url))
+ } else {
+ log::warn!(
+ "`doc.extern-map.{}` specifies a registry that is not defined",
+ name
+ );
+ None
+ }
+ })
+ .collect();
+ for dep in cx.unit_deps(unit) {
+ if dep.unit.target.is_linkable() && !dep.unit.mode.is_doc() {
+ for (registry, location) in &map.registries {
+ let sid = dep.unit.pkg.package_id().source_id();
+ let matches_registry = || -> bool {
+ if !sid.is_registry() {
+ return false;
+ }
+ if sid.is_crates_io() {
+ return registry == CRATES_IO_REGISTRY;
+ }
+ if let Some(index_url) = name2url.get(registry) {
+ return index_url == sid.url();
+ }
+ false
+ };
+ if matches_registry() {
+ let mut url = location.clone();
+ if !url.contains("{pkg_name}") && !url.contains("{version}") {
+ if !url.ends_with('/') {
+ url.push('/');
+ }
+ url.push_str("{pkg_name}/{version}/");
+ }
+ let url = url
+ .replace("{pkg_name}", &dep.unit.pkg.name())
+ .replace("{version}", &dep.unit.pkg.version().to_string());
+ rustdoc.arg("--extern-html-root-url");
+ rustdoc.arg(format!("{}={}", dep.unit.target.crate_name(), url));
+ unstable_opts = true;
+ }
+ }
+ }
+ }
+ let std_url = match &map.std {
+ None | Some(RustdocExternMode::Remote) => None,
+ Some(RustdocExternMode::Local) => {
+ let sysroot = &cx.bcx.target_data.info(CompileKind::Host).sysroot;
+ let html_root = sysroot.join("share").join("doc").join("rust").join("html");
+ if html_root.exists() {
+ let url = Url::from_file_path(&html_root).map_err(|()| {
+ internal(format!(
+ "`{}` failed to convert to URL",
+ html_root.display()
+ ))
+ })?;
+ Some(url.to_string())
+ } else {
+ log::warn!(
+ "`doc.extern-map.std` is \"local\", but local docs don't appear to exist at {}",
+ html_root.display()
+ );
+ None
+ }
+ }
+ Some(RustdocExternMode::Url(s)) => Some(s.to_string()),
+ };
+ if let Some(url) = std_url {
+ for name in &["std", "core", "alloc", "proc_macro"] {
+ rustdoc.arg("--extern-html-root-url");
+ rustdoc.arg(format!("{}={}", name, url));
+ unstable_opts = true;
+ }
+ }
+
+ if unstable_opts {
+ rustdoc.arg("-Zunstable-options");
+ }
+ Ok(())
+}
+
+/// Indicates whether a target should have examples scraped from it by rustdoc.
+/// Configured within Cargo.toml and only for unstable feature
+/// [`-Zrustdoc-scrape-examples`][1].
+///
+/// [1]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#scrape-examples
+#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Debug, Copy)]
+pub enum RustdocScrapeExamples {
+ Enabled,
+ Disabled,
+ Unset,
+}
+
+impl RustdocScrapeExamples {
+ pub fn is_enabled(&self) -> bool {
+ matches!(self, RustdocScrapeExamples::Enabled)
+ }
+
+ pub fn is_unset(&self) -> bool {
+ matches!(self, RustdocScrapeExamples::Unset)
+ }
+}
+
+impl BuildContext<'_, '_> {
+ /// Returns the set of [`Docscrape`] units that have a direct dependency on `unit`.
+ ///
+ /// [`RunCustomBuild`] units are excluded because we allow failures
+ /// from type checks but not build script executions.
+ /// A plain old `cargo doc` would just die if a build script execution fails,
+ /// there is no reason for `-Zrustdoc-scrape-examples` to keep going.
+ ///
+ /// [`Docscrape`]: crate::core::compiler::CompileMode::Docscrape
+ /// [`RunCustomBuild`]: crate::core::compiler::CompileMode::Docscrape
+ pub fn scrape_units_have_dep_on<'a>(&'a self, unit: &'a Unit) -> Vec<&'a Unit> {
+ self.scrape_units
+ .iter()
+ .filter(|scrape_unit| {
+ self.unit_graph[scrape_unit]
+ .iter()
+ .any(|dep| &dep.unit == unit && !dep.unit.mode.is_run_custom_build())
+ })
+ .collect()
+ }
+
+ /// Returns true if this unit is needed for doing doc-scraping and is also
+ /// allowed to fail without killing the build.
+ pub fn unit_can_fail_for_docscraping(&self, unit: &Unit) -> bool {
+ // If the unit is not a Docscrape unit, e.g. a Lib target that is
+ // checked to scrape an Example target, then we need to get the doc-scrape-examples
+ // configuration for the reverse-dependent Example target.
+ let for_scrape_units = if unit.mode.is_doc_scrape() {
+ vec![unit]
+ } else {
+ self.scrape_units_have_dep_on(unit)
+ };
+
+ if for_scrape_units.is_empty() {
+ false
+ } else {
+ // All Docscrape units must have doc-scrape-examples unset. If any are true,
+ // then the unit is not allowed to fail.
+ for_scrape_units
+ .iter()
+ .all(|unit| unit.target.doc_scrape_examples().is_unset())
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/standard_lib.rs b/src/tools/cargo/src/cargo/core/compiler/standard_lib.rs
new file mode 100644
index 000000000..b5ac0c997
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/standard_lib.rs
@@ -0,0 +1,253 @@
+//! Code for building the standard library.
+
+use crate::core::compiler::unit_dependencies::IsArtifact;
+use crate::core::compiler::UnitInterner;
+use crate::core::compiler::{CompileKind, CompileMode, RustcTargetData, Unit};
+use crate::core::profiles::{Profiles, UnitFor};
+use crate::core::resolver::features::{CliFeatures, FeaturesFor, ResolvedFeatures};
+use crate::core::resolver::HasDevUnits;
+use crate::core::{Dependency, PackageId, PackageSet, Resolve, SourceId, Workspace};
+use crate::ops::{self, Packages};
+use crate::util::errors::CargoResult;
+use crate::Config;
+use std::collections::{HashMap, HashSet};
+use std::path::PathBuf;
+
+use super::BuildConfig;
+
+/// Parse the `-Zbuild-std` flag.
+pub fn parse_unstable_flag(value: Option<&str>) -> Vec<String> {
+ // This is a temporary hack until there is a more principled way to
+ // declare dependencies in Cargo.toml.
+ let value = value.unwrap_or("std");
+ let mut crates: HashSet<&str> = value.split(',').collect();
+ if crates.contains("std") {
+ crates.insert("core");
+ crates.insert("alloc");
+ crates.insert("proc_macro");
+ crates.insert("panic_unwind");
+ crates.insert("compiler_builtins");
+ } else if crates.contains("core") {
+ crates.insert("compiler_builtins");
+ }
+ crates.into_iter().map(|s| s.to_string()).collect()
+}
+
+pub(crate) fn std_crates(config: &Config, units: Option<&[Unit]>) -> Option<Vec<String>> {
+ let crates = config.cli_unstable().build_std.as_ref()?.clone();
+
+ // Only build libtest if it looks like it is needed.
+ let mut crates = crates.clone();
+ // If we know what units we're building, we can filter for libtest depending on the jobs.
+ if let Some(units) = units {
+ if units
+ .iter()
+ .any(|unit| unit.mode.is_rustc_test() && unit.target.harness())
+ {
+ // Only build libtest when libstd is built (libtest depends on libstd)
+ if crates.iter().any(|c| c == "std") && !crates.iter().any(|c| c == "test") {
+ crates.push("test".to_string());
+ }
+ }
+ } else {
+ // We don't know what jobs are going to be run, so download libtest just in case.
+ if !crates.iter().any(|c| c == "test") {
+ crates.push("test".to_string())
+ }
+ }
+
+ Some(crates)
+}
+
+/// Resolve the standard library dependencies.
+pub fn resolve_std<'cfg>(
+ ws: &Workspace<'cfg>,
+ target_data: &RustcTargetData<'cfg>,
+ build_config: &BuildConfig,
+ crates: &[String],
+) -> CargoResult<(PackageSet<'cfg>, Resolve, ResolvedFeatures)> {
+ if build_config.build_plan {
+ ws.config()
+ .shell()
+ .warn("-Zbuild-std does not currently fully support --build-plan")?;
+ }
+
+ let src_path = detect_sysroot_src_path(target_data)?;
+ let to_patch = [
+ "rustc-std-workspace-core",
+ "rustc-std-workspace-alloc",
+ "rustc-std-workspace-std",
+ ];
+ let patches = to_patch
+ .iter()
+ .map(|&name| {
+ let source_path = SourceId::for_path(&src_path.join("library").join(name))?;
+ let dep = Dependency::parse(name, None, source_path)?;
+ Ok(dep)
+ })
+ .collect::<CargoResult<Vec<_>>>()?;
+ let crates_io_url = crate::sources::CRATES_IO_INDEX.parse().unwrap();
+ let patch = HashMap::from([(crates_io_url, patches)]);
+ let members = vec![
+ String::from("library/std"),
+ String::from("library/core"),
+ String::from("library/alloc"),
+ String::from("library/test"),
+ ];
+ let ws_config = crate::core::WorkspaceConfig::Root(crate::core::WorkspaceRootConfig::new(
+ &src_path,
+ &Some(members),
+ /*default_members*/ &None,
+ /*exclude*/ &None,
+ /*inheritable*/ &None,
+ /*custom_metadata*/ &None,
+ ));
+ let virtual_manifest = crate::core::VirtualManifest::new(
+ /*replace*/ Vec::new(),
+ patch,
+ ws_config,
+ /*profiles*/ None,
+ crate::core::Features::default(),
+ None,
+ );
+
+ let config = ws.config();
+ // This is a delicate hack. In order for features to resolve correctly,
+ // the resolver needs to run a specific "current" member of the workspace.
+ // Thus, in order to set the features for `std`, we need to set `libtest`
+ // to be the "current" member. `libtest` is the root, and all other
+ // standard library crates are dependencies from there. Since none of the
+ // other crates need to alter their features, this should be fine, for
+ // now. Perhaps in the future features will be decoupled from the resolver
+ // and it will be easier to control feature selection.
+ let current_manifest = src_path.join("library/test/Cargo.toml");
+ // TODO: Consider doing something to enforce --locked? Or to prevent the
+ // lock file from being written, such as setting ephemeral.
+ let mut std_ws = Workspace::new_virtual(src_path, current_manifest, virtual_manifest, config)?;
+ // Don't require optional dependencies in this workspace, aka std's own
+ // `[dev-dependencies]`. No need for us to generate a `Resolve` which has
+ // those included because we'll never use them anyway.
+ std_ws.set_require_optional_deps(false);
+ // `test` is not in the default set because it is optional, but it needs
+ // to be part of the resolve in case we do need it.
+ let mut spec_pkgs = Vec::from(crates);
+ spec_pkgs.push("test".to_string());
+ let spec = Packages::Packages(spec_pkgs);
+ let specs = spec.to_package_id_specs(&std_ws)?;
+ let features = match &config.cli_unstable().build_std_features {
+ Some(list) => list.clone(),
+ None => vec![
+ "panic-unwind".to_string(),
+ "backtrace".to_string(),
+ "default".to_string(),
+ ],
+ };
+ let cli_features = CliFeatures::from_command_line(
+ &features, /*all_features*/ false, /*uses_default_features*/ false,
+ )?;
+ let resolve = ops::resolve_ws_with_opts(
+ &std_ws,
+ target_data,
+ &build_config.requested_kinds,
+ &cli_features,
+ &specs,
+ HasDevUnits::No,
+ crate::core::resolver::features::ForceAllTargets::No,
+ )?;
+ Ok((
+ resolve.pkg_set,
+ resolve.targeted_resolve,
+ resolve.resolved_features,
+ ))
+}
+
+/// Generate a list of root `Unit`s for the standard library.
+///
+/// The given slice of crate names is the root set.
+pub fn generate_std_roots(
+ crates: &[String],
+ std_resolve: &Resolve,
+ std_features: &ResolvedFeatures,
+ kinds: &[CompileKind],
+ package_set: &PackageSet<'_>,
+ interner: &UnitInterner,
+ profiles: &Profiles,
+) -> CargoResult<HashMap<CompileKind, Vec<Unit>>> {
+ // Generate the root Units for the standard library.
+ let std_ids = crates
+ .iter()
+ .map(|crate_name| std_resolve.query(crate_name))
+ .collect::<CargoResult<Vec<PackageId>>>()?;
+ // Convert PackageId to Package.
+ let std_pkgs = package_set.get_many(std_ids)?;
+ // Generate a map of Units for each kind requested.
+ let mut ret = HashMap::new();
+ for pkg in std_pkgs {
+ let lib = pkg
+ .targets()
+ .iter()
+ .find(|t| t.is_lib())
+ .expect("std has a lib");
+ // I don't think we need to bother with Check here, the difference
+ // in time is minimal, and the difference in caching is
+ // significant.
+ let mode = CompileMode::Build;
+ let features = std_features.activated_features(pkg.package_id(), FeaturesFor::NormalOrDev);
+ for kind in kinds {
+ let list = ret.entry(*kind).or_insert_with(Vec::new);
+ let unit_for = UnitFor::new_normal(*kind);
+ let profile = profiles.get_profile(
+ pkg.package_id(),
+ /*is_member*/ false,
+ /*is_local*/ false,
+ unit_for,
+ *kind,
+ );
+ list.push(interner.intern(
+ pkg,
+ lib,
+ profile,
+ *kind,
+ mode,
+ features.clone(),
+ /*is_std*/ true,
+ /*dep_hash*/ 0,
+ IsArtifact::No,
+ None,
+ ));
+ }
+ }
+ Ok(ret)
+}
+
+fn detect_sysroot_src_path(target_data: &RustcTargetData<'_>) -> CargoResult<PathBuf> {
+ if let Some(s) = target_data.config.get_env_os("__CARGO_TESTS_ONLY_SRC_ROOT") {
+ return Ok(s.into());
+ }
+
+ // NOTE: This is temporary until we figure out how to acquire the source.
+ let src_path = target_data
+ .info(CompileKind::Host)
+ .sysroot
+ .join("lib")
+ .join("rustlib")
+ .join("src")
+ .join("rust");
+ let lock = src_path.join("Cargo.lock");
+ if !lock.exists() {
+ let msg = format!(
+ "{:?} does not exist, unable to build with the standard \
+ library, try:\n rustup component add rust-src",
+ lock
+ );
+ match target_data.config.get_env("RUSTUP_TOOLCHAIN") {
+ Ok(rustup_toolchain) => {
+ anyhow::bail!("{} --toolchain {}", msg, rustup_toolchain);
+ }
+ Err(_) => {
+ anyhow::bail!(msg);
+ }
+ }
+ }
+ Ok(src_path)
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/timings.js b/src/tools/cargo/src/cargo/core/compiler/timings.js
new file mode 100644
index 000000000..986070ab0
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/timings.js
@@ -0,0 +1,470 @@
+// Position of the vertical axis.
+const X_LINE = 50;
+// General-use margin size.
+const MARGIN = 5;
+// Position of the horizontal axis, relative to the bottom.
+const Y_LINE = 35;
+// Minimum distance between time tick labels.
+const MIN_TICK_DIST = 50;
+// Radius for rounded rectangle corners.
+const RADIUS = 3;
+// Height of unit boxes.
+const BOX_HEIGHT = 25;
+// Distance between Y tick marks on the unit graph.
+const Y_TICK_DIST = BOX_HEIGHT + 2;
+// Rects used for mouseover detection.
+// Objects of {x, y, x2, y2, i} where `i` is the index into UNIT_DATA.
+let HIT_BOXES = [];
+// Index into UNIT_DATA of the last unit hovered over by mouse.
+let LAST_HOVER = null;
+// Key is unit index, value is {x, y, width, rmeta_x} of the box.
+let UNIT_COORDS = {};
+// Map of unit index to the index it was unlocked by.
+let REVERSE_UNIT_DEPS = {};
+let REVERSE_UNIT_RMETA_DEPS = {};
+for (let n=0; n<UNIT_DATA.length; n++) {
+ let unit = UNIT_DATA[n];
+ for (let unlocked of unit.unlocked_units) {
+ REVERSE_UNIT_DEPS[unlocked] = n;
+ }
+ for (let unlocked of unit.unlocked_rmeta_units) {
+ REVERSE_UNIT_RMETA_DEPS[unlocked] = n;
+ }
+}
+
+function render_pipeline_graph() {
+ if (UNIT_DATA.length == 0) {
+ return;
+ }
+ let g = document.getElementById('pipeline-graph');
+ HIT_BOXES.length = 0;
+ g.onmousemove = pipeline_mousemove;
+ const min_time = document.getElementById('min-unit-time').valueAsNumber;
+
+ const units = UNIT_DATA.filter(unit => unit.duration >= min_time);
+
+ const graph_height = Y_TICK_DIST * units.length;
+ const {ctx, graph_width, canvas_width, canvas_height, px_per_sec} = draw_graph_axes('pipeline-graph', graph_height);
+ const container = document.getElementById('pipeline-container');
+ container.style.width = canvas_width;
+ container.style.height = canvas_height;
+
+ // Canvas for hover highlights. This is a separate layer to improve performance.
+ const linectx = setup_canvas('pipeline-graph-lines', canvas_width, canvas_height);
+ linectx.clearRect(0, 0, canvas_width, canvas_height);
+
+ // Draw Y tick marks.
+ for (let n=1; n<units.length; n++) {
+ const y = MARGIN + Y_TICK_DIST * n;
+ ctx.beginPath();
+ ctx.moveTo(X_LINE, y);
+ ctx.lineTo(X_LINE-5, y);
+ ctx.stroke();
+ }
+
+ // Draw Y labels.
+ ctx.textAlign = 'end';
+ ctx.textBaseline = 'middle';
+ for (let n=0; n<units.length; n++) {
+ let y = MARGIN + Y_TICK_DIST * n + Y_TICK_DIST / 2;
+ ctx.fillText(n+1, X_LINE-4, y);
+ }
+
+ // Draw the graph.
+ ctx.save();
+ ctx.translate(X_LINE, MARGIN);
+
+ // Compute x,y coordinate of each block.
+ UNIT_COORDS = {};
+ for (i=0; i<units.length; i++) {
+ let unit = units[i];
+ let y = i * Y_TICK_DIST + 1;
+ let x = px_per_sec * unit.start;
+ let rmeta_x = null;
+ if (unit.rmeta_time != null) {
+ rmeta_x = x + px_per_sec * unit.rmeta_time;
+ }
+ let width = Math.max(px_per_sec * unit.duration, 1.0);
+ UNIT_COORDS[unit.i] = {x, y, width, rmeta_x};
+ }
+
+ // Draw the blocks.
+ for (i=0; i<units.length; i++) {
+ let unit = units[i];
+ let {x, y, width, rmeta_x} = UNIT_COORDS[unit.i];
+
+ HIT_BOXES.push({x: X_LINE+x, y:MARGIN+y, x2: X_LINE+x+width, y2: MARGIN+y+BOX_HEIGHT, i: unit.i});
+
+ ctx.beginPath();
+ ctx.fillStyle = unit.mode == 'run-custom-build' ? '#f0b165' : '#95cce8';
+ roundedRect(ctx, x, y, width, BOX_HEIGHT, RADIUS);
+ ctx.fill();
+
+ if (unit.rmeta_time != null) {
+ ctx.beginPath();
+ ctx.fillStyle = '#aa95e8';
+ let ctime = unit.duration - unit.rmeta_time;
+ roundedRect(ctx, rmeta_x, y, px_per_sec * ctime, BOX_HEIGHT, RADIUS);
+ ctx.fill();
+ }
+ ctx.fillStyle = "#000";
+ ctx.textAlign = 'start';
+ ctx.textBaseline = 'middle';
+ ctx.font = '14px sans-serif';
+ const label = `${unit.name}${unit.target} ${unit.duration}s`;
+ const text_info = ctx.measureText(label);
+ const label_x = Math.min(x + 5.0, canvas_width - text_info.width - X_LINE);
+ ctx.fillText(label, label_x, y + BOX_HEIGHT / 2);
+ draw_dep_lines(ctx, unit.i, false);
+ }
+ ctx.restore();
+}
+
+// Draws lines from the given unit to the units it unlocks.
+function draw_dep_lines(ctx, unit_idx, highlighted) {
+ const unit = UNIT_DATA[unit_idx];
+ const {x, y, rmeta_x} = UNIT_COORDS[unit_idx];
+ ctx.save();
+ for (const unlocked of unit.unlocked_units) {
+ draw_one_dep_line(ctx, x, y, unlocked, highlighted);
+ }
+ for (const unlocked of unit.unlocked_rmeta_units) {
+ draw_one_dep_line(ctx, rmeta_x, y, unlocked, highlighted);
+ }
+ ctx.restore();
+}
+
+function draw_one_dep_line(ctx, from_x, from_y, to_unit, highlighted) {
+ if (to_unit in UNIT_COORDS) {
+ let {x: u_x, y: u_y} = UNIT_COORDS[to_unit];
+ ctx.strokeStyle = highlighted ? '#000' : '#ddd';
+ ctx.setLineDash([2]);
+ ctx.beginPath();
+ ctx.moveTo(from_x, from_y+BOX_HEIGHT/2);
+ ctx.lineTo(from_x-5, from_y+BOX_HEIGHT/2);
+ ctx.lineTo(from_x-5, u_y+BOX_HEIGHT/2);
+ ctx.lineTo(u_x, u_y+BOX_HEIGHT/2);
+ ctx.stroke();
+ }
+}
+
+function render_timing_graph() {
+ if (CONCURRENCY_DATA.length == 0) {
+ return;
+ }
+ const HEIGHT = 400;
+ const AXIS_HEIGHT = HEIGHT - MARGIN - Y_LINE;
+ const TOP_MARGIN = 10;
+ const GRAPH_HEIGHT = AXIS_HEIGHT - TOP_MARGIN;
+
+ const {canvas_width, graph_width, ctx} = draw_graph_axes('timing-graph', AXIS_HEIGHT);
+
+ // Draw Y tick marks and labels.
+ let max_v = 0;
+ for (c of CONCURRENCY_DATA) {
+ max_v = Math.max(max_v, c.active, c.waiting, c.inactive);
+ }
+ const px_per_v = GRAPH_HEIGHT / max_v;
+ const {step, tick_dist, num_ticks} = split_ticks(max_v, px_per_v, GRAPH_HEIGHT);
+ ctx.textAlign = 'end';
+ for (n=0; n<num_ticks; n++) {
+ let y = HEIGHT - Y_LINE - ((n + 1) * tick_dist);
+ ctx.beginPath();
+ ctx.moveTo(X_LINE, y);
+ ctx.lineTo(X_LINE-5, y);
+ ctx.stroke();
+ ctx.fillText((n+1) * step, X_LINE-10, y+5);
+ }
+
+ // Label the Y axis.
+ let label_y = (HEIGHT - Y_LINE) / 2;
+ ctx.save();
+ ctx.translate(15, label_y);
+ ctx.rotate(3*Math.PI/2);
+ ctx.textAlign = 'center';
+ ctx.fillText('# Units', 0, 0);
+ ctx.restore();
+
+ // Draw the graph.
+ ctx.save();
+ ctx.translate(X_LINE, MARGIN);
+
+ function coord(t, v) {
+ return {
+ x: graph_width * (t/DURATION),
+ y: TOP_MARGIN + GRAPH_HEIGHT * (1.0 - (v / max_v))
+ };
+ }
+
+ const cpuFillStyle = 'rgba(250, 119, 0, 0.2)';
+ if (CPU_USAGE.length > 1) {
+ ctx.beginPath();
+ ctx.fillStyle = cpuFillStyle;
+ let bottomLeft = coord(CPU_USAGE[0][0], 0);
+ ctx.moveTo(bottomLeft.x, bottomLeft.y);
+ for (let i=0; i < CPU_USAGE.length; i++) {
+ let [time, usage] = CPU_USAGE[i];
+ let {x, y} = coord(time, usage / 100.0 * max_v);
+ ctx.lineTo(x, y);
+ }
+ let bottomRight = coord(CPU_USAGE[CPU_USAGE.length - 1][0], 0);
+ ctx.lineTo(bottomRight.x, bottomRight.y);
+ ctx.fill();
+ }
+
+ function draw_line(style, key) {
+ let first = CONCURRENCY_DATA[0];
+ let last = coord(first.t, key(first));
+ ctx.strokeStyle = style;
+ ctx.beginPath();
+ ctx.moveTo(last.x, last.y);
+ for (let i=1; i<CONCURRENCY_DATA.length; i++) {
+ let c = CONCURRENCY_DATA[i];
+ let {x, y} = coord(c.t, key(c));
+ ctx.lineTo(x, last.y);
+ ctx.lineTo(x, y);
+ last = {x, y};
+ }
+ ctx.stroke();
+ }
+
+ draw_line('blue', function(c) {return c.inactive;});
+ draw_line('red', function(c) {return c.waiting;});
+ draw_line('green', function(c) {return c.active;});
+
+ // Draw a legend.
+ ctx.restore();
+ ctx.save();
+ ctx.translate(canvas_width-200, MARGIN);
+ // background
+ ctx.fillStyle = '#fff';
+ ctx.strokeStyle = '#000';
+ ctx.lineWidth = 1;
+ ctx.textBaseline = 'middle'
+ ctx.textAlign = 'start';
+ ctx.beginPath();
+ ctx.rect(0, 0, 150, 82);
+ ctx.stroke();
+ ctx.fill();
+
+ ctx.fillStyle = '#000'
+ ctx.beginPath();
+ ctx.lineWidth = 2;
+ ctx.strokeStyle = 'red';
+ ctx.moveTo(5, 10);
+ ctx.lineTo(50, 10);
+ ctx.stroke();
+ ctx.fillText('Waiting', 54, 11);
+
+ ctx.beginPath();
+ ctx.strokeStyle = 'blue';
+ ctx.moveTo(5, 30);
+ ctx.lineTo(50, 30);
+ ctx.stroke();
+ ctx.fillText('Inactive', 54, 31);
+
+ ctx.beginPath();
+ ctx.strokeStyle = 'green';
+ ctx.moveTo(5, 50);
+ ctx.lineTo(50, 50);
+ ctx.stroke();
+ ctx.fillText('Active', 54, 51);
+
+ ctx.beginPath();
+ ctx.fillStyle = cpuFillStyle
+ ctx.fillRect(15, 60, 30, 15);
+ ctx.fill();
+ ctx.fillStyle = 'black';
+ ctx.fillText('CPU Usage', 54, 71);
+
+ ctx.restore();
+}
+
+function setup_canvas(id, width, height) {
+ let g = document.getElementById(id);
+ let dpr = window.devicePixelRatio || 1;
+ g.width = width * dpr;
+ g.height = height * dpr;
+ g.style.width = width;
+ g.style.height = height;
+ let ctx = g.getContext('2d');
+ ctx.scale(dpr, dpr);
+ return ctx;
+}
+
+function draw_graph_axes(id, graph_height) {
+ const scale = document.getElementById('scale').valueAsNumber;
+ // Cap the size of the graph. It is hard to view if it is too large, and
+ // browsers may not render a large graph because it takes too much memory.
+ // 4096 is still ridiculously large, and probably won't render on mobile
+ // browsers, but should be ok for many desktop environments.
+ const graph_width = Math.min(scale * DURATION, 4096);
+ const px_per_sec = graph_width / DURATION;
+ const canvas_width = Math.max(graph_width + X_LINE + 30, X_LINE + 250);
+ const canvas_height = graph_height + MARGIN + Y_LINE;
+ let ctx = setup_canvas(id, canvas_width, canvas_height);
+ ctx.fillStyle = '#f7f7f7';
+ ctx.fillRect(0, 0, canvas_width, canvas_height);
+
+ ctx.lineWidth = 2;
+ ctx.font = '16px sans-serif';
+ ctx.textAlign = 'center';
+
+ // Draw main axes.
+ ctx.beginPath();
+ ctx.moveTo(X_LINE, MARGIN);
+ ctx.lineTo(X_LINE, graph_height + MARGIN);
+ ctx.lineTo(X_LINE+graph_width+20, graph_height + MARGIN);
+ ctx.stroke();
+
+ // Draw X tick marks.
+ const {step, tick_dist, num_ticks} = split_ticks(DURATION, px_per_sec, graph_width);
+ ctx.fillStyle = '#303030';
+ for (let n=0; n<num_ticks; n++) {
+ const x = X_LINE + ((n + 1) * tick_dist);
+ ctx.beginPath();
+ ctx.moveTo(x, canvas_height-Y_LINE);
+ ctx.lineTo(x, canvas_height-Y_LINE+5);
+ ctx.stroke();
+
+ ctx.fillText(`${(n+1) * step}s`, x, canvas_height - Y_LINE + 20);
+ }
+
+ // Draw vertical lines.
+ ctx.strokeStyle = '#e6e6e6';
+ ctx.setLineDash([2, 4]);
+ for (n=0; n<num_ticks; n++) {
+ const x = X_LINE + ((n + 1) * tick_dist);
+ ctx.beginPath();
+ ctx.moveTo(x, MARGIN);
+ ctx.lineTo(x, MARGIN+graph_height);
+ ctx.stroke();
+ }
+ ctx.strokeStyle = '#000';
+ ctx.setLineDash([]);
+ return {canvas_width, canvas_height, graph_width, graph_height, ctx, px_per_sec};
+}
+
+// Determine the spacing and number of ticks along an axis.
+function split_ticks(max_value, px_per_v, max_px) {
+ const max_ticks = Math.floor(max_px / MIN_TICK_DIST);
+ if (max_ticks <= 1) {
+ // Graph is too small for even 1 tick.
+ return {step: max_value, tick_dist: max_px, num_ticks: 1};
+ }
+ let step;
+ if (max_value <= max_ticks) {
+ step = 1;
+ } else if (max_value <= max_ticks * 2) {
+ step = 2;
+ } else if (max_value <= max_ticks * 4) {
+ step = 4;
+ } else if (max_value <= max_ticks * 5) {
+ step = 5;
+ } else {
+ step = 10;
+ let count = 0;
+ while (true) {
+ if (count > 100) {
+ throw Error("tick loop too long");
+ }
+ count += 1;
+ if (max_value <= max_ticks * step) {
+ break;
+ }
+ step += 10;
+ }
+ }
+ const tick_dist = px_per_v * step;
+ const num_ticks = Math.floor(max_value / step);
+ return {step, tick_dist, num_ticks};
+}
+
+function codegen_time(unit) {
+ if (unit.rmeta_time == null) {
+ return null;
+ }
+ let ctime = unit.duration - unit.rmeta_time;
+ return [unit.rmeta_time, ctime];
+}
+
+function roundedRect(ctx, x, y, width, height, r) {
+ r = Math.min(r, width, height);
+ ctx.beginPath();
+ ctx.moveTo(x+r, y);
+ ctx.lineTo(x+width-r, y);
+ ctx.arc(x+width-r, y+r, r, 3*Math.PI/2, 0);
+ ctx.lineTo(x+width, y+height-r);
+ ctx.arc(x+width-r, y+height-r, r, 0, Math.PI/2);
+ ctx.lineTo(x+r, y+height);
+ ctx.arc(x+r, y+height-r, r, Math.PI/2, Math.PI);
+ ctx.lineTo(x, y-r);
+ ctx.arc(x+r, y+r, r, Math.PI, 3*Math.PI/2);
+ ctx.closePath();
+}
+
+function pipeline_mouse_hit(event) {
+ // This brute-force method can be optimized if needed.
+ for (let box of HIT_BOXES) {
+ if (event.offsetX >= box.x && event.offsetX <= box.x2 &&
+ event.offsetY >= box.y && event.offsetY <= box.y2) {
+ return box;
+ }
+ }
+}
+
+function pipeline_mousemove(event) {
+ // Highlight dependency lines on mouse hover.
+ let box = pipeline_mouse_hit(event);
+ if (box) {
+ if (box.i != LAST_HOVER) {
+ LAST_HOVER = box.i;
+ let g = document.getElementById('pipeline-graph-lines');
+ let ctx = g.getContext('2d');
+ ctx.clearRect(0, 0, g.width, g.height);
+ ctx.save();
+ ctx.translate(X_LINE, MARGIN);
+ ctx.lineWidth = 2;
+ draw_dep_lines(ctx, box.i, true);
+
+ if (box.i in REVERSE_UNIT_DEPS) {
+ const dep_unit = REVERSE_UNIT_DEPS[box.i];
+ if (dep_unit in UNIT_COORDS) {
+ const {x, y, rmeta_x} = UNIT_COORDS[dep_unit];
+ draw_one_dep_line(ctx, x, y, box.i, true);
+ }
+ }
+ if (box.i in REVERSE_UNIT_RMETA_DEPS) {
+ const dep_unit = REVERSE_UNIT_RMETA_DEPS[box.i];
+ if (dep_unit in UNIT_COORDS) {
+ const {x, y, rmeta_x} = UNIT_COORDS[dep_unit];
+ draw_one_dep_line(ctx, rmeta_x, y, box.i, true);
+ }
+ }
+ ctx.restore();
+ }
+ }
+}
+
+render_pipeline_graph();
+render_timing_graph();
+
+// Set up and handle controls.
+{
+ const range = document.getElementById('min-unit-time');
+ const time_output = document.getElementById('min-unit-time-output');
+ time_output.innerHTML = `${range.value}s`;
+ range.oninput = event => {
+ time_output.innerHTML = `${range.value}s`;
+ render_pipeline_graph();
+ };
+
+ const scale = document.getElementById('scale');
+ const scale_output = document.getElementById('scale-output');
+ scale_output.innerHTML = `${scale.value}`;
+ scale.oninput = event => {
+ scale_output.innerHTML = `${scale.value}`;
+ render_pipeline_graph();
+ render_timing_graph();
+ };
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/timings.rs b/src/tools/cargo/src/cargo/core/compiler/timings.rs
new file mode 100644
index 000000000..0e0dc03ee
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/timings.rs
@@ -0,0 +1,750 @@
+//! Timing tracking.
+//!
+//! This module implements some simple tracking information for timing of how
+//! long it takes for different units to compile.
+use super::{CompileMode, Unit};
+use crate::core::compiler::job_queue::JobId;
+use crate::core::compiler::{BuildContext, Context, TimingOutput};
+use crate::core::PackageId;
+use crate::util::cpu::State;
+use crate::util::machine_message::{self, Message};
+use crate::util::{CargoResult, Config};
+use anyhow::Context as _;
+use cargo_util::paths;
+use std::collections::HashMap;
+use std::io::{BufWriter, Write};
+use std::thread::available_parallelism;
+use std::time::{Duration, Instant, SystemTime};
+
+/// Tracking information for the entire build.
+///
+/// Methods on this structure are generally called from the main thread of a
+/// running [`JobQueue`] instance (`DrainState` in specific) when the queue
+/// receives messages from spawned off threads.
+///
+/// [`JobQueue`]: super::JobQueue
+pub struct Timings<'cfg> {
+ config: &'cfg Config,
+ /// Whether or not timings should be captured.
+ enabled: bool,
+ /// If true, saves an HTML report to disk.
+ report_html: bool,
+ /// If true, emits JSON information with timing information.
+ report_json: bool,
+ /// When Cargo started.
+ start: Instant,
+ /// A rendered string of when compilation started.
+ start_str: String,
+ /// A summary of the root units.
+ ///
+ /// Tuples of `(package_description, target_descriptions)`.
+ root_targets: Vec<(String, Vec<String>)>,
+ /// The build profile.
+ profile: String,
+ /// Total number of fresh units.
+ total_fresh: u32,
+ /// Total number of dirty units.
+ total_dirty: u32,
+ /// Time tracking for each individual unit.
+ unit_times: Vec<UnitTime>,
+ /// Units that are in the process of being built.
+ /// When they finished, they are moved to `unit_times`.
+ active: HashMap<JobId, UnitTime>,
+ /// Concurrency-tracking information. This is periodically updated while
+ /// compilation progresses.
+ concurrency: Vec<Concurrency>,
+ /// Last recorded state of the system's CPUs and when it happened
+ last_cpu_state: Option<State>,
+ last_cpu_recording: Instant,
+ /// Recorded CPU states, stored as tuples. First element is when the
+ /// recording was taken and second element is percentage usage of the
+ /// system.
+ cpu_usage: Vec<(f64, f64)>,
+}
+
+/// Tracking information for an individual unit.
+struct UnitTime {
+ unit: Unit,
+ /// A string describing the cargo target.
+ target: String,
+ /// The time when this unit started as an offset in seconds from `Timings::start`.
+ start: f64,
+ /// Total time to build this unit in seconds.
+ duration: f64,
+ /// The time when the `.rmeta` file was generated, an offset in seconds
+ /// from `start`.
+ rmeta_time: Option<f64>,
+ /// Reverse deps that are freed to run after this unit finished.
+ unlocked_units: Vec<Unit>,
+ /// Same as `unlocked_units`, but unlocked by rmeta.
+ unlocked_rmeta_units: Vec<Unit>,
+}
+
+/// Periodic concurrency tracking information.
+#[derive(serde::Serialize)]
+struct Concurrency {
+ /// Time as an offset in seconds from `Timings::start`.
+ t: f64,
+ /// Number of units currently running.
+ active: usize,
+ /// Number of units that could run, but are waiting for a jobserver token.
+ waiting: usize,
+ /// Number of units that are not yet ready, because they are waiting for
+ /// dependencies to finish.
+ inactive: usize,
+}
+
+impl<'cfg> Timings<'cfg> {
+ pub fn new(bcx: &BuildContext<'_, 'cfg>, root_units: &[Unit]) -> Timings<'cfg> {
+ let has_report = |what| bcx.build_config.timing_outputs.contains(&what);
+ let report_html = has_report(TimingOutput::Html);
+ let report_json = has_report(TimingOutput::Json);
+ let enabled = report_html | report_json;
+
+ let mut root_map: HashMap<PackageId, Vec<String>> = HashMap::new();
+ for unit in root_units {
+ let target_desc = unit.target.description_named();
+ root_map
+ .entry(unit.pkg.package_id())
+ .or_default()
+ .push(target_desc);
+ }
+ let root_targets = root_map
+ .into_iter()
+ .map(|(pkg_id, targets)| {
+ let pkg_desc = format!("{} {}", pkg_id.name(), pkg_id.version());
+ (pkg_desc, targets)
+ })
+ .collect();
+ let start_str = humantime::format_rfc3339_seconds(SystemTime::now()).to_string();
+ let profile = bcx.build_config.requested_profile.to_string();
+ let last_cpu_state = if enabled {
+ match State::current() {
+ Ok(state) => Some(state),
+ Err(e) => {
+ log::info!("failed to get CPU state, CPU tracking disabled: {:?}", e);
+ None
+ }
+ }
+ } else {
+ None
+ };
+
+ Timings {
+ config: bcx.config,
+ enabled,
+ report_html,
+ report_json,
+ start: bcx.config.creation_time(),
+ start_str,
+ root_targets,
+ profile,
+ total_fresh: 0,
+ total_dirty: 0,
+ unit_times: Vec::new(),
+ active: HashMap::new(),
+ concurrency: Vec::new(),
+ last_cpu_state,
+ last_cpu_recording: Instant::now(),
+ cpu_usage: Vec::new(),
+ }
+ }
+
+ /// Mark that a unit has started running.
+ pub fn unit_start(&mut self, id: JobId, unit: Unit) {
+ if !self.enabled {
+ return;
+ }
+ let mut target = if unit.target.is_lib() && unit.mode == CompileMode::Build {
+ // Special case for brevity, since most dependencies hit
+ // this path.
+ "".to_string()
+ } else {
+ format!(" {}", unit.target.description_named())
+ };
+ match unit.mode {
+ CompileMode::Test => target.push_str(" (test)"),
+ CompileMode::Build => {}
+ CompileMode::Check { test: true } => target.push_str(" (check-test)"),
+ CompileMode::Check { test: false } => target.push_str(" (check)"),
+ CompileMode::Bench => target.push_str(" (bench)"),
+ CompileMode::Doc { .. } => target.push_str(" (doc)"),
+ CompileMode::Doctest => target.push_str(" (doc test)"),
+ CompileMode::Docscrape => target.push_str(" (doc scrape)"),
+ CompileMode::RunCustomBuild => target.push_str(" (run)"),
+ }
+ let unit_time = UnitTime {
+ unit,
+ target,
+ start: self.start.elapsed().as_secs_f64(),
+ duration: 0.0,
+ rmeta_time: None,
+ unlocked_units: Vec::new(),
+ unlocked_rmeta_units: Vec::new(),
+ };
+ assert!(self.active.insert(id, unit_time).is_none());
+ }
+
+ /// Mark that the `.rmeta` file as generated.
+ pub fn unit_rmeta_finished(&mut self, id: JobId, unlocked: Vec<&Unit>) {
+ if !self.enabled {
+ return;
+ }
+ // `id` may not always be active. "fresh" units unconditionally
+ // generate `Message::Finish`, but this active map only tracks dirty
+ // units.
+ let unit_time = match self.active.get_mut(&id) {
+ Some(ut) => ut,
+ None => return,
+ };
+ let t = self.start.elapsed().as_secs_f64();
+ unit_time.rmeta_time = Some(t - unit_time.start);
+ assert!(unit_time.unlocked_rmeta_units.is_empty());
+ unit_time
+ .unlocked_rmeta_units
+ .extend(unlocked.iter().cloned().cloned());
+ }
+
+ /// Mark that a unit has finished running.
+ pub fn unit_finished(&mut self, id: JobId, unlocked: Vec<&Unit>) {
+ if !self.enabled {
+ return;
+ }
+ // See note above in `unit_rmeta_finished`, this may not always be active.
+ let mut unit_time = match self.active.remove(&id) {
+ Some(ut) => ut,
+ None => return,
+ };
+ let t = self.start.elapsed().as_secs_f64();
+ unit_time.duration = t - unit_time.start;
+ assert!(unit_time.unlocked_units.is_empty());
+ unit_time
+ .unlocked_units
+ .extend(unlocked.iter().cloned().cloned());
+ if self.report_json {
+ let msg = machine_message::TimingInfo {
+ package_id: unit_time.unit.pkg.package_id(),
+ target: &unit_time.unit.target,
+ mode: unit_time.unit.mode,
+ duration: unit_time.duration,
+ rmeta_time: unit_time.rmeta_time,
+ }
+ .to_json_string();
+ crate::drop_println!(self.config, "{}", msg);
+ }
+ self.unit_times.push(unit_time);
+ }
+
+ /// This is called periodically to mark the concurrency of internal structures.
+ pub fn mark_concurrency(&mut self, active: usize, waiting: usize, inactive: usize) {
+ if !self.enabled {
+ return;
+ }
+ let c = Concurrency {
+ t: self.start.elapsed().as_secs_f64(),
+ active,
+ waiting,
+ inactive,
+ };
+ self.concurrency.push(c);
+ }
+
+ /// Mark that a fresh unit was encountered. (No re-compile needed)
+ pub fn add_fresh(&mut self) {
+ self.total_fresh += 1;
+ }
+
+ /// Mark that a dirty unit was encountered. (Re-compile needed)
+ pub fn add_dirty(&mut self) {
+ self.total_dirty += 1;
+ }
+
+ /// Take a sample of CPU usage
+ pub fn record_cpu(&mut self) {
+ if !self.enabled {
+ return;
+ }
+ let prev = match &mut self.last_cpu_state {
+ Some(state) => state,
+ None => return,
+ };
+ // Don't take samples too frequently, even if requested.
+ let now = Instant::now();
+ if self.last_cpu_recording.elapsed() < Duration::from_millis(100) {
+ return;
+ }
+ let current = match State::current() {
+ Ok(s) => s,
+ Err(e) => {
+ log::info!("failed to get CPU state: {:?}", e);
+ return;
+ }
+ };
+ let pct_idle = current.idle_since(prev);
+ *prev = current;
+ self.last_cpu_recording = now;
+ let dur = now.duration_since(self.start).as_secs_f64();
+ self.cpu_usage.push((dur, 100.0 - pct_idle));
+ }
+
+ /// Call this when all units are finished.
+ pub fn finished(
+ &mut self,
+ cx: &Context<'_, '_>,
+ error: &Option<anyhow::Error>,
+ ) -> CargoResult<()> {
+ if !self.enabled {
+ return Ok(());
+ }
+ self.mark_concurrency(0, 0, 0);
+ self.unit_times
+ .sort_unstable_by(|a, b| a.start.partial_cmp(&b.start).unwrap());
+ if self.report_html {
+ self.report_html(cx, error)
+ .with_context(|| "failed to save timing report")?;
+ }
+ Ok(())
+ }
+
+ /// Save HTML report to disk.
+ fn report_html(&self, cx: &Context<'_, '_>, error: &Option<anyhow::Error>) -> CargoResult<()> {
+ let duration = self.start.elapsed().as_secs_f64();
+ let timestamp = self.start_str.replace(&['-', ':'][..], "");
+ let timings_path = cx.files().host_root().join("cargo-timings");
+ paths::create_dir_all(&timings_path)?;
+ let filename = timings_path.join(format!("cargo-timing-{}.html", timestamp));
+ let mut f = BufWriter::new(paths::create(&filename)?);
+ let roots: Vec<&str> = self
+ .root_targets
+ .iter()
+ .map(|(name, _targets)| name.as_str())
+ .collect();
+ f.write_all(HTML_TMPL.replace("{ROOTS}", &roots.join(", ")).as_bytes())?;
+ self.write_summary_table(&mut f, duration, cx.bcx, error)?;
+ f.write_all(HTML_CANVAS.as_bytes())?;
+ self.write_unit_table(&mut f)?;
+ // It helps with pixel alignment to use whole numbers.
+ writeln!(
+ f,
+ "<script>\n\
+ DURATION = {};",
+ f64::ceil(duration) as u32
+ )?;
+ self.write_js_data(&mut f)?;
+ write!(
+ f,
+ "{}\n\
+ </script>\n\
+ </body>\n\
+ </html>\n\
+ ",
+ include_str!("timings.js")
+ )?;
+ drop(f);
+ let msg = format!(
+ "report saved to {}",
+ std::env::current_dir()
+ .unwrap_or_default()
+ .join(&filename)
+ .display()
+ );
+ let unstamped_filename = timings_path.join("cargo-timing.html");
+ paths::link_or_copy(&filename, &unstamped_filename)?;
+ self.config
+ .shell()
+ .status_with_color("Timing", msg, termcolor::Color::Cyan)?;
+ Ok(())
+ }
+
+ /// Render the summary table.
+ fn write_summary_table(
+ &self,
+ f: &mut impl Write,
+ duration: f64,
+ bcx: &BuildContext<'_, '_>,
+ error: &Option<anyhow::Error>,
+ ) -> CargoResult<()> {
+ let targets: Vec<String> = self
+ .root_targets
+ .iter()
+ .map(|(name, targets)| format!("{} ({})", name, targets.join(", ")))
+ .collect();
+ let targets = targets.join("<br>");
+ let time_human = if duration > 60.0 {
+ format!(" ({}m {:.1}s)", duration as u32 / 60, duration % 60.0)
+ } else {
+ "".to_string()
+ };
+ let total_time = format!("{:.1}s{}", duration, time_human);
+ let max_concurrency = self.concurrency.iter().map(|c| c.active).max().unwrap();
+ let num_cpus = available_parallelism()
+ .map(|x| x.get().to_string())
+ .unwrap_or_else(|_| "n/a".into());
+ let rustc_info = render_rustc_info(bcx);
+ let error_msg = match error {
+ Some(e) => format!(
+ r#"\
+ <tr>
+ <td class="error-text">Error:</td><td>{}</td>
+ </tr>
+"#,
+ e
+ ),
+ None => "".to_string(),
+ };
+ write!(
+ f,
+ r#"
+<table class="my-table summary-table">
+ <tr>
+ <td>Targets:</td><td>{}</td>
+ </tr>
+ <tr>
+ <td>Profile:</td><td>{}</td>
+ </tr>
+ <tr>
+ <td>Fresh units:</td><td>{}</td>
+ </tr>
+ <tr>
+ <td>Dirty units:</td><td>{}</td>
+ </tr>
+ <tr>
+ <td>Total units:</td><td>{}</td>
+ </tr>
+ <tr>
+ <td>Max concurrency:</td><td>{} (jobs={} ncpu={})</td>
+ </tr>
+ <tr>
+ <td>Build start:</td><td>{}</td>
+ </tr>
+ <tr>
+ <td>Total time:</td><td>{}</td>
+ </tr>
+ <tr>
+ <td>rustc:</td><td>{}</td>
+ </tr>
+{}
+</table>
+"#,
+ targets,
+ self.profile,
+ self.total_fresh,
+ self.total_dirty,
+ self.total_fresh + self.total_dirty,
+ max_concurrency,
+ bcx.jobs(),
+ num_cpus,
+ self.start_str,
+ total_time,
+ rustc_info,
+ error_msg,
+ )?;
+ Ok(())
+ }
+
+ /// Write timing data in JavaScript. Primarily for `timings.js` to put data
+ /// in a `<script>` HTML element to draw graphs.
+ fn write_js_data(&self, f: &mut impl Write) -> CargoResult<()> {
+ // Create a map to link indices of unlocked units.
+ let unit_map: HashMap<Unit, usize> = self
+ .unit_times
+ .iter()
+ .enumerate()
+ .map(|(i, ut)| (ut.unit.clone(), i))
+ .collect();
+ #[derive(serde::Serialize)]
+ struct UnitData {
+ i: usize,
+ name: String,
+ version: String,
+ mode: String,
+ target: String,
+ start: f64,
+ duration: f64,
+ rmeta_time: Option<f64>,
+ unlocked_units: Vec<usize>,
+ unlocked_rmeta_units: Vec<usize>,
+ }
+ let round = |x: f64| (x * 100.0).round() / 100.0;
+ let unit_data: Vec<UnitData> = self
+ .unit_times
+ .iter()
+ .enumerate()
+ .map(|(i, ut)| {
+ let mode = if ut.unit.mode.is_run_custom_build() {
+ "run-custom-build"
+ } else {
+ "todo"
+ }
+ .to_string();
+
+ // These filter on the unlocked units because not all unlocked
+ // units are actually "built". For example, Doctest mode units
+ // don't actually generate artifacts.
+ let unlocked_units: Vec<usize> = ut
+ .unlocked_units
+ .iter()
+ .filter_map(|unit| unit_map.get(unit).copied())
+ .collect();
+ let unlocked_rmeta_units: Vec<usize> = ut
+ .unlocked_rmeta_units
+ .iter()
+ .filter_map(|unit| unit_map.get(unit).copied())
+ .collect();
+ UnitData {
+ i,
+ name: ut.unit.pkg.name().to_string(),
+ version: ut.unit.pkg.version().to_string(),
+ mode,
+ target: ut.target.clone(),
+ start: round(ut.start),
+ duration: round(ut.duration),
+ rmeta_time: ut.rmeta_time.map(round),
+ unlocked_units,
+ unlocked_rmeta_units,
+ }
+ })
+ .collect();
+ writeln!(
+ f,
+ "const UNIT_DATA = {};",
+ serde_json::to_string_pretty(&unit_data)?
+ )?;
+ writeln!(
+ f,
+ "const CONCURRENCY_DATA = {};",
+ serde_json::to_string_pretty(&self.concurrency)?
+ )?;
+ writeln!(
+ f,
+ "const CPU_USAGE = {};",
+ serde_json::to_string_pretty(&self.cpu_usage)?
+ )?;
+ Ok(())
+ }
+
+ /// Render the table of all units.
+ fn write_unit_table(&self, f: &mut impl Write) -> CargoResult<()> {
+ write!(
+ f,
+ r#"
+<table class="my-table">
+ <thead>
+ <tr>
+ <th></th>
+ <th>Unit</th>
+ <th>Total</th>
+ <th>Codegen</th>
+ <th>Features</th>
+ </tr>
+ </thead>
+ <tbody>
+"#
+ )?;
+ let mut units: Vec<&UnitTime> = self.unit_times.iter().collect();
+ units.sort_unstable_by(|a, b| b.duration.partial_cmp(&a.duration).unwrap());
+ for (i, unit) in units.iter().enumerate() {
+ let codegen = match unit.codegen_time() {
+ None => "".to_string(),
+ Some((_rt, ctime, cent)) => format!("{:.1}s ({:.0}%)", ctime, cent),
+ };
+ let features = unit.unit.features.join(", ");
+ write!(
+ f,
+ r#"
+<tr>
+ <td>{}.</td>
+ <td>{}{}</td>
+ <td>{:.1}s</td>
+ <td>{}</td>
+ <td>{}</td>
+</tr>
+"#,
+ i + 1,
+ unit.name_ver(),
+ unit.target,
+ unit.duration,
+ codegen,
+ features,
+ )?;
+ }
+ write!(f, "</tbody>\n</table>\n")?;
+ Ok(())
+ }
+}
+
+impl UnitTime {
+ /// Returns the codegen time as (rmeta_time, codegen_time, percent of total)
+ fn codegen_time(&self) -> Option<(f64, f64, f64)> {
+ self.rmeta_time.map(|rmeta_time| {
+ let ctime = self.duration - rmeta_time;
+ let cent = (ctime / self.duration) * 100.0;
+ (rmeta_time, ctime, cent)
+ })
+ }
+
+ fn name_ver(&self) -> String {
+ format!("{} v{}", self.unit.pkg.name(), self.unit.pkg.version())
+ }
+}
+
+fn render_rustc_info(bcx: &BuildContext<'_, '_>) -> String {
+ let version = bcx
+ .rustc()
+ .verbose_version
+ .lines()
+ .next()
+ .expect("rustc version");
+ let requested_target = bcx
+ .build_config
+ .requested_kinds
+ .iter()
+ .map(|kind| bcx.target_data.short_name(kind))
+ .collect::<Vec<_>>()
+ .join(", ");
+ format!(
+ "{}<br>Host: {}<br>Target: {}",
+ version,
+ bcx.rustc().host,
+ requested_target
+ )
+}
+
+static HTML_TMPL: &str = r#"
+<html>
+<head>
+ <title>Cargo Build Timings — {ROOTS}</title>
+ <meta charset="utf-8">
+<style type="text/css">
+html {
+ font-family: sans-serif;
+}
+
+.canvas-container {
+ position: relative;
+ margin-top: 5px;
+ margin-bottom: 5px;
+}
+
+h1 {
+ border-bottom: 1px solid #c0c0c0;
+}
+
+.graph {
+ display: block;
+}
+
+.my-table {
+ margin-top: 20px;
+ margin-bottom: 20px;
+ border-collapse: collapse;
+ box-shadow: 0 5px 10px rgba(0, 0, 0, 0.1);
+}
+
+.my-table th {
+ color: #d5dde5;
+ background: #1b1e24;
+ border-bottom: 4px solid #9ea7af;
+ border-right: 1px solid #343a45;
+ font-size: 18px;
+ font-weight: 100;
+ padding: 12px;
+ text-align: left;
+ vertical-align: middle;
+}
+
+.my-table th:first-child {
+ border-top-left-radius: 3px;
+}
+
+.my-table th:last-child {
+ border-top-right-radius: 3px;
+ border-right:none;
+}
+
+.my-table tr {
+ border-top: 1px solid #c1c3d1;
+ border-bottom: 1px solid #c1c3d1;
+ font-size: 16px;
+ font-weight: normal;
+}
+
+.my-table tr:first-child {
+ border-top:none;
+}
+
+.my-table tr:last-child {
+ border-bottom:none;
+}
+
+.my-table tr:nth-child(odd) td {
+ background: #ebebeb;
+}
+
+.my-table tr:last-child td:first-child {
+ border-bottom-left-radius:3px;
+}
+
+.my-table tr:last-child td:last-child {
+ border-bottom-right-radius:3px;
+}
+
+.my-table td {
+ background: #ffffff;
+ padding: 10px;
+ text-align: left;
+ vertical-align: middle;
+ font-weight: 300;
+ font-size: 14px;
+ border-right: 1px solid #C1C3D1;
+}
+
+.my-table td:last-child {
+ border-right: 0px;
+}
+
+.summary-table td:first-child {
+ vertical-align: top;
+ text-align: right;
+}
+
+.input-table td {
+ text-align: center;
+}
+
+.error-text {
+ color: #e80000;
+}
+
+</style>
+</head>
+<body>
+
+<h1>Cargo Build Timings</h1>
+See <a href="https://doc.rust-lang.org/nightly/cargo/reference/timings.html">Documentation</a>
+"#;
+
+static HTML_CANVAS: &str = r#"
+<table class="input-table">
+ <tr>
+ <td><label for="min-unit-time">Min unit time:</label></td>
+ <td><label for="scale">Scale:</label></td>
+ </tr>
+ <tr>
+ <td><input type="range" min="0" max="30" step="0.1" value="0" id="min-unit-time"></td>
+ <td><input type="range" min="1" max="50" value="20" id="scale"></td>
+ </tr>
+ <tr>
+ <td><output for="min-unit-time" id="min-unit-time-output"></output></td>
+ <td><output for="scale" id="scale-output"></output></td>
+ </tr>
+</table>
+
+<div id="pipeline-container" class="canvas-container">
+ <canvas id="pipeline-graph" class="graph" style="position: absolute; left: 0; top: 0; z-index: 0;"></canvas>
+ <canvas id="pipeline-graph-lines" style="position: absolute; left: 0; top: 0; z-index: 1; pointer-events:none;"></canvas>
+</div>
+<div class="canvas-container">
+ <canvas id="timing-graph" class="graph"></canvas>
+</div>
+"#;
diff --git a/src/tools/cargo/src/cargo/core/compiler/unit.rs b/src/tools/cargo/src/cargo/core/compiler/unit.rs
new file mode 100644
index 000000000..335564bcd
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/unit.rs
@@ -0,0 +1,251 @@
+//! Types and impls for [`Unit`].
+
+use crate::core::compiler::unit_dependencies::IsArtifact;
+use crate::core::compiler::{CompileKind, CompileMode, CompileTarget, CrateType};
+use crate::core::manifest::{Target, TargetKind};
+use crate::core::profiles::Profile;
+use crate::core::Package;
+use crate::util::hex::short_hash;
+use crate::util::interning::InternedString;
+use crate::util::Config;
+use std::cell::RefCell;
+use std::collections::HashSet;
+use std::fmt;
+use std::hash::{Hash, Hasher};
+use std::ops::Deref;
+use std::rc::Rc;
+
+/// All information needed to define a unit.
+///
+/// A unit is an object that has enough information so that cargo knows how to build it.
+/// For example, if your package has dependencies, then every dependency will be built as a library
+/// unit. If your package is a library, then it will be built as a library unit as well, or if it
+/// is a binary with `main.rs`, then a binary will be output. There are also separate unit types
+/// for `test`ing and `check`ing, amongst others.
+///
+/// The unit also holds information about all possible metadata about the package in `pkg`.
+///
+/// A unit needs to know extra information in addition to the type and root source file. For
+/// example, it needs to know the target architecture (OS, chip arch etc.) and it needs to know
+/// whether you want a debug or release build. There is enough information in this struct to figure
+/// all that out.
+#[derive(Clone, PartialOrd, Ord)]
+pub struct Unit {
+ inner: Rc<UnitInner>,
+}
+
+/// Internal fields of `Unit` which `Unit` will dereference to.
+#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
+pub struct UnitInner {
+ /// Information about available targets, which files to include/exclude, etc. Basically stuff in
+ /// `Cargo.toml`.
+ pub pkg: Package,
+ /// Information about the specific target to build, out of the possible targets in `pkg`. Not
+ /// to be confused with *target-triple* (or *target architecture* ...), the target arch for a
+ /// build.
+ pub target: Target,
+ /// The profile contains information about *how* the build should be run, including debug
+ /// level, etc.
+ pub profile: Profile,
+ /// Whether this compilation unit is for the host or target architecture.
+ ///
+ /// For example, when
+ /// cross compiling and using a custom build script, the build script needs to be compiled for
+ /// the host architecture so the host rustc can use it (when compiling to the target
+ /// architecture).
+ pub kind: CompileKind,
+ /// The "mode" this unit is being compiled for. See [`CompileMode`] for more details.
+ pub mode: CompileMode,
+ /// The `cfg` features to enable for this unit.
+ /// This must be sorted.
+ pub features: Vec<InternedString>,
+ // if `true`, the dependency is an artifact dependency, requiring special handling when
+ // calculating output directories, linkage and environment variables provided to builds.
+ pub artifact: IsArtifact,
+ /// Whether this is a standard library unit.
+ pub is_std: bool,
+ /// A hash of all dependencies of this unit.
+ ///
+ /// This is used to keep the `Unit` unique in the situation where two
+ /// otherwise identical units need to link to different dependencies. This
+ /// can happen, for example, when there are shared dependencies that need
+ /// to be built with different features between normal and build
+ /// dependencies. See `rebuild_unit_graph_shared` for more on why this is
+ /// done.
+ ///
+ /// This value initially starts as 0, and then is filled in via a
+ /// second-pass after all the unit dependencies have been computed.
+ pub dep_hash: u64,
+
+ /// This is used for target-dependent feature resolution and is copied from
+ /// [`FeaturesFor::ArtifactDep`], if the enum matches the variant.
+ ///
+ /// [`FeaturesFor::ArtifactDep`]: crate::core::resolver::features::FeaturesFor::ArtifactDep
+ pub artifact_target_for_features: Option<CompileTarget>,
+}
+
+impl UnitInner {
+ /// Returns whether compilation of this unit requires all upstream artifacts
+ /// to be available.
+ ///
+ /// This effectively means that this unit is a synchronization point (if the
+ /// return value is `true`) that all previously pipelined units need to
+ /// finish in their entirety before this one is started.
+ pub fn requires_upstream_objects(&self) -> bool {
+ self.mode.is_any_test() || self.target.kind().requires_upstream_objects()
+ }
+
+ /// Returns whether or not this is a "local" package.
+ ///
+ /// A "local" package is one that the user can likely edit, or otherwise
+ /// wants warnings, etc.
+ pub fn is_local(&self) -> bool {
+ self.pkg.package_id().source_id().is_path() && !self.is_std
+ }
+
+ /// Returns whether or not warnings should be displayed for this unit.
+ pub fn show_warnings(&self, config: &Config) -> bool {
+ self.is_local() || config.extra_verbose()
+ }
+}
+
+impl Unit {
+ /// Gets the unique key for [`-Zbuild-plan`].
+ ///
+ /// [`-Zbuild-plan`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#build-plan
+ pub fn buildkey(&self) -> String {
+ format!("{}-{}", self.pkg.name(), short_hash(self))
+ }
+}
+
+// Just hash the pointer for fast hashing
+impl Hash for Unit {
+ fn hash<H: Hasher>(&self, hasher: &mut H) {
+ std::ptr::hash(&*self.inner, hasher)
+ }
+}
+
+// Just equate the pointer since these are interned
+impl PartialEq for Unit {
+ fn eq(&self, other: &Unit) -> bool {
+ std::ptr::eq(&*self.inner, &*other.inner)
+ }
+}
+
+impl Eq for Unit {}
+
+impl Deref for Unit {
+ type Target = UnitInner;
+
+ fn deref(&self) -> &UnitInner {
+ &*self.inner
+ }
+}
+
+impl fmt::Debug for Unit {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Unit")
+ .field("pkg", &self.pkg)
+ .field("target", &self.target)
+ .field("profile", &self.profile)
+ .field("kind", &self.kind)
+ .field("mode", &self.mode)
+ .field("features", &self.features)
+ .field("artifact", &self.artifact.is_true())
+ .field(
+ "artifact_target_for_features",
+ &self.artifact_target_for_features,
+ )
+ .field("is_std", &self.is_std)
+ .field("dep_hash", &self.dep_hash)
+ .finish()
+ }
+}
+
+/// A small structure used to "intern" `Unit` values.
+///
+/// A `Unit` is just a thin pointer to an internal `UnitInner`. This is done to
+/// ensure that `Unit` itself is quite small as well as enabling a very
+/// efficient hash/equality implementation for `Unit`. All units are
+/// manufactured through an interner which guarantees that each equivalent value
+/// is only produced once.
+pub struct UnitInterner {
+ state: RefCell<InternerState>,
+}
+
+struct InternerState {
+ cache: HashSet<Rc<UnitInner>>,
+}
+
+impl UnitInterner {
+ /// Creates a new blank interner
+ pub fn new() -> UnitInterner {
+ UnitInterner {
+ state: RefCell::new(InternerState {
+ cache: HashSet::new(),
+ }),
+ }
+ }
+
+ /// Creates a new `unit` from its components. The returned `Unit`'s fields
+ /// will all be equivalent to the provided arguments, although they may not
+ /// be the exact same instance.
+ pub fn intern(
+ &self,
+ pkg: &Package,
+ target: &Target,
+ profile: Profile,
+ kind: CompileKind,
+ mode: CompileMode,
+ features: Vec<InternedString>,
+ is_std: bool,
+ dep_hash: u64,
+ artifact: IsArtifact,
+ artifact_target_for_features: Option<CompileTarget>,
+ ) -> Unit {
+ let target = match (is_std, target.kind()) {
+ // This is a horrible hack to support build-std. `libstd` declares
+ // itself with both rlib and dylib. We don't want the dylib for a
+ // few reasons:
+ //
+ // - dylibs don't have a hash in the filename. If you do something
+ // (like switch rustc versions), it will stomp on the dylib
+ // file, invalidating the entire cache (because std is a dep of
+ // everything).
+ // - We don't want to publicize the presence of dylib for the
+ // standard library.
+ //
+ // At some point in the future, it would be nice to have a
+ // first-class way of overriding or specifying crate-types.
+ (true, TargetKind::Lib(crate_types)) if crate_types.contains(&CrateType::Dylib) => {
+ let mut new_target = Target::clone(target);
+ new_target.set_kind(TargetKind::Lib(vec![CrateType::Rlib]));
+ new_target
+ }
+ _ => target.clone(),
+ };
+ let inner = self.intern_inner(&UnitInner {
+ pkg: pkg.clone(),
+ target,
+ profile,
+ kind,
+ mode,
+ features,
+ is_std,
+ dep_hash,
+ artifact,
+ artifact_target_for_features,
+ });
+ Unit { inner }
+ }
+
+ fn intern_inner(&self, item: &UnitInner) -> Rc<UnitInner> {
+ let mut me = self.state.borrow_mut();
+ if let Some(item) = me.cache.get(item) {
+ return item.clone();
+ }
+ let item = Rc::new(item.clone());
+ me.cache.insert(item.clone());
+ item
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/unit_dependencies.rs b/src/tools/cargo/src/cargo/core/compiler/unit_dependencies.rs
new file mode 100644
index 000000000..68fc1e519
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/unit_dependencies.rs
@@ -0,0 +1,1096 @@
+//! # Constructs the dependency graph for compilation
+//!
+//! Rust code is typically organized as a set of Cargo packages. The
+//! dependencies between the packages themselves are stored in the
+//! [`Resolve`] struct. However, we can't use that information as is for
+//! compilation! A package typically contains several targets, or crates,
+//! and these targets has inter-dependencies. For example, you need to
+//! compile the `lib` target before the `bin` one, and you need to compile
+//! `build.rs` before either of those.
+//!
+//! So, we need to lower the `Resolve`, which specifies dependencies between
+//! *packages*, to a graph of dependencies between their *targets*, and this
+//! is exactly what this module is doing! Well, almost exactly: another
+//! complication is that we might want to compile the same target several times
+//! (for example, with and without tests), so we actually build a dependency
+//! graph of [`Unit`]s, which capture these properties.
+
+use std::collections::{HashMap, HashSet};
+
+use log::trace;
+
+use crate::core::compiler::artifact::match_artifacts_kind_with_targets;
+use crate::core::compiler::unit_graph::{UnitDep, UnitGraph};
+use crate::core::compiler::{
+ CompileKind, CompileMode, CrateType, RustcTargetData, Unit, UnitInterner,
+};
+use crate::core::dependency::{Artifact, ArtifactTarget, DepKind};
+use crate::core::profiles::{Profile, Profiles, UnitFor};
+use crate::core::resolver::features::{FeaturesFor, ResolvedFeatures};
+use crate::core::resolver::Resolve;
+use crate::core::{Dependency, Package, PackageId, PackageSet, Target, TargetKind, Workspace};
+use crate::ops::resolve_all_features;
+use crate::util::interning::InternedString;
+use crate::util::Config;
+use crate::CargoResult;
+
+const IS_NO_ARTIFACT_DEP: Option<&'static Artifact> = None;
+
+/// Collection of stuff used while creating the [`UnitGraph`].
+struct State<'a, 'cfg> {
+ ws: &'a Workspace<'cfg>,
+ config: &'cfg Config,
+ /// Stores the result of building the [`UnitGraph`].
+ unit_dependencies: UnitGraph,
+ package_set: &'a PackageSet<'cfg>,
+ usr_resolve: &'a Resolve,
+ usr_features: &'a ResolvedFeatures,
+ /// Like `usr_resolve` but for building standard library (`-Zbuild-std`).
+ std_resolve: Option<&'a Resolve>,
+ /// Like `usr_features` but for building standard library (`-Zbuild-std`).
+ std_features: Option<&'a ResolvedFeatures>,
+ /// `true` while generating the dependencies for the standard library.
+ is_std: bool,
+ /// The mode we are compiling in. Used for preventing from building lib thrice.
+ global_mode: CompileMode,
+ target_data: &'a RustcTargetData<'cfg>,
+ profiles: &'a Profiles,
+ interner: &'a UnitInterner,
+ // Units for `-Zrustdoc-scrape-examples`.
+ scrape_units: &'a [Unit],
+
+ /// A set of edges in `unit_dependencies` where (a, b) means that the
+ /// dependency from a to b was added purely because it was a dev-dependency.
+ /// This is used during `connect_run_custom_build_deps`.
+ dev_dependency_edges: HashSet<(Unit, Unit)>,
+}
+
+/// A boolean-like to indicate if a `Unit` is an artifact or not.
+#[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
+pub enum IsArtifact {
+ Yes,
+ No,
+}
+
+impl IsArtifact {
+ pub fn is_true(&self) -> bool {
+ matches!(self, IsArtifact::Yes)
+ }
+}
+
+/// Then entry point for building a dependency graph of compilation units.
+///
+/// You can find some information for arguments from doc of [`State`].
+pub fn build_unit_dependencies<'a, 'cfg>(
+ ws: &'a Workspace<'cfg>,
+ package_set: &'a PackageSet<'cfg>,
+ resolve: &'a Resolve,
+ features: &'a ResolvedFeatures,
+ std_resolve: Option<&'a (Resolve, ResolvedFeatures)>,
+ roots: &[Unit],
+ scrape_units: &[Unit],
+ std_roots: &HashMap<CompileKind, Vec<Unit>>,
+ global_mode: CompileMode,
+ target_data: &'a RustcTargetData<'cfg>,
+ profiles: &'a Profiles,
+ interner: &'a UnitInterner,
+) -> CargoResult<UnitGraph> {
+ if roots.is_empty() {
+ // If -Zbuild-std, don't attach units if there is nothing to build.
+ // Otherwise, other parts of the code may be confused by seeing units
+ // in the dep graph without a root.
+ return Ok(HashMap::new());
+ }
+ let (std_resolve, std_features) = match std_resolve {
+ Some((r, f)) => (Some(r), Some(f)),
+ None => (None, None),
+ };
+ let mut state = State {
+ ws,
+ config: ws.config(),
+ unit_dependencies: HashMap::new(),
+ package_set,
+ usr_resolve: resolve,
+ usr_features: features,
+ std_resolve,
+ std_features,
+ is_std: false,
+ global_mode,
+ target_data,
+ profiles,
+ interner,
+ scrape_units,
+ dev_dependency_edges: HashSet::new(),
+ };
+
+ let std_unit_deps = calc_deps_of_std(&mut state, std_roots)?;
+
+ deps_of_roots(roots, &mut state)?;
+ super::links::validate_links(state.resolve(), &state.unit_dependencies)?;
+ // Hopefully there aren't any links conflicts with the standard library?
+
+ if let Some(std_unit_deps) = std_unit_deps {
+ attach_std_deps(&mut state, std_roots, std_unit_deps);
+ }
+
+ connect_run_custom_build_deps(&mut state);
+
+ // Dependencies are used in tons of places throughout the backend, many of
+ // which affect the determinism of the build itself. As a result be sure
+ // that dependency lists are always sorted to ensure we've always got a
+ // deterministic output.
+ for list in state.unit_dependencies.values_mut() {
+ list.sort();
+ }
+ trace!("ALL UNIT DEPENDENCIES {:#?}", state.unit_dependencies);
+
+ Ok(state.unit_dependencies)
+}
+
+/// Compute all the dependencies for the standard library.
+fn calc_deps_of_std(
+ mut state: &mut State<'_, '_>,
+ std_roots: &HashMap<CompileKind, Vec<Unit>>,
+) -> CargoResult<Option<UnitGraph>> {
+ if std_roots.is_empty() {
+ return Ok(None);
+ }
+ // Compute dependencies for the standard library.
+ state.is_std = true;
+ for roots in std_roots.values() {
+ deps_of_roots(roots, state)?;
+ }
+ state.is_std = false;
+ Ok(Some(std::mem::take(&mut state.unit_dependencies)))
+}
+
+/// Add the standard library units to the `unit_dependencies`.
+fn attach_std_deps(
+ state: &mut State<'_, '_>,
+ std_roots: &HashMap<CompileKind, Vec<Unit>>,
+ std_unit_deps: UnitGraph,
+) {
+ // Attach the standard library as a dependency of every target unit.
+ let mut found = false;
+ for (unit, deps) in state.unit_dependencies.iter_mut() {
+ if !unit.kind.is_host() && !unit.mode.is_run_custom_build() {
+ deps.extend(std_roots[&unit.kind].iter().map(|unit| UnitDep {
+ unit: unit.clone(),
+ unit_for: UnitFor::new_normal(unit.kind),
+ extern_crate_name: unit.pkg.name(),
+ dep_name: None,
+ // TODO: Does this `public` make sense?
+ public: true,
+ noprelude: true,
+ }));
+ found = true;
+ }
+ }
+ // And also include the dependencies of the standard library itself. Don't
+ // include these if no units actually needed the standard library.
+ if found {
+ for (unit, deps) in std_unit_deps.into_iter() {
+ if let Some(other_unit) = state.unit_dependencies.insert(unit, deps) {
+ panic!("std unit collision with existing unit: {:?}", other_unit);
+ }
+ }
+ }
+}
+
+/// Compute all the dependencies of the given root units.
+/// The result is stored in state.unit_dependencies.
+fn deps_of_roots(roots: &[Unit], state: &mut State<'_, '_>) -> CargoResult<()> {
+ for unit in roots.iter() {
+ // Dependencies of tests/benches should not have `panic` set.
+ // We check the global test mode to see if we are running in `cargo
+ // test` in which case we ensure all dependencies have `panic`
+ // cleared, and avoid building the lib thrice (once with `panic`, once
+ // without, once for `--test`). In particular, the lib included for
+ // Doc tests and examples are `Build` mode here.
+ let root_compile_kind = unit.kind;
+ let unit_for = if unit.mode.is_any_test() || state.global_mode.is_rustc_test() {
+ if unit.target.proc_macro() {
+ // Special-case for proc-macros, which are forced to for-host
+ // since they need to link with the proc_macro crate.
+ UnitFor::new_host_test(state.config, root_compile_kind)
+ } else {
+ UnitFor::new_test(state.config, root_compile_kind)
+ }
+ } else if unit.target.is_custom_build() {
+ // This normally doesn't happen, except `clean` aggressively
+ // generates all units.
+ UnitFor::new_host(false, root_compile_kind)
+ } else if unit.target.proc_macro() {
+ UnitFor::new_host(true, root_compile_kind)
+ } else if unit.target.for_host() {
+ // Plugin should never have panic set.
+ UnitFor::new_compiler(root_compile_kind)
+ } else {
+ UnitFor::new_normal(root_compile_kind)
+ };
+ deps_of(unit, state, unit_for)?;
+ }
+
+ Ok(())
+}
+
+/// Compute the dependencies of a single unit, recursively computing all
+/// transitive dependencies.
+///
+/// The result is stored in `state.unit_dependencies`.
+fn deps_of(unit: &Unit, state: &mut State<'_, '_>, unit_for: UnitFor) -> CargoResult<()> {
+ // Currently the `unit_dependencies` map does not include `unit_for`. This should
+ // be safe for now. `TestDependency` only exists to clear the `panic`
+ // flag, and you'll never ask for a `unit` with `panic` set as a
+ // `TestDependency`. `CustomBuild` should also be fine since if the
+ // requested unit's settings are the same as `Any`, `CustomBuild` can't
+ // affect anything else in the hierarchy.
+ if !state.unit_dependencies.contains_key(unit) {
+ let unit_deps = compute_deps(unit, state, unit_for)?;
+ state
+ .unit_dependencies
+ .insert(unit.clone(), unit_deps.clone());
+ for unit_dep in unit_deps {
+ deps_of(&unit_dep.unit, state, unit_dep.unit_for)?;
+ }
+ }
+ Ok(())
+}
+
+/// Returns the direct unit dependencies for the given `Unit`.
+fn compute_deps(
+ unit: &Unit,
+ state: &mut State<'_, '_>,
+ unit_for: UnitFor,
+) -> CargoResult<Vec<UnitDep>> {
+ if unit.mode.is_run_custom_build() {
+ return compute_deps_custom_build(unit, unit_for, state);
+ } else if unit.mode.is_doc() {
+ // Note: this does not include doc test.
+ return compute_deps_doc(unit, state, unit_for);
+ }
+
+ let mut ret = Vec::new();
+ let mut dev_deps = Vec::new();
+ for (dep_pkg_id, deps) in state.deps(unit, unit_for) {
+ let dep_lib = match calc_artifact_deps(unit, unit_for, dep_pkg_id, &deps, state, &mut ret)?
+ {
+ Some(lib) => lib,
+ None => continue,
+ };
+ let dep_pkg = state.get(dep_pkg_id);
+ let mode = check_or_build_mode(unit.mode, dep_lib);
+ let dep_unit_for = unit_for.with_dependency(unit, dep_lib, unit_for.root_compile_kind());
+
+ let start = ret.len();
+ if state.config.cli_unstable().dual_proc_macros
+ && dep_lib.proc_macro()
+ && !unit.kind.is_host()
+ {
+ let unit_dep = new_unit_dep(
+ state,
+ unit,
+ dep_pkg,
+ dep_lib,
+ dep_unit_for,
+ unit.kind,
+ mode,
+ IS_NO_ARTIFACT_DEP,
+ )?;
+ ret.push(unit_dep);
+ let unit_dep = new_unit_dep(
+ state,
+ unit,
+ dep_pkg,
+ dep_lib,
+ dep_unit_for,
+ CompileKind::Host,
+ mode,
+ IS_NO_ARTIFACT_DEP,
+ )?;
+ ret.push(unit_dep);
+ } else {
+ let unit_dep = new_unit_dep(
+ state,
+ unit,
+ dep_pkg,
+ dep_lib,
+ dep_unit_for,
+ unit.kind.for_target(dep_lib),
+ mode,
+ IS_NO_ARTIFACT_DEP,
+ )?;
+ ret.push(unit_dep);
+ }
+
+ // If the unit added was a dev-dependency unit, then record that in the
+ // dev-dependencies array. We'll add this to
+ // `state.dev_dependency_edges` at the end and process it later in
+ // `connect_run_custom_build_deps`.
+ if deps.iter().all(|d| !d.is_transitive()) {
+ for dep in ret[start..].iter() {
+ dev_deps.push((unit.clone(), dep.unit.clone()));
+ }
+ }
+ }
+ state.dev_dependency_edges.extend(dev_deps);
+
+ // If this target is a build script, then what we've collected so far is
+ // all we need. If this isn't a build script, then it depends on the
+ // build script if there is one.
+ if unit.target.is_custom_build() {
+ return Ok(ret);
+ }
+ ret.extend(dep_build_script(unit, unit_for, state)?);
+
+ // If this target is a binary, test, example, etc, then it depends on
+ // the library of the same package. The call to `resolve.deps` above
+ // didn't include `pkg` in the return values, so we need to special case
+ // it here and see if we need to push `(pkg, pkg_lib_target)`.
+ if unit.target.is_lib() && unit.mode != CompileMode::Doctest {
+ return Ok(ret);
+ }
+ ret.extend(maybe_lib(unit, state, unit_for)?);
+
+ // If any integration tests/benches are being run, make sure that
+ // binaries are built as well.
+ if !unit.mode.is_check()
+ && unit.mode.is_any_test()
+ && (unit.target.is_test() || unit.target.is_bench())
+ {
+ let id = unit.pkg.package_id();
+ ret.extend(
+ unit.pkg
+ .targets()
+ .iter()
+ .filter(|t| {
+ // Skip binaries with required features that have not been selected.
+ match t.required_features() {
+ Some(rf) if t.is_bin() => {
+ let features = resolve_all_features(
+ state.resolve(),
+ state.features(),
+ state.package_set,
+ id,
+ );
+ rf.iter().all(|f| features.contains(f))
+ }
+ None if t.is_bin() => true,
+ _ => false,
+ }
+ })
+ .map(|t| {
+ new_unit_dep(
+ state,
+ unit,
+ &unit.pkg,
+ t,
+ UnitFor::new_normal(unit_for.root_compile_kind()),
+ unit.kind.for_target(t),
+ CompileMode::Build,
+ IS_NO_ARTIFACT_DEP,
+ )
+ })
+ .collect::<CargoResult<Vec<UnitDep>>>()?,
+ );
+ }
+
+ Ok(ret)
+}
+
+/// Find artifacts for all `deps` of `unit` and add units that build these artifacts
+/// to `ret`.
+fn calc_artifact_deps<'a>(
+ unit: &Unit,
+ unit_for: UnitFor,
+ dep_id: PackageId,
+ deps: &[&Dependency],
+ state: &State<'a, '_>,
+ ret: &mut Vec<UnitDep>,
+) -> CargoResult<Option<&'a Target>> {
+ let mut has_artifact_lib = false;
+ let mut maybe_non_artifact_lib = false;
+ let artifact_pkg = state.get(dep_id);
+ for dep in deps {
+ let artifact = match dep.artifact() {
+ Some(a) => a,
+ None => {
+ maybe_non_artifact_lib = true;
+ continue;
+ }
+ };
+ has_artifact_lib |= artifact.is_lib();
+ // Custom build scripts (build/compile) never get artifact dependencies,
+ // but the run-build-script step does (where it is handled).
+ if !unit.target.is_custom_build() {
+ debug_assert!(
+ !unit.mode.is_run_custom_build(),
+ "BUG: This should be handled in a separate branch"
+ );
+ ret.extend(artifact_targets_to_unit_deps(
+ unit,
+ unit_for.with_artifact_features(artifact),
+ state,
+ artifact
+ .target()
+ .and_then(|t| match t {
+ ArtifactTarget::BuildDependencyAssumeTarget => None,
+ ArtifactTarget::Force(kind) => Some(CompileKind::Target(kind)),
+ })
+ .unwrap_or(unit.kind),
+ artifact_pkg,
+ dep,
+ )?);
+ }
+ }
+ if has_artifact_lib || maybe_non_artifact_lib {
+ Ok(artifact_pkg.targets().iter().find(|t| t.is_lib()))
+ } else {
+ Ok(None)
+ }
+}
+
+/// Returns the dependencies needed to run a build script.
+///
+/// The `unit` provided must represent an execution of a build script, and
+/// the returned set of units must all be run before `unit` is run.
+fn compute_deps_custom_build(
+ unit: &Unit,
+ unit_for: UnitFor,
+ state: &State<'_, '_>,
+) -> CargoResult<Vec<UnitDep>> {
+ if let Some(links) = unit.pkg.manifest().links() {
+ if state
+ .target_data
+ .script_override(links, unit.kind)
+ .is_some()
+ {
+ // Overridden build scripts don't have any dependencies.
+ return Ok(Vec::new());
+ }
+ }
+ // All dependencies of this unit should use profiles for custom builds.
+ // If this is a build script of a proc macro, make sure it uses host
+ // features.
+ let script_unit_for = unit_for.for_custom_build();
+ // When not overridden, then the dependencies to run a build script are:
+ //
+ // 1. Compiling the build script itself.
+ // 2. For each immediate dependency of our package which has a `links`
+ // key, the execution of that build script.
+ //
+ // We don't have a great way of handling (2) here right now so this is
+ // deferred until after the graph of all unit dependencies has been
+ // constructed.
+ let compile_script_unit = new_unit_dep(
+ state,
+ unit,
+ &unit.pkg,
+ &unit.target,
+ script_unit_for,
+ // Build scripts always compiled for the host.
+ CompileKind::Host,
+ CompileMode::Build,
+ IS_NO_ARTIFACT_DEP,
+ )?;
+
+ let mut result = vec![compile_script_unit];
+
+ // Include any artifact dependencies.
+ //
+ // This is essentially the same as `calc_artifact_deps`, but there are some
+ // subtle differences that require this to be implemented differently.
+ //
+ // Produce units that build all required artifact kinds (like binaries,
+ // static libraries, etc) with the correct compile target.
+ //
+ // Computing the compile target for artifact units is more involved as it has to handle
+ // various target configurations specific to artifacts, like `target = "target"` and
+ // `target = "<triple>"`, which makes knowing the root units compile target
+ // `root_unit_compile_target` necessary.
+ let root_unit_compile_target = unit_for.root_compile_kind();
+ let unit_for = UnitFor::new_host(/*host_features*/ true, root_unit_compile_target);
+ for (dep_pkg_id, deps) in state.deps(unit, script_unit_for) {
+ for dep in deps {
+ if dep.kind() != DepKind::Build || dep.artifact().is_none() {
+ continue;
+ }
+ let artifact_pkg = state.get(dep_pkg_id);
+ let artifact = dep.artifact().expect("artifact dep");
+ let resolved_artifact_compile_kind = artifact
+ .target()
+ .map(|target| target.to_resolved_compile_kind(root_unit_compile_target));
+
+ result.extend(artifact_targets_to_unit_deps(
+ unit,
+ unit_for.with_artifact_features_from_resolved_compile_kind(
+ resolved_artifact_compile_kind,
+ ),
+ state,
+ resolved_artifact_compile_kind.unwrap_or(CompileKind::Host),
+ artifact_pkg,
+ dep,
+ )?);
+ }
+ }
+
+ Ok(result)
+}
+
+/// Given a `parent` unit containing a dependency `dep` whose package is `artifact_pkg`,
+/// find all targets in `artifact_pkg` which refer to the `dep`s artifact declaration
+/// and turn them into units.
+/// Due to the nature of artifact dependencies, a single dependency in a manifest can
+/// cause one or more targets to be build, for instance with
+/// `artifact = ["bin:a", "bin:b", "staticlib"]`, which is very different from normal
+/// dependencies which cause only a single unit to be created.
+///
+/// `compile_kind` is the computed kind for the future artifact unit
+/// dependency, only the caller can pick the correct one.
+fn artifact_targets_to_unit_deps(
+ parent: &Unit,
+ parent_unit_for: UnitFor,
+ state: &State<'_, '_>,
+ compile_kind: CompileKind,
+ artifact_pkg: &Package,
+ dep: &Dependency,
+) -> CargoResult<Vec<UnitDep>> {
+ let ret =
+ match_artifacts_kind_with_targets(dep, artifact_pkg.targets(), parent.pkg.name().as_str())?
+ .into_iter()
+ .map(|(_artifact_kind, target)| target)
+ .flat_map(|target| {
+ // We split target libraries into individual units, even though rustc is able
+ // to produce multiple kinds in an single invocation for the sole reason that
+ // each artifact kind has its own output directory, something we can't easily
+ // teach rustc for now.
+ match target.kind() {
+ TargetKind::Lib(kinds) => Box::new(
+ kinds
+ .iter()
+ .filter(|tk| matches!(tk, CrateType::Cdylib | CrateType::Staticlib))
+ .map(|target_kind| {
+ new_unit_dep(
+ state,
+ parent,
+ artifact_pkg,
+ target
+ .clone()
+ .set_kind(TargetKind::Lib(vec![target_kind.clone()])),
+ parent_unit_for,
+ compile_kind,
+ CompileMode::Build,
+ dep.artifact(),
+ )
+ }),
+ ) as Box<dyn Iterator<Item = _>>,
+ _ => Box::new(std::iter::once(new_unit_dep(
+ state,
+ parent,
+ artifact_pkg,
+ target,
+ parent_unit_for,
+ compile_kind,
+ CompileMode::Build,
+ dep.artifact(),
+ ))),
+ }
+ })
+ .collect::<Result<Vec<_>, _>>()?;
+ Ok(ret)
+}
+
+/// Returns the dependencies necessary to document a package.
+fn compute_deps_doc(
+ unit: &Unit,
+ state: &mut State<'_, '_>,
+ unit_for: UnitFor,
+) -> CargoResult<Vec<UnitDep>> {
+ // To document a library, we depend on dependencies actually being
+ // built. If we're documenting *all* libraries, then we also depend on
+ // the documentation of the library being built.
+ let mut ret = Vec::new();
+ for (id, deps) in state.deps(unit, unit_for) {
+ let dep_lib = match calc_artifact_deps(unit, unit_for, id, &deps, state, &mut ret)? {
+ Some(lib) => lib,
+ None => continue,
+ };
+ let dep_pkg = state.get(id);
+ // Rustdoc only needs rmeta files for regular dependencies.
+ // However, for plugins/proc macros, deps should be built like normal.
+ let mode = check_or_build_mode(unit.mode, dep_lib);
+ let dep_unit_for = unit_for.with_dependency(unit, dep_lib, unit_for.root_compile_kind());
+ let lib_unit_dep = new_unit_dep(
+ state,
+ unit,
+ dep_pkg,
+ dep_lib,
+ dep_unit_for,
+ unit.kind.for_target(dep_lib),
+ mode,
+ IS_NO_ARTIFACT_DEP,
+ )?;
+ ret.push(lib_unit_dep);
+ if dep_lib.documented() {
+ if let CompileMode::Doc { deps: true } = unit.mode {
+ // Document this lib as well.
+ let doc_unit_dep = new_unit_dep(
+ state,
+ unit,
+ dep_pkg,
+ dep_lib,
+ dep_unit_for,
+ unit.kind.for_target(dep_lib),
+ unit.mode,
+ IS_NO_ARTIFACT_DEP,
+ )?;
+ ret.push(doc_unit_dep);
+ }
+ }
+ }
+
+ // Be sure to build/run the build script for documented libraries.
+ ret.extend(dep_build_script(unit, unit_for, state)?);
+
+ // If we document a binary/example, we need the library available.
+ if unit.target.is_bin() || unit.target.is_example() {
+ // build the lib
+ ret.extend(maybe_lib(unit, state, unit_for)?);
+ // and also the lib docs for intra-doc links
+ if let Some(lib) = unit
+ .pkg
+ .targets()
+ .iter()
+ .find(|t| t.is_linkable() && t.documented())
+ {
+ let dep_unit_for = unit_for.with_dependency(unit, lib, unit_for.root_compile_kind());
+ let lib_doc_unit = new_unit_dep(
+ state,
+ unit,
+ &unit.pkg,
+ lib,
+ dep_unit_for,
+ unit.kind.for_target(lib),
+ unit.mode,
+ IS_NO_ARTIFACT_DEP,
+ )?;
+ ret.push(lib_doc_unit);
+ }
+ }
+
+ // Add all units being scraped for examples as a dependency of top-level Doc units.
+ if state.ws.unit_needs_doc_scrape(unit) {
+ for scrape_unit in state.scrape_units.iter() {
+ let scrape_unit_for = UnitFor::new_normal(scrape_unit.kind);
+ deps_of(scrape_unit, state, scrape_unit_for)?;
+ ret.push(new_unit_dep(
+ state,
+ scrape_unit,
+ &scrape_unit.pkg,
+ &scrape_unit.target,
+ scrape_unit_for,
+ scrape_unit.kind,
+ scrape_unit.mode,
+ IS_NO_ARTIFACT_DEP,
+ )?);
+ }
+ }
+
+ Ok(ret)
+}
+
+fn maybe_lib(
+ unit: &Unit,
+ state: &mut State<'_, '_>,
+ unit_for: UnitFor,
+) -> CargoResult<Option<UnitDep>> {
+ unit.pkg
+ .targets()
+ .iter()
+ .find(|t| t.is_linkable())
+ .map(|t| {
+ let mode = check_or_build_mode(unit.mode, t);
+ let dep_unit_for = unit_for.with_dependency(unit, t, unit_for.root_compile_kind());
+ new_unit_dep(
+ state,
+ unit,
+ &unit.pkg,
+ t,
+ dep_unit_for,
+ unit.kind.for_target(t),
+ mode,
+ IS_NO_ARTIFACT_DEP,
+ )
+ })
+ .transpose()
+}
+
+/// If a build script is scheduled to be run for the package specified by
+/// `unit`, this function will return the unit to run that build script.
+///
+/// Overriding a build script simply means that the running of the build
+/// script itself doesn't have any dependencies, so even in that case a unit
+/// of work is still returned. `None` is only returned if the package has no
+/// build script.
+fn dep_build_script(
+ unit: &Unit,
+ unit_for: UnitFor,
+ state: &State<'_, '_>,
+) -> CargoResult<Option<UnitDep>> {
+ unit.pkg
+ .targets()
+ .iter()
+ .find(|t| t.is_custom_build())
+ .map(|t| {
+ // The profile stored in the Unit is the profile for the thing
+ // the custom build script is running for.
+ let profile = state.profiles.get_profile_run_custom_build(&unit.profile);
+ // UnitFor::for_custom_build is used because we want the `host` flag set
+ // for all of our build dependencies (so they all get
+ // build-override profiles), including compiling the build.rs
+ // script itself.
+ //
+ // If `is_for_host_features` here is `false`, that means we are a
+ // build.rs script for a normal dependency and we want to set the
+ // CARGO_FEATURE_* environment variables to the features as a
+ // normal dep.
+ //
+ // If `is_for_host_features` here is `true`, that means that this
+ // package is being used as a build dependency or proc-macro, and
+ // so we only want to set CARGO_FEATURE_* variables for the host
+ // side of the graph.
+ //
+ // Keep in mind that the RunCustomBuild unit and the Compile
+ // build.rs unit use the same features. This is because some
+ // people use `cfg!` and `#[cfg]` expressions to check for enabled
+ // features instead of just checking `CARGO_FEATURE_*` at runtime.
+ // In the case with the new feature resolver (decoupled host
+ // deps), and a shared dependency has different features enabled
+ // for normal vs. build, then the build.rs script will get
+ // compiled twice. I believe it is not feasible to only build it
+ // once because it would break a large number of scripts (they
+ // would think they have the wrong set of features enabled).
+ let script_unit_for = unit_for.for_custom_build();
+ new_unit_dep_with_profile(
+ state,
+ unit,
+ &unit.pkg,
+ t,
+ script_unit_for,
+ unit.kind,
+ CompileMode::RunCustomBuild,
+ profile,
+ IS_NO_ARTIFACT_DEP,
+ )
+ })
+ .transpose()
+}
+
+/// Choose the correct mode for dependencies.
+fn check_or_build_mode(mode: CompileMode, target: &Target) -> CompileMode {
+ match mode {
+ CompileMode::Check { .. } | CompileMode::Doc { .. } | CompileMode::Docscrape => {
+ if target.for_host() {
+ // Plugin and proc macro targets should be compiled like
+ // normal.
+ CompileMode::Build
+ } else {
+ // Regular dependencies should not be checked with --test.
+ // Regular dependencies of doc targets should emit rmeta only.
+ CompileMode::Check { test: false }
+ }
+ }
+ _ => CompileMode::Build,
+ }
+}
+
+/// Create a new Unit for a dependency from `parent` to `pkg` and `target`.
+fn new_unit_dep(
+ state: &State<'_, '_>,
+ parent: &Unit,
+ pkg: &Package,
+ target: &Target,
+ unit_for: UnitFor,
+ kind: CompileKind,
+ mode: CompileMode,
+ artifact: Option<&Artifact>,
+) -> CargoResult<UnitDep> {
+ let is_local = pkg.package_id().source_id().is_path() && !state.is_std;
+ let profile = state.profiles.get_profile(
+ pkg.package_id(),
+ state.ws.is_member(pkg),
+ is_local,
+ unit_for,
+ kind,
+ );
+ new_unit_dep_with_profile(
+ state, parent, pkg, target, unit_for, kind, mode, profile, artifact,
+ )
+}
+
+fn new_unit_dep_with_profile(
+ state: &State<'_, '_>,
+ parent: &Unit,
+ pkg: &Package,
+ target: &Target,
+ unit_for: UnitFor,
+ kind: CompileKind,
+ mode: CompileMode,
+ profile: Profile,
+ artifact: Option<&Artifact>,
+) -> CargoResult<UnitDep> {
+ let (extern_crate_name, dep_name) = state.resolve().extern_crate_name_and_dep_name(
+ parent.pkg.package_id(),
+ pkg.package_id(),
+ target,
+ )?;
+ let public = state
+ .resolve()
+ .is_public_dep(parent.pkg.package_id(), pkg.package_id());
+ let features_for = unit_for.map_to_features_for(artifact);
+ let artifact_target = match features_for {
+ FeaturesFor::ArtifactDep(target) => Some(target),
+ _ => None,
+ };
+ let features = state.activated_features(pkg.package_id(), features_for);
+ let unit = state.interner.intern(
+ pkg,
+ target,
+ profile,
+ kind,
+ mode,
+ features,
+ state.is_std,
+ /*dep_hash*/ 0,
+ artifact.map_or(IsArtifact::No, |_| IsArtifact::Yes),
+ artifact_target,
+ );
+ Ok(UnitDep {
+ unit,
+ unit_for,
+ extern_crate_name,
+ dep_name,
+ public,
+ noprelude: false,
+ })
+}
+
+/// Fill in missing dependencies for units of the `RunCustomBuild`
+///
+/// As mentioned above in `compute_deps_custom_build` each build script
+/// execution has two dependencies. The first is compiling the build script
+/// itself (already added) and the second is that all crates the package of the
+/// build script depends on with `links` keys, their build script execution. (a
+/// bit confusing eh?)
+///
+/// Here we take the entire `deps` map and add more dependencies from execution
+/// of one build script to execution of another build script.
+fn connect_run_custom_build_deps(state: &mut State<'_, '_>) {
+ let mut new_deps = Vec::new();
+
+ {
+ let state = &*state;
+ // First up build a reverse dependency map. This is a mapping of all
+ // `RunCustomBuild` known steps to the unit which depends on them. For
+ // example a library might depend on a build script, so this map will
+ // have the build script as the key and the library would be in the
+ // value's set.
+ let mut reverse_deps_map = HashMap::new();
+ for (unit, deps) in state.unit_dependencies.iter() {
+ for dep in deps {
+ if dep.unit.mode == CompileMode::RunCustomBuild {
+ reverse_deps_map
+ .entry(dep.unit.clone())
+ .or_insert_with(HashSet::new)
+ .insert(unit);
+ }
+ }
+ }
+
+ // Next, we take a look at all build scripts executions listed in the
+ // dependency map. Our job here is to take everything that depends on
+ // this build script (from our reverse map above) and look at the other
+ // package dependencies of these parents.
+ //
+ // If we depend on a linkable target and the build script mentions
+ // `links`, then we depend on that package's build script! Here we use
+ // `dep_build_script` to manufacture an appropriate build script unit to
+ // depend on.
+ for unit in state
+ .unit_dependencies
+ .keys()
+ .filter(|k| k.mode == CompileMode::RunCustomBuild)
+ {
+ // This list of dependencies all depend on `unit`, an execution of
+ // the build script.
+ let reverse_deps = match reverse_deps_map.get(unit) {
+ Some(set) => set,
+ None => continue,
+ };
+
+ let to_add = reverse_deps
+ .iter()
+ // Get all sibling dependencies of `unit`
+ .flat_map(|reverse_dep| {
+ state.unit_dependencies[reverse_dep]
+ .iter()
+ .map(move |a| (reverse_dep, a))
+ })
+ // Only deps with `links`.
+ .filter(|(_parent, other)| {
+ other.unit.pkg != unit.pkg
+ && other.unit.target.is_linkable()
+ && other.unit.pkg.manifest().links().is_some()
+ })
+ // Avoid cycles when using the doc --scrape-examples feature:
+ // Say a workspace has crates A and B where A has a build-dependency on B.
+ // The Doc units for A and B will have a dependency on the Docscrape for both A and B.
+ // So this would add a dependency from B-build to A-build, causing a cycle:
+ // B (build) -> A (build) -> B(build)
+ // See the test scrape_examples_avoid_build_script_cycle for a concrete example.
+ // To avoid this cycle, we filter out the B -> A (docscrape) dependency.
+ .filter(|(_parent, other)| !other.unit.mode.is_doc_scrape())
+ // Skip dependencies induced via dev-dependencies since
+ // connections between `links` and build scripts only happens
+ // via normal dependencies. Otherwise since dev-dependencies can
+ // be cyclic we could have cyclic build-script executions.
+ .filter_map(move |(parent, other)| {
+ if state
+ .dev_dependency_edges
+ .contains(&((*parent).clone(), other.unit.clone()))
+ {
+ None
+ } else {
+ Some(other)
+ }
+ })
+ // Get the RunCustomBuild for other lib.
+ .filter_map(|other| {
+ state.unit_dependencies[&other.unit]
+ .iter()
+ .find(|other_dep| other_dep.unit.mode == CompileMode::RunCustomBuild)
+ .cloned()
+ })
+ .collect::<HashSet<_>>();
+
+ if !to_add.is_empty() {
+ // (RunCustomBuild, set(other RunCustomBuild))
+ new_deps.push((unit.clone(), to_add));
+ }
+ }
+ }
+
+ // And finally, add in all the missing dependencies!
+ for (unit, new_deps) in new_deps {
+ state
+ .unit_dependencies
+ .get_mut(&unit)
+ .unwrap()
+ .extend(new_deps);
+ }
+}
+
+impl<'a, 'cfg> State<'a, 'cfg> {
+ /// Gets `std_resolve` during building std, otherwise `usr_resolve`.
+ fn resolve(&self) -> &'a Resolve {
+ if self.is_std {
+ self.std_resolve.unwrap()
+ } else {
+ self.usr_resolve
+ }
+ }
+
+ /// Gets `std_features` during building std, otherwise `usr_features`.
+ fn features(&self) -> &'a ResolvedFeatures {
+ if self.is_std {
+ self.std_features.unwrap()
+ } else {
+ self.usr_features
+ }
+ }
+
+ fn activated_features(
+ &self,
+ pkg_id: PackageId,
+ features_for: FeaturesFor,
+ ) -> Vec<InternedString> {
+ let features = self.features();
+ features.activated_features(pkg_id, features_for)
+ }
+
+ fn is_dep_activated(
+ &self,
+ pkg_id: PackageId,
+ features_for: FeaturesFor,
+ dep_name: InternedString,
+ ) -> bool {
+ self.features()
+ .is_dep_activated(pkg_id, features_for, dep_name)
+ }
+
+ fn get(&self, id: PackageId) -> &'a Package {
+ self.package_set
+ .get_one(id)
+ .unwrap_or_else(|_| panic!("expected {} to be downloaded", id))
+ }
+
+ /// Returns a filtered set of dependencies for the given unit.
+ fn deps(&self, unit: &Unit, unit_for: UnitFor) -> Vec<(PackageId, Vec<&Dependency>)> {
+ let pkg_id = unit.pkg.package_id();
+ let kind = unit.kind;
+ self.resolve()
+ .deps(pkg_id)
+ .filter_map(|(id, deps)| {
+ assert!(!deps.is_empty());
+ let deps: Vec<_> = deps
+ .iter()
+ .filter(|dep| {
+ // If this target is a build command, then we only want build
+ // dependencies, otherwise we want everything *other than* build
+ // dependencies.
+ if unit.target.is_custom_build() != dep.is_build() {
+ return false;
+ }
+
+ // If this dependency is **not** a transitive dependency, then it
+ // only applies to test/example targets.
+ if !dep.is_transitive()
+ && !unit.target.is_test()
+ && !unit.target.is_example()
+ && !unit.mode.is_any_test()
+ {
+ return false;
+ }
+
+ // If this dependency is only available for certain platforms,
+ // make sure we're only enabling it for that platform.
+ if !self.target_data.dep_platform_activated(dep, kind) {
+ return false;
+ }
+
+ // If this is an optional dependency, and the new feature resolver
+ // did not enable it, don't include it.
+ if dep.is_optional() {
+ // This `unit_for` is from parent dep and *SHOULD* contains its own
+ // artifact dep information inside `artifact_target_for_features`.
+ // So, no need to map any artifact info from an incorrect `dep.artifact()`.
+ let features_for = unit_for.map_to_features_for(IS_NO_ARTIFACT_DEP);
+ if !self.is_dep_activated(pkg_id, features_for, dep.name_in_toml()) {
+ return false;
+ }
+ }
+
+ // If we've gotten past all that, then this dependency is
+ // actually used!
+ true
+ })
+ .collect();
+ if deps.is_empty() {
+ None
+ } else {
+ Some((id, deps))
+ }
+ })
+ .collect()
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/compiler/unit_graph.rs b/src/tools/cargo/src/cargo/core/compiler/unit_graph.rs
new file mode 100644
index 000000000..a8c56de3b
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/compiler/unit_graph.rs
@@ -0,0 +1,135 @@
+//! Serialization of [`UnitGraph`] for unstable option [`--unit-graph`].
+//!
+//! [`--unit-graph`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#unit-graph
+
+use crate::core::compiler::Unit;
+use crate::core::compiler::{CompileKind, CompileMode};
+use crate::core::profiles::{Profile, UnitFor};
+use crate::core::{PackageId, Target};
+use crate::util::interning::InternedString;
+use crate::util::CargoResult;
+use crate::Config;
+use std::collections::HashMap;
+use std::io::Write;
+
+/// The dependency graph of Units.
+pub type UnitGraph = HashMap<Unit, Vec<UnitDep>>;
+
+/// A unit dependency.
+#[derive(Debug, Clone, Hash, Eq, PartialEq, PartialOrd, Ord)]
+pub struct UnitDep {
+ /// The dependency unit.
+ pub unit: Unit,
+ /// The purpose of this dependency (a dependency for a test, or a build
+ /// script, etc.). Do not use this after the unit graph has been built.
+ pub unit_for: UnitFor,
+ /// The name the parent uses to refer to this dependency.
+ pub extern_crate_name: InternedString,
+ /// If `Some`, the name of the dependency if renamed in toml.
+ /// It's particularly interesting to artifact dependencies which rely on it
+ /// for naming their environment variables. Note that the `extern_crate_name`
+ /// cannot be used for this as it also may be the build target itself,
+ /// which isn't always the renamed dependency name.
+ pub dep_name: Option<InternedString>,
+ /// Whether or not this is a public dependency.
+ pub public: bool,
+ /// If `true`, the dependency should not be added to Rust's prelude.
+ pub noprelude: bool,
+}
+
+const VERSION: u32 = 1;
+
+#[derive(serde::Serialize)]
+struct SerializedUnitGraph<'a> {
+ version: u32,
+ units: Vec<SerializedUnit<'a>>,
+ roots: Vec<usize>,
+}
+
+#[derive(serde::Serialize)]
+struct SerializedUnit<'a> {
+ pkg_id: PackageId,
+ target: &'a Target,
+ profile: &'a Profile,
+ platform: CompileKind,
+ mode: CompileMode,
+ features: &'a Vec<InternedString>,
+ #[serde(skip_serializing_if = "std::ops::Not::not")] // hide for unstable build-std
+ is_std: bool,
+ dependencies: Vec<SerializedUnitDep>,
+}
+
+#[derive(serde::Serialize)]
+struct SerializedUnitDep {
+ index: usize,
+ extern_crate_name: InternedString,
+ // This is only set on nightly since it is unstable.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ public: Option<bool>,
+ // This is only set on nightly since it is unstable.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ noprelude: Option<bool>,
+ // Intentionally not including `unit_for` because it is a low-level
+ // internal detail that is mostly used for building the graph.
+}
+
+/// Outputs a JSON serialization of [`UnitGraph`] for given `root_units`
+/// to the standard output.
+pub fn emit_serialized_unit_graph(
+ root_units: &[Unit],
+ unit_graph: &UnitGraph,
+ config: &Config,
+) -> CargoResult<()> {
+ let mut units: Vec<(&Unit, &Vec<UnitDep>)> = unit_graph.iter().collect();
+ units.sort_unstable();
+ // Create a map for quick lookup for dependencies.
+ let indices: HashMap<&Unit, usize> = units
+ .iter()
+ .enumerate()
+ .map(|(i, val)| (val.0, i))
+ .collect();
+ let roots = root_units.iter().map(|root| indices[root]).collect();
+ let ser_units = units
+ .iter()
+ .map(|(unit, unit_deps)| {
+ let dependencies = unit_deps
+ .iter()
+ .map(|unit_dep| {
+ // https://github.com/rust-lang/rust/issues/64260 when stabilized.
+ let (public, noprelude) = if config.nightly_features_allowed {
+ (Some(unit_dep.public), Some(unit_dep.noprelude))
+ } else {
+ (None, None)
+ };
+ SerializedUnitDep {
+ index: indices[&unit_dep.unit],
+ extern_crate_name: unit_dep.extern_crate_name,
+ public,
+ noprelude,
+ }
+ })
+ .collect();
+ SerializedUnit {
+ pkg_id: unit.pkg.package_id(),
+ target: &unit.target,
+ profile: &unit.profile,
+ platform: unit.kind,
+ mode: unit.mode,
+ features: &unit.features,
+ is_std: unit.is_std,
+ dependencies,
+ }
+ })
+ .collect();
+ let s = SerializedUnitGraph {
+ version: VERSION,
+ units: ser_units,
+ roots,
+ };
+
+ let stdout = std::io::stdout();
+ let mut lock = stdout.lock();
+ serde_json::to_writer(&mut lock, &s)?;
+ drop(writeln!(lock));
+ Ok(())
+}
diff --git a/src/tools/cargo/src/cargo/core/dependency.rs b/src/tools/cargo/src/cargo/core/dependency.rs
new file mode 100644
index 000000000..0b3aba8ad
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/dependency.rs
@@ -0,0 +1,642 @@
+use cargo_platform::Platform;
+use log::trace;
+use semver::VersionReq;
+use serde::ser;
+use serde::Serialize;
+use std::borrow::Cow;
+use std::fmt;
+use std::path::PathBuf;
+use std::rc::Rc;
+
+use crate::core::compiler::{CompileKind, CompileTarget};
+use crate::core::{PackageId, SourceId, Summary};
+use crate::util::errors::CargoResult;
+use crate::util::interning::InternedString;
+use crate::util::toml::StringOrVec;
+use crate::util::OptVersionReq;
+
+/// Information about a dependency requested by a Cargo manifest.
+/// Cheap to copy.
+#[derive(PartialEq, Eq, Hash, Clone, Debug)]
+pub struct Dependency {
+ inner: Rc<Inner>,
+}
+
+/// The data underlying a `Dependency`.
+#[derive(PartialEq, Eq, Hash, Clone, Debug)]
+struct Inner {
+ name: InternedString,
+ source_id: SourceId,
+ /// Source ID for the registry as specified in the manifest.
+ ///
+ /// This will be None if it is not specified (crates.io dependency).
+ /// This is different from `source_id` for example when both a `path` and
+ /// `registry` is specified. Or in the case of a crates.io dependency,
+ /// `source_id` will be crates.io and this will be None.
+ registry_id: Option<SourceId>,
+ req: OptVersionReq,
+ specified_req: bool,
+ kind: DepKind,
+ only_match_name: bool,
+ explicit_name_in_toml: Option<InternedString>,
+
+ optional: bool,
+ public: bool,
+ default_features: bool,
+ features: Vec<InternedString>,
+ // The presence of this information turns a dependency into an artifact dependency.
+ artifact: Option<Artifact>,
+
+ // This dependency should be used only for this platform.
+ // `None` means *all platforms*.
+ platform: Option<Platform>,
+}
+
+#[derive(Serialize)]
+struct SerializedDependency<'a> {
+ name: &'a str,
+ source: SourceId,
+ req: String,
+ kind: DepKind,
+ rename: Option<&'a str>,
+
+ optional: bool,
+ uses_default_features: bool,
+ features: &'a [InternedString],
+ #[serde(skip_serializing_if = "Option::is_none")]
+ artifact: Option<&'a Artifact>,
+ target: Option<&'a Platform>,
+ /// The registry URL this dependency is from.
+ /// If None, then it comes from the default registry (crates.io).
+ registry: Option<&'a str>,
+
+ /// The file system path for a local path dependency.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ path: Option<PathBuf>,
+}
+
+impl ser::Serialize for Dependency {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: ser::Serializer,
+ {
+ let registry_id = self.registry_id();
+ SerializedDependency {
+ name: &*self.package_name(),
+ source: self.source_id(),
+ req: self.version_req().to_string(),
+ kind: self.kind(),
+ optional: self.is_optional(),
+ uses_default_features: self.uses_default_features(),
+ features: self.features(),
+ target: self.platform(),
+ rename: self.explicit_name_in_toml().map(|s| s.as_str()),
+ registry: registry_id.as_ref().map(|sid| sid.url().as_str()),
+ path: self.source_id().local_path(),
+ artifact: self.artifact(),
+ }
+ .serialize(s)
+ }
+}
+
+#[derive(PartialEq, Eq, Hash, Ord, PartialOrd, Clone, Debug, Copy)]
+pub enum DepKind {
+ Normal,
+ Development,
+ Build,
+}
+
+impl DepKind {
+ pub fn kind_table(&self) -> &'static str {
+ match self {
+ DepKind::Normal => "dependencies",
+ DepKind::Development => "dev-dependencies",
+ DepKind::Build => "build-dependencies",
+ }
+ }
+}
+
+impl ser::Serialize for DepKind {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: ser::Serializer,
+ {
+ match *self {
+ DepKind::Normal => None,
+ DepKind::Development => Some("dev"),
+ DepKind::Build => Some("build"),
+ }
+ .serialize(s)
+ }
+}
+
+impl Dependency {
+ /// Attempt to create a `Dependency` from an entry in the manifest.
+ pub fn parse(
+ name: impl Into<InternedString>,
+ version: Option<&str>,
+ source_id: SourceId,
+ ) -> CargoResult<Dependency> {
+ let name = name.into();
+ let (specified_req, version_req) = match version {
+ Some(v) => match VersionReq::parse(v) {
+ Ok(req) => (true, OptVersionReq::Req(req)),
+ Err(err) => {
+ return Err(anyhow::Error::new(err).context(format!(
+ "failed to parse the version requirement `{}` for dependency `{}`",
+ v, name,
+ )))
+ }
+ },
+ None => (false, OptVersionReq::Any),
+ };
+
+ let mut ret = Dependency::new_override(name, source_id);
+ {
+ let ptr = Rc::make_mut(&mut ret.inner);
+ ptr.only_match_name = false;
+ ptr.req = version_req;
+ ptr.specified_req = specified_req;
+ }
+ Ok(ret)
+ }
+
+ pub fn new_override(name: InternedString, source_id: SourceId) -> Dependency {
+ assert!(!name.is_empty());
+ Dependency {
+ inner: Rc::new(Inner {
+ name,
+ source_id,
+ registry_id: None,
+ req: OptVersionReq::Any,
+ kind: DepKind::Normal,
+ only_match_name: true,
+ optional: false,
+ public: false,
+ features: Vec::new(),
+ default_features: true,
+ specified_req: false,
+ platform: None,
+ explicit_name_in_toml: None,
+ artifact: None,
+ }),
+ }
+ }
+
+ pub fn version_req(&self) -> &OptVersionReq {
+ &self.inner.req
+ }
+
+ /// This is the name of this `Dependency` as listed in `Cargo.toml`.
+ ///
+ /// Or in other words, this is what shows up in the `[dependencies]` section
+ /// on the left hand side. This is *not* the name of the package that's
+ /// being depended on as the dependency can be renamed. For that use
+ /// `package_name` below.
+ ///
+ /// Both of the dependencies below return `foo` for `name_in_toml`:
+ ///
+ /// ```toml
+ /// [dependencies]
+ /// foo = "0.1"
+ /// ```
+ ///
+ /// and ...
+ ///
+ /// ```toml
+ /// [dependencies]
+ /// foo = { version = "0.1", package = 'bar' }
+ /// ```
+ pub fn name_in_toml(&self) -> InternedString {
+ self.explicit_name_in_toml().unwrap_or(self.inner.name)
+ }
+
+ /// The name of the package that this `Dependency` depends on.
+ ///
+ /// Usually this is what's written on the left hand side of a dependencies
+ /// section, but it can also be renamed via the `package` key.
+ ///
+ /// Both of the dependencies below return `foo` for `package_name`:
+ ///
+ /// ```toml
+ /// [dependencies]
+ /// foo = "0.1"
+ /// ```
+ ///
+ /// and ...
+ ///
+ /// ```toml
+ /// [dependencies]
+ /// bar = { version = "0.1", package = 'foo' }
+ /// ```
+ pub fn package_name(&self) -> InternedString {
+ self.inner.name
+ }
+
+ pub fn source_id(&self) -> SourceId {
+ self.inner.source_id
+ }
+
+ pub fn registry_id(&self) -> Option<SourceId> {
+ self.inner.registry_id
+ }
+
+ pub fn set_registry_id(&mut self, registry_id: SourceId) -> &mut Dependency {
+ Rc::make_mut(&mut self.inner).registry_id = Some(registry_id);
+ self
+ }
+
+ pub fn kind(&self) -> DepKind {
+ self.inner.kind
+ }
+
+ pub fn is_public(&self) -> bool {
+ self.inner.public
+ }
+
+ /// Sets whether the dependency is public.
+ pub fn set_public(&mut self, public: bool) -> &mut Dependency {
+ if public {
+ // Setting 'public' only makes sense for normal dependencies
+ assert_eq!(self.kind(), DepKind::Normal);
+ }
+ Rc::make_mut(&mut self.inner).public = public;
+ self
+ }
+
+ pub fn specified_req(&self) -> bool {
+ self.inner.specified_req
+ }
+
+ /// If none, this dependencies must be built for all platforms.
+ /// If some, it must only be built for the specified platform.
+ pub fn platform(&self) -> Option<&Platform> {
+ self.inner.platform.as_ref()
+ }
+
+ /// The renamed name of this dependency, if any.
+ ///
+ /// If the `package` key is used in `Cargo.toml` then this returns the same
+ /// value as `name_in_toml`.
+ pub fn explicit_name_in_toml(&self) -> Option<InternedString> {
+ self.inner.explicit_name_in_toml
+ }
+
+ pub fn set_kind(&mut self, kind: DepKind) -> &mut Dependency {
+ if self.is_public() {
+ // Setting 'public' only makes sense for normal dependencies
+ assert_eq!(kind, DepKind::Normal);
+ }
+ Rc::make_mut(&mut self.inner).kind = kind;
+ self
+ }
+
+ /// Sets the list of features requested for the package.
+ pub fn set_features(
+ &mut self,
+ features: impl IntoIterator<Item = impl Into<InternedString>>,
+ ) -> &mut Dependency {
+ Rc::make_mut(&mut self.inner).features = features.into_iter().map(|s| s.into()).collect();
+ self
+ }
+
+ /// Sets whether the dependency requests default features of the package.
+ pub fn set_default_features(&mut self, default_features: bool) -> &mut Dependency {
+ Rc::make_mut(&mut self.inner).default_features = default_features;
+ self
+ }
+
+ /// Sets whether the dependency is optional.
+ pub fn set_optional(&mut self, optional: bool) -> &mut Dependency {
+ Rc::make_mut(&mut self.inner).optional = optional;
+ self
+ }
+
+ /// Sets the source ID for this dependency.
+ pub fn set_source_id(&mut self, id: SourceId) -> &mut Dependency {
+ Rc::make_mut(&mut self.inner).source_id = id;
+ self
+ }
+
+ /// Sets the version requirement for this dependency.
+ pub fn set_version_req(&mut self, req: VersionReq) -> &mut Dependency {
+ Rc::make_mut(&mut self.inner).req = OptVersionReq::Req(req);
+ self
+ }
+
+ pub fn set_platform(&mut self, platform: Option<Platform>) -> &mut Dependency {
+ Rc::make_mut(&mut self.inner).platform = platform;
+ self
+ }
+
+ pub fn set_explicit_name_in_toml(
+ &mut self,
+ name: impl Into<InternedString>,
+ ) -> &mut Dependency {
+ Rc::make_mut(&mut self.inner).explicit_name_in_toml = Some(name.into());
+ self
+ }
+
+ /// Locks this dependency to depending on the specified package ID.
+ pub fn lock_to(&mut self, id: PackageId) -> &mut Dependency {
+ assert_eq!(self.inner.source_id, id.source_id());
+ trace!(
+ "locking dep from `{}` with `{}` at {} to {}",
+ self.package_name(),
+ self.version_req(),
+ self.source_id(),
+ id
+ );
+ let me = Rc::make_mut(&mut self.inner);
+ me.req.lock_to(id.version());
+
+ // Only update the `precise` of this source to preserve other
+ // information about dependency's source which may not otherwise be
+ // tested during equality/hashing.
+ me.source_id = me
+ .source_id
+ .with_precise(id.source_id().precise().map(|s| s.to_string()));
+ self
+ }
+
+ /// Locks this dependency to a specified version.
+ ///
+ /// Mainly used in dependency patching like `[patch]` or `[replace]`, which
+ /// doesn't need to lock the entire dependency to a specific [`PackageId`].
+ pub fn lock_version(&mut self, version: &semver::Version) -> &mut Dependency {
+ let me = Rc::make_mut(&mut self.inner);
+ me.req.lock_to(version);
+ self
+ }
+
+ /// Returns `true` if this is a "locked" dependency. Basically a locked
+ /// dependency has an exact version req, but not vice versa.
+ pub fn is_locked(&self) -> bool {
+ self.inner.req.is_locked()
+ }
+
+ /// Returns `false` if the dependency is only used to build the local package.
+ pub fn is_transitive(&self) -> bool {
+ match self.inner.kind {
+ DepKind::Normal | DepKind::Build => true,
+ DepKind::Development => false,
+ }
+ }
+
+ pub fn is_build(&self) -> bool {
+ matches!(self.inner.kind, DepKind::Build)
+ }
+
+ pub fn is_optional(&self) -> bool {
+ self.inner.optional
+ }
+
+ /// Returns `true` if the default features of the dependency are requested.
+ pub fn uses_default_features(&self) -> bool {
+ self.inner.default_features
+ }
+ /// Returns the list of features that are requested by the dependency.
+ pub fn features(&self) -> &[InternedString] {
+ &self.inner.features
+ }
+
+ /// Returns `true` if the package (`sum`) can fulfill this dependency request.
+ pub fn matches(&self, sum: &Summary) -> bool {
+ self.matches_id(sum.package_id())
+ }
+
+ /// Returns `true` if the package (`id`) can fulfill this dependency request.
+ pub fn matches_ignoring_source(&self, id: PackageId) -> bool {
+ self.package_name() == id.name() && self.version_req().matches(id.version())
+ }
+
+ /// Returns `true` if the package (`id`) can fulfill this dependency request.
+ pub fn matches_id(&self, id: PackageId) -> bool {
+ self.inner.name == id.name()
+ && (self.inner.only_match_name
+ || (self.inner.req.matches(id.version()) && self.inner.source_id == id.source_id()))
+ }
+
+ pub fn map_source(mut self, to_replace: SourceId, replace_with: SourceId) -> Dependency {
+ if self.source_id() == to_replace {
+ self.set_source_id(replace_with);
+ }
+ self
+ }
+
+ pub(crate) fn set_artifact(&mut self, artifact: Artifact) {
+ Rc::make_mut(&mut self.inner).artifact = Some(artifact);
+ }
+
+ pub(crate) fn artifact(&self) -> Option<&Artifact> {
+ self.inner.artifact.as_ref()
+ }
+
+ /// Dependencies are potential rust libs if they are not artifacts or they are an
+ /// artifact which allows to be seen as library.
+ /// Previously, every dependency was potentially seen as library.
+ pub(crate) fn maybe_lib(&self) -> bool {
+ self.artifact().map(|a| a.is_lib).unwrap_or(true)
+ }
+}
+
+/// The presence of an artifact turns an ordinary dependency into an Artifact dependency.
+/// As such, it will build one or more different artifacts of possibly various kinds
+/// for making them available at build time for rustc invocations or runtime
+/// for build scripts.
+///
+/// This information represents a requirement in the package this dependency refers to.
+#[derive(PartialEq, Eq, Hash, Clone, Debug)]
+pub struct Artifact {
+ inner: Rc<Vec<ArtifactKind>>,
+ is_lib: bool,
+ target: Option<ArtifactTarget>,
+}
+
+#[derive(Serialize)]
+pub struct SerializedArtifact<'a> {
+ kinds: &'a [ArtifactKind],
+ lib: bool,
+ target: Option<&'a str>,
+}
+
+impl ser::Serialize for Artifact {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: ser::Serializer,
+ {
+ SerializedArtifact {
+ kinds: self.kinds(),
+ lib: self.is_lib,
+ target: self.target.as_ref().map(|t| match t {
+ ArtifactTarget::BuildDependencyAssumeTarget => "target",
+ ArtifactTarget::Force(target) => target.rustc_target().as_str(),
+ }),
+ }
+ .serialize(s)
+ }
+}
+
+impl Artifact {
+ pub(crate) fn parse(
+ artifacts: &StringOrVec,
+ is_lib: bool,
+ target: Option<&str>,
+ ) -> CargoResult<Self> {
+ let kinds = ArtifactKind::validate(
+ artifacts
+ .iter()
+ .map(|s| ArtifactKind::parse(s))
+ .collect::<Result<Vec<_>, _>>()?,
+ )?;
+ Ok(Artifact {
+ inner: Rc::new(kinds),
+ is_lib,
+ target: target.map(ArtifactTarget::parse).transpose()?,
+ })
+ }
+
+ pub(crate) fn kinds(&self) -> &[ArtifactKind] {
+ &self.inner
+ }
+
+ pub(crate) fn is_lib(&self) -> bool {
+ self.is_lib
+ }
+
+ pub(crate) fn target(&self) -> Option<ArtifactTarget> {
+ self.target
+ }
+}
+
+#[derive(PartialEq, Eq, Hash, Copy, Clone, Ord, PartialOrd, Debug)]
+pub enum ArtifactTarget {
+ /// Only applicable to build-dependencies, causing them to be built
+ /// for the given target (i.e. via `--target <triple>`) instead of for the host.
+ /// Has no effect on non-build dependencies.
+ BuildDependencyAssumeTarget,
+ /// The name of the platform triple, like `x86_64-apple-darwin`, that this
+ /// artifact will always be built for, no matter if it is a build,
+ /// normal or dev dependency.
+ Force(CompileTarget),
+}
+
+impl ArtifactTarget {
+ pub fn parse(target: &str) -> CargoResult<ArtifactTarget> {
+ Ok(match target {
+ "target" => ArtifactTarget::BuildDependencyAssumeTarget,
+ name => ArtifactTarget::Force(CompileTarget::new(name)?),
+ })
+ }
+
+ pub fn to_compile_kind(&self) -> Option<CompileKind> {
+ self.to_compile_target().map(CompileKind::Target)
+ }
+
+ pub fn to_compile_target(&self) -> Option<CompileTarget> {
+ match self {
+ ArtifactTarget::BuildDependencyAssumeTarget => None,
+ ArtifactTarget::Force(target) => Some(*target),
+ }
+ }
+ pub(crate) fn to_resolved_compile_kind(
+ &self,
+ root_unit_compile_kind: CompileKind,
+ ) -> CompileKind {
+ match self {
+ ArtifactTarget::Force(target) => CompileKind::Target(*target),
+ ArtifactTarget::BuildDependencyAssumeTarget => root_unit_compile_kind,
+ }
+ }
+
+ pub(crate) fn to_resolved_compile_target(
+ &self,
+ root_unit_compile_kind: CompileKind,
+ ) -> Option<CompileTarget> {
+ match self.to_resolved_compile_kind(root_unit_compile_kind) {
+ CompileKind::Host => None,
+ CompileKind::Target(target) => Some(target),
+ }
+ }
+}
+
+#[derive(PartialEq, Eq, Hash, Copy, Clone, Ord, PartialOrd, Debug)]
+pub enum ArtifactKind {
+ /// We represent all binaries in this dependency
+ AllBinaries,
+ /// We represent a single binary
+ SelectedBinary(InternedString),
+ Cdylib,
+ Staticlib,
+}
+
+impl ser::Serialize for ArtifactKind {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: ser::Serializer,
+ {
+ let out: Cow<'_, str> = match *self {
+ ArtifactKind::SelectedBinary(name) => format!("bin:{}", name.as_str()).into(),
+ _ => self.crate_type().into(),
+ };
+ out.serialize(s)
+ }
+}
+
+impl fmt::Display for ArtifactKind {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(match self {
+ ArtifactKind::SelectedBinary(bin_name) => return write!(f, "bin:{bin_name}"),
+ _ => self.crate_type(),
+ })
+ }
+}
+
+impl ArtifactKind {
+ /// Returns a string of crate type of the artifact being built.
+ ///
+ /// Note that the name of `SelectedBinary` would be dropped and displayed as `bin`.
+ pub fn crate_type(&self) -> &'static str {
+ match self {
+ ArtifactKind::AllBinaries | ArtifactKind::SelectedBinary(_) => "bin",
+ ArtifactKind::Cdylib => "cdylib",
+ ArtifactKind::Staticlib => "staticlib",
+ }
+ }
+
+ fn parse(kind: &str) -> CargoResult<Self> {
+ Ok(match kind {
+ "bin" => ArtifactKind::AllBinaries,
+ "cdylib" => ArtifactKind::Cdylib,
+ "staticlib" => ArtifactKind::Staticlib,
+ _ => {
+ return kind
+ .strip_prefix("bin:")
+ .map(|bin_name| ArtifactKind::SelectedBinary(InternedString::new(bin_name)))
+ .ok_or_else(|| anyhow::anyhow!("'{}' is not a valid artifact specifier", kind))
+ }
+ })
+ }
+
+ fn validate(kinds: Vec<ArtifactKind>) -> CargoResult<Vec<ArtifactKind>> {
+ if kinds.iter().any(|k| matches!(k, ArtifactKind::AllBinaries))
+ && kinds
+ .iter()
+ .any(|k| matches!(k, ArtifactKind::SelectedBinary(_)))
+ {
+ anyhow::bail!("Cannot specify both 'bin' and 'bin:<name>' binary artifacts, as 'bin' selects all available binaries.");
+ }
+ let mut kinds_without_dupes = kinds.clone();
+ kinds_without_dupes.sort();
+ kinds_without_dupes.dedup();
+ let num_dupes = kinds.len() - kinds_without_dupes.len();
+ if num_dupes != 0 {
+ anyhow::bail!(
+ "Found {} duplicate binary artifact{}",
+ num_dupes,
+ (num_dupes > 1).then(|| "s").unwrap_or("")
+ );
+ }
+ Ok(kinds)
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/features.rs b/src/tools/cargo/src/cargo/core/features.rs
new file mode 100644
index 000000000..7f16e79cf
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/features.rs
@@ -0,0 +1,1197 @@
+//! Support for nightly features in Cargo itself.
+//!
+//! This file is the version of `feature_gate.rs` in upstream Rust for Cargo
+//! itself and is intended to be the avenue for which new features in Cargo are
+//! gated by default and then eventually stabilized. All known stable and
+//! unstable features are tracked in this file.
+//!
+//! If you're reading this then you're likely interested in adding a feature to
+//! Cargo, and the good news is that it shouldn't be too hard! First determine
+//! how the feature should be gated:
+//!
+//! * New syntax in Cargo.toml should use `cargo-features`.
+//! * New CLI options should use `-Z unstable-options`.
+//! * New functionality that may not have an interface, or the interface has
+//! not yet been designed, or for more complex features that affect multiple
+//! parts of Cargo should use a new `-Z` flag.
+//!
+//! See below for more details.
+//!
+//! When adding new tests for your feature, usually the tests should go into a
+//! new module of the testsuite. See
+//! <https://doc.crates.io/contrib/tests/writing.html> for more information on
+//! writing tests. Particularly, check out the "Testing Nightly Features"
+//! section for testing unstable features.
+//!
+//! After you have added your feature, be sure to update the unstable
+//! documentation at `src/doc/src/reference/unstable.md` to include a short
+//! description of how to use your new feature.
+//!
+//! And hopefully that's it!
+//!
+//! ## New Cargo.toml syntax
+//!
+//! The steps for adding new Cargo.toml syntax are:
+//!
+//! 1. Add the cargo-features unstable gate. Search below for "look here" to
+//! find the [`features!`] macro invocation and add your feature to the list.
+//!
+//! 2. Update the Cargo.toml parsing code to handle your new feature.
+//!
+//! 3. Wherever you added the new parsing code, call
+//! `features.require(Feature::my_feature_name())?` if the new syntax is
+//! used. This will return an error if the user hasn't listed the feature
+//! in `cargo-features` or this is not the nightly channel.
+//!
+//! ## `-Z unstable-options`
+//!
+//! `-Z unstable-options` is intended to force the user to opt-in to new CLI
+//! flags, options, and new subcommands.
+//!
+//! The steps to add a new command-line option are:
+//!
+//! 1. Add the option to the CLI parsing code. In the help text, be sure to
+//! include `(unstable)` to note that this is an unstable option.
+//! 2. Where the CLI option is loaded, be sure to call
+//! [`CliUnstable::fail_if_stable_opt`]. This will return an error if `-Z
+//! unstable options` was not passed.
+//!
+//! ## `-Z` options
+//!
+//! New `-Z` options cover all other functionality that isn't covered with
+//! `cargo-features` or `-Z unstable-options`.
+//!
+//! The steps to add a new `-Z` option are:
+//!
+//! 1. Add the option to the [`CliUnstable`] struct below. Flags can take an
+//! optional value if you want.
+//! 2. Update the [`CliUnstable::add`] function to parse the flag.
+//! 3. Wherever the new functionality is implemented, call
+//! [`Config::cli_unstable`] to get an instance of [`CliUnstable`]
+//! and check if the option has been enabled on the [`CliUnstable`] instance.
+//! Nightly gating is already handled, so no need to worry about that.
+//!
+//! ### `-Z` vs `cargo-features`
+//!
+//! In some cases there might be some changes that `cargo-features` is unable
+//! to sufficiently encompass. An example would be a syntax change in
+//! `Cargo.toml` that also impacts the index or resolver. The resolver doesn't
+//! know about `cargo-features`, so it needs a `-Z` flag to enable the
+//! experimental functionality.
+//!
+//! In those cases, you usually should introduce both a `-Z` flag (to enable
+//! the changes outside of the manifest) and a `cargo-features` entry (to
+//! enable the new syntax in `Cargo.toml`). The `cargo-features` entry ensures
+//! that any experimental syntax that gets uploaded to crates.io is clearly
+//! intended for nightly-only builds. Otherwise, users accessing those crates
+//! may get confusing errors, particularly if the syntax changes during the
+//! development cycle, and the user tries to access it with a stable release.
+//!
+//! ### `-Z` with external files
+//!
+//! Some files, such as `config.toml` config files, or the `config.json` index
+//! file, are used in a global location which can make interaction with stable
+//! releases problematic. In general, before the feature is stabilized, stable
+//! Cargo should behave roughly similar to how it behaved *before* the
+//! unstable feature was introduced. If Cargo would normally have ignored or
+//! warned about the introduction of something, then it probably should
+//! continue to do so.
+//!
+//! For example, Cargo generally ignores (or warns) about `config.toml`
+//! entries it doesn't know about. This allows a limited degree of
+//! forwards-compatibility with future versions of Cargo that add new entries.
+//!
+//! Whether or not to warn on stable may need to be decided on a case-by-case
+//! basis. For example, you may want to avoid generating a warning for options
+//! that are not critical to Cargo's operation in order to reduce the
+//! annoyance of constant warnings. However, ignoring some options may prevent
+//! proper operation, so a warning may be valuable for a user trying to
+//! diagnose why it isn't working correctly.
+//!
+//! ## Stabilization
+//!
+//! For the stabilization process, see
+//! <https://doc.crates.io/contrib/process/unstable.html#stabilization>.
+//!
+//! The steps for stabilizing are roughly:
+//!
+//! 1. Update the feature to be stable, based on the kind of feature:
+//! 1. `cargo-features`: Change the feature to `stable` in the [`features!`]
+//! macro invocation below, and include the version and a URL for the
+//! documentation.
+//! 2. `-Z unstable-options`: Find the call to [`fail_if_stable_opt`] and
+//! remove it. Be sure to update the man pages if necessary.
+//! 3. `-Z` flag: Change the parsing code in [`CliUnstable::add`] to call
+//! `stabilized_warn` or `stabilized_err` and remove the field from
+//! [`CliUnstable`]. Remove the `(unstable)` note in the clap help text if
+//! necessary.
+//! 2. Remove `masquerade_as_nightly_cargo` from any tests, and remove
+//! `cargo-features` from `Cargo.toml` test files if any. You can
+//! quickly find what needs to be removed by searching for the name
+//! of the feature, e.g. `print_im_a_teapot`
+//! 3. Update the docs in unstable.md to move the section to the bottom
+//! and summarize it similar to the other entries. Update the rest of the
+//! documentation to add the new feature.
+//!
+//! [`Config::cli_unstable`]: crate::util::config::Config::cli_unstable
+//! [`fail_if_stable_opt`]: CliUnstable::fail_if_stable_opt
+//! [`features!`]: macro.features.html
+
+use std::collections::BTreeSet;
+use std::env;
+use std::fmt::{self, Write};
+use std::str::FromStr;
+
+use anyhow::{bail, Error};
+use cargo_util::ProcessBuilder;
+use serde::{Deserialize, Serialize};
+
+use crate::core::resolver::ResolveBehavior;
+use crate::util::errors::CargoResult;
+use crate::util::{indented_lines, iter_join};
+use crate::Config;
+
+pub const HIDDEN: &str = "";
+pub const SEE_CHANNELS: &str =
+ "See https://doc.rust-lang.org/book/appendix-07-nightly-rust.html for more information \
+ about Rust release channels.";
+
+/// The edition of the compiler ([RFC 2052])
+///
+/// The following sections will guide you how to add and stabilize an edition.
+///
+/// ## Adding a new edition
+///
+/// - Add the next edition to the enum.
+/// - Update every match expression that now fails to compile.
+/// - Update the [`FromStr`] impl.
+/// - Update [`CLI_VALUES`] to include the new edition.
+/// - Set [`LATEST_UNSTABLE`] to Some with the new edition.
+/// - Add an unstable feature to the [`features!`] macro invocation below for the new edition.
+/// - Gate on that new feature in [`TomlManifest::to_real_manifest`].
+/// - Update the shell completion files.
+/// - Update any failing tests (hopefully there are very few).
+/// - Update unstable.md to add a new section for this new edition (see [this example]).
+///
+/// ## Stabilization instructions
+///
+/// - Set [`LATEST_UNSTABLE`] to None.
+/// - Set [`LATEST_STABLE`] to the new version.
+/// - Update [`is_stable`] to `true`.
+/// - Set the editionNNNN feature to stable in the [`features!`] macro invocation below.
+/// - Update any tests that are affected.
+/// - Update the man page for the `--edition` flag.
+/// - Update unstable.md to move the edition section to the bottom.
+/// - Update the documentation:
+/// - Update any features impacted by the edition.
+/// - Update manifest.md#the-edition-field.
+/// - Update the `--edition` flag (options-new.md).
+/// - Rebuild man pages.
+///
+/// [RFC 2052]: https://rust-lang.github.io/rfcs/2052-epochs.html
+/// [`FromStr`]: Edition::from_str
+/// [`CLI_VALUES`]: Edition::CLI_VALUES
+/// [`LATEST_UNSTABLE`]: Edition::LATEST_UNSTABLE
+/// [`LATEST_STABLE`]: Edition::LATEST_STABLE
+/// [this example]: https://github.com/rust-lang/cargo/blob/3ebb5f15a940810f250b68821149387af583a79e/src/doc/src/reference/unstable.md?plain=1#L1238-L1264
+/// [`is_stable`]: Edition::is_stable
+/// [`TomlManifest::to_real_manifest`]: crate::util::toml::TomlManifest::to_real_manifest
+/// [`features!`]: macro.features.html
+#[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, Eq, PartialEq, Serialize, Deserialize)]
+pub enum Edition {
+ /// The 2015 edition
+ Edition2015,
+ /// The 2018 edition
+ Edition2018,
+ /// The 2021 edition
+ Edition2021,
+}
+
+impl Edition {
+ /// The latest edition that is unstable.
+ ///
+ /// This is `None` if there is no next unstable edition.
+ pub const LATEST_UNSTABLE: Option<Edition> = None;
+ /// The latest stable edition.
+ pub const LATEST_STABLE: Edition = Edition::Edition2021;
+ /// Possible values allowed for the `--edition` CLI flag.
+ ///
+ /// This requires a static value due to the way clap works, otherwise I
+ /// would have built this dynamically.
+ pub const CLI_VALUES: [&'static str; 3] = ["2015", "2018", "2021"];
+
+ /// Returns the first version that a particular edition was released on
+ /// stable.
+ pub(crate) fn first_version(&self) -> Option<semver::Version> {
+ use Edition::*;
+ match self {
+ Edition2015 => None,
+ Edition2018 => Some(semver::Version::new(1, 31, 0)),
+ Edition2021 => Some(semver::Version::new(1, 56, 0)),
+ }
+ }
+
+ /// Returns `true` if this edition is stable in this release.
+ pub fn is_stable(&self) -> bool {
+ use Edition::*;
+ match self {
+ Edition2015 => true,
+ Edition2018 => true,
+ Edition2021 => true,
+ }
+ }
+
+ /// Returns the previous edition from this edition.
+ ///
+ /// Returns `None` for 2015.
+ pub fn previous(&self) -> Option<Edition> {
+ use Edition::*;
+ match self {
+ Edition2015 => None,
+ Edition2018 => Some(Edition2015),
+ Edition2021 => Some(Edition2018),
+ }
+ }
+
+ /// Returns the next edition from this edition, returning the last edition
+ /// if this is already the last one.
+ pub fn saturating_next(&self) -> Edition {
+ use Edition::*;
+ match self {
+ Edition2015 => Edition2018,
+ Edition2018 => Edition2021,
+ Edition2021 => Edition2021,
+ }
+ }
+
+ /// Updates the given [`ProcessBuilder`] to include the appropriate flags
+ /// for setting the edition.
+ pub(crate) fn cmd_edition_arg(&self, cmd: &mut ProcessBuilder) {
+ if *self != Edition::Edition2015 {
+ cmd.arg(format!("--edition={}", self));
+ }
+ if !self.is_stable() {
+ cmd.arg("-Z").arg("unstable-options");
+ }
+ }
+
+ /// Whether or not this edition supports the `rust_*_compatibility` lint.
+ ///
+ /// Ideally this would not be necessary, but editions may not have any
+ /// lints, and thus `rustc` doesn't recognize it. Perhaps `rustc` could
+ /// create an empty group instead?
+ pub(crate) fn supports_compat_lint(&self) -> bool {
+ use Edition::*;
+ match self {
+ Edition2015 => false,
+ Edition2018 => true,
+ Edition2021 => true,
+ }
+ }
+
+ /// Whether or not this edition supports the `rust_*_idioms` lint.
+ ///
+ /// Ideally this would not be necessary...
+ pub(crate) fn supports_idiom_lint(&self) -> bool {
+ use Edition::*;
+ match self {
+ Edition2015 => false,
+ Edition2018 => true,
+ Edition2021 => false,
+ }
+ }
+
+ pub(crate) fn default_resolve_behavior(&self) -> ResolveBehavior {
+ if *self >= Edition::Edition2021 {
+ ResolveBehavior::V2
+ } else {
+ ResolveBehavior::V1
+ }
+ }
+}
+
+impl fmt::Display for Edition {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ Edition::Edition2015 => f.write_str("2015"),
+ Edition::Edition2018 => f.write_str("2018"),
+ Edition::Edition2021 => f.write_str("2021"),
+ }
+ }
+}
+impl FromStr for Edition {
+ type Err = Error;
+ fn from_str(s: &str) -> Result<Self, Error> {
+ match s {
+ "2015" => Ok(Edition::Edition2015),
+ "2018" => Ok(Edition::Edition2018),
+ "2021" => Ok(Edition::Edition2021),
+ s if s.parse().map_or(false, |y: u16| y > 2021 && y < 2050) => bail!(
+ "this version of Cargo is older than the `{}` edition, \
+ and only supports `2015`, `2018`, and `2021` editions.",
+ s
+ ),
+ s => bail!(
+ "supported edition values are `2015`, `2018`, or `2021`, \
+ but `{}` is unknown",
+ s
+ ),
+ }
+ }
+}
+
+#[derive(PartialEq)]
+enum Status {
+ Stable,
+ Unstable,
+ Removed,
+}
+
+macro_rules! features {
+ (
+ $(($stab:ident, $feature:ident, $version:expr, $docs:expr),)*
+ ) => (
+ #[derive(Default, Clone, Debug)]
+ pub struct Features {
+ $($feature: bool,)*
+ activated: Vec<String>,
+ nightly_features_allowed: bool,
+ is_local: bool,
+ }
+
+ impl Feature {
+ $(
+ pub fn $feature() -> &'static Feature {
+ fn get(features: &Features) -> bool {
+ stab!($stab) == Status::Stable || features.$feature
+ }
+ static FEAT: Feature = Feature {
+ name: stringify!($feature),
+ stability: stab!($stab),
+ version: $version,
+ docs: $docs,
+ get,
+ };
+ &FEAT
+ }
+ )*
+
+ fn is_enabled(&self, features: &Features) -> bool {
+ (self.get)(features)
+ }
+ }
+
+ impl Features {
+ fn status(&mut self, feature: &str) -> Option<(&mut bool, &'static Feature)> {
+ if feature.contains("_") {
+ return None
+ }
+ let feature = feature.replace("-", "_");
+ $(
+ if feature == stringify!($feature) {
+ return Some((&mut self.$feature, Feature::$feature()))
+ }
+ )*
+ None
+ }
+ }
+ )
+}
+
+macro_rules! stab {
+ (stable) => {
+ Status::Stable
+ };
+ (unstable) => {
+ Status::Unstable
+ };
+ (removed) => {
+ Status::Removed
+ };
+}
+
+// A listing of all features in Cargo.
+//
+// "look here"
+//
+// This is the macro that lists all stable and unstable features in Cargo.
+// You'll want to add to this macro whenever you add a feature to Cargo, also
+// following the directions above.
+//
+// Note that all feature names here are valid Rust identifiers, but the `_`
+// character is translated to `-` when specified in the `cargo-features`
+// manifest entry in `Cargo.toml`.
+features! {
+ // A dummy feature that doesn't actually gate anything, but it's used in
+ // testing to ensure that we can enable stable features.
+ (stable, test_dummy_stable, "1.0", ""),
+
+ // A dummy feature that gates the usage of the `im-a-teapot` manifest
+ // entry. This is basically just intended for tests.
+ (unstable, test_dummy_unstable, "", "reference/unstable.html"),
+
+ // Downloading packages from alternative registry indexes.
+ (stable, alternative_registries, "1.34", "reference/registries.html"),
+
+ // Using editions
+ (stable, edition, "1.31", "reference/manifest.html#the-edition-field"),
+
+ // Renaming a package in the manifest via the `package` key
+ (stable, rename_dependency, "1.31", "reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml"),
+
+ // Whether a lock file is published with this crate
+ (removed, publish_lockfile, "1.37", "reference/unstable.html#publish-lockfile"),
+
+ // Overriding profiles for dependencies.
+ (stable, profile_overrides, "1.41", "reference/profiles.html#overrides"),
+
+ // "default-run" manifest option,
+ (stable, default_run, "1.37", "reference/manifest.html#the-default-run-field"),
+
+ // Declarative build scripts.
+ (unstable, metabuild, "", "reference/unstable.html#metabuild"),
+
+ // Specifying the 'public' attribute on dependencies
+ (unstable, public_dependency, "", "reference/unstable.html#public-dependency"),
+
+ // Allow to specify profiles other than 'dev', 'release', 'test', etc.
+ (stable, named_profiles, "1.57", "reference/profiles.html#custom-profiles"),
+
+ // Opt-in new-resolver behavior.
+ (stable, resolver, "1.51", "reference/resolver.html#resolver-versions"),
+
+ // Allow to specify whether binaries should be stripped.
+ (stable, strip, "1.58", "reference/profiles.html#strip-option"),
+
+ // Specifying a minimal 'rust-version' attribute for crates
+ (stable, rust_version, "1.56", "reference/manifest.html#the-rust-version-field"),
+
+ // Support for 2021 edition.
+ (stable, edition2021, "1.56", "reference/manifest.html#the-edition-field"),
+
+ // Allow to specify per-package targets (compile kinds)
+ (unstable, per_package_target, "", "reference/unstable.html#per-package-target"),
+
+ // Allow to specify which codegen backend should be used.
+ (unstable, codegen_backend, "", "reference/unstable.html#codegen-backend"),
+
+ // Allow specifying different binary name apart from the crate name
+ (unstable, different_binary_name, "", "reference/unstable.html#different-binary-name"),
+
+ // Allow specifying rustflags directly in a profile
+ (unstable, profile_rustflags, "", "reference/unstable.html#profile-rustflags-option"),
+
+ // Allow specifying rustflags directly in a profile
+ (stable, workspace_inheritance, "1.64", "reference/unstable.html#workspace-inheritance"),
+}
+
+pub struct Feature {
+ name: &'static str,
+ stability: Status,
+ version: &'static str,
+ docs: &'static str,
+ get: fn(&Features) -> bool,
+}
+
+impl Features {
+ pub fn new(
+ features: &[String],
+ config: &Config,
+ warnings: &mut Vec<String>,
+ is_local: bool,
+ ) -> CargoResult<Features> {
+ let mut ret = Features::default();
+ ret.nightly_features_allowed = config.nightly_features_allowed;
+ ret.is_local = is_local;
+ for feature in features {
+ ret.add(feature, config, warnings)?;
+ ret.activated.push(feature.to_string());
+ }
+ Ok(ret)
+ }
+
+ fn add(
+ &mut self,
+ feature_name: &str,
+ config: &Config,
+ warnings: &mut Vec<String>,
+ ) -> CargoResult<()> {
+ let nightly_features_allowed = self.nightly_features_allowed;
+ let is_local = self.is_local;
+ let (slot, feature) = match self.status(feature_name) {
+ Some(p) => p,
+ None => bail!("unknown cargo feature `{}`", feature_name),
+ };
+
+ if *slot {
+ bail!(
+ "the cargo feature `{}` has already been activated",
+ feature_name
+ );
+ }
+
+ let see_docs = || {
+ let url_channel = match channel().as_str() {
+ "dev" | "nightly" => "nightly/",
+ "beta" => "beta/",
+ _ => "",
+ };
+ format!(
+ "See https://doc.rust-lang.org/{}cargo/{} for more information \
+ about using this feature.",
+ url_channel, feature.docs
+ )
+ };
+
+ match feature.stability {
+ Status::Stable => {
+ // The user can't do anything about non-local packages.
+ // Warnings are usually suppressed, but just being cautious here.
+ if is_local {
+ let warning = format!(
+ "the cargo feature `{}` has been stabilized in the {} \
+ release and is no longer necessary to be listed in the \
+ manifest\n {}",
+ feature_name,
+ feature.version,
+ see_docs()
+ );
+ warnings.push(warning);
+ }
+ }
+ Status::Unstable if !nightly_features_allowed => bail!(
+ "the cargo feature `{}` requires a nightly version of \
+ Cargo, but this is the `{}` channel\n\
+ {}\n{}",
+ feature_name,
+ channel(),
+ SEE_CHANNELS,
+ see_docs()
+ ),
+ Status::Unstable => {
+ if let Some(allow) = &config.cli_unstable().allow_features {
+ if !allow.contains(feature_name) {
+ bail!(
+ "the feature `{}` is not in the list of allowed features: [{}]",
+ feature_name,
+ iter_join(allow, ", "),
+ );
+ }
+ }
+ }
+ Status::Removed => {
+ let mut msg = format!(
+ "the cargo feature `{}` has been removed in the {} release\n\n",
+ feature_name, feature.version
+ );
+ if self.is_local {
+ drop(writeln!(
+ msg,
+ "Remove the feature from Cargo.toml to remove this error."
+ ));
+ } else {
+ drop(writeln!(
+ msg,
+ "This package cannot be used with this version of Cargo, \
+ as the unstable feature `{}` is no longer supported.",
+ feature_name
+ ));
+ }
+ drop(writeln!(msg, "{}", see_docs()));
+ bail!(msg);
+ }
+ }
+
+ *slot = true;
+
+ Ok(())
+ }
+
+ pub fn activated(&self) -> &[String] {
+ &self.activated
+ }
+
+ pub fn require(&self, feature: &Feature) -> CargoResult<()> {
+ if feature.is_enabled(self) {
+ return Ok(());
+ }
+ let feature_name = feature.name.replace("_", "-");
+ let mut msg = format!(
+ "feature `{}` is required\n\
+ \n\
+ The package requires the Cargo feature called `{}`, but \
+ that feature is not stabilized in this version of Cargo ({}).\n\
+ ",
+ feature_name,
+ feature_name,
+ crate::version(),
+ );
+
+ if self.nightly_features_allowed {
+ if self.is_local {
+ drop(writeln!(
+ msg,
+ "Consider adding `cargo-features = [\"{}\"]` \
+ to the top of Cargo.toml (above the [package] table) \
+ to tell Cargo you are opting in to use this unstable feature.",
+ feature_name
+ ));
+ } else {
+ drop(writeln!(
+ msg,
+ "Consider trying a more recent nightly release."
+ ));
+ }
+ } else {
+ drop(writeln!(
+ msg,
+ "Consider trying a newer version of Cargo \
+ (this may require the nightly release)."
+ ));
+ }
+ drop(writeln!(
+ msg,
+ "See https://doc.rust-lang.org/nightly/cargo/{} for more information \
+ about the status of this feature.",
+ feature.docs
+ ));
+
+ bail!("{}", msg);
+ }
+
+ pub fn is_enabled(&self, feature: &Feature) -> bool {
+ feature.is_enabled(self)
+ }
+}
+
+macro_rules! unstable_cli_options {
+ (
+ $(
+ $(#[$meta:meta])?
+ $element: ident: $ty: ty = ($help: expr ),
+ )*
+ ) => {
+ /// A parsed representation of all unstable flags that Cargo accepts.
+ ///
+ /// Cargo, like `rustc`, accepts a suite of `-Z` flags which are intended for
+ /// gating unstable functionality to Cargo. These flags are only available on
+ /// the nightly channel of Cargo.
+ #[derive(Default, Debug, Deserialize)]
+ #[serde(default, rename_all = "kebab-case")]
+ pub struct CliUnstable {
+ $(
+ $(#[$meta])?
+ pub $element: $ty
+ ),*
+ }
+ impl CliUnstable {
+ pub fn help() -> Vec<(&'static str, &'static str)> {
+ let fields = vec![$((stringify!($element), $help)),*];
+ fields
+ }
+ }
+ }
+}
+
+unstable_cli_options!(
+ // Permanently unstable features:
+ allow_features: Option<BTreeSet<String>> = ("Allow *only* the listed unstable features"),
+ print_im_a_teapot: bool = (HIDDEN),
+
+ // All other unstable features.
+ // Please keep this list lexiographically ordered.
+ advanced_env: bool = (HIDDEN),
+ avoid_dev_deps: bool = ("Avoid installing dev-dependencies if possible"),
+ binary_dep_depinfo: bool = ("Track changes to dependency artifacts"),
+ bindeps: bool = ("Allow Cargo packages to depend on bin, cdylib, and staticlib crates, and use the artifacts built by those crates"),
+ #[serde(deserialize_with = "deserialize_build_std")]
+ build_std: Option<Vec<String>> = ("Enable Cargo to compile the standard library itself as part of a crate graph compilation"),
+ build_std_features: Option<Vec<String>> = ("Configure features enabled for the standard library itself when building the standard library"),
+ codegen_backend: bool = ("Enable the `codegen-backend` option in profiles in .cargo/config.toml file"),
+ config_include: bool = ("Enable the `include` key in config files"),
+ credential_process: bool = ("Add a config setting to fetch registry authentication tokens by calling an external process"),
+ #[serde(deserialize_with = "deserialize_check_cfg")]
+ check_cfg: Option<(/*features:*/ bool, /*well_known_names:*/ bool, /*well_known_values:*/ bool, /*output:*/ bool)> = ("Specify scope of compile-time checking of `cfg` names/values"),
+ doctest_in_workspace: bool = ("Compile doctests with paths relative to the workspace root"),
+ doctest_xcompile: bool = ("Compile and run doctests for non-host target using runner config"),
+ dual_proc_macros: bool = ("Build proc-macros for both the host and the target"),
+ features: Option<Vec<String>> = (HIDDEN),
+ gitoxide: Option<GitoxideFeatures> = ("Use gitoxide for the given git interactions, or all of them if no argument is given"),
+ jobserver_per_rustc: bool = (HIDDEN),
+ minimal_versions: bool = ("Resolve minimal dependency versions instead of maximum"),
+ direct_minimal_versions: bool = ("Resolve minimal dependency versions instead of maximum (direct dependencies only)"),
+ mtime_on_use: bool = ("Configure Cargo to update the mtime of used files"),
+ no_index_update: bool = ("Do not update the registry index even if the cache is outdated"),
+ panic_abort_tests: bool = ("Enable support to run tests with -Cpanic=abort"),
+ profile_rustflags: bool = ("Enable the `rustflags` option in profiles in .cargo/config.toml file"),
+ host_config: bool = ("Enable the [host] section in the .cargo/config.toml file"),
+ registry_auth: bool = ("Authentication for alternative registries, and generate registry authentication tokens using asymmetric cryptography"),
+ target_applies_to_host: bool = ("Enable the `target-applies-to-host` key in the .cargo/config.toml file"),
+ rustdoc_map: bool = ("Allow passing external documentation mappings to rustdoc"),
+ separate_nightlies: bool = (HIDDEN),
+ publish_timeout: bool = ("Enable the `publish.timeout` key in .cargo/config.toml file"),
+ unstable_options: bool = ("Allow the usage of unstable options"),
+ skip_rustdoc_fingerprint: bool = (HIDDEN),
+ rustdoc_scrape_examples: bool = ("Allows Rustdoc to scrape code examples from reverse-dependencies"),
+);
+
+const STABILIZED_COMPILE_PROGRESS: &str = "The progress bar is now always \
+ enabled when used on an interactive console.\n\
+ See https://doc.rust-lang.org/cargo/reference/config.html#termprogresswhen \
+ for information on controlling the progress bar.";
+
+const STABILIZED_OFFLINE: &str = "Offline mode is now available via the \
+ --offline CLI option";
+
+const STABILIZED_CACHE_MESSAGES: &str = "Message caching is now always enabled.";
+
+const STABILIZED_INSTALL_UPGRADE: &str = "Packages are now always upgraded if \
+ they appear out of date.\n\
+ See https://doc.rust-lang.org/cargo/commands/cargo-install.html for more \
+ information on how upgrading works.";
+
+const STABILIZED_CONFIG_PROFILE: &str = "See \
+ https://doc.rust-lang.org/cargo/reference/config.html#profile for more \
+ information about specifying profiles in config.";
+
+const STABILIZED_CRATE_VERSIONS: &str = "The crate version is now \
+ automatically added to the documentation.";
+
+const STABILIZED_PACKAGE_FEATURES: &str = "Enhanced feature flag behavior is now \
+ available in virtual workspaces, and `member/feature-name` syntax is also \
+ always available. Other extensions require setting `resolver = \"2\"` in \
+ Cargo.toml.\n\
+ See https://doc.rust-lang.org/nightly/cargo/reference/features.html#resolver-version-2-command-line-flags \
+ for more information.";
+
+const STABILIZED_FEATURES: &str = "The new feature resolver is now available \
+ by specifying `resolver = \"2\"` in Cargo.toml.\n\
+ See https://doc.rust-lang.org/nightly/cargo/reference/features.html#feature-resolver-version-2 \
+ for more information.";
+
+const STABILIZED_EXTRA_LINK_ARG: &str = "Additional linker arguments are now \
+ supported without passing this flag.";
+
+const STABILIZED_CONFIGURABLE_ENV: &str = "The [env] section is now always enabled.";
+
+const STABILIZED_PATCH_IN_CONFIG: &str = "The patch-in-config feature is now always enabled.";
+
+const STABILIZED_NAMED_PROFILES: &str = "The named-profiles feature is now always enabled.\n\
+ See https://doc.rust-lang.org/nightly/cargo/reference/profiles.html#custom-profiles \
+ for more information";
+
+const STABILIZED_FUTURE_INCOMPAT_REPORT: &str =
+ "The future-incompat-report feature is now always enabled.";
+
+const STABILIZED_WEAK_DEP_FEATURES: &str = "Weak dependency features are now always available.";
+
+const STABILISED_NAMESPACED_FEATURES: &str = "Namespaced features are now always available.";
+
+const STABILIZED_TIMINGS: &str = "The -Ztimings option has been stabilized as --timings.";
+
+const STABILISED_MULTITARGET: &str = "Multiple `--target` options are now always available.";
+
+const STABILIZED_TERMINAL_WIDTH: &str =
+ "The -Zterminal-width option is now always enabled for terminal output.";
+
+const STABILISED_SPARSE_REGISTRY: &str = "The sparse protocol is now the default for crates.io";
+
+fn deserialize_build_std<'de, D>(deserializer: D) -> Result<Option<Vec<String>>, D::Error>
+where
+ D: serde::Deserializer<'de>,
+{
+ let crates = match <Option<Vec<String>>>::deserialize(deserializer)? {
+ Some(list) => list,
+ None => return Ok(None),
+ };
+ let v = crates.join(",");
+ Ok(Some(
+ crate::core::compiler::standard_lib::parse_unstable_flag(Some(&v)),
+ ))
+}
+
+fn deserialize_check_cfg<'de, D>(
+ deserializer: D,
+) -> Result<Option<(bool, bool, bool, bool)>, D::Error>
+where
+ D: serde::Deserializer<'de>,
+{
+ use serde::de::Error;
+ let crates = match <Option<Vec<String>>>::deserialize(deserializer)? {
+ Some(list) => list,
+ None => return Ok(None),
+ };
+
+ parse_check_cfg(crates.into_iter()).map_err(D::Error::custom)
+}
+
+#[derive(Debug, Copy, Clone, Default, Deserialize)]
+pub struct GitoxideFeatures {
+ /// All fetches are done with `gitoxide`, which includes git dependencies as well as the crates index.
+ pub fetch: bool,
+ /// When cloning the index, perform a shallow clone. Maintain shallowness upon subsequent fetches.
+ pub shallow_index: bool,
+ /// When cloning git dependencies, perform a shallow clone and maintain shallowness on subsequent fetches.
+ pub shallow_deps: bool,
+ /// Checkout git dependencies using `gitoxide` (submodules are still handled by git2 ATM, and filters
+ /// like linefeed conversions are unsupported).
+ pub checkout: bool,
+ /// A feature flag which doesn't have any meaning except for preventing
+ /// `__CARGO_USE_GITOXIDE_INSTEAD_OF_GIT2=1` builds to enable all safe `gitoxide` features.
+ /// That way, `gitoxide` isn't actually used even though it's enabled.
+ pub internal_use_git2: bool,
+}
+
+impl GitoxideFeatures {
+ fn all() -> Self {
+ GitoxideFeatures {
+ fetch: true,
+ shallow_index: true,
+ checkout: true,
+ shallow_deps: true,
+ internal_use_git2: false,
+ }
+ }
+
+ /// Features we deem safe for everyday use - typically true when all tests pass with them
+ /// AND they are backwards compatible.
+ fn safe() -> Self {
+ GitoxideFeatures {
+ fetch: true,
+ shallow_index: false,
+ checkout: true,
+ shallow_deps: false,
+ internal_use_git2: false,
+ }
+ }
+}
+
+fn parse_gitoxide(
+ it: impl Iterator<Item = impl AsRef<str>>,
+) -> CargoResult<Option<GitoxideFeatures>> {
+ let mut out = GitoxideFeatures::default();
+ let GitoxideFeatures {
+ fetch,
+ shallow_index,
+ checkout,
+ shallow_deps,
+ internal_use_git2,
+ } = &mut out;
+
+ for e in it {
+ match e.as_ref() {
+ "fetch" => *fetch = true,
+ "shallow-index" => *shallow_index = true,
+ "shallow-deps" => *shallow_deps = true,
+ "checkout" => *checkout = true,
+ "internal-use-git2" => *internal_use_git2 = true,
+ _ => {
+ bail!("unstable 'gitoxide' only takes `fetch`, 'shallow-index', 'shallow-deps' and 'checkout' as valid inputs")
+ }
+ }
+ }
+ Ok(Some(out))
+}
+
+fn parse_check_cfg(
+ it: impl Iterator<Item = impl AsRef<str>>,
+) -> CargoResult<Option<(bool, bool, bool, bool)>> {
+ let mut features = false;
+ let mut well_known_names = false;
+ let mut well_known_values = false;
+ let mut output = false;
+
+ for e in it {
+ match e.as_ref() {
+ "features" => features = true,
+ "names" => well_known_names = true,
+ "values" => well_known_values = true,
+ "output" => output = true,
+ _ => bail!("unstable check-cfg only takes `features`, `names`, `values` or `output` as valid inputs"),
+ }
+ }
+
+ Ok(Some((
+ features,
+ well_known_names,
+ well_known_values,
+ output,
+ )))
+}
+
+impl CliUnstable {
+ pub fn parse(
+ &mut self,
+ flags: &[String],
+ nightly_features_allowed: bool,
+ ) -> CargoResult<Vec<String>> {
+ if !flags.is_empty() && !nightly_features_allowed {
+ bail!(
+ "the `-Z` flag is only accepted on the nightly channel of Cargo, \
+ but this is the `{}` channel\n\
+ {}",
+ channel(),
+ SEE_CHANNELS
+ );
+ }
+ let mut warnings = Vec::new();
+ // We read flags twice, first to get allowed-features (if specified),
+ // and then to read the remaining unstable flags.
+ for flag in flags {
+ if flag.starts_with("allow-features=") {
+ self.add(flag, &mut warnings)?;
+ }
+ }
+ for flag in flags {
+ self.add(flag, &mut warnings)?;
+ }
+
+ if self.gitoxide.is_none() && cargo_use_gitoxide_instead_of_git2() {
+ self.gitoxide = GitoxideFeatures::safe().into();
+ }
+ Ok(warnings)
+ }
+
+ fn add(&mut self, flag: &str, warnings: &mut Vec<String>) -> CargoResult<()> {
+ let mut parts = flag.splitn(2, '=');
+ let k = parts.next().unwrap();
+ let v = parts.next();
+
+ fn parse_bool(key: &str, value: Option<&str>) -> CargoResult<bool> {
+ match value {
+ None | Some("yes") => Ok(true),
+ Some("no") => Ok(false),
+ Some(s) => bail!("flag -Z{} expected `no` or `yes`, found: `{}`", key, s),
+ }
+ }
+
+ fn parse_features(value: Option<&str>) -> Vec<String> {
+ match value {
+ None => Vec::new(),
+ Some("") => Vec::new(),
+ Some(v) => v.split(',').map(|s| s.to_string()).collect(),
+ }
+ }
+
+ // Asserts that there is no argument to the flag.
+ fn parse_empty(key: &str, value: Option<&str>) -> CargoResult<bool> {
+ if let Some(v) = value {
+ bail!("flag -Z{} does not take a value, found: `{}`", key, v);
+ }
+ Ok(true)
+ }
+
+ let mut stabilized_warn = |key: &str, version: &str, message: &str| {
+ warnings.push(format!(
+ "flag `-Z {}` has been stabilized in the {} release, \
+ and is no longer necessary\n{}",
+ key,
+ version,
+ indented_lines(message)
+ ));
+ };
+
+ // Use this if the behavior now requires another mechanism to enable.
+ let stabilized_err = |key: &str, version: &str, message: &str| {
+ Err(anyhow::format_err!(
+ "flag `-Z {}` has been stabilized in the {} release\n{}",
+ key,
+ version,
+ indented_lines(message)
+ ))
+ };
+
+ if let Some(allowed) = &self.allow_features {
+ if k != "allow-features" && !allowed.contains(k) {
+ bail!(
+ "the feature `{}` is not in the list of allowed features: [{}]",
+ k,
+ iter_join(allowed, ", ")
+ );
+ }
+ }
+
+ match k {
+ "print-im-a-teapot" => self.print_im_a_teapot = parse_bool(k, v)?,
+ "allow-features" => self.allow_features = Some(parse_features(v).into_iter().collect()),
+ "unstable-options" => self.unstable_options = parse_empty(k, v)?,
+ "no-index-update" => self.no_index_update = parse_empty(k, v)?,
+ "avoid-dev-deps" => self.avoid_dev_deps = parse_empty(k, v)?,
+ "minimal-versions" => self.minimal_versions = parse_empty(k, v)?,
+ "direct-minimal-versions" => self.direct_minimal_versions = parse_empty(k, v)?,
+ "advanced-env" => self.advanced_env = parse_empty(k, v)?,
+ "config-include" => self.config_include = parse_empty(k, v)?,
+ "check-cfg" => {
+ self.check_cfg = v.map_or(Ok(None), |v| parse_check_cfg(v.split(',')))?
+ }
+ "dual-proc-macros" => self.dual_proc_macros = parse_empty(k, v)?,
+ // can also be set in .cargo/config or with and ENV
+ "mtime-on-use" => self.mtime_on_use = parse_empty(k, v)?,
+ "named-profiles" => stabilized_warn(k, "1.57", STABILIZED_NAMED_PROFILES),
+ "binary-dep-depinfo" => self.binary_dep_depinfo = parse_empty(k, v)?,
+ "bindeps" => self.bindeps = parse_empty(k, v)?,
+ "build-std" => {
+ self.build_std = Some(crate::core::compiler::standard_lib::parse_unstable_flag(v))
+ }
+ "build-std-features" => self.build_std_features = Some(parse_features(v)),
+ "doctest-xcompile" => self.doctest_xcompile = parse_empty(k, v)?,
+ "doctest-in-workspace" => self.doctest_in_workspace = parse_empty(k, v)?,
+ "panic-abort-tests" => self.panic_abort_tests = parse_empty(k, v)?,
+ "jobserver-per-rustc" => self.jobserver_per_rustc = parse_empty(k, v)?,
+ "gitoxide" => {
+ self.gitoxide = v.map_or_else(
+ || Ok(Some(GitoxideFeatures::all())),
+ |v| parse_gitoxide(v.split(',')),
+ )?
+ }
+ "host-config" => self.host_config = parse_empty(k, v)?,
+ "target-applies-to-host" => self.target_applies_to_host = parse_empty(k, v)?,
+ "publish-timeout" => self.publish_timeout = parse_empty(k, v)?,
+ "features" => {
+ // `-Z features` has been stabilized since 1.51,
+ // but `-Z features=compare` is still allowed for convenience
+ // to validate that the feature resolver resolves features
+ // in the same way as the dependency resolver,
+ // until we feel confident to remove entirely.
+ //
+ // See rust-lang/cargo#11168
+ let feats = parse_features(v);
+ let stab_is_not_empty = feats.iter().any(|feat| {
+ matches!(
+ feat.as_str(),
+ "build_dep" | "host_dep" | "dev_dep" | "itarget" | "all"
+ )
+ });
+ if stab_is_not_empty || feats.is_empty() {
+ // Make this stabilized_err once -Zfeature support is removed.
+ stabilized_warn(k, "1.51", STABILIZED_FEATURES);
+ }
+ self.features = Some(feats);
+ }
+ "separate-nightlies" => self.separate_nightlies = parse_empty(k, v)?,
+ "multitarget" => stabilized_warn(k, "1.64", STABILISED_MULTITARGET),
+ "rustdoc-map" => self.rustdoc_map = parse_empty(k, v)?,
+ "terminal-width" => stabilized_warn(k, "1.68", STABILIZED_TERMINAL_WIDTH),
+ "sparse-registry" => stabilized_warn(k, "1.68", STABILISED_SPARSE_REGISTRY),
+ "registry-auth" => self.registry_auth = parse_empty(k, v)?,
+ "namespaced-features" => stabilized_warn(k, "1.60", STABILISED_NAMESPACED_FEATURES),
+ "weak-dep-features" => stabilized_warn(k, "1.60", STABILIZED_WEAK_DEP_FEATURES),
+ "credential-process" => self.credential_process = parse_empty(k, v)?,
+ "rustdoc-scrape-examples" => self.rustdoc_scrape_examples = parse_empty(k, v)?,
+ "skip-rustdoc-fingerprint" => self.skip_rustdoc_fingerprint = parse_empty(k, v)?,
+ "compile-progress" => stabilized_warn(k, "1.30", STABILIZED_COMPILE_PROGRESS),
+ "offline" => stabilized_err(k, "1.36", STABILIZED_OFFLINE)?,
+ "cache-messages" => stabilized_warn(k, "1.40", STABILIZED_CACHE_MESSAGES),
+ "install-upgrade" => stabilized_warn(k, "1.41", STABILIZED_INSTALL_UPGRADE),
+ "config-profile" => stabilized_warn(k, "1.43", STABILIZED_CONFIG_PROFILE),
+ "crate-versions" => stabilized_warn(k, "1.47", STABILIZED_CRATE_VERSIONS),
+ "package-features" => stabilized_warn(k, "1.51", STABILIZED_PACKAGE_FEATURES),
+ "extra-link-arg" => stabilized_warn(k, "1.56", STABILIZED_EXTRA_LINK_ARG),
+ "configurable-env" => stabilized_warn(k, "1.56", STABILIZED_CONFIGURABLE_ENV),
+ "patch-in-config" => stabilized_warn(k, "1.56", STABILIZED_PATCH_IN_CONFIG),
+ "future-incompat-report" => {
+ stabilized_warn(k, "1.59.0", STABILIZED_FUTURE_INCOMPAT_REPORT)
+ }
+ "timings" => stabilized_warn(k, "1.60", STABILIZED_TIMINGS),
+ "codegen-backend" => self.codegen_backend = parse_empty(k, v)?,
+ "profile-rustflags" => self.profile_rustflags = parse_empty(k, v)?,
+ _ => bail!("unknown `-Z` flag specified: {}", k),
+ }
+
+ Ok(())
+ }
+
+ /// Generates an error if `-Z unstable-options` was not used for a new,
+ /// unstable command-line flag.
+ pub fn fail_if_stable_opt(&self, flag: &str, issue: u32) -> CargoResult<()> {
+ if !self.unstable_options {
+ let see = format!(
+ "See https://github.com/rust-lang/cargo/issues/{issue} for more \
+ information about the `{flag}` flag."
+ );
+ // NOTE: a `config` isn't available here, check the channel directly
+ let channel = channel();
+ if channel == "nightly" || channel == "dev" {
+ bail!(
+ "the `{flag}` flag is unstable, pass `-Z unstable-options` to enable it\n\
+ {see}"
+ );
+ } else {
+ bail!(
+ "the `{flag}` flag is unstable, and only available on the nightly channel \
+ of Cargo, but this is the `{channel}` channel\n\
+ {SEE_CHANNELS}\n\
+ {see}"
+ );
+ }
+ }
+ Ok(())
+ }
+
+ /// Generates an error if `-Z unstable-options` was not used for a new,
+ /// unstable subcommand.
+ pub fn fail_if_stable_command(
+ &self,
+ config: &Config,
+ command: &str,
+ issue: u32,
+ ) -> CargoResult<()> {
+ if self.unstable_options {
+ return Ok(());
+ }
+ let see = format!(
+ "See https://github.com/rust-lang/cargo/issues/{} for more \
+ information about the `cargo {}` command.",
+ issue, command
+ );
+ if config.nightly_features_allowed {
+ bail!(
+ "the `cargo {}` command is unstable, pass `-Z unstable-options` to enable it\n\
+ {}",
+ command,
+ see
+ );
+ } else {
+ bail!(
+ "the `cargo {}` command is unstable, and only available on the \
+ nightly channel of Cargo, but this is the `{}` channel\n\
+ {}\n\
+ {}",
+ command,
+ channel(),
+ SEE_CHANNELS,
+ see
+ );
+ }
+ }
+}
+
+/// Returns the current release channel ("stable", "beta", "nightly", "dev").
+pub fn channel() -> String {
+ // ALLOWED: For testing cargo itself only.
+ #[allow(clippy::disallowed_methods)]
+ if let Ok(override_channel) = env::var("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS") {
+ return override_channel;
+ }
+ // ALLOWED: the process of rustc boostrapping reads this through
+ // `std::env`. We should make the behavior consistent. Also, we
+ // don't advertise this for bypassing nightly.
+ #[allow(clippy::disallowed_methods)]
+ if let Ok(staging) = env::var("RUSTC_BOOTSTRAP") {
+ if staging == "1" {
+ return "dev".to_string();
+ }
+ }
+ crate::version()
+ .release_channel
+ .unwrap_or_else(|| String::from("dev"))
+}
+
+/// Only for testing and developing. See ["Running with gitoxide as default git backend in tests"][1].
+///
+/// [1]: https://doc.crates.io/contrib/tests/running.html#running-with-gitoxide-as-default-git-backend-in-tests
+// ALLOWED: For testing cargo itself only.
+#[allow(clippy::disallowed_methods)]
+fn cargo_use_gitoxide_instead_of_git2() -> bool {
+ std::env::var_os("__CARGO_USE_GITOXIDE_INSTEAD_OF_GIT2").map_or(false, |value| value == "1")
+}
diff --git a/src/tools/cargo/src/cargo/core/manifest.rs b/src/tools/cargo/src/cargo/core/manifest.rs
new file mode 100644
index 000000000..182882dad
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/manifest.rs
@@ -0,0 +1,1014 @@
+use std::collections::{BTreeMap, HashMap};
+use std::fmt;
+use std::hash::{Hash, Hasher};
+use std::path::{Path, PathBuf};
+use std::rc::Rc;
+use std::sync::Arc;
+
+use anyhow::Context as _;
+use semver::Version;
+use serde::ser;
+use serde::Serialize;
+use url::Url;
+
+use crate::core::compiler::rustdoc::RustdocScrapeExamples;
+use crate::core::compiler::{CompileKind, CrateType};
+use crate::core::resolver::ResolveBehavior;
+use crate::core::{Dependency, PackageId, PackageIdSpec, SourceId, Summary};
+use crate::core::{Edition, Feature, Features, WorkspaceConfig};
+use crate::util::errors::*;
+use crate::util::interning::InternedString;
+use crate::util::toml::{TomlManifest, TomlProfiles};
+use crate::util::{short_hash, Config, Filesystem};
+
+pub enum EitherManifest {
+ Real(Manifest),
+ Virtual(VirtualManifest),
+}
+
+impl EitherManifest {
+ pub(crate) fn workspace_config(&self) -> &WorkspaceConfig {
+ match *self {
+ EitherManifest::Real(ref r) => r.workspace_config(),
+ EitherManifest::Virtual(ref v) => v.workspace_config(),
+ }
+ }
+}
+
+/// Contains all the information about a package, as loaded from a `Cargo.toml`.
+///
+/// This is deserialized using the [`TomlManifest`] type.
+#[derive(Clone, Debug)]
+pub struct Manifest {
+ summary: Summary,
+ targets: Vec<Target>,
+ default_kind: Option<CompileKind>,
+ forced_kind: Option<CompileKind>,
+ links: Option<String>,
+ warnings: Warnings,
+ exclude: Vec<String>,
+ include: Vec<String>,
+ metadata: ManifestMetadata,
+ custom_metadata: Option<toml::Value>,
+ profiles: Option<TomlProfiles>,
+ publish: Option<Vec<String>>,
+ replace: Vec<(PackageIdSpec, Dependency)>,
+ patch: HashMap<Url, Vec<Dependency>>,
+ workspace: WorkspaceConfig,
+ original: Rc<TomlManifest>,
+ unstable_features: Features,
+ edition: Edition,
+ rust_version: Option<String>,
+ im_a_teapot: Option<bool>,
+ default_run: Option<String>,
+ metabuild: Option<Vec<String>>,
+ resolve_behavior: Option<ResolveBehavior>,
+}
+
+/// When parsing `Cargo.toml`, some warnings should silenced
+/// if the manifest comes from a dependency. `ManifestWarning`
+/// allows this delayed emission of warnings.
+#[derive(Clone, Debug)]
+pub struct DelayedWarning {
+ pub message: String,
+ pub is_critical: bool,
+}
+
+#[derive(Clone, Debug)]
+pub struct Warnings(Vec<DelayedWarning>);
+
+#[derive(Clone, Debug)]
+pub struct VirtualManifest {
+ replace: Vec<(PackageIdSpec, Dependency)>,
+ patch: HashMap<Url, Vec<Dependency>>,
+ workspace: WorkspaceConfig,
+ profiles: Option<TomlProfiles>,
+ warnings: Warnings,
+ features: Features,
+ resolve_behavior: Option<ResolveBehavior>,
+}
+
+/// General metadata about a package which is just blindly uploaded to the
+/// registry.
+///
+/// Note that many of these fields can contain invalid values such as the
+/// homepage, repository, documentation, or license. These fields are not
+/// validated by cargo itself, but rather it is up to the registry when uploaded
+/// to validate these fields. Cargo will itself accept any valid TOML
+/// specification for these values.
+#[derive(PartialEq, Clone, Debug)]
+pub struct ManifestMetadata {
+ pub authors: Vec<String>,
+ pub keywords: Vec<String>,
+ pub categories: Vec<String>,
+ pub license: Option<String>,
+ pub license_file: Option<String>,
+ pub description: Option<String>, // Not in Markdown
+ pub readme: Option<String>, // File, not contents
+ pub homepage: Option<String>, // URL
+ pub repository: Option<String>, // URL
+ pub documentation: Option<String>, // URL
+ pub badges: BTreeMap<String, BTreeMap<String, String>>,
+ pub links: Option<String>,
+}
+
+#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
+pub enum TargetKind {
+ Lib(Vec<CrateType>),
+ Bin,
+ Test,
+ Bench,
+ ExampleLib(Vec<CrateType>),
+ ExampleBin,
+ CustomBuild,
+}
+
+impl ser::Serialize for TargetKind {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: ser::Serializer,
+ {
+ use self::TargetKind::*;
+ match self {
+ Lib(kinds) => s.collect_seq(kinds.iter().map(|t| t.to_string())),
+ Bin => ["bin"].serialize(s),
+ ExampleBin | ExampleLib(_) => ["example"].serialize(s),
+ Test => ["test"].serialize(s),
+ CustomBuild => ["custom-build"].serialize(s),
+ Bench => ["bench"].serialize(s),
+ }
+ }
+}
+
+impl fmt::Debug for TargetKind {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ use self::TargetKind::*;
+ match *self {
+ Lib(ref kinds) => kinds.fmt(f),
+ Bin => "bin".fmt(f),
+ ExampleBin | ExampleLib(_) => "example".fmt(f),
+ Test => "test".fmt(f),
+ CustomBuild => "custom-build".fmt(f),
+ Bench => "bench".fmt(f),
+ }
+ }
+}
+
+impl TargetKind {
+ pub fn description(&self) -> &'static str {
+ match self {
+ TargetKind::Lib(..) => "lib",
+ TargetKind::Bin => "bin",
+ TargetKind::Test => "integration-test",
+ TargetKind::ExampleBin | TargetKind::ExampleLib(..) => "example",
+ TargetKind::Bench => "bench",
+ TargetKind::CustomBuild => "build-script",
+ }
+ }
+
+ /// Returns whether production of this artifact requires the object files
+ /// from dependencies to be available.
+ ///
+ /// This only returns `false` when all we're producing is an rlib, otherwise
+ /// it will return `true`.
+ pub fn requires_upstream_objects(&self) -> bool {
+ match self {
+ TargetKind::Lib(kinds) | TargetKind::ExampleLib(kinds) => {
+ kinds.iter().any(|k| k.requires_upstream_objects())
+ }
+ _ => true,
+ }
+ }
+
+ /// Returns the arguments suitable for `--crate-type` to pass to rustc.
+ pub fn rustc_crate_types(&self) -> Vec<CrateType> {
+ match self {
+ TargetKind::Lib(kinds) | TargetKind::ExampleLib(kinds) => kinds.clone(),
+ TargetKind::CustomBuild
+ | TargetKind::Bench
+ | TargetKind::Test
+ | TargetKind::ExampleBin
+ | TargetKind::Bin => vec![CrateType::Bin],
+ }
+ }
+}
+
+/// Information about a binary, a library, an example, etc. that is part of the
+/// package.
+#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
+pub struct Target {
+ inner: Arc<TargetInner>,
+}
+
+#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
+struct TargetInner {
+ kind: TargetKind,
+ name: String,
+ // Note that `bin_name` is used for the cargo-feature `different_binary_name`
+ bin_name: Option<String>,
+ // Note that the `src_path` here is excluded from the `Hash` implementation
+ // as it's absolute currently and is otherwise a little too brittle for
+ // causing rebuilds. Instead the hash for the path that we send to the
+ // compiler is handled elsewhere.
+ src_path: TargetSourcePath,
+ required_features: Option<Vec<String>>,
+ tested: bool,
+ benched: bool,
+ doc: bool,
+ doctest: bool,
+ harness: bool, // whether to use the test harness (--test)
+ for_host: bool,
+ proc_macro: bool,
+ edition: Edition,
+ doc_scrape_examples: RustdocScrapeExamples,
+}
+
+#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
+pub enum TargetSourcePath {
+ Path(PathBuf),
+ Metabuild,
+}
+
+impl TargetSourcePath {
+ pub fn path(&self) -> Option<&Path> {
+ match self {
+ TargetSourcePath::Path(path) => Some(path.as_ref()),
+ TargetSourcePath::Metabuild => None,
+ }
+ }
+
+ pub fn is_path(&self) -> bool {
+ matches!(self, TargetSourcePath::Path(_))
+ }
+}
+
+impl Hash for TargetSourcePath {
+ fn hash<H: Hasher>(&self, _: &mut H) {
+ // ...
+ }
+}
+
+impl fmt::Debug for TargetSourcePath {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ TargetSourcePath::Path(path) => path.fmt(f),
+ TargetSourcePath::Metabuild => "metabuild".fmt(f),
+ }
+ }
+}
+
+impl From<PathBuf> for TargetSourcePath {
+ fn from(path: PathBuf) -> Self {
+ assert!(path.is_absolute(), "`{}` is not absolute", path.display());
+ TargetSourcePath::Path(path)
+ }
+}
+
+#[derive(Serialize)]
+struct SerializedTarget<'a> {
+ /// Is this a `--bin bin`, `--lib`, `--example ex`?
+ /// Serialized as a list of strings for historical reasons.
+ kind: &'a TargetKind,
+ /// Corresponds to `--crate-type` compiler attribute.
+ /// See <https://doc.rust-lang.org/reference/linkage.html>
+ crate_types: Vec<CrateType>,
+ name: &'a str,
+ src_path: Option<&'a PathBuf>,
+ edition: &'a str,
+ #[serde(rename = "required-features", skip_serializing_if = "Option::is_none")]
+ required_features: Option<Vec<&'a str>>,
+ /// Whether docs should be built for the target via `cargo doc`
+ /// See <https://doc.rust-lang.org/cargo/commands/cargo-doc.html#target-selection>
+ doc: bool,
+ doctest: bool,
+ /// Whether tests should be run for the target (`test` field in `Cargo.toml`)
+ test: bool,
+}
+
+impl ser::Serialize for Target {
+ fn serialize<S: ser::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
+ let src_path = match self.src_path() {
+ TargetSourcePath::Path(p) => Some(p),
+ // Unfortunately getting the correct path would require access to
+ // target_dir, which is not available here.
+ TargetSourcePath::Metabuild => None,
+ };
+ SerializedTarget {
+ kind: self.kind(),
+ crate_types: self.rustc_crate_types(),
+ name: self.name(),
+ src_path,
+ edition: &self.edition().to_string(),
+ required_features: self
+ .required_features()
+ .map(|rf| rf.iter().map(|s| s.as_str()).collect()),
+ doc: self.documented(),
+ doctest: self.doctested() && self.doctestable(),
+ test: self.tested(),
+ }
+ .serialize(s)
+ }
+}
+
+impl fmt::Debug for Target {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.inner.fmt(f)
+ }
+}
+
+compact_debug! {
+ impl fmt::Debug for TargetInner {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let (default, default_name) = {
+ match &self.kind {
+ TargetKind::Lib(kinds) => {
+ (
+ Target::lib_target(
+ &self.name,
+ kinds.clone(),
+ self.src_path.path().unwrap().to_path_buf(),
+ self.edition,
+ ).inner,
+ format!("lib_target({:?}, {:?}, {:?}, {:?})",
+ self.name, kinds, self.src_path, self.edition),
+ )
+ }
+ TargetKind::CustomBuild => {
+ match self.src_path {
+ TargetSourcePath::Path(ref path) => {
+ (
+ Target::custom_build_target(
+ &self.name,
+ path.to_path_buf(),
+ self.edition,
+ ).inner,
+ format!("custom_build_target({:?}, {:?}, {:?})",
+ self.name, path, self.edition),
+ )
+ }
+ TargetSourcePath::Metabuild => {
+ (
+ Target::metabuild_target(&self.name).inner,
+ format!("metabuild_target({:?})", self.name),
+ )
+ }
+ }
+ }
+ _ => (
+ Target::new(self.src_path.clone(), self.edition).inner,
+ format!("with_path({:?}, {:?})", self.src_path, self.edition),
+ ),
+ }
+ };
+ [debug_the_fields(
+ kind
+ name
+ bin_name
+ src_path
+ required_features
+ tested
+ benched
+ doc
+ doctest
+ harness
+ for_host
+ proc_macro
+ edition
+ doc_scrape_examples
+ )]
+ }
+ }
+}
+
+impl Manifest {
+ pub fn new(
+ summary: Summary,
+ default_kind: Option<CompileKind>,
+ forced_kind: Option<CompileKind>,
+ targets: Vec<Target>,
+ exclude: Vec<String>,
+ include: Vec<String>,
+ links: Option<String>,
+ metadata: ManifestMetadata,
+ custom_metadata: Option<toml::Value>,
+ profiles: Option<TomlProfiles>,
+ publish: Option<Vec<String>>,
+ replace: Vec<(PackageIdSpec, Dependency)>,
+ patch: HashMap<Url, Vec<Dependency>>,
+ workspace: WorkspaceConfig,
+ unstable_features: Features,
+ edition: Edition,
+ rust_version: Option<String>,
+ im_a_teapot: Option<bool>,
+ default_run: Option<String>,
+ original: Rc<TomlManifest>,
+ metabuild: Option<Vec<String>>,
+ resolve_behavior: Option<ResolveBehavior>,
+ ) -> Manifest {
+ Manifest {
+ summary,
+ default_kind,
+ forced_kind,
+ targets,
+ warnings: Warnings::new(),
+ exclude,
+ include,
+ links,
+ metadata,
+ custom_metadata,
+ profiles,
+ publish,
+ replace,
+ patch,
+ workspace,
+ unstable_features,
+ edition,
+ rust_version,
+ original,
+ im_a_teapot,
+ default_run,
+ metabuild,
+ resolve_behavior,
+ }
+ }
+
+ pub fn dependencies(&self) -> &[Dependency] {
+ self.summary.dependencies()
+ }
+ pub fn default_kind(&self) -> Option<CompileKind> {
+ self.default_kind
+ }
+ pub fn forced_kind(&self) -> Option<CompileKind> {
+ self.forced_kind
+ }
+ pub fn exclude(&self) -> &[String] {
+ &self.exclude
+ }
+ pub fn include(&self) -> &[String] {
+ &self.include
+ }
+ pub fn metadata(&self) -> &ManifestMetadata {
+ &self.metadata
+ }
+ pub fn name(&self) -> InternedString {
+ self.package_id().name()
+ }
+ pub fn package_id(&self) -> PackageId {
+ self.summary.package_id()
+ }
+ pub fn summary(&self) -> &Summary {
+ &self.summary
+ }
+ pub fn summary_mut(&mut self) -> &mut Summary {
+ &mut self.summary
+ }
+ pub fn targets(&self) -> &[Target] {
+ &self.targets
+ }
+ // It is used by cargo-c, please do not remove it
+ pub fn targets_mut(&mut self) -> &mut [Target] {
+ &mut self.targets
+ }
+ pub fn version(&self) -> &Version {
+ self.package_id().version()
+ }
+ pub fn warnings_mut(&mut self) -> &mut Warnings {
+ &mut self.warnings
+ }
+ pub fn warnings(&self) -> &Warnings {
+ &self.warnings
+ }
+ pub fn profiles(&self) -> Option<&TomlProfiles> {
+ self.profiles.as_ref()
+ }
+ pub fn publish(&self) -> &Option<Vec<String>> {
+ &self.publish
+ }
+ pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] {
+ &self.replace
+ }
+ pub fn original(&self) -> &TomlManifest {
+ &self.original
+ }
+ pub fn patch(&self) -> &HashMap<Url, Vec<Dependency>> {
+ &self.patch
+ }
+ pub fn links(&self) -> Option<&str> {
+ self.links.as_deref()
+ }
+
+ pub fn workspace_config(&self) -> &WorkspaceConfig {
+ &self.workspace
+ }
+
+ /// Unstable, nightly features that are enabled in this manifest.
+ pub fn unstable_features(&self) -> &Features {
+ &self.unstable_features
+ }
+
+ /// The style of resolver behavior to use, declared with the `resolver` field.
+ ///
+ /// Returns `None` if it is not specified.
+ pub fn resolve_behavior(&self) -> Option<ResolveBehavior> {
+ self.resolve_behavior
+ }
+
+ pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Manifest {
+ Manifest {
+ summary: self.summary.map_source(to_replace, replace_with),
+ ..self
+ }
+ }
+
+ pub fn feature_gate(&self) -> CargoResult<()> {
+ if self.im_a_teapot.is_some() {
+ self.unstable_features
+ .require(Feature::test_dummy_unstable())
+ .with_context(|| {
+ "the `im-a-teapot` manifest key is unstable and may \
+ not work properly in England"
+ })?;
+ }
+
+ if self.default_kind.is_some() || self.forced_kind.is_some() {
+ self.unstable_features
+ .require(Feature::per_package_target())
+ .with_context(|| {
+ "the `package.default-target` and `package.forced-target` \
+ manifest keys are unstable and may not work properly"
+ })?;
+ }
+
+ Ok(())
+ }
+
+ // Just a helper function to test out `-Z` flags on Cargo
+ pub fn print_teapot(&self, config: &Config) {
+ if let Some(teapot) = self.im_a_teapot {
+ if config.cli_unstable().print_im_a_teapot {
+ crate::drop_println!(config, "im-a-teapot = {}", teapot);
+ }
+ }
+ }
+
+ pub fn edition(&self) -> Edition {
+ self.edition
+ }
+
+ pub fn rust_version(&self) -> Option<&str> {
+ self.rust_version.as_deref()
+ }
+
+ pub fn custom_metadata(&self) -> Option<&toml::Value> {
+ self.custom_metadata.as_ref()
+ }
+
+ pub fn default_run(&self) -> Option<&str> {
+ self.default_run.as_deref()
+ }
+
+ pub fn metabuild(&self) -> Option<&Vec<String>> {
+ self.metabuild.as_ref()
+ }
+
+ pub fn metabuild_path(&self, target_dir: Filesystem) -> PathBuf {
+ let hash = short_hash(&self.package_id());
+ target_dir
+ .into_path_unlocked()
+ .join(".metabuild")
+ .join(format!("metabuild-{}-{}.rs", self.name(), hash))
+ }
+}
+
+impl VirtualManifest {
+ pub fn new(
+ replace: Vec<(PackageIdSpec, Dependency)>,
+ patch: HashMap<Url, Vec<Dependency>>,
+ workspace: WorkspaceConfig,
+ profiles: Option<TomlProfiles>,
+ features: Features,
+ resolve_behavior: Option<ResolveBehavior>,
+ ) -> VirtualManifest {
+ VirtualManifest {
+ replace,
+ patch,
+ workspace,
+ profiles,
+ warnings: Warnings::new(),
+ features,
+ resolve_behavior,
+ }
+ }
+
+ pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] {
+ &self.replace
+ }
+
+ pub fn patch(&self) -> &HashMap<Url, Vec<Dependency>> {
+ &self.patch
+ }
+
+ pub fn workspace_config(&self) -> &WorkspaceConfig {
+ &self.workspace
+ }
+
+ pub fn profiles(&self) -> Option<&TomlProfiles> {
+ self.profiles.as_ref()
+ }
+
+ pub fn warnings_mut(&mut self) -> &mut Warnings {
+ &mut self.warnings
+ }
+
+ pub fn warnings(&self) -> &Warnings {
+ &self.warnings
+ }
+
+ pub fn unstable_features(&self) -> &Features {
+ &self.features
+ }
+
+ /// The style of resolver behavior to use, declared with the `resolver` field.
+ ///
+ /// Returns `None` if it is not specified.
+ pub fn resolve_behavior(&self) -> Option<ResolveBehavior> {
+ self.resolve_behavior
+ }
+}
+
+impl Target {
+ fn new(src_path: TargetSourcePath, edition: Edition) -> Target {
+ Target {
+ inner: Arc::new(TargetInner {
+ kind: TargetKind::Bin,
+ name: String::new(),
+ bin_name: None,
+ src_path,
+ required_features: None,
+ doc: false,
+ doctest: false,
+ harness: true,
+ for_host: false,
+ proc_macro: false,
+ doc_scrape_examples: RustdocScrapeExamples::Unset,
+ edition,
+ tested: true,
+ benched: true,
+ }),
+ }
+ }
+
+ fn with_path(src_path: PathBuf, edition: Edition) -> Target {
+ Target::new(TargetSourcePath::from(src_path), edition)
+ }
+
+ pub fn lib_target(
+ name: &str,
+ crate_targets: Vec<CrateType>,
+ src_path: PathBuf,
+ edition: Edition,
+ ) -> Target {
+ let mut target = Target::with_path(src_path, edition);
+ target
+ .set_kind(TargetKind::Lib(crate_targets))
+ .set_name(name)
+ .set_doctest(true)
+ .set_doc(true);
+ target
+ }
+
+ pub fn bin_target(
+ name: &str,
+ bin_name: Option<String>,
+ src_path: PathBuf,
+ required_features: Option<Vec<String>>,
+ edition: Edition,
+ ) -> Target {
+ let mut target = Target::with_path(src_path, edition);
+ target
+ .set_kind(TargetKind::Bin)
+ .set_name(name)
+ .set_binary_name(bin_name)
+ .set_required_features(required_features)
+ .set_doc(true);
+ target
+ }
+
+ /// Builds a `Target` corresponding to the `build = "build.rs"` entry.
+ pub fn custom_build_target(name: &str, src_path: PathBuf, edition: Edition) -> Target {
+ let mut target = Target::with_path(src_path, edition);
+ target
+ .set_kind(TargetKind::CustomBuild)
+ .set_name(name)
+ .set_for_host(true)
+ .set_benched(false)
+ .set_tested(false)
+ .set_doc_scrape_examples(RustdocScrapeExamples::Disabled);
+ target
+ }
+
+ pub fn metabuild_target(name: &str) -> Target {
+ let mut target = Target::new(TargetSourcePath::Metabuild, Edition::Edition2018);
+ target
+ .set_kind(TargetKind::CustomBuild)
+ .set_name(name)
+ .set_for_host(true)
+ .set_benched(false)
+ .set_tested(false)
+ .set_doc_scrape_examples(RustdocScrapeExamples::Disabled);
+ target
+ }
+
+ pub fn example_target(
+ name: &str,
+ crate_targets: Vec<CrateType>,
+ src_path: PathBuf,
+ required_features: Option<Vec<String>>,
+ edition: Edition,
+ ) -> Target {
+ let kind = if crate_targets.is_empty() || crate_targets.iter().all(|t| *t == CrateType::Bin)
+ {
+ TargetKind::ExampleBin
+ } else {
+ TargetKind::ExampleLib(crate_targets)
+ };
+ let mut target = Target::with_path(src_path, edition);
+ target
+ .set_kind(kind)
+ .set_name(name)
+ .set_required_features(required_features)
+ .set_tested(false)
+ .set_benched(false);
+ target
+ }
+
+ pub fn test_target(
+ name: &str,
+ src_path: PathBuf,
+ required_features: Option<Vec<String>>,
+ edition: Edition,
+ ) -> Target {
+ let mut target = Target::with_path(src_path, edition);
+ target
+ .set_kind(TargetKind::Test)
+ .set_name(name)
+ .set_required_features(required_features)
+ .set_benched(false);
+ target
+ }
+
+ pub fn bench_target(
+ name: &str,
+ src_path: PathBuf,
+ required_features: Option<Vec<String>>,
+ edition: Edition,
+ ) -> Target {
+ let mut target = Target::with_path(src_path, edition);
+ target
+ .set_kind(TargetKind::Bench)
+ .set_name(name)
+ .set_required_features(required_features)
+ .set_tested(false);
+ target
+ }
+
+ pub fn name(&self) -> &str {
+ &self.inner.name
+ }
+ pub fn crate_name(&self) -> String {
+ self.name().replace("-", "_")
+ }
+ pub fn src_path(&self) -> &TargetSourcePath {
+ &self.inner.src_path
+ }
+ pub fn set_src_path(&mut self, src_path: TargetSourcePath) {
+ Arc::make_mut(&mut self.inner).src_path = src_path;
+ }
+ pub fn required_features(&self) -> Option<&Vec<String>> {
+ self.inner.required_features.as_ref()
+ }
+ pub fn kind(&self) -> &TargetKind {
+ &self.inner.kind
+ }
+ pub fn tested(&self) -> bool {
+ self.inner.tested
+ }
+ pub fn harness(&self) -> bool {
+ self.inner.harness
+ }
+ pub fn documented(&self) -> bool {
+ self.inner.doc
+ }
+ // A plugin, proc-macro, or build-script.
+ pub fn for_host(&self) -> bool {
+ self.inner.for_host
+ }
+ pub fn proc_macro(&self) -> bool {
+ self.inner.proc_macro
+ }
+ pub fn edition(&self) -> Edition {
+ self.inner.edition
+ }
+ pub fn doc_scrape_examples(&self) -> RustdocScrapeExamples {
+ self.inner.doc_scrape_examples
+ }
+ pub fn benched(&self) -> bool {
+ self.inner.benched
+ }
+ pub fn doctested(&self) -> bool {
+ self.inner.doctest
+ }
+
+ pub fn doctestable(&self) -> bool {
+ match self.kind() {
+ TargetKind::Lib(ref kinds) => kinds.iter().any(|k| {
+ *k == CrateType::Rlib || *k == CrateType::Lib || *k == CrateType::ProcMacro
+ }),
+ _ => false,
+ }
+ }
+
+ pub fn is_lib(&self) -> bool {
+ matches!(self.kind(), TargetKind::Lib(_))
+ }
+
+ pub fn is_dylib(&self) -> bool {
+ match self.kind() {
+ TargetKind::Lib(libs) => libs.iter().any(|l| *l == CrateType::Dylib),
+ _ => false,
+ }
+ }
+
+ pub fn is_cdylib(&self) -> bool {
+ match self.kind() {
+ TargetKind::Lib(libs) => libs.iter().any(|l| *l == CrateType::Cdylib),
+ _ => false,
+ }
+ }
+
+ pub fn is_staticlib(&self) -> bool {
+ match self.kind() {
+ TargetKind::Lib(libs) => libs.iter().any(|l| *l == CrateType::Staticlib),
+ _ => false,
+ }
+ }
+
+ /// Returns whether this target produces an artifact which can be linked
+ /// into a Rust crate.
+ ///
+ /// This only returns true for certain kinds of libraries.
+ pub fn is_linkable(&self) -> bool {
+ match self.kind() {
+ TargetKind::Lib(kinds) => kinds.iter().any(|k| k.is_linkable()),
+ _ => false,
+ }
+ }
+
+ pub fn is_bin(&self) -> bool {
+ *self.kind() == TargetKind::Bin
+ }
+
+ pub fn is_example(&self) -> bool {
+ matches!(
+ self.kind(),
+ TargetKind::ExampleBin | TargetKind::ExampleLib(..)
+ )
+ }
+
+ /// Returns `true` if it is a binary or executable example.
+ /// NOTE: Tests are `false`!
+ pub fn is_executable(&self) -> bool {
+ self.is_bin() || self.is_exe_example()
+ }
+
+ /// Returns `true` if it is an executable example.
+ pub fn is_exe_example(&self) -> bool {
+ // Needed for --all-examples in contexts where only runnable examples make sense
+ matches!(self.kind(), TargetKind::ExampleBin)
+ }
+
+ pub fn is_test(&self) -> bool {
+ *self.kind() == TargetKind::Test
+ }
+ pub fn is_bench(&self) -> bool {
+ *self.kind() == TargetKind::Bench
+ }
+ pub fn is_custom_build(&self) -> bool {
+ *self.kind() == TargetKind::CustomBuild
+ }
+
+ /// Returns the arguments suitable for `--crate-type` to pass to rustc.
+ pub fn rustc_crate_types(&self) -> Vec<CrateType> {
+ self.kind().rustc_crate_types()
+ }
+
+ pub fn set_tested(&mut self, tested: bool) -> &mut Target {
+ Arc::make_mut(&mut self.inner).tested = tested;
+ self
+ }
+ pub fn set_benched(&mut self, benched: bool) -> &mut Target {
+ Arc::make_mut(&mut self.inner).benched = benched;
+ self
+ }
+ pub fn set_doctest(&mut self, doctest: bool) -> &mut Target {
+ Arc::make_mut(&mut self.inner).doctest = doctest;
+ self
+ }
+ pub fn set_for_host(&mut self, for_host: bool) -> &mut Target {
+ Arc::make_mut(&mut self.inner).for_host = for_host;
+ self
+ }
+ pub fn set_proc_macro(&mut self, proc_macro: bool) -> &mut Target {
+ Arc::make_mut(&mut self.inner).proc_macro = proc_macro;
+ self
+ }
+ pub fn set_edition(&mut self, edition: Edition) -> &mut Target {
+ Arc::make_mut(&mut self.inner).edition = edition;
+ self
+ }
+ pub fn set_doc_scrape_examples(
+ &mut self,
+ doc_scrape_examples: RustdocScrapeExamples,
+ ) -> &mut Target {
+ Arc::make_mut(&mut self.inner).doc_scrape_examples = doc_scrape_examples;
+ self
+ }
+ pub fn set_harness(&mut self, harness: bool) -> &mut Target {
+ Arc::make_mut(&mut self.inner).harness = harness;
+ self
+ }
+ pub fn set_doc(&mut self, doc: bool) -> &mut Target {
+ Arc::make_mut(&mut self.inner).doc = doc;
+ self
+ }
+ pub fn set_kind(&mut self, kind: TargetKind) -> &mut Target {
+ Arc::make_mut(&mut self.inner).kind = kind;
+ self
+ }
+ pub fn set_name(&mut self, name: &str) -> &mut Target {
+ Arc::make_mut(&mut self.inner).name = name.to_string();
+ self
+ }
+ pub fn set_binary_name(&mut self, bin_name: Option<String>) -> &mut Target {
+ Arc::make_mut(&mut self.inner).bin_name = bin_name;
+ self
+ }
+ pub fn set_required_features(&mut self, required_features: Option<Vec<String>>) -> &mut Target {
+ Arc::make_mut(&mut self.inner).required_features = required_features;
+ self
+ }
+ pub fn binary_filename(&self) -> Option<String> {
+ self.inner.bin_name.clone()
+ }
+ pub fn description_named(&self) -> String {
+ match self.kind() {
+ TargetKind::Lib(..) => "lib".to_string(),
+ TargetKind::Bin => format!("bin \"{}\"", self.name()),
+ TargetKind::Test => format!("test \"{}\"", self.name()),
+ TargetKind::Bench => format!("bench \"{}\"", self.name()),
+ TargetKind::ExampleLib(..) | TargetKind::ExampleBin => {
+ format!("example \"{}\"", self.name())
+ }
+ TargetKind::CustomBuild => "build script".to_string(),
+ }
+ }
+}
+
+impl fmt::Display for Target {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.kind() {
+ TargetKind::Lib(..) => write!(f, "Target(lib)"),
+ TargetKind::Bin => write!(f, "Target(bin: {})", self.name()),
+ TargetKind::Test => write!(f, "Target(test: {})", self.name()),
+ TargetKind::Bench => write!(f, "Target(bench: {})", self.name()),
+ TargetKind::ExampleBin | TargetKind::ExampleLib(..) => {
+ write!(f, "Target(example: {})", self.name())
+ }
+ TargetKind::CustomBuild => write!(f, "Target(script)"),
+ }
+ }
+}
+
+impl Warnings {
+ fn new() -> Warnings {
+ Warnings(Vec::new())
+ }
+
+ pub fn add_warning(&mut self, s: String) {
+ self.0.push(DelayedWarning {
+ message: s,
+ is_critical: false,
+ })
+ }
+
+ pub fn add_critical_warning(&mut self, s: String) {
+ self.0.push(DelayedWarning {
+ message: s,
+ is_critical: true,
+ })
+ }
+
+ pub fn warnings(&self) -> &[DelayedWarning] {
+ &self.0
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/mod.rs b/src/tools/cargo/src/cargo/core/mod.rs
new file mode 100644
index 000000000..e36c678c4
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/mod.rs
@@ -0,0 +1,32 @@
+pub use self::dependency::Dependency;
+pub use self::features::{CliUnstable, Edition, Feature, Features};
+pub use self::manifest::{EitherManifest, VirtualManifest};
+pub use self::manifest::{Manifest, Target, TargetKind};
+pub use self::package::{Package, PackageSet};
+pub use self::package_id::PackageId;
+pub use self::package_id_spec::PackageIdSpec;
+pub use self::registry::Registry;
+pub use self::resolver::{Resolve, ResolveVersion};
+pub use self::shell::{Shell, Verbosity};
+pub use self::source::{GitReference, QueryKind, Source, SourceId, SourceMap};
+pub use self::summary::{FeatureMap, FeatureValue, Summary};
+pub use self::workspace::{
+ find_workspace_root, resolve_relative_path, MaybePackage, Workspace, WorkspaceConfig,
+ WorkspaceRootConfig,
+};
+pub use crate::util::toml::InheritableFields;
+
+pub mod compiler;
+pub mod dependency;
+pub mod features;
+pub mod manifest;
+pub mod package;
+pub mod package_id;
+mod package_id_spec;
+pub mod profiles;
+pub mod registry;
+pub mod resolver;
+pub mod shell;
+pub mod source;
+pub mod summary;
+mod workspace;
diff --git a/src/tools/cargo/src/cargo/core/package.rs b/src/tools/cargo/src/cargo/core/package.rs
new file mode 100644
index 000000000..40ba9cdf8
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/package.rs
@@ -0,0 +1,1218 @@
+use std::cell::{Cell, Ref, RefCell, RefMut};
+use std::cmp::Ordering;
+use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
+use std::fmt;
+use std::hash;
+use std::mem;
+use std::path::{Path, PathBuf};
+use std::rc::Rc;
+use std::time::{Duration, Instant};
+
+use anyhow::Context;
+use bytesize::ByteSize;
+use curl::easy::{Easy, HttpVersion};
+use curl::multi::{EasyHandle, Multi};
+use lazycell::LazyCell;
+use log::{debug, warn};
+use semver::Version;
+use serde::Serialize;
+
+use crate::core::compiler::{CompileKind, RustcTargetData};
+use crate::core::dependency::DepKind;
+use crate::core::resolver::features::ForceAllTargets;
+use crate::core::resolver::{HasDevUnits, Resolve};
+use crate::core::source::MaybePackage;
+use crate::core::{Dependency, Manifest, PackageId, SourceId, Target};
+use crate::core::{SourceMap, Summary, Workspace};
+use crate::ops;
+use crate::util::config::PackageCacheLock;
+use crate::util::errors::{CargoResult, HttpNotSuccessful, DEBUG_HEADERS};
+use crate::util::interning::InternedString;
+use crate::util::network::retry::{Retry, RetryResult};
+use crate::util::network::sleep::SleepTracker;
+use crate::util::{self, internal, Config, Progress, ProgressStyle};
+
+pub const MANIFEST_PREAMBLE: &str = "\
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# \"normalize\" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+";
+
+/// Information about a package that is available somewhere in the file system.
+///
+/// A package is a `Cargo.toml` file plus all the files that are part of it.
+#[derive(Clone)]
+pub struct Package {
+ inner: Rc<PackageInner>,
+}
+
+#[derive(Clone)]
+// TODO: is `manifest_path` a relic?
+struct PackageInner {
+ /// The package's manifest.
+ manifest: Manifest,
+ /// The root of the package.
+ manifest_path: PathBuf,
+}
+
+impl Ord for Package {
+ fn cmp(&self, other: &Package) -> Ordering {
+ self.package_id().cmp(&other.package_id())
+ }
+}
+
+impl PartialOrd for Package {
+ fn partial_cmp(&self, other: &Package) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+/// A Package in a form where `Serialize` can be derived.
+#[derive(Serialize)]
+pub struct SerializedPackage {
+ name: InternedString,
+ version: Version,
+ id: PackageId,
+ license: Option<String>,
+ license_file: Option<String>,
+ description: Option<String>,
+ source: SourceId,
+ dependencies: Vec<Dependency>,
+ targets: Vec<Target>,
+ features: BTreeMap<InternedString, Vec<InternedString>>,
+ manifest_path: PathBuf,
+ metadata: Option<toml::Value>,
+ publish: Option<Vec<String>>,
+ authors: Vec<String>,
+ categories: Vec<String>,
+ keywords: Vec<String>,
+ readme: Option<String>,
+ repository: Option<String>,
+ homepage: Option<String>,
+ documentation: Option<String>,
+ edition: String,
+ links: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ metabuild: Option<Vec<String>>,
+ default_run: Option<String>,
+ rust_version: Option<String>,
+}
+
+impl Package {
+ /// Creates a package from a manifest and its location.
+ pub fn new(manifest: Manifest, manifest_path: &Path) -> Package {
+ Package {
+ inner: Rc::new(PackageInner {
+ manifest,
+ manifest_path: manifest_path.to_path_buf(),
+ }),
+ }
+ }
+
+ /// Gets the manifest dependencies.
+ pub fn dependencies(&self) -> &[Dependency] {
+ self.manifest().dependencies()
+ }
+ /// Gets the manifest.
+ pub fn manifest(&self) -> &Manifest {
+ &self.inner.manifest
+ }
+ /// Gets the manifest.
+ pub fn manifest_mut(&mut self) -> &mut Manifest {
+ &mut Rc::make_mut(&mut self.inner).manifest
+ }
+ /// Gets the path to the manifest.
+ pub fn manifest_path(&self) -> &Path {
+ &self.inner.manifest_path
+ }
+ /// Gets the name of the package.
+ pub fn name(&self) -> InternedString {
+ self.package_id().name()
+ }
+ /// Gets the `PackageId` object for the package (fully defines a package).
+ pub fn package_id(&self) -> PackageId {
+ self.manifest().package_id()
+ }
+ /// Gets the root folder of the package.
+ pub fn root(&self) -> &Path {
+ self.manifest_path().parent().unwrap()
+ }
+ /// Gets the summary for the package.
+ pub fn summary(&self) -> &Summary {
+ self.manifest().summary()
+ }
+ /// Gets the targets specified in the manifest.
+ pub fn targets(&self) -> &[Target] {
+ self.manifest().targets()
+ }
+ /// Gets the library crate for this package, if it exists.
+ pub fn library(&self) -> Option<&Target> {
+ self.targets().iter().find(|t| t.is_lib())
+ }
+ /// Gets the current package version.
+ pub fn version(&self) -> &Version {
+ self.package_id().version()
+ }
+ /// Gets the package authors.
+ pub fn authors(&self) -> &Vec<String> {
+ &self.manifest().metadata().authors
+ }
+
+ /// Returns `None` if the package is set to publish.
+ /// Returns `Some(allowed_registries)` if publishing is limited to specified
+ /// registries or if package is set to not publish.
+ pub fn publish(&self) -> &Option<Vec<String>> {
+ self.manifest().publish()
+ }
+ /// Returns `true` if this package is a proc-macro.
+ pub fn proc_macro(&self) -> bool {
+ self.targets().iter().any(|target| target.proc_macro())
+ }
+ /// Gets the package's minimum Rust version.
+ pub fn rust_version(&self) -> Option<&str> {
+ self.manifest().rust_version()
+ }
+
+ /// Returns `true` if the package uses a custom build script for any target.
+ pub fn has_custom_build(&self) -> bool {
+ self.targets().iter().any(|t| t.is_custom_build())
+ }
+
+ pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Package {
+ Package {
+ inner: Rc::new(PackageInner {
+ manifest: self.manifest().clone().map_source(to_replace, replace_with),
+ manifest_path: self.manifest_path().to_owned(),
+ }),
+ }
+ }
+
+ pub fn to_registry_toml(&self, ws: &Workspace<'_>) -> CargoResult<String> {
+ let manifest = self
+ .manifest()
+ .original()
+ .prepare_for_publish(ws, self.root())?;
+ let toml = toml::to_string_pretty(&manifest)?;
+ Ok(format!("{}\n{}", MANIFEST_PREAMBLE, toml))
+ }
+
+ /// Returns if package should include `Cargo.lock`.
+ pub fn include_lockfile(&self) -> bool {
+ self.targets().iter().any(|t| t.is_example() || t.is_bin())
+ }
+
+ pub fn serialized(&self) -> SerializedPackage {
+ let summary = self.manifest().summary();
+ let package_id = summary.package_id();
+ let manmeta = self.manifest().metadata();
+ // Filter out metabuild targets. They are an internal implementation
+ // detail that is probably not relevant externally. There's also not a
+ // real path to show in `src_path`, and this avoids changing the format.
+ let targets: Vec<Target> = self
+ .manifest()
+ .targets()
+ .iter()
+ .filter(|t| t.src_path().is_path())
+ .cloned()
+ .collect();
+ // Convert Vec<FeatureValue> to Vec<InternedString>
+ let features = summary
+ .features()
+ .iter()
+ .map(|(k, v)| {
+ (
+ *k,
+ v.iter()
+ .map(|fv| InternedString::new(&fv.to_string()))
+ .collect(),
+ )
+ })
+ .collect();
+
+ SerializedPackage {
+ name: package_id.name(),
+ version: package_id.version().clone(),
+ id: package_id,
+ license: manmeta.license.clone(),
+ license_file: manmeta.license_file.clone(),
+ description: manmeta.description.clone(),
+ source: summary.source_id(),
+ dependencies: summary.dependencies().to_vec(),
+ targets,
+ features,
+ manifest_path: self.manifest_path().to_path_buf(),
+ metadata: self.manifest().custom_metadata().cloned(),
+ authors: manmeta.authors.clone(),
+ categories: manmeta.categories.clone(),
+ keywords: manmeta.keywords.clone(),
+ readme: manmeta.readme.clone(),
+ repository: manmeta.repository.clone(),
+ homepage: manmeta.homepage.clone(),
+ documentation: manmeta.documentation.clone(),
+ edition: self.manifest().edition().to_string(),
+ links: self.manifest().links().map(|s| s.to_owned()),
+ metabuild: self.manifest().metabuild().cloned(),
+ publish: self.publish().as_ref().cloned(),
+ default_run: self.manifest().default_run().map(|s| s.to_owned()),
+ rust_version: self.rust_version().map(|s| s.to_owned()),
+ }
+ }
+}
+
+impl fmt::Display for Package {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.summary().package_id())
+ }
+}
+
+impl fmt::Debug for Package {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Package")
+ .field("id", &self.summary().package_id())
+ .field("..", &"..")
+ .finish()
+ }
+}
+
+impl PartialEq for Package {
+ fn eq(&self, other: &Package) -> bool {
+ self.package_id() == other.package_id()
+ }
+}
+
+impl Eq for Package {}
+
+impl hash::Hash for Package {
+ fn hash<H: hash::Hasher>(&self, into: &mut H) {
+ self.package_id().hash(into)
+ }
+}
+
+/// A set of packages, with the intent to download.
+///
+/// This is primarily used to convert a set of `PackageId`s to `Package`s. It
+/// will download as needed, or used the cached download if available.
+pub struct PackageSet<'cfg> {
+ packages: HashMap<PackageId, LazyCell<Package>>,
+ sources: RefCell<SourceMap<'cfg>>,
+ config: &'cfg Config,
+ multi: Multi,
+ /// Used to prevent reusing the PackageSet to download twice.
+ downloading: Cell<bool>,
+ /// Whether or not to use curl HTTP/2 multiplexing.
+ multiplexing: bool,
+}
+
+/// Helper for downloading crates.
+pub struct Downloads<'a, 'cfg> {
+ set: &'a PackageSet<'cfg>,
+ /// When a download is started, it is added to this map. The key is a
+ /// "token" (see `Download::token`). It is removed once the download is
+ /// finished.
+ pending: HashMap<usize, (Download<'cfg>, EasyHandle)>,
+ /// Set of packages currently being downloaded. This should stay in sync
+ /// with `pending`.
+ pending_ids: HashSet<PackageId>,
+ /// Downloads that have failed and are waiting to retry again later.
+ sleeping: SleepTracker<(Download<'cfg>, Easy)>,
+ /// The final result of each download. A pair `(token, result)`. This is a
+ /// temporary holding area, needed because curl can report multiple
+ /// downloads at once, but the main loop (`wait`) is written to only
+ /// handle one at a time.
+ results: Vec<(usize, Result<(), curl::Error>)>,
+ /// The next ID to use for creating a token (see `Download::token`).
+ next: usize,
+ /// Progress bar.
+ progress: RefCell<Option<Progress<'cfg>>>,
+ /// Number of downloads that have successfully finished.
+ downloads_finished: usize,
+ /// Total bytes for all successfully downloaded packages.
+ downloaded_bytes: u64,
+ /// Size (in bytes) and package name of the largest downloaded package.
+ largest: (u64, String),
+ /// Time when downloading started.
+ start: Instant,
+ /// Indicates *all* downloads were successful.
+ success: bool,
+
+ /// Timeout management, both of timeout thresholds as well as whether or not
+ /// our connection has timed out (and accompanying message if it has).
+ ///
+ /// Note that timeout management is done manually here instead of in libcurl
+ /// because we want to apply timeouts to an entire batch of operations, not
+ /// any one particular single operation.
+ timeout: ops::HttpTimeout,
+ /// Last time bytes were received.
+ updated_at: Cell<Instant>,
+ /// This is a slow-speed check. It is reset to `now + timeout_duration`
+ /// every time at least `threshold` bytes are received. If the current
+ /// time ever exceeds `next_speed_check`, then give up and report a
+ /// timeout error.
+ next_speed_check: Cell<Instant>,
+ /// This is the slow-speed threshold byte count. It starts at the
+ /// configured threshold value (default 10), and is decremented by the
+ /// number of bytes received in each chunk. If it is <= zero, the
+ /// threshold has been met and data is being received fast enough not to
+ /// trigger a timeout; reset `next_speed_check` and set this back to the
+ /// configured threshold.
+ next_speed_check_bytes_threshold: Cell<u64>,
+ /// Global filesystem lock to ensure only one Cargo is downloading at a
+ /// time.
+ _lock: PackageCacheLock<'cfg>,
+}
+
+struct Download<'cfg> {
+ /// The token for this download, used as the key of the `Downloads::pending` map
+ /// and stored in `EasyHandle` as well.
+ token: usize,
+
+ /// The package that we're downloading.
+ id: PackageId,
+
+ /// Actual downloaded data, updated throughout the lifetime of this download.
+ data: RefCell<Vec<u8>>,
+
+ /// HTTP headers for debugging.
+ headers: RefCell<Vec<String>>,
+
+ /// The URL that we're downloading from, cached here for error messages and
+ /// reenqueuing.
+ url: String,
+
+ /// A descriptive string to print when we've finished downloading this crate.
+ descriptor: String,
+
+ /// Statistics updated from the progress callback in libcurl.
+ total: Cell<u64>,
+ current: Cell<u64>,
+
+ /// The moment we started this transfer at.
+ start: Instant,
+ timed_out: Cell<Option<String>>,
+
+ /// Logic used to track retrying this download if it's a spurious failure.
+ retry: Retry<'cfg>,
+}
+
+impl<'cfg> PackageSet<'cfg> {
+ pub fn new(
+ package_ids: &[PackageId],
+ sources: SourceMap<'cfg>,
+ config: &'cfg Config,
+ ) -> CargoResult<PackageSet<'cfg>> {
+ // We've enabled the `http2` feature of `curl` in Cargo, so treat
+ // failures here as fatal as it would indicate a build-time problem.
+ let mut multi = Multi::new();
+ let multiplexing = config.http_config()?.multiplexing.unwrap_or(true);
+ multi
+ .pipelining(false, multiplexing)
+ .with_context(|| "failed to enable multiplexing/pipelining in curl")?;
+
+ // let's not flood crates.io with connections
+ multi.set_max_host_connections(2)?;
+
+ Ok(PackageSet {
+ packages: package_ids
+ .iter()
+ .map(|&id| (id, LazyCell::new()))
+ .collect(),
+ sources: RefCell::new(sources),
+ config,
+ multi,
+ downloading: Cell::new(false),
+ multiplexing,
+ })
+ }
+
+ pub fn package_ids(&self) -> impl Iterator<Item = PackageId> + '_ {
+ self.packages.keys().cloned()
+ }
+
+ pub fn packages(&self) -> impl Iterator<Item = &Package> {
+ self.packages.values().filter_map(|p| p.borrow())
+ }
+
+ pub fn enable_download<'a>(&'a self) -> CargoResult<Downloads<'a, 'cfg>> {
+ assert!(!self.downloading.replace(true));
+ let timeout = ops::HttpTimeout::new(self.config)?;
+ Ok(Downloads {
+ start: Instant::now(),
+ set: self,
+ next: 0,
+ pending: HashMap::new(),
+ pending_ids: HashSet::new(),
+ sleeping: SleepTracker::new(),
+ results: Vec::new(),
+ progress: RefCell::new(Some(Progress::with_style(
+ "Downloading",
+ ProgressStyle::Ratio,
+ self.config,
+ ))),
+ downloads_finished: 0,
+ downloaded_bytes: 0,
+ largest: (0, String::new()),
+ success: false,
+ updated_at: Cell::new(Instant::now()),
+ timeout,
+ next_speed_check: Cell::new(Instant::now()),
+ next_speed_check_bytes_threshold: Cell::new(0),
+ _lock: self.config.acquire_package_cache_lock()?,
+ })
+ }
+
+ pub fn get_one(&self, id: PackageId) -> CargoResult<&Package> {
+ if let Some(pkg) = self.packages.get(&id).and_then(|slot| slot.borrow()) {
+ return Ok(pkg);
+ }
+ Ok(self.get_many(Some(id))?.remove(0))
+ }
+
+ pub fn get_many(&self, ids: impl IntoIterator<Item = PackageId>) -> CargoResult<Vec<&Package>> {
+ let mut pkgs = Vec::new();
+ let mut downloads = self.enable_download()?;
+ for id in ids {
+ pkgs.extend(downloads.start(id)?);
+ }
+ while downloads.remaining() > 0 {
+ pkgs.push(downloads.wait()?);
+ }
+ downloads.success = true;
+ Ok(pkgs)
+ }
+
+ /// Downloads any packages accessible from the give root ids.
+ pub fn download_accessible(
+ &self,
+ resolve: &Resolve,
+ root_ids: &[PackageId],
+ has_dev_units: HasDevUnits,
+ requested_kinds: &[CompileKind],
+ target_data: &RustcTargetData<'cfg>,
+ force_all_targets: ForceAllTargets,
+ ) -> CargoResult<()> {
+ fn collect_used_deps(
+ used: &mut BTreeSet<PackageId>,
+ resolve: &Resolve,
+ pkg_id: PackageId,
+ has_dev_units: HasDevUnits,
+ requested_kinds: &[CompileKind],
+ target_data: &RustcTargetData<'_>,
+ force_all_targets: ForceAllTargets,
+ ) -> CargoResult<()> {
+ if !used.insert(pkg_id) {
+ return Ok(());
+ }
+ let filtered_deps = PackageSet::filter_deps(
+ pkg_id,
+ resolve,
+ has_dev_units,
+ requested_kinds,
+ target_data,
+ force_all_targets,
+ );
+ for (pkg_id, _dep) in filtered_deps {
+ collect_used_deps(
+ used,
+ resolve,
+ pkg_id,
+ has_dev_units,
+ requested_kinds,
+ target_data,
+ force_all_targets,
+ )?;
+ }
+ Ok(())
+ }
+
+ // This is sorted by PackageId to get consistent behavior and error
+ // messages for Cargo's testsuite. Perhaps there is a better ordering
+ // that optimizes download time?
+ let mut to_download = BTreeSet::new();
+
+ for id in root_ids {
+ collect_used_deps(
+ &mut to_download,
+ resolve,
+ *id,
+ has_dev_units,
+ requested_kinds,
+ target_data,
+ force_all_targets,
+ )?;
+ }
+ self.get_many(to_download.into_iter())?;
+ Ok(())
+ }
+
+ /// Check if there are any dependency packages that violate artifact constraints
+ /// to instantly abort, or that do not have any libs which results in warnings.
+ pub(crate) fn warn_no_lib_packages_and_artifact_libs_overlapping_deps(
+ &self,
+ ws: &Workspace<'cfg>,
+ resolve: &Resolve,
+ root_ids: &[PackageId],
+ has_dev_units: HasDevUnits,
+ requested_kinds: &[CompileKind],
+ target_data: &RustcTargetData<'_>,
+ force_all_targets: ForceAllTargets,
+ ) -> CargoResult<()> {
+ let no_lib_pkgs: BTreeMap<PackageId, Vec<(&Package, &HashSet<Dependency>)>> = root_ids
+ .iter()
+ .map(|&root_id| {
+ let dep_pkgs_to_deps: Vec<_> = PackageSet::filter_deps(
+ root_id,
+ resolve,
+ has_dev_units,
+ requested_kinds,
+ target_data,
+ force_all_targets,
+ )
+ .collect();
+
+ let dep_pkgs_and_deps = dep_pkgs_to_deps
+ .into_iter()
+ .filter(|(_id, deps)| deps.iter().any(|dep| dep.maybe_lib()))
+ .filter_map(|(dep_package_id, deps)| {
+ self.get_one(dep_package_id).ok().and_then(|dep_pkg| {
+ (!dep_pkg.targets().iter().any(|t| t.is_lib())).then(|| (dep_pkg, deps))
+ })
+ })
+ .collect();
+ (root_id, dep_pkgs_and_deps)
+ })
+ .collect();
+
+ for (pkg_id, dep_pkgs) in no_lib_pkgs {
+ for (_dep_pkg_without_lib_target, deps) in dep_pkgs {
+ for dep in deps.iter().filter(|dep| {
+ dep.artifact()
+ .map(|artifact| artifact.is_lib())
+ .unwrap_or(true)
+ }) {
+ ws.config().shell().warn(&format!(
+ "{} ignoring invalid dependency `{}` which is missing a lib target",
+ pkg_id,
+ dep.name_in_toml(),
+ ))?;
+ }
+ }
+ }
+ Ok(())
+ }
+
+ fn filter_deps<'a>(
+ pkg_id: PackageId,
+ resolve: &'a Resolve,
+ has_dev_units: HasDevUnits,
+ requested_kinds: &'a [CompileKind],
+ target_data: &'a RustcTargetData<'_>,
+ force_all_targets: ForceAllTargets,
+ ) -> impl Iterator<Item = (PackageId, &'a HashSet<Dependency>)> + 'a {
+ resolve
+ .deps(pkg_id)
+ .filter(move |&(_id, deps)| {
+ deps.iter().any(|dep| {
+ if dep.kind() == DepKind::Development && has_dev_units == HasDevUnits::No {
+ return false;
+ }
+ if force_all_targets == ForceAllTargets::No {
+ let activated = requested_kinds
+ .iter()
+ .chain(Some(&CompileKind::Host))
+ .any(|kind| target_data.dep_platform_activated(dep, *kind));
+ if !activated {
+ return false;
+ }
+ }
+ true
+ })
+ })
+ .into_iter()
+ }
+
+ pub fn sources(&self) -> Ref<'_, SourceMap<'cfg>> {
+ self.sources.borrow()
+ }
+
+ pub fn sources_mut(&self) -> RefMut<'_, SourceMap<'cfg>> {
+ self.sources.borrow_mut()
+ }
+
+ /// Merge the given set into self.
+ pub fn add_set(&mut self, set: PackageSet<'cfg>) {
+ assert!(!self.downloading.get());
+ assert!(!set.downloading.get());
+ for (pkg_id, p_cell) in set.packages {
+ self.packages.entry(pkg_id).or_insert(p_cell);
+ }
+ let mut sources = self.sources.borrow_mut();
+ let other_sources = set.sources.into_inner();
+ sources.add_source_map(other_sources);
+ }
+}
+
+impl<'a, 'cfg> Downloads<'a, 'cfg> {
+ /// Starts to download the package for the `id` specified.
+ ///
+ /// Returns `None` if the package is queued up for download and will
+ /// eventually be returned from `wait_for_download`. Returns `Some(pkg)` if
+ /// the package is ready and doesn't need to be downloaded.
+ pub fn start(&mut self, id: PackageId) -> CargoResult<Option<&'a Package>> {
+ self.start_inner(id)
+ .with_context(|| format!("failed to download `{}`", id))
+ }
+
+ fn start_inner(&mut self, id: PackageId) -> CargoResult<Option<&'a Package>> {
+ // First up see if we've already cached this package, in which case
+ // there's nothing to do.
+ let slot = self
+ .set
+ .packages
+ .get(&id)
+ .ok_or_else(|| internal(format!("couldn't find `{}` in package set", id)))?;
+ if let Some(pkg) = slot.borrow() {
+ return Ok(Some(pkg));
+ }
+
+ // Ask the original source for this `PackageId` for the corresponding
+ // package. That may immediately come back and tell us that the package
+ // is ready, or it could tell us that it needs to be downloaded.
+ let mut sources = self.set.sources.borrow_mut();
+ let source = sources
+ .get_mut(id.source_id())
+ .ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?;
+ let pkg = source
+ .download(id)
+ .with_context(|| "unable to get packages from source")?;
+ let (url, descriptor, authorization) = match pkg {
+ MaybePackage::Ready(pkg) => {
+ debug!("{} doesn't need a download", id);
+ assert!(slot.fill(pkg).is_ok());
+ return Ok(Some(slot.borrow().unwrap()));
+ }
+ MaybePackage::Download {
+ url,
+ descriptor,
+ authorization,
+ } => (url, descriptor, authorization),
+ };
+
+ // Ok we're going to download this crate, so let's set up all our
+ // internal state and hand off an `Easy` handle to our libcurl `Multi`
+ // handle. This won't actually start the transfer, but later it'll
+ // happen during `wait_for_download`
+ let token = self.next;
+ self.next += 1;
+ debug!("downloading {} as {}", id, token);
+ assert!(self.pending_ids.insert(id));
+
+ let (mut handle, _timeout) = ops::http_handle_and_timeout(self.set.config)?;
+ handle.get(true)?;
+ handle.url(&url)?;
+ handle.follow_location(true)?; // follow redirects
+
+ // Add authorization header.
+ if let Some(authorization) = authorization {
+ let mut headers = curl::easy::List::new();
+ headers.append(&format!("Authorization: {}", authorization))?;
+ handle.http_headers(headers)?;
+ }
+
+ // Enable HTTP/2 to be used as it'll allow true multiplexing which makes
+ // downloads much faster.
+ //
+ // Currently Cargo requests the `http2` feature of the `curl` crate
+ // which means it should always be built in. On OSX, however, we ship
+ // cargo still linked against the system libcurl. Building curl with
+ // ALPN support for HTTP/2 requires newer versions of OSX (the
+ // SecureTransport API) than we want to ship Cargo for. By linking Cargo
+ // against the system libcurl then older curl installations won't use
+ // HTTP/2 but newer ones will. All that to basically say we ignore
+ // errors here on OSX, but consider this a fatal error to not activate
+ // HTTP/2 on all other platforms.
+ if self.set.multiplexing {
+ crate::try_old_curl!(handle.http_version(HttpVersion::V2), "HTTP2");
+ } else {
+ handle.http_version(HttpVersion::V11)?;
+ }
+
+ // This is an option to `libcurl` which indicates that if there's a
+ // bunch of parallel requests to the same host they all wait until the
+ // pipelining status of the host is known. This means that we won't
+ // initiate dozens of connections to crates.io, but rather only one.
+ // Once the main one is opened we realized that pipelining is possible
+ // and multiplexing is possible with static.crates.io. All in all this
+ // reduces the number of connections down to a more manageable state.
+ crate::try_old_curl!(handle.pipewait(true), "pipewait");
+
+ handle.write_function(move |buf| {
+ debug!("{} - {} bytes of data", token, buf.len());
+ tls::with(|downloads| {
+ if let Some(downloads) = downloads {
+ downloads.pending[&token]
+ .0
+ .data
+ .borrow_mut()
+ .extend_from_slice(buf);
+ }
+ });
+ Ok(buf.len())
+ })?;
+ handle.header_function(move |data| {
+ tls::with(|downloads| {
+ if let Some(downloads) = downloads {
+ // Headers contain trailing \r\n, trim them to make it easier
+ // to work with.
+ let h = String::from_utf8_lossy(data).trim().to_string();
+ if DEBUG_HEADERS.iter().any(|p| h.starts_with(p)) {
+ downloads.pending[&token].0.headers.borrow_mut().push(h);
+ }
+ }
+ });
+ true
+ })?;
+
+ handle.progress(true)?;
+ handle.progress_function(move |dl_total, dl_cur, _, _| {
+ tls::with(|downloads| match downloads {
+ Some(d) => d.progress(token, dl_total as u64, dl_cur as u64),
+ None => false,
+ })
+ })?;
+
+ // If the progress bar isn't enabled then it may be awhile before the
+ // first crate finishes downloading so we inform immediately that we're
+ // downloading crates here.
+ if self.downloads_finished == 0
+ && self.pending.is_empty()
+ && !self.progress.borrow().as_ref().unwrap().is_enabled()
+ {
+ self.set
+ .config
+ .shell()
+ .status("Downloading", "crates ...")?;
+ }
+
+ let dl = Download {
+ token,
+ data: RefCell::new(Vec::new()),
+ headers: RefCell::new(Vec::new()),
+ id,
+ url,
+ descriptor,
+ total: Cell::new(0),
+ current: Cell::new(0),
+ start: Instant::now(),
+ timed_out: Cell::new(None),
+ retry: Retry::new(self.set.config)?,
+ };
+ self.enqueue(dl, handle)?;
+ self.tick(WhyTick::DownloadStarted)?;
+
+ Ok(None)
+ }
+
+ /// Returns the number of crates that are still downloading.
+ pub fn remaining(&self) -> usize {
+ self.pending.len() + self.sleeping.len()
+ }
+
+ /// Blocks the current thread waiting for a package to finish downloading.
+ ///
+ /// This method will wait for a previously enqueued package to finish
+ /// downloading and return a reference to it after it's done downloading.
+ ///
+ /// # Panics
+ ///
+ /// This function will panic if there are no remaining downloads.
+ pub fn wait(&mut self) -> CargoResult<&'a Package> {
+ let (dl, data) = loop {
+ assert_eq!(self.pending.len(), self.pending_ids.len());
+ let (token, result) = self.wait_for_curl()?;
+ debug!("{} finished with {:?}", token, result);
+
+ let (mut dl, handle) = self
+ .pending
+ .remove(&token)
+ .expect("got a token for a non-in-progress transfer");
+ let data = mem::take(&mut *dl.data.borrow_mut());
+ let headers = mem::take(&mut *dl.headers.borrow_mut());
+ let mut handle = self.set.multi.remove(handle)?;
+ self.pending_ids.remove(&dl.id);
+
+ // Check if this was a spurious error. If it was a spurious error
+ // then we want to re-enqueue our request for another attempt and
+ // then we wait for another request to finish.
+ let ret = {
+ let timed_out = &dl.timed_out;
+ let url = &dl.url;
+ dl.retry.r#try(|| {
+ if let Err(e) = result {
+ // If this error is "aborted by callback" then that's
+ // probably because our progress callback aborted due to
+ // a timeout. We'll find out by looking at the
+ // `timed_out` field, looking for a descriptive message.
+ // If one is found we switch the error code (to ensure
+ // it's flagged as spurious) and then attach our extra
+ // information to the error.
+ if !e.is_aborted_by_callback() {
+ return Err(e.into());
+ }
+
+ return Err(match timed_out.replace(None) {
+ Some(msg) => {
+ let code = curl_sys::CURLE_OPERATION_TIMEDOUT;
+ let mut err = curl::Error::new(code);
+ err.set_extra(msg);
+ err
+ }
+ None => e,
+ }
+ .into());
+ }
+
+ let code = handle.response_code()?;
+ if code != 200 && code != 0 {
+ return Err(HttpNotSuccessful::new_from_handle(
+ &mut handle,
+ &url,
+ data,
+ headers,
+ )
+ .into());
+ }
+ Ok(data)
+ })
+ };
+ match ret {
+ RetryResult::Success(data) => break (dl, data),
+ RetryResult::Err(e) => {
+ return Err(e.context(format!("failed to download from `{}`", dl.url)))
+ }
+ RetryResult::Retry(sleep) => {
+ debug!("download retry {} for {sleep}ms", dl.url);
+ self.sleeping.push(sleep, (dl, handle));
+ }
+ }
+ };
+
+ // If the progress bar isn't enabled then we still want to provide some
+ // semblance of progress of how we're downloading crates, and if the
+ // progress bar is enabled this provides a good log of what's happening.
+ self.progress.borrow_mut().as_mut().unwrap().clear();
+ self.set
+ .config
+ .shell()
+ .status("Downloaded", &dl.descriptor)?;
+
+ self.downloads_finished += 1;
+ self.downloaded_bytes += dl.total.get();
+ if dl.total.get() > self.largest.0 {
+ self.largest = (dl.total.get(), dl.id.name().to_string());
+ }
+
+ // We're about to synchronously extract the crate below. While we're
+ // doing that our download progress won't actually be updated, nor do we
+ // have a great view into the progress of the extraction. Let's prepare
+ // the user for this CPU-heavy step if it looks like it'll take some
+ // time to do so.
+ if dl.total.get() < ByteSize::kb(400).0 {
+ self.tick(WhyTick::DownloadFinished)?;
+ } else {
+ self.tick(WhyTick::Extracting(&dl.id.name()))?;
+ }
+
+ // Inform the original source that the download is finished which
+ // should allow us to actually get the package and fill it in now.
+ let mut sources = self.set.sources.borrow_mut();
+ let source = sources
+ .get_mut(dl.id.source_id())
+ .ok_or_else(|| internal(format!("couldn't find source for `{}`", dl.id)))?;
+ let start = Instant::now();
+ let pkg = source.finish_download(dl.id, data)?;
+
+ // Assume that no time has passed while we were calling
+ // `finish_download`, update all speed checks and timeout limits of all
+ // active downloads to make sure they don't fire because of a slowly
+ // extracted tarball.
+ let finish_dur = start.elapsed();
+ self.updated_at.set(self.updated_at.get() + finish_dur);
+ self.next_speed_check
+ .set(self.next_speed_check.get() + finish_dur);
+
+ let slot = &self.set.packages[&dl.id];
+ assert!(slot.fill(pkg).is_ok());
+ Ok(slot.borrow().unwrap())
+ }
+
+ fn enqueue(&mut self, dl: Download<'cfg>, handle: Easy) -> CargoResult<()> {
+ let mut handle = self.set.multi.add(handle)?;
+ let now = Instant::now();
+ handle.set_token(dl.token)?;
+ self.updated_at.set(now);
+ self.next_speed_check.set(now + self.timeout.dur);
+ self.next_speed_check_bytes_threshold
+ .set(u64::from(self.timeout.low_speed_limit));
+ dl.timed_out.set(None);
+ dl.current.set(0);
+ dl.total.set(0);
+ self.pending.insert(dl.token, (dl, handle));
+ Ok(())
+ }
+
+ /// Block, waiting for curl. Returns a token and a `Result` for that token
+ /// (`Ok` means the download successfully finished).
+ fn wait_for_curl(&mut self) -> CargoResult<(usize, Result<(), curl::Error>)> {
+ // This is the main workhorse loop. We use libcurl's portable `wait`
+ // method to actually perform blocking. This isn't necessarily too
+ // efficient in terms of fd management, but we should only be juggling
+ // a few anyway.
+ //
+ // Here we start off by asking the `multi` handle to do some work via
+ // the `perform` method. This will actually do I/O work (non-blocking)
+ // and attempt to make progress. Afterwards we ask about the `messages`
+ // contained in the handle which will inform us if anything has finished
+ // transferring.
+ //
+ // If we've got a finished transfer after all that work we break out
+ // and process the finished transfer at the end. Otherwise we need to
+ // actually block waiting for I/O to happen, which we achieve with the
+ // `wait` method on `multi`.
+ loop {
+ self.add_sleepers()?;
+ let n = tls::set(self, || {
+ self.set
+ .multi
+ .perform()
+ .with_context(|| "failed to perform http requests")
+ })?;
+ debug!("handles remaining: {}", n);
+ let results = &mut self.results;
+ let pending = &self.pending;
+ self.set.multi.messages(|msg| {
+ let token = msg.token().expect("failed to read token");
+ let handle = &pending[&token].1;
+ if let Some(result) = msg.result_for(handle) {
+ results.push((token, result));
+ } else {
+ debug!("message without a result (?)");
+ }
+ });
+
+ if let Some(pair) = results.pop() {
+ break Ok(pair);
+ }
+ assert_ne!(self.remaining(), 0);
+ if self.pending.is_empty() {
+ let delay = self.sleeping.time_to_next().unwrap();
+ debug!("sleeping main thread for {delay:?}");
+ std::thread::sleep(delay);
+ } else {
+ let min_timeout = Duration::new(1, 0);
+ let timeout = self.set.multi.get_timeout()?.unwrap_or(min_timeout);
+ let timeout = timeout.min(min_timeout);
+ self.set
+ .multi
+ .wait(&mut [], timeout)
+ .with_context(|| "failed to wait on curl `Multi`")?;
+ }
+ }
+ }
+
+ fn add_sleepers(&mut self) -> CargoResult<()> {
+ for (dl, handle) in self.sleeping.to_retry() {
+ self.pending_ids.insert(dl.id);
+ self.enqueue(dl, handle)?;
+ }
+ Ok(())
+ }
+
+ fn progress(&self, token: usize, total: u64, cur: u64) -> bool {
+ let dl = &self.pending[&token].0;
+ dl.total.set(total);
+ let now = Instant::now();
+ if cur > dl.current.get() {
+ let delta = cur - dl.current.get();
+ let threshold = self.next_speed_check_bytes_threshold.get();
+
+ dl.current.set(cur);
+ self.updated_at.set(now);
+
+ if delta >= threshold {
+ self.next_speed_check.set(now + self.timeout.dur);
+ self.next_speed_check_bytes_threshold
+ .set(u64::from(self.timeout.low_speed_limit));
+ } else {
+ self.next_speed_check_bytes_threshold.set(threshold - delta);
+ }
+ }
+ if self.tick(WhyTick::DownloadUpdate).is_err() {
+ return false;
+ }
+
+ // If we've spent too long not actually receiving any data we time out.
+ if now > self.updated_at.get() + self.timeout.dur {
+ self.updated_at.set(now);
+ let msg = format!(
+ "failed to download any data for `{}` within {}s",
+ dl.id,
+ self.timeout.dur.as_secs()
+ );
+ dl.timed_out.set(Some(msg));
+ return false;
+ }
+
+ // If we reached the point in time that we need to check our speed
+ // limit, see if we've transferred enough data during this threshold. If
+ // it fails this check then we fail because the download is going too
+ // slowly.
+ if now >= self.next_speed_check.get() {
+ self.next_speed_check.set(now + self.timeout.dur);
+ assert!(self.next_speed_check_bytes_threshold.get() > 0);
+ let msg = format!(
+ "download of `{}` failed to transfer more \
+ than {} bytes in {}s",
+ dl.id,
+ self.timeout.low_speed_limit,
+ self.timeout.dur.as_secs()
+ );
+ dl.timed_out.set(Some(msg));
+ return false;
+ }
+
+ true
+ }
+
+ fn tick(&self, why: WhyTick<'_>) -> CargoResult<()> {
+ let mut progress = self.progress.borrow_mut();
+ let progress = progress.as_mut().unwrap();
+
+ if let WhyTick::DownloadUpdate = why {
+ if !progress.update_allowed() {
+ return Ok(());
+ }
+ }
+ let pending = self.remaining();
+ let mut msg = if pending == 1 {
+ format!("{} crate", pending)
+ } else {
+ format!("{} crates", pending)
+ };
+ match why {
+ WhyTick::Extracting(krate) => {
+ msg.push_str(&format!(", extracting {} ...", krate));
+ }
+ _ => {
+ let mut dur = Duration::new(0, 0);
+ let mut remaining = 0;
+ for (dl, _) in self.pending.values() {
+ dur += dl.start.elapsed();
+ // If the total/current look weird just throw out the data
+ // point, sounds like curl has more to learn before we have
+ // the true information.
+ if dl.total.get() >= dl.current.get() {
+ remaining += dl.total.get() - dl.current.get();
+ }
+ }
+ if remaining > 0 && dur > Duration::from_millis(500) {
+ msg.push_str(&format!(", remaining bytes: {}", ByteSize(remaining)));
+ }
+ }
+ }
+ progress.print_now(&msg)
+ }
+}
+
+#[derive(Copy, Clone)]
+enum WhyTick<'a> {
+ DownloadStarted,
+ DownloadUpdate,
+ DownloadFinished,
+ Extracting(&'a str),
+}
+
+impl<'a, 'cfg> Drop for Downloads<'a, 'cfg> {
+ fn drop(&mut self) {
+ self.set.downloading.set(false);
+ let progress = self.progress.get_mut().take().unwrap();
+ // Don't print a download summary if we're not using a progress bar,
+ // we've already printed lots of `Downloading...` items.
+ if !progress.is_enabled() {
+ return;
+ }
+ // If we didn't download anything, no need for a summary.
+ if self.downloads_finished == 0 {
+ return;
+ }
+ // If an error happened, let's not clutter up the output.
+ if !self.success {
+ return;
+ }
+ // pick the correct plural of crate(s)
+ let crate_string = if self.downloads_finished == 1 {
+ "crate"
+ } else {
+ "crates"
+ };
+ let mut status = format!(
+ "{} {} ({}) in {}",
+ self.downloads_finished,
+ crate_string,
+ ByteSize(self.downloaded_bytes),
+ util::elapsed(self.start.elapsed())
+ );
+ // print the size of largest crate if it was >1mb
+ // however don't print if only a single crate was downloaded
+ // because it is obvious that it will be the largest then
+ if self.largest.0 > ByteSize::mb(1).0 && self.downloads_finished > 1 {
+ status.push_str(&format!(
+ " (largest was `{}` at {})",
+ self.largest.1,
+ ByteSize(self.largest.0),
+ ));
+ }
+ // Clear progress before displaying final summary.
+ drop(progress);
+ drop(self.set.config.shell().status("Downloaded", status));
+ }
+}
+
+mod tls {
+ use std::cell::Cell;
+
+ use super::Downloads;
+
+ thread_local!(static PTR: Cell<usize> = Cell::new(0));
+
+ pub(crate) fn with<R>(f: impl FnOnce(Option<&Downloads<'_, '_>>) -> R) -> R {
+ let ptr = PTR.with(|p| p.get());
+ if ptr == 0 {
+ f(None)
+ } else {
+ unsafe { f(Some(&*(ptr as *const Downloads<'_, '_>))) }
+ }
+ }
+
+ pub(crate) fn set<R>(dl: &Downloads<'_, '_>, f: impl FnOnce() -> R) -> R {
+ struct Reset<'a, T: Copy>(&'a Cell<T>, T);
+
+ impl<'a, T: Copy> Drop for Reset<'a, T> {
+ fn drop(&mut self) {
+ self.0.set(self.1);
+ }
+ }
+
+ PTR.with(|p| {
+ let _reset = Reset(p, p.get());
+ p.set(dl as *const Downloads<'_, '_> as usize);
+ f()
+ })
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/package_id.rs b/src/tools/cargo/src/cargo/core/package_id.rs
new file mode 100644
index 000000000..ee31e9c48
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/package_id.rs
@@ -0,0 +1,293 @@
+use std::collections::HashSet;
+use std::fmt::{self, Formatter};
+use std::hash;
+use std::hash::Hash;
+use std::path::Path;
+use std::ptr;
+use std::sync::Mutex;
+
+use serde::de;
+use serde::ser;
+
+use crate::core::source::SourceId;
+use crate::util::interning::InternedString;
+use crate::util::{CargoResult, ToSemver};
+
+lazy_static::lazy_static! {
+ static ref PACKAGE_ID_CACHE: Mutex<HashSet<&'static PackageIdInner>> =
+ Mutex::new(HashSet::new());
+}
+
+/// Identifier for a specific version of a package in a specific source.
+#[derive(Clone, Copy, Eq, PartialOrd, Ord)]
+pub struct PackageId {
+ inner: &'static PackageIdInner,
+}
+
+#[derive(PartialOrd, Eq, Ord)]
+struct PackageIdInner {
+ name: InternedString,
+ version: semver::Version,
+ source_id: SourceId,
+}
+
+// Custom equality that uses full equality of SourceId, rather than its custom equality,
+// and Version, which usually ignores `build` metadata.
+//
+// The `build` part of the version is usually ignored (like a "comment").
+// However, there are some cases where it is important. The download path from
+// a registry includes the build metadata, and Cargo uses PackageIds for
+// creating download paths. Including it here prevents the PackageId interner
+// from getting poisoned with PackageIds where that build metadata is missing.
+impl PartialEq for PackageIdInner {
+ fn eq(&self, other: &Self) -> bool {
+ self.name == other.name
+ && self.version.major == other.version.major
+ && self.version.minor == other.version.minor
+ && self.version.patch == other.version.patch
+ && self.version.pre == other.version.pre
+ && self.version.build == other.version.build
+ && self.source_id.full_eq(other.source_id)
+ }
+}
+
+// Custom hash that is coherent with the custom equality above.
+impl Hash for PackageIdInner {
+ fn hash<S: hash::Hasher>(&self, into: &mut S) {
+ self.name.hash(into);
+ self.version.major.hash(into);
+ self.version.minor.hash(into);
+ self.version.patch.hash(into);
+ self.version.pre.hash(into);
+ self.version.build.hash(into);
+ self.source_id.full_hash(into);
+ }
+}
+
+impl ser::Serialize for PackageId {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: ser::Serializer,
+ {
+ s.collect_str(&format_args!(
+ "{} {} ({})",
+ self.inner.name,
+ self.inner.version,
+ self.inner.source_id.as_url()
+ ))
+ }
+}
+
+impl<'de> de::Deserialize<'de> for PackageId {
+ fn deserialize<D>(d: D) -> Result<PackageId, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ let string = String::deserialize(d)?;
+ let mut s = string.splitn(3, ' ');
+ let name = s.next().unwrap();
+ let name = InternedString::new(name);
+ let version = match s.next() {
+ Some(s) => s,
+ None => return Err(de::Error::custom("invalid serialized PackageId")),
+ };
+ let version = version.to_semver().map_err(de::Error::custom)?;
+ let url = match s.next() {
+ Some(s) => s,
+ None => return Err(de::Error::custom("invalid serialized PackageId")),
+ };
+ let url = if url.starts_with('(') && url.ends_with(')') {
+ &url[1..url.len() - 1]
+ } else {
+ return Err(de::Error::custom("invalid serialized PackageId"));
+ };
+ let source_id = SourceId::from_url(url).map_err(de::Error::custom)?;
+
+ Ok(PackageId::pure(name, version, source_id))
+ }
+}
+
+impl PartialEq for PackageId {
+ fn eq(&self, other: &PackageId) -> bool {
+ if ptr::eq(self.inner, other.inner) {
+ return true;
+ }
+ // This is here so that PackageId uses SourceId's and Version's idea
+ // of equality. PackageIdInner uses a more exact notion of equality.
+ self.inner.name == other.inner.name
+ && self.inner.version == other.inner.version
+ && self.inner.source_id == other.inner.source_id
+ }
+}
+
+impl Hash for PackageId {
+ fn hash<S: hash::Hasher>(&self, state: &mut S) {
+ // This is here (instead of derived) so that PackageId uses SourceId's
+ // and Version's idea of equality. PackageIdInner uses a more exact
+ // notion of hashing.
+ self.inner.name.hash(state);
+ self.inner.version.hash(state);
+ self.inner.source_id.hash(state);
+ }
+}
+
+impl PackageId {
+ pub fn new<T: ToSemver>(
+ name: impl Into<InternedString>,
+ version: T,
+ sid: SourceId,
+ ) -> CargoResult<PackageId> {
+ let v = version.to_semver()?;
+ Ok(PackageId::pure(name.into(), v, sid))
+ }
+
+ pub fn pure(name: InternedString, version: semver::Version, source_id: SourceId) -> PackageId {
+ let inner = PackageIdInner {
+ name,
+ version,
+ source_id,
+ };
+ let mut cache = PACKAGE_ID_CACHE.lock().unwrap();
+ let inner = cache.get(&inner).cloned().unwrap_or_else(|| {
+ let inner = Box::leak(Box::new(inner));
+ cache.insert(inner);
+ inner
+ });
+ PackageId { inner }
+ }
+
+ pub fn name(self) -> InternedString {
+ self.inner.name
+ }
+ pub fn version(self) -> &'static semver::Version {
+ &self.inner.version
+ }
+ pub fn source_id(self) -> SourceId {
+ self.inner.source_id
+ }
+
+ pub fn with_precise(self, precise: Option<String>) -> PackageId {
+ PackageId::pure(
+ self.inner.name,
+ self.inner.version.clone(),
+ self.inner.source_id.with_precise(precise),
+ )
+ }
+
+ pub fn with_source_id(self, source: SourceId) -> PackageId {
+ PackageId::pure(self.inner.name, self.inner.version.clone(), source)
+ }
+
+ pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Self {
+ if self.source_id() == to_replace {
+ self.with_source_id(replace_with)
+ } else {
+ self
+ }
+ }
+
+ /// Returns a value that implements a "stable" hashable value.
+ ///
+ /// Stable hashing removes the path prefix of the workspace from path
+ /// packages. This helps with reproducible builds, since this hash is part
+ /// of the symbol metadata, and we don't want the absolute path where the
+ /// build is performed to affect the binary output.
+ pub fn stable_hash(self, workspace: &Path) -> PackageIdStableHash<'_> {
+ PackageIdStableHash(self, workspace)
+ }
+}
+
+pub struct PackageIdStableHash<'a>(PackageId, &'a Path);
+
+impl<'a> Hash for PackageIdStableHash<'a> {
+ fn hash<S: hash::Hasher>(&self, state: &mut S) {
+ self.0.inner.name.hash(state);
+ self.0.inner.version.hash(state);
+ self.0.inner.source_id.stable_hash(self.1, state);
+ }
+}
+
+impl fmt::Display for PackageId {
+ fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+ write!(f, "{} v{}", self.inner.name, self.inner.version)?;
+
+ if !self.inner.source_id.is_crates_io() {
+ write!(f, " ({})", self.inner.source_id)?;
+ }
+
+ Ok(())
+ }
+}
+
+impl fmt::Debug for PackageId {
+ fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+ f.debug_struct("PackageId")
+ .field("name", &self.inner.name)
+ .field("version", &self.inner.version.to_string())
+ .field("source", &self.inner.source_id.to_string())
+ .finish()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::PackageId;
+ use crate::core::source::SourceId;
+ use crate::sources::CRATES_IO_INDEX;
+ use crate::util::IntoUrl;
+
+ #[test]
+ fn invalid_version_handled_nicely() {
+ let loc = CRATES_IO_INDEX.into_url().unwrap();
+ let repo = SourceId::for_registry(&loc).unwrap();
+
+ assert!(PackageId::new("foo", "1.0", repo).is_err());
+ assert!(PackageId::new("foo", "1", repo).is_err());
+ assert!(PackageId::new("foo", "bar", repo).is_err());
+ assert!(PackageId::new("foo", "", repo).is_err());
+ }
+
+ #[test]
+ fn debug() {
+ let loc = CRATES_IO_INDEX.into_url().unwrap();
+ let pkg_id = PackageId::new("foo", "1.0.0", SourceId::for_registry(&loc).unwrap()).unwrap();
+ assert_eq!(
+ r#"PackageId { name: "foo", version: "1.0.0", source: "registry `crates-io`" }"#,
+ format!("{:?}", pkg_id)
+ );
+
+ let expected = r#"
+PackageId {
+ name: "foo",
+ version: "1.0.0",
+ source: "registry `crates-io`",
+}
+"#
+ .trim();
+
+ // Can be removed once trailing commas in Debug have reached the stable
+ // channel.
+ let expected_without_trailing_comma = r#"
+PackageId {
+ name: "foo",
+ version: "1.0.0",
+ source: "registry `crates-io`"
+}
+"#
+ .trim();
+
+ let actual = format!("{:#?}", pkg_id);
+ if actual.ends_with(",\n}") {
+ assert_eq!(actual, expected);
+ } else {
+ assert_eq!(actual, expected_without_trailing_comma);
+ }
+ }
+
+ #[test]
+ fn display() {
+ let loc = CRATES_IO_INDEX.into_url().unwrap();
+ let pkg_id = PackageId::new("foo", "1.0.0", SourceId::for_registry(&loc).unwrap()).unwrap();
+ assert_eq!("foo v1.0.0", pkg_id.to_string());
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/package_id_spec.rs b/src/tools/cargo/src/cargo/core/package_id_spec.rs
new file mode 100644
index 000000000..29043b963
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/package_id_spec.rs
@@ -0,0 +1,432 @@
+use std::collections::HashMap;
+use std::fmt;
+
+use anyhow::{bail, Context as _};
+use semver::Version;
+use serde::{de, ser};
+use url::Url;
+
+use crate::core::PackageId;
+use crate::util::errors::CargoResult;
+use crate::util::interning::InternedString;
+use crate::util::lev_distance;
+use crate::util::{validate_package_name, IntoUrl, ToSemver};
+
+/// Some or all of the data required to identify a package:
+///
+/// 1. the package name (a `String`, required)
+/// 2. the package version (a `Version`, optional)
+/// 3. the package source (a `Url`, optional)
+///
+/// If any of the optional fields are omitted, then the package ID may be ambiguous, there may be
+/// more than one package/version/url combo that will match. However, often just the name is
+/// sufficient to uniquely define a package ID.
+#[derive(Clone, PartialEq, Eq, Debug, Hash, Ord, PartialOrd)]
+pub struct PackageIdSpec {
+ name: InternedString,
+ version: Option<Version>,
+ url: Option<Url>,
+}
+
+impl PackageIdSpec {
+ /// Parses a spec string and returns a `PackageIdSpec` if the string was valid.
+ ///
+ /// # Examples
+ /// Some examples of valid strings
+ ///
+ /// ```
+ /// use cargo::core::PackageIdSpec;
+ ///
+ /// let specs = vec![
+ /// "https://crates.io/foo",
+ /// "https://crates.io/foo#1.2.3",
+ /// "https://crates.io/foo#bar:1.2.3",
+ /// "https://crates.io/foo#bar@1.2.3",
+ /// "foo",
+ /// "foo:1.2.3",
+ /// "foo@1.2.3",
+ /// ];
+ /// for spec in specs {
+ /// assert!(PackageIdSpec::parse(spec).is_ok());
+ /// }
+ pub fn parse(spec: &str) -> CargoResult<PackageIdSpec> {
+ if spec.contains("://") {
+ if let Ok(url) = spec.into_url() {
+ return PackageIdSpec::from_url(url);
+ }
+ } else if spec.contains('/') || spec.contains('\\') {
+ let abs = std::env::current_dir().unwrap_or_default().join(spec);
+ if abs.exists() {
+ let maybe_url = Url::from_file_path(abs)
+ .map_or_else(|_| "a file:// URL".to_string(), |url| url.to_string());
+ bail!(
+ "package ID specification `{}` looks like a file path, \
+ maybe try {}",
+ spec,
+ maybe_url
+ );
+ }
+ }
+ let mut parts = spec.splitn(2, [':', '@']);
+ let name = parts.next().unwrap();
+ let version = match parts.next() {
+ Some(version) => Some(version.to_semver()?),
+ None => None,
+ };
+ validate_package_name(name, "pkgid", "")?;
+ Ok(PackageIdSpec {
+ name: InternedString::new(name),
+ version,
+ url: None,
+ })
+ }
+
+ /// Roughly equivalent to `PackageIdSpec::parse(spec)?.query(i)`
+ pub fn query_str<I>(spec: &str, i: I) -> CargoResult<PackageId>
+ where
+ I: IntoIterator<Item = PackageId>,
+ {
+ let i: Vec<_> = i.into_iter().collect();
+ let spec = PackageIdSpec::parse(spec).with_context(|| {
+ let suggestion = lev_distance::closest_msg(spec, i.iter(), |id| id.name().as_str());
+ format!("invalid package ID specification: `{}`{}", spec, suggestion)
+ })?;
+ spec.query(i)
+ }
+
+ /// Convert a `PackageId` to a `PackageIdSpec`, which will have both the `Version` and `Url`
+ /// fields filled in.
+ pub fn from_package_id(package_id: PackageId) -> PackageIdSpec {
+ PackageIdSpec {
+ name: package_id.name(),
+ version: Some(package_id.version().clone()),
+ url: Some(package_id.source_id().url().clone()),
+ }
+ }
+
+ /// Tries to convert a valid `Url` to a `PackageIdSpec`.
+ fn from_url(mut url: Url) -> CargoResult<PackageIdSpec> {
+ if url.query().is_some() {
+ bail!("cannot have a query string in a pkgid: {}", url)
+ }
+ let frag = url.fragment().map(|s| s.to_owned());
+ url.set_fragment(None);
+ let (name, version) = {
+ let mut path = url
+ .path_segments()
+ .ok_or_else(|| anyhow::format_err!("pkgid urls must have a path: {}", url))?;
+ let path_name = path.next_back().ok_or_else(|| {
+ anyhow::format_err!(
+ "pkgid urls must have at least one path \
+ component: {}",
+ url
+ )
+ })?;
+ match frag {
+ Some(fragment) => {
+ let mut parts = fragment.splitn(2, [':', '@']);
+ let name_or_version = parts.next().unwrap();
+ match parts.next() {
+ Some(part) => {
+ let version = part.to_semver()?;
+ (InternedString::new(name_or_version), Some(version))
+ }
+ None => {
+ if name_or_version.chars().next().unwrap().is_alphabetic() {
+ (InternedString::new(name_or_version), None)
+ } else {
+ let version = name_or_version.to_semver()?;
+ (InternedString::new(path_name), Some(version))
+ }
+ }
+ }
+ }
+ None => (InternedString::new(path_name), None),
+ }
+ };
+ Ok(PackageIdSpec {
+ name,
+ version,
+ url: Some(url),
+ })
+ }
+
+ pub fn name(&self) -> InternedString {
+ self.name
+ }
+
+ pub fn version(&self) -> Option<&Version> {
+ self.version.as_ref()
+ }
+
+ pub fn url(&self) -> Option<&Url> {
+ self.url.as_ref()
+ }
+
+ pub fn set_url(&mut self, url: Url) {
+ self.url = Some(url);
+ }
+
+ /// Checks whether the given `PackageId` matches the `PackageIdSpec`.
+ pub fn matches(&self, package_id: PackageId) -> bool {
+ if self.name() != package_id.name() {
+ return false;
+ }
+
+ if let Some(ref v) = self.version {
+ if v != package_id.version() {
+ return false;
+ }
+ }
+
+ match self.url {
+ Some(ref u) => u == package_id.source_id().url(),
+ None => true,
+ }
+ }
+
+ /// Checks a list of `PackageId`s to find 1 that matches this `PackageIdSpec`. If 0, 2, or
+ /// more are found, then this returns an error.
+ pub fn query<I>(&self, i: I) -> CargoResult<PackageId>
+ where
+ I: IntoIterator<Item = PackageId>,
+ {
+ let all_ids: Vec<_> = i.into_iter().collect();
+ let mut ids = all_ids.iter().copied().filter(|&id| self.matches(id));
+ let ret = match ids.next() {
+ Some(id) => id,
+ None => {
+ let mut suggestion = String::new();
+ let try_spec = |spec: PackageIdSpec, suggestion: &mut String| {
+ let try_matches: Vec<_> = all_ids
+ .iter()
+ .copied()
+ .filter(|&id| spec.matches(id))
+ .collect();
+ if !try_matches.is_empty() {
+ suggestion.push_str("\nDid you mean one of these?\n");
+ minimize(suggestion, &try_matches, self);
+ }
+ };
+ if self.url.is_some() {
+ try_spec(
+ PackageIdSpec {
+ name: self.name,
+ version: self.version.clone(),
+ url: None,
+ },
+ &mut suggestion,
+ );
+ }
+ if suggestion.is_empty() && self.version.is_some() {
+ try_spec(
+ PackageIdSpec {
+ name: self.name,
+ version: None,
+ url: None,
+ },
+ &mut suggestion,
+ );
+ }
+ if suggestion.is_empty() {
+ suggestion.push_str(&lev_distance::closest_msg(
+ &self.name,
+ all_ids.iter(),
+ |id| id.name().as_str(),
+ ));
+ }
+
+ bail!(
+ "package ID specification `{}` did not match any packages{}",
+ self,
+ suggestion
+ );
+ }
+ };
+ return match ids.next() {
+ Some(other) => {
+ let mut msg = format!(
+ "There are multiple `{}` packages in \
+ your project, and the specification \
+ `{}` is ambiguous.\n\
+ Please re-run this command \
+ with `-p <spec>` where `<spec>` is one \
+ of the following:",
+ self.name(),
+ self
+ );
+ let mut vec = vec![ret, other];
+ vec.extend(ids);
+ minimize(&mut msg, &vec, self);
+ Err(anyhow::format_err!("{}", msg))
+ }
+ None => Ok(ret),
+ };
+
+ fn minimize(msg: &mut String, ids: &[PackageId], spec: &PackageIdSpec) {
+ let mut version_cnt = HashMap::new();
+ for id in ids {
+ *version_cnt.entry(id.version()).or_insert(0) += 1;
+ }
+ for id in ids {
+ if version_cnt[id.version()] == 1 {
+ msg.push_str(&format!("\n {}@{}", spec.name(), id.version()));
+ } else {
+ msg.push_str(&format!("\n {}", PackageIdSpec::from_package_id(*id)));
+ }
+ }
+ }
+ }
+}
+
+impl fmt::Display for PackageIdSpec {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut printed_name = false;
+ match self.url {
+ Some(ref url) => {
+ write!(f, "{}", url)?;
+ if url.path_segments().unwrap().next_back().unwrap() != &*self.name {
+ printed_name = true;
+ write!(f, "#{}", self.name)?;
+ }
+ }
+ None => {
+ printed_name = true;
+ write!(f, "{}", self.name)?;
+ }
+ }
+ if let Some(ref v) = self.version {
+ write!(f, "{}{}", if printed_name { "@" } else { "#" }, v)?;
+ }
+ Ok(())
+ }
+}
+
+impl ser::Serialize for PackageIdSpec {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: ser::Serializer,
+ {
+ self.to_string().serialize(s)
+ }
+}
+
+impl<'de> de::Deserialize<'de> for PackageIdSpec {
+ fn deserialize<D>(d: D) -> Result<PackageIdSpec, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ let string = String::deserialize(d)?;
+ PackageIdSpec::parse(&string).map_err(de::Error::custom)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::PackageIdSpec;
+ use crate::core::{PackageId, SourceId};
+ use crate::util::interning::InternedString;
+ use crate::util::ToSemver;
+ use url::Url;
+
+ #[test]
+ fn good_parsing() {
+ #[track_caller]
+ fn ok(spec: &str, expected: PackageIdSpec, expected_rendered: &str) {
+ let parsed = PackageIdSpec::parse(spec).unwrap();
+ assert_eq!(parsed, expected);
+ assert_eq!(parsed.to_string(), expected_rendered);
+ }
+
+ ok(
+ "https://crates.io/foo",
+ PackageIdSpec {
+ name: InternedString::new("foo"),
+ version: None,
+ url: Some(Url::parse("https://crates.io/foo").unwrap()),
+ },
+ "https://crates.io/foo",
+ );
+ ok(
+ "https://crates.io/foo#1.2.3",
+ PackageIdSpec {
+ name: InternedString::new("foo"),
+ version: Some("1.2.3".to_semver().unwrap()),
+ url: Some(Url::parse("https://crates.io/foo").unwrap()),
+ },
+ "https://crates.io/foo#1.2.3",
+ );
+ ok(
+ "https://crates.io/foo#bar:1.2.3",
+ PackageIdSpec {
+ name: InternedString::new("bar"),
+ version: Some("1.2.3".to_semver().unwrap()),
+ url: Some(Url::parse("https://crates.io/foo").unwrap()),
+ },
+ "https://crates.io/foo#bar@1.2.3",
+ );
+ ok(
+ "https://crates.io/foo#bar@1.2.3",
+ PackageIdSpec {
+ name: InternedString::new("bar"),
+ version: Some("1.2.3".to_semver().unwrap()),
+ url: Some(Url::parse("https://crates.io/foo").unwrap()),
+ },
+ "https://crates.io/foo#bar@1.2.3",
+ );
+ ok(
+ "foo",
+ PackageIdSpec {
+ name: InternedString::new("foo"),
+ version: None,
+ url: None,
+ },
+ "foo",
+ );
+ ok(
+ "foo:1.2.3",
+ PackageIdSpec {
+ name: InternedString::new("foo"),
+ version: Some("1.2.3".to_semver().unwrap()),
+ url: None,
+ },
+ "foo@1.2.3",
+ );
+ ok(
+ "foo@1.2.3",
+ PackageIdSpec {
+ name: InternedString::new("foo"),
+ version: Some("1.2.3".to_semver().unwrap()),
+ url: None,
+ },
+ "foo@1.2.3",
+ );
+ }
+
+ #[test]
+ fn bad_parsing() {
+ assert!(PackageIdSpec::parse("baz:").is_err());
+ assert!(PackageIdSpec::parse("baz:*").is_err());
+ assert!(PackageIdSpec::parse("baz:1.0").is_err());
+ assert!(PackageIdSpec::parse("baz@").is_err());
+ assert!(PackageIdSpec::parse("baz@*").is_err());
+ assert!(PackageIdSpec::parse("baz@1.0").is_err());
+ assert!(PackageIdSpec::parse("https://baz:1.0").is_err());
+ assert!(PackageIdSpec::parse("https://#baz:1.0").is_err());
+ }
+
+ #[test]
+ fn matching() {
+ let url = Url::parse("https://example.com").unwrap();
+ let sid = SourceId::for_registry(&url).unwrap();
+ let foo = PackageId::new("foo", "1.2.3", sid).unwrap();
+ let bar = PackageId::new("bar", "1.2.3", sid).unwrap();
+
+ assert!(PackageIdSpec::parse("foo").unwrap().matches(foo));
+ assert!(!PackageIdSpec::parse("foo").unwrap().matches(bar));
+ assert!(PackageIdSpec::parse("foo:1.2.3").unwrap().matches(foo));
+ assert!(!PackageIdSpec::parse("foo:1.2.2").unwrap().matches(foo));
+ assert!(PackageIdSpec::parse("foo@1.2.3").unwrap().matches(foo));
+ assert!(!PackageIdSpec::parse("foo@1.2.2").unwrap().matches(foo));
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/profiles.rs b/src/tools/cargo/src/cargo/core/profiles.rs
new file mode 100644
index 000000000..51d19e32e
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/profiles.rs
@@ -0,0 +1,1358 @@
+//! # Profiles: built-in and customizable compiler flag presets
+//!
+//! [`Profiles`] is a collections of built-in profiles, and profiles defined
+//! in the root manifest and configurations.
+//!
+//! To start using a profile, most of the time you start from [`Profiles::new`],
+//! which does the followings:
+//!
+//! - Create a `Profiles` by merging profiles from configs onto the profile
+//! from root manifest (see [`merge_config_profiles`]).
+//! - Add built-in profiles onto it (see [`Profiles::add_root_profiles`]).
+//! - Process profile inheritance for each profiles. (see [`Profiles::add_maker`]).
+//!
+//! Then you can query a [`Profile`] via [`Profiles::get_profile`], which respects
+//! the profile overridden hierarchy described in below. The [`Profile`] you get
+//! is basically an immutable struct containing the compiler flag presets.
+//!
+//! ## Profile overridden hierarchy
+//!
+//! Profile settings can be overridden for specific packages and build-time crates.
+//! The precedence is explained in [`ProfileMaker`].
+//! The algorithm happens within [`ProfileMaker::get_profile`].
+
+use crate::core::compiler::{CompileKind, CompileTarget, Unit};
+use crate::core::dependency::Artifact;
+use crate::core::resolver::features::FeaturesFor;
+use crate::core::{PackageId, PackageIdSpec, Resolve, Shell, Target, Workspace};
+use crate::util::interning::InternedString;
+use crate::util::toml::{ProfilePackageSpec, StringOrBool, TomlProfile, TomlProfiles, U32OrBool};
+use crate::util::{closest_msg, config, CargoResult, Config};
+use anyhow::{bail, Context as _};
+use std::collections::{BTreeMap, HashMap, HashSet};
+use std::hash::Hash;
+use std::{cmp, fmt, hash};
+
+/// Collection of all profiles.
+///
+/// To get a specific [`Profile`], you usually create this and call [`get_profile`] then.
+///
+/// [`get_profile`]: Profiles::get_profile
+#[derive(Clone, Debug)]
+pub struct Profiles {
+ /// Incremental compilation can be overridden globally via:
+ /// - `CARGO_INCREMENTAL` environment variable.
+ /// - `build.incremental` config value.
+ incremental: Option<bool>,
+ /// Map of profile name to directory name for that profile.
+ dir_names: HashMap<InternedString, InternedString>,
+ /// The profile makers. Key is the profile name.
+ by_name: HashMap<InternedString, ProfileMaker>,
+ /// The original profiles written by the user in the manifest and config.
+ ///
+ /// This is here to assist with error reporting, as the `ProfileMaker`
+ /// values have the inherits chains all merged together.
+ original_profiles: BTreeMap<InternedString, TomlProfile>,
+ /// The profile the user requested to use.
+ requested_profile: InternedString,
+ /// The host target for rustc being used by this `Profiles`.
+ rustc_host: InternedString,
+}
+
+impl Profiles {
+ pub fn new(ws: &Workspace<'_>, requested_profile: InternedString) -> CargoResult<Profiles> {
+ let config = ws.config();
+ let incremental = match config.get_env_os("CARGO_INCREMENTAL") {
+ Some(v) => Some(v == "1"),
+ None => config.build_config()?.incremental,
+ };
+ let mut profiles = merge_config_profiles(ws, requested_profile)?;
+ let rustc_host = ws.config().load_global_rustc(Some(ws))?.host;
+
+ let mut profile_makers = Profiles {
+ incremental,
+ dir_names: Self::predefined_dir_names(),
+ by_name: HashMap::new(),
+ original_profiles: profiles.clone(),
+ requested_profile,
+ rustc_host,
+ };
+
+ Self::add_root_profiles(&mut profile_makers, &profiles);
+
+ // Merge with predefined profiles.
+ use std::collections::btree_map::Entry;
+ for (predef_name, mut predef_prof) in Self::predefined_profiles().into_iter() {
+ match profiles.entry(InternedString::new(predef_name)) {
+ Entry::Vacant(vac) => {
+ vac.insert(predef_prof);
+ }
+ Entry::Occupied(mut oc) => {
+ // Override predefined with the user-provided Toml.
+ let r = oc.get_mut();
+ predef_prof.merge(r);
+ *r = predef_prof;
+ }
+ }
+ }
+
+ for (name, profile) in &profiles {
+ profile_makers.add_maker(*name, profile, &profiles)?;
+ }
+ // Verify that the requested profile is defined *somewhere*.
+ // This simplifies the API (no need for CargoResult), and enforces
+ // assumptions about how config profiles are loaded.
+ profile_makers.get_profile_maker(requested_profile)?;
+ Ok(profile_makers)
+ }
+
+ /// Returns the hard-coded directory names for built-in profiles.
+ fn predefined_dir_names() -> HashMap<InternedString, InternedString> {
+ [
+ (InternedString::new("dev"), InternedString::new("debug")),
+ (InternedString::new("test"), InternedString::new("debug")),
+ (InternedString::new("bench"), InternedString::new("release")),
+ ]
+ .into()
+ }
+
+ /// Initialize `by_name` with the two "root" profiles, `dev`, and
+ /// `release` given the user's definition.
+ fn add_root_profiles(
+ profile_makers: &mut Profiles,
+ profiles: &BTreeMap<InternedString, TomlProfile>,
+ ) {
+ profile_makers.by_name.insert(
+ InternedString::new("dev"),
+ ProfileMaker::new(Profile::default_dev(), profiles.get("dev").cloned()),
+ );
+
+ profile_makers.by_name.insert(
+ InternedString::new("release"),
+ ProfileMaker::new(Profile::default_release(), profiles.get("release").cloned()),
+ );
+ }
+
+ /// Returns the built-in profiles (not including dev/release, which are
+ /// "root" profiles).
+ fn predefined_profiles() -> Vec<(&'static str, TomlProfile)> {
+ vec![
+ (
+ "bench",
+ TomlProfile {
+ inherits: Some(InternedString::new("release")),
+ ..TomlProfile::default()
+ },
+ ),
+ (
+ "test",
+ TomlProfile {
+ inherits: Some(InternedString::new("dev")),
+ ..TomlProfile::default()
+ },
+ ),
+ (
+ "doc",
+ TomlProfile {
+ inherits: Some(InternedString::new("dev")),
+ ..TomlProfile::default()
+ },
+ ),
+ ]
+ }
+
+ /// Creates a `ProfileMaker`, and inserts it into `self.by_name`.
+ fn add_maker(
+ &mut self,
+ name: InternedString,
+ profile: &TomlProfile,
+ profiles: &BTreeMap<InternedString, TomlProfile>,
+ ) -> CargoResult<()> {
+ match &profile.dir_name {
+ None => {}
+ Some(dir_name) => {
+ self.dir_names.insert(name, dir_name.to_owned());
+ }
+ }
+
+ // dev/release are "roots" and don't inherit.
+ if name == "dev" || name == "release" {
+ if profile.inherits.is_some() {
+ bail!(
+ "`inherits` must not be specified in root profile `{}`",
+ name
+ );
+ }
+ // Already inserted from `add_root_profiles`, no need to do anything.
+ return Ok(());
+ }
+
+ // Keep track for inherits cycles.
+ let mut set = HashSet::new();
+ set.insert(name);
+ let maker = self.process_chain(name, profile, &mut set, profiles)?;
+ self.by_name.insert(name, maker);
+ Ok(())
+ }
+
+ /// Build a `ProfileMaker` by recursively following the `inherits` setting.
+ ///
+ /// * `name`: The name of the profile being processed.
+ /// * `profile`: The TOML profile being processed.
+ /// * `set`: Set of profiles that have been visited, used to detect cycles.
+ /// * `profiles`: Map of all TOML profiles.
+ ///
+ /// Returns a `ProfileMaker` to be used for the given named profile.
+ fn process_chain(
+ &mut self,
+ name: InternedString,
+ profile: &TomlProfile,
+ set: &mut HashSet<InternedString>,
+ profiles: &BTreeMap<InternedString, TomlProfile>,
+ ) -> CargoResult<ProfileMaker> {
+ let mut maker = match profile.inherits {
+ Some(inherits_name) if inherits_name == "dev" || inherits_name == "release" => {
+ // These are the root profiles added in `add_root_profiles`.
+ self.get_profile_maker(inherits_name).unwrap().clone()
+ }
+ Some(inherits_name) => {
+ if !set.insert(inherits_name) {
+ bail!(
+ "profile inheritance loop detected with profile `{}` inheriting `{}`",
+ name,
+ inherits_name
+ );
+ }
+
+ match profiles.get(&inherits_name) {
+ None => {
+ bail!(
+ "profile `{}` inherits from `{}`, but that profile is not defined",
+ name,
+ inherits_name
+ );
+ }
+ Some(parent) => self.process_chain(inherits_name, parent, set, profiles)?,
+ }
+ }
+ None => {
+ bail!(
+ "profile `{}` is missing an `inherits` directive \
+ (`inherits` is required for all profiles except `dev` or `release`)",
+ name
+ );
+ }
+ };
+ match &mut maker.toml {
+ Some(toml) => toml.merge(profile),
+ None => maker.toml = Some(profile.clone()),
+ };
+ Ok(maker)
+ }
+
+ /// Retrieves the profile for a target.
+ /// `is_member` is whether or not this package is a member of the
+ /// workspace.
+ pub fn get_profile(
+ &self,
+ pkg_id: PackageId,
+ is_member: bool,
+ is_local: bool,
+ unit_for: UnitFor,
+ kind: CompileKind,
+ ) -> Profile {
+ let maker = self.get_profile_maker(self.requested_profile).unwrap();
+ let mut profile = maker.get_profile(Some(pkg_id), is_member, unit_for.is_for_host());
+
+ // Dealing with `panic=abort` and `panic=unwind` requires some special
+ // treatment. Be sure to process all the various options here.
+ match unit_for.panic_setting() {
+ PanicSetting::AlwaysUnwind => profile.panic = PanicStrategy::Unwind,
+ PanicSetting::ReadProfile => {}
+ }
+
+ // Default macOS debug information to being stored in the "unpacked"
+ // split-debuginfo format. At the time of this writing that's the only
+ // platform which has a stable `-Csplit-debuginfo` option for rustc,
+ // and it's typically much faster than running `dsymutil` on all builds
+ // in incremental cases.
+ if let Some(debug) = profile.debuginfo.to_option() {
+ if profile.split_debuginfo.is_none() && debug > 0 {
+ let target = match &kind {
+ CompileKind::Host => self.rustc_host.as_str(),
+ CompileKind::Target(target) => target.short_name(),
+ };
+ if target.contains("-apple-") {
+ profile.split_debuginfo = Some(InternedString::new("unpacked"));
+ }
+ }
+ }
+
+ // Incremental can be globally overridden.
+ if let Some(v) = self.incremental {
+ profile.incremental = v;
+ }
+
+ // Only enable incremental compilation for sources the user can
+ // modify (aka path sources). For things that change infrequently,
+ // non-incremental builds yield better performance in the compiler
+ // itself (aka crates.io / git dependencies)
+ //
+ // (see also https://github.com/rust-lang/cargo/issues/3972)
+ if !is_local {
+ profile.incremental = false;
+ }
+ profile.name = self.requested_profile;
+ profile
+ }
+
+ /// The profile for *running* a `build.rs` script is only used for setting
+ /// a few environment variables. To ensure proper de-duplication of the
+ /// running `Unit`, this uses a stripped-down profile (so that unrelated
+ /// profile flags don't cause `build.rs` to needlessly run multiple
+ /// times).
+ pub fn get_profile_run_custom_build(&self, for_unit_profile: &Profile) -> Profile {
+ let mut result = Profile::default();
+ result.name = for_unit_profile.name;
+ result.root = for_unit_profile.root;
+ result.debuginfo = for_unit_profile.debuginfo;
+ result.opt_level = for_unit_profile.opt_level;
+ result
+ }
+
+ /// This returns the base profile. This is currently used for the
+ /// `[Finished]` line. It is not entirely accurate, since it doesn't
+ /// select for the package that was actually built.
+ pub fn base_profile(&self) -> Profile {
+ let profile_name = self.requested_profile;
+ let maker = self.get_profile_maker(profile_name).unwrap();
+ maker.get_profile(None, /*is_member*/ true, /*is_for_host*/ false)
+ }
+
+ /// Gets the directory name for a profile, like `debug` or `release`.
+ pub fn get_dir_name(&self) -> InternedString {
+ *self
+ .dir_names
+ .get(&self.requested_profile)
+ .unwrap_or(&self.requested_profile)
+ }
+
+ /// Used to check for overrides for non-existing packages.
+ pub fn validate_packages(
+ &self,
+ profiles: Option<&TomlProfiles>,
+ shell: &mut Shell,
+ resolve: &Resolve,
+ ) -> CargoResult<()> {
+ for (name, profile) in &self.by_name {
+ // If the user did not specify an override, skip this. This is here
+ // to avoid generating errors for inherited profiles which don't
+ // specify package overrides. The `by_name` profile has had the inherits
+ // chain merged, so we need to look at the original source to check
+ // if an override was specified.
+ if self
+ .original_profiles
+ .get(name)
+ .and_then(|orig| orig.package.as_ref())
+ .is_none()
+ {
+ continue;
+ }
+ let found = validate_packages_unique(resolve, name, &profile.toml)?;
+ // We intentionally do not validate unmatched packages for config
+ // profiles, in case they are defined in a central location. This
+ // iterates over the manifest profiles only.
+ if let Some(profiles) = profiles {
+ if let Some(toml_profile) = profiles.get(name) {
+ validate_packages_unmatched(shell, resolve, name, toml_profile, &found)?;
+ }
+ }
+ }
+ Ok(())
+ }
+
+ /// Returns the profile maker for the given profile name.
+ fn get_profile_maker(&self, name: InternedString) -> CargoResult<&ProfileMaker> {
+ self.by_name
+ .get(&name)
+ .ok_or_else(|| anyhow::format_err!("profile `{}` is not defined", name))
+ }
+}
+
+/// An object used for handling the profile hierarchy.
+///
+/// The precedence of profiles are (first one wins):
+///
+/// - Profiles in `.cargo/config` files (using same order as below).
+/// - `[profile.dev.package.name]` -- a named package.
+/// - `[profile.dev.package."*"]` -- this cannot apply to workspace members.
+/// - `[profile.dev.build-override]` -- this can only apply to `build.rs` scripts
+/// and their dependencies.
+/// - `[profile.dev]`
+/// - Default (hard-coded) values.
+#[derive(Debug, Clone)]
+struct ProfileMaker {
+ /// The starting, hard-coded defaults for the profile.
+ default: Profile,
+ /// The TOML profile defined in `Cargo.toml` or config.
+ ///
+ /// This is None if the user did not specify one, in which case the
+ /// `default` is used. Note that the built-in defaults for test/bench/doc
+ /// always set this since they need to declare the `inherits` value.
+ toml: Option<TomlProfile>,
+}
+
+impl ProfileMaker {
+ /// Creates a new `ProfileMaker`.
+ ///
+ /// Note that this does not process `inherits`, the caller is responsible for that.
+ fn new(default: Profile, toml: Option<TomlProfile>) -> ProfileMaker {
+ ProfileMaker { default, toml }
+ }
+
+ /// Generates a new `Profile`.
+ fn get_profile(
+ &self,
+ pkg_id: Option<PackageId>,
+ is_member: bool,
+ is_for_host: bool,
+ ) -> Profile {
+ let mut profile = self.default.clone();
+
+ // First apply profile-specific settings, things like
+ // `[profile.release]`
+ if let Some(toml) = &self.toml {
+ merge_profile(&mut profile, toml);
+ }
+
+ // Next start overriding those settings. First comes build dependencies
+ // which default to opt-level 0...
+ if is_for_host {
+ // For-host units are things like procedural macros, build scripts, and
+ // their dependencies. For these units most projects simply want them
+ // to compile quickly and the runtime doesn't matter too much since
+ // they tend to process very little data. For this reason we default
+ // them to a "compile as quickly as possible" mode which for now means
+ // basically turning down the optimization level and avoid limiting
+ // codegen units. This ensures that we spend little time optimizing as
+ // well as enabling parallelism by not constraining codegen units.
+ profile.opt_level = InternedString::new("0");
+ profile.codegen_units = None;
+
+ // For build dependencies, we usually don't need debuginfo, and
+ // removing it will compile faster. However, that can conflict with
+ // a unit graph optimization, reusing units that are shared between
+ // build dependencies and runtime dependencies: when the runtime
+ // target is the same as the build host, we only need to build a
+ // dependency once and reuse the results, instead of building twice.
+ // We defer the choice of the debuginfo level until we can check if
+ // a unit is shared. If that's the case, we'll use the deferred value
+ // below so the unit can be reused, otherwise we can avoid emitting
+ // the unit's debuginfo.
+ if let Some(debuginfo) = profile.debuginfo.to_option() {
+ profile.debuginfo = DebugInfo::Deferred(debuginfo);
+ }
+ }
+ // ... and next comes any other sorts of overrides specified in
+ // profiles, such as `[profile.release.build-override]` or
+ // `[profile.release.package.foo]`
+ if let Some(toml) = &self.toml {
+ merge_toml_overrides(pkg_id, is_member, is_for_host, &mut profile, toml);
+ }
+ profile
+ }
+}
+
+/// Merge package and build overrides from the given TOML profile into the given `Profile`.
+fn merge_toml_overrides(
+ pkg_id: Option<PackageId>,
+ is_member: bool,
+ is_for_host: bool,
+ profile: &mut Profile,
+ toml: &TomlProfile,
+) {
+ if is_for_host {
+ if let Some(build_override) = &toml.build_override {
+ merge_profile(profile, build_override);
+ }
+ }
+ if let Some(overrides) = toml.package.as_ref() {
+ if !is_member {
+ if let Some(all) = overrides.get(&ProfilePackageSpec::All) {
+ merge_profile(profile, all);
+ }
+ }
+ if let Some(pkg_id) = pkg_id {
+ let mut matches = overrides
+ .iter()
+ .filter_map(|(key, spec_profile)| match *key {
+ ProfilePackageSpec::All => None,
+ ProfilePackageSpec::Spec(ref s) => {
+ if s.matches(pkg_id) {
+ Some(spec_profile)
+ } else {
+ None
+ }
+ }
+ });
+ if let Some(spec_profile) = matches.next() {
+ merge_profile(profile, spec_profile);
+ // `validate_packages` should ensure that there are
+ // no additional matches.
+ assert!(
+ matches.next().is_none(),
+ "package `{}` matched multiple package profile overrides",
+ pkg_id
+ );
+ }
+ }
+ }
+}
+
+/// Merge the given TOML profile into the given `Profile`.
+///
+/// Does not merge overrides (see `merge_toml_overrides`).
+fn merge_profile(profile: &mut Profile, toml: &TomlProfile) {
+ if let Some(ref opt_level) = toml.opt_level {
+ profile.opt_level = InternedString::new(&opt_level.0);
+ }
+ match toml.lto {
+ Some(StringOrBool::Bool(b)) => profile.lto = Lto::Bool(b),
+ Some(StringOrBool::String(ref n)) if is_off(n.as_str()) => profile.lto = Lto::Off,
+ Some(StringOrBool::String(ref n)) => profile.lto = Lto::Named(InternedString::new(n)),
+ None => {}
+ }
+ if toml.codegen_backend.is_some() {
+ profile.codegen_backend = toml.codegen_backend;
+ }
+ if toml.codegen_units.is_some() {
+ profile.codegen_units = toml.codegen_units;
+ }
+ match toml.debug {
+ Some(U32OrBool::U32(debug)) => profile.debuginfo = DebugInfo::Explicit(debug),
+ Some(U32OrBool::Bool(true)) => profile.debuginfo = DebugInfo::Explicit(2),
+ Some(U32OrBool::Bool(false)) => profile.debuginfo = DebugInfo::None,
+ None => {}
+ }
+ if let Some(debug_assertions) = toml.debug_assertions {
+ profile.debug_assertions = debug_assertions;
+ }
+ if let Some(split_debuginfo) = &toml.split_debuginfo {
+ profile.split_debuginfo = Some(InternedString::new(split_debuginfo));
+ }
+ if let Some(rpath) = toml.rpath {
+ profile.rpath = rpath;
+ }
+ if let Some(panic) = &toml.panic {
+ profile.panic = match panic.as_str() {
+ "unwind" => PanicStrategy::Unwind,
+ "abort" => PanicStrategy::Abort,
+ // This should be validated in TomlProfile::validate
+ _ => panic!("Unexpected panic setting `{}`", panic),
+ };
+ }
+ if let Some(overflow_checks) = toml.overflow_checks {
+ profile.overflow_checks = overflow_checks;
+ }
+ if let Some(incremental) = toml.incremental {
+ profile.incremental = incremental;
+ }
+ if let Some(flags) = &toml.rustflags {
+ profile.rustflags = flags.clone();
+ }
+ profile.strip = match toml.strip {
+ Some(StringOrBool::Bool(true)) => Strip::Named(InternedString::new("symbols")),
+ None | Some(StringOrBool::Bool(false)) => Strip::None,
+ Some(StringOrBool::String(ref n)) if n.as_str() == "none" => Strip::None,
+ Some(StringOrBool::String(ref n)) => Strip::Named(InternedString::new(n)),
+ };
+}
+
+/// The root profile (dev/release).
+///
+/// This is currently only used for the `PROFILE` env var for build scripts
+/// for backwards compatibility. We should probably deprecate `PROFILE` and
+/// encourage using things like `DEBUG` and `OPT_LEVEL` instead.
+#[derive(Clone, Copy, Eq, PartialOrd, Ord, PartialEq, Debug)]
+pub enum ProfileRoot {
+ Release,
+ Debug,
+}
+
+/// Profile settings used to determine which compiler flags to use for a
+/// target.
+#[derive(Clone, Eq, PartialOrd, Ord, serde::Serialize)]
+pub struct Profile {
+ pub name: InternedString,
+ pub opt_level: InternedString,
+ #[serde(skip)] // named profiles are unstable
+ pub root: ProfileRoot,
+ pub lto: Lto,
+ // `None` means use rustc default.
+ pub codegen_backend: Option<InternedString>,
+ // `None` means use rustc default.
+ pub codegen_units: Option<u32>,
+ pub debuginfo: DebugInfo,
+ pub split_debuginfo: Option<InternedString>,
+ pub debug_assertions: bool,
+ pub overflow_checks: bool,
+ pub rpath: bool,
+ pub incremental: bool,
+ pub panic: PanicStrategy,
+ pub strip: Strip,
+ #[serde(skip_serializing_if = "Vec::is_empty")] // remove when `rustflags` is stablized
+ // Note that `rustflags` is used for the cargo-feature `profile_rustflags`
+ pub rustflags: Vec<InternedString>,
+}
+
+impl Default for Profile {
+ fn default() -> Profile {
+ Profile {
+ name: InternedString::new(""),
+ opt_level: InternedString::new("0"),
+ root: ProfileRoot::Debug,
+ lto: Lto::Bool(false),
+ codegen_backend: None,
+ codegen_units: None,
+ debuginfo: DebugInfo::None,
+ debug_assertions: false,
+ split_debuginfo: None,
+ overflow_checks: false,
+ rpath: false,
+ incremental: false,
+ panic: PanicStrategy::Unwind,
+ strip: Strip::None,
+ rustflags: vec![],
+ }
+ }
+}
+
+compact_debug! {
+ impl fmt::Debug for Profile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let (default, default_name) = match self.name.as_str() {
+ "dev" => (Profile::default_dev(), "default_dev()"),
+ "release" => (Profile::default_release(), "default_release()"),
+ _ => (Profile::default(), "default()"),
+ };
+ [debug_the_fields(
+ name
+ opt_level
+ lto
+ root
+ codegen_backend
+ codegen_units
+ debuginfo
+ split_debuginfo
+ debug_assertions
+ overflow_checks
+ rpath
+ incremental
+ panic
+ strip
+ rustflags
+ )]
+ }
+ }
+}
+
+impl fmt::Display for Profile {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "Profile({})", self.name)
+ }
+}
+
+impl hash::Hash for Profile {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: hash::Hasher,
+ {
+ self.comparable().hash(state);
+ }
+}
+
+impl cmp::PartialEq for Profile {
+ fn eq(&self, other: &Self) -> bool {
+ self.comparable() == other.comparable()
+ }
+}
+
+impl Profile {
+ /// Returns a built-in `dev` profile.
+ fn default_dev() -> Profile {
+ Profile {
+ name: InternedString::new("dev"),
+ root: ProfileRoot::Debug,
+ debuginfo: DebugInfo::Explicit(2),
+ debug_assertions: true,
+ overflow_checks: true,
+ incremental: true,
+ ..Profile::default()
+ }
+ }
+
+ /// Returns a built-in `release` profile.
+ fn default_release() -> Profile {
+ Profile {
+ name: InternedString::new("release"),
+ root: ProfileRoot::Release,
+ opt_level: InternedString::new("3"),
+ ..Profile::default()
+ }
+ }
+
+ /// Compares all fields except `name`, which doesn't affect compilation.
+ /// This is necessary for `Unit` deduplication for things like "test" and
+ /// "dev" which are essentially the same.
+ fn comparable(&self) -> impl Hash + Eq {
+ (
+ self.opt_level,
+ self.lto,
+ self.codegen_backend,
+ self.codegen_units,
+ self.debuginfo,
+ self.split_debuginfo,
+ self.debug_assertions,
+ self.overflow_checks,
+ self.rpath,
+ self.incremental,
+ self.panic,
+ self.strip,
+ )
+ }
+}
+
+/// The debuginfo level setting.
+///
+/// This is semantically an `Option<u32>`, and should be used as so via the
+/// [DebugInfo::to_option] method for all intents and purposes:
+/// - `DebugInfo::None` corresponds to `None`
+/// - `DebugInfo::Explicit(u32)` and `DebugInfo::Deferred` correspond to
+/// `Option<u32>::Some`
+///
+/// Internally, it's used to model a debuginfo level whose value can be deferred
+/// for optimization purposes: host dependencies usually don't need the same
+/// level as target dependencies. For dependencies that are shared between the
+/// two however, that value also affects reuse: different debuginfo levels would
+/// cause to build a unit twice. By deferring the choice until we know
+/// whether to choose the optimized value or the default value, we can make sure
+/// the unit is only built once and the unit graph is still optimized.
+#[derive(Debug, Copy, Clone, serde::Serialize)]
+#[serde(untagged)]
+pub enum DebugInfo {
+ /// No debuginfo level was set.
+ None,
+ /// A debuginfo level that is explicitly set, by a profile or a user.
+ Explicit(u32),
+ /// For internal purposes: a deferred debuginfo level that can be optimized
+ /// away, but has this value otherwise.
+ ///
+ /// Behaves like `Explicit` in all situations except for the default build
+ /// dependencies profile: whenever a build dependency is not shared with
+ /// runtime dependencies, this level is weakened to a lower level that is
+ /// faster to build (see [DebugInfo::weaken]).
+ ///
+ /// In all other situations, this level value will be the one to use.
+ Deferred(u32),
+}
+
+impl DebugInfo {
+ /// The main way to interact with this debuginfo level, turning it into an Option.
+ pub fn to_option(&self) -> Option<u32> {
+ match self {
+ DebugInfo::None => None,
+ DebugInfo::Explicit(v) | DebugInfo::Deferred(v) => Some(*v),
+ }
+ }
+
+ /// Returns true if the debuginfo level is high enough (at least 1). Helper
+ /// for a common operation on the usual `Option` representation.
+ pub(crate) fn is_turned_on(&self) -> bool {
+ self.to_option().unwrap_or(0) != 0
+ }
+
+ pub(crate) fn is_deferred(&self) -> bool {
+ matches!(self, DebugInfo::Deferred(_))
+ }
+
+ /// Force the deferred, preferred, debuginfo level to a finalized explicit value.
+ pub(crate) fn finalize(self) -> Self {
+ match self {
+ DebugInfo::Deferred(v) => DebugInfo::Explicit(v),
+ _ => self,
+ }
+ }
+
+ /// Reset to the lowest level: no debuginfo.
+ pub(crate) fn weaken(self) -> Self {
+ DebugInfo::None
+ }
+}
+
+impl PartialEq for DebugInfo {
+ fn eq(&self, other: &DebugInfo) -> bool {
+ self.to_option().eq(&other.to_option())
+ }
+}
+
+impl Eq for DebugInfo {}
+
+impl Hash for DebugInfo {
+ fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+ self.to_option().hash(state);
+ }
+}
+
+impl PartialOrd for DebugInfo {
+ fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
+ self.to_option().partial_cmp(&other.to_option())
+ }
+}
+
+impl Ord for DebugInfo {
+ fn cmp(&self, other: &Self) -> std::cmp::Ordering {
+ self.to_option().cmp(&other.to_option())
+ }
+}
+
+/// The link-time-optimization setting.
+#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
+pub enum Lto {
+ /// Explicitly no LTO, disables thin-LTO.
+ Off,
+ /// True = "Fat" LTO
+ /// False = rustc default (no args), currently "thin LTO"
+ Bool(bool),
+ /// Named LTO settings like "thin".
+ Named(InternedString),
+}
+
+impl serde::ser::Serialize for Lto {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::ser::Serializer,
+ {
+ match self {
+ Lto::Off => "off".serialize(s),
+ Lto::Bool(b) => b.to_string().serialize(s),
+ Lto::Named(n) => n.serialize(s),
+ }
+ }
+}
+
+/// The `panic` setting.
+#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, PartialOrd, Ord, serde::Serialize)]
+#[serde(rename_all = "lowercase")]
+pub enum PanicStrategy {
+ Unwind,
+ Abort,
+}
+
+impl fmt::Display for PanicStrategy {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ PanicStrategy::Unwind => "unwind",
+ PanicStrategy::Abort => "abort",
+ }
+ .fmt(f)
+ }
+}
+
+/// The setting for choosing which symbols to strip
+#[derive(
+ Clone, Copy, PartialEq, Eq, Debug, Hash, PartialOrd, Ord, serde::Serialize, serde::Deserialize,
+)]
+#[serde(rename_all = "lowercase")]
+pub enum Strip {
+ /// Don't remove any symbols
+ None,
+ /// Named Strip settings
+ Named(InternedString),
+}
+
+impl fmt::Display for Strip {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ Strip::None => "none",
+ Strip::Named(s) => s.as_str(),
+ }
+ .fmt(f)
+ }
+}
+
+/// Flags used in creating `Unit`s to indicate the purpose for the target, and
+/// to ensure the target's dependencies have the correct settings.
+///
+/// This means these are passed down from the root of the dependency tree to apply
+/// to most child dependencies.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)]
+pub struct UnitFor {
+ /// A target for `build.rs` or any of its dependencies, or a proc-macro or
+ /// any of its dependencies. This enables `build-override` profiles for
+ /// these targets.
+ ///
+ /// An invariant is that if `host_features` is true, `host` must be true.
+ ///
+ /// Note that this is `true` for `RunCustomBuild` units, even though that
+ /// unit should *not* use build-override profiles. This is a bit of a
+ /// special case. When computing the `RunCustomBuild` unit, it manually
+ /// uses the `get_profile_run_custom_build` method to get the correct
+ /// profile information for the unit. `host` needs to be true so that all
+ /// of the dependencies of that `RunCustomBuild` unit have this flag be
+ /// sticky (and forced to `true` for all further dependencies) — which is
+ /// the whole point of `UnitFor`.
+ host: bool,
+ /// A target for a build dependency or proc-macro (or any of its
+ /// dependencies). This is used for computing features of build
+ /// dependencies and proc-macros independently of other dependency kinds.
+ ///
+ /// The subtle difference between this and `host` is that the build script
+ /// for a non-host package sets this to `false` because it wants the
+ /// features of the non-host package (whereas `host` is true because the
+ /// build script is being built for the host). `host_features` becomes
+ /// `true` for build-dependencies or proc-macros, or any of their
+ /// dependencies. For example, with this dependency tree:
+ ///
+ /// ```text
+ /// foo
+ /// ├── foo build.rs
+ /// │ └── shared_dep (BUILD dependency)
+ /// │ └── shared_dep build.rs
+ /// └── shared_dep (Normal dependency)
+ /// └── shared_dep build.rs
+ /// ```
+ ///
+ /// In this example, `foo build.rs` is HOST=true, HOST_FEATURES=false.
+ /// This is so that `foo build.rs` gets the profile settings for build
+ /// scripts (HOST=true) and features of foo (HOST_FEATURES=false) because
+ /// build scripts need to know which features their package is being built
+ /// with.
+ ///
+ /// But in the case of `shared_dep`, when built as a build dependency,
+ /// both flags are true (it only wants the build-dependency features).
+ /// When `shared_dep` is built as a normal dependency, then `shared_dep
+ /// build.rs` is HOST=true, HOST_FEATURES=false for the same reasons that
+ /// foo's build script is set that way.
+ host_features: bool,
+ /// How Cargo processes the `panic` setting or profiles.
+ panic_setting: PanicSetting,
+
+ /// The compile kind of the root unit for which artifact dependencies are built.
+ /// This is required particularly for the `target = "target"` setting of artifact
+ /// dependencies which mean to inherit the `--target` specified on the command-line.
+ /// However, that is a multi-value argument and root units are already created to
+ /// reflect one unit per --target. Thus we have to build one artifact with the
+ /// correct target for each of these trees.
+ /// Note that this will always be set as we don't initially know if there are
+ /// artifacts that make use of it.
+ root_compile_kind: CompileKind,
+
+ /// This is only set for artifact dependencies which have their
+ /// `<target-triple>|target` set.
+ /// If so, this information is used as part of the key for resolving their features,
+ /// allowing for target-dependent feature resolution within the entire dependency tree.
+ /// Note that this target corresponds to the target used to build the units in that
+ /// dependency tree, too, but this copy of it is specifically used for feature lookup.
+ artifact_target_for_features: Option<CompileTarget>,
+}
+
+/// How Cargo processes the `panic` setting or profiles.
+///
+/// This is done to handle test/benches inheriting from dev/release,
+/// as well as forcing `for_host` units to always unwind.
+/// It also interacts with [`-Z panic-abort-tests`].
+///
+/// [`-Z panic-abort-tests`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#panic-abort-tests
+#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)]
+enum PanicSetting {
+ /// Used to force a unit to always be compiled with the `panic=unwind`
+ /// strategy, notably for build scripts, proc macros, etc.
+ AlwaysUnwind,
+
+ /// Indicates that this unit will read its `profile` setting and use
+ /// whatever is configured there.
+ ReadProfile,
+}
+
+impl UnitFor {
+ /// A unit for a normal target/dependency (i.e., not custom build,
+ /// proc macro/plugin, or test/bench).
+ pub fn new_normal(root_compile_kind: CompileKind) -> UnitFor {
+ UnitFor {
+ host: false,
+ host_features: false,
+ panic_setting: PanicSetting::ReadProfile,
+ root_compile_kind,
+ artifact_target_for_features: None,
+ }
+ }
+
+ /// A unit for a custom build script or proc-macro or its dependencies.
+ ///
+ /// The `host_features` parameter is whether or not this is for a build
+ /// dependency or proc-macro (something that requires being built "on the
+ /// host"). Build scripts for non-host units should use `false` because
+ /// they want to use the features of the package they are running for.
+ pub fn new_host(host_features: bool, root_compile_kind: CompileKind) -> UnitFor {
+ UnitFor {
+ host: true,
+ host_features,
+ // Force build scripts to always use `panic=unwind` for now to
+ // maximally share dependencies with procedural macros.
+ panic_setting: PanicSetting::AlwaysUnwind,
+ root_compile_kind,
+ artifact_target_for_features: None,
+ }
+ }
+
+ /// A unit for a compiler plugin or their dependencies.
+ pub fn new_compiler(root_compile_kind: CompileKind) -> UnitFor {
+ UnitFor {
+ host: false,
+ // The feature resolver doesn't know which dependencies are
+ // plugins, so for now plugins don't split features. Since plugins
+ // are mostly deprecated, just leave this as false.
+ host_features: false,
+ // Force plugins to use `panic=abort` so panics in the compiler do
+ // not abort the process but instead end with a reasonable error
+ // message that involves catching the panic in the compiler.
+ panic_setting: PanicSetting::AlwaysUnwind,
+ root_compile_kind,
+ artifact_target_for_features: None,
+ }
+ }
+
+ /// A unit for a test/bench target or their dependencies.
+ ///
+ /// Note that `config` is taken here for unstable CLI features to detect
+ /// whether `panic=abort` is supported for tests. Historical versions of
+ /// rustc did not support this, but newer versions do with an unstable
+ /// compiler flag.
+ pub fn new_test(config: &Config, root_compile_kind: CompileKind) -> UnitFor {
+ UnitFor {
+ host: false,
+ host_features: false,
+ // We're testing out an unstable feature (`-Zpanic-abort-tests`)
+ // which inherits the panic setting from the dev/release profile
+ // (basically avoid recompiles) but historical defaults required
+ // that we always unwound.
+ panic_setting: if config.cli_unstable().panic_abort_tests {
+ PanicSetting::ReadProfile
+ } else {
+ PanicSetting::AlwaysUnwind
+ },
+ root_compile_kind,
+ artifact_target_for_features: None,
+ }
+ }
+
+ /// This is a special case for unit tests of a proc-macro.
+ ///
+ /// Proc-macro unit tests are forced to be run on the host.
+ pub fn new_host_test(config: &Config, root_compile_kind: CompileKind) -> UnitFor {
+ let mut unit_for = UnitFor::new_test(config, root_compile_kind);
+ unit_for.host = true;
+ unit_for.host_features = true;
+ unit_for
+ }
+
+ /// Returns a new copy updated based on the target dependency.
+ ///
+ /// This is where the magic happens that the host/host_features settings
+ /// transition in a sticky fashion. As the dependency graph is being
+ /// built, once those flags are set, they stay set for the duration of
+ /// that portion of tree.
+ pub fn with_dependency(
+ self,
+ parent: &Unit,
+ dep_target: &Target,
+ root_compile_kind: CompileKind,
+ ) -> UnitFor {
+ // A build script or proc-macro transitions this to being built for the host.
+ let dep_for_host = dep_target.for_host();
+ // This is where feature decoupling of host versus target happens.
+ //
+ // Once host features are desired, they are always desired.
+ //
+ // A proc-macro should always use host features.
+ //
+ // Dependencies of a build script should use host features (subtle
+ // point: the build script itself does *not* use host features, that's
+ // why the parent is checked here, and not the dependency).
+ let host_features =
+ self.host_features || parent.target.is_custom_build() || dep_target.proc_macro();
+ // Build scripts and proc macros, and all of their dependencies are
+ // AlwaysUnwind.
+ let panic_setting = if dep_for_host {
+ PanicSetting::AlwaysUnwind
+ } else {
+ self.panic_setting
+ };
+ UnitFor {
+ host: self.host || dep_for_host,
+ host_features,
+ panic_setting,
+ root_compile_kind,
+ artifact_target_for_features: self.artifact_target_for_features,
+ }
+ }
+
+ pub fn for_custom_build(self) -> UnitFor {
+ UnitFor {
+ host: true,
+ host_features: self.host_features,
+ // Force build scripts to always use `panic=unwind` for now to
+ // maximally share dependencies with procedural macros.
+ panic_setting: PanicSetting::AlwaysUnwind,
+ root_compile_kind: self.root_compile_kind,
+ artifact_target_for_features: self.artifact_target_for_features,
+ }
+ }
+
+ /// Set the artifact compile target for use in features using the given `artifact`.
+ pub(crate) fn with_artifact_features(mut self, artifact: &Artifact) -> UnitFor {
+ self.artifact_target_for_features = artifact.target().and_then(|t| t.to_compile_target());
+ self
+ }
+
+ /// Set the artifact compile target as determined by a resolved compile target. This is used if `target = "target"`.
+ pub(crate) fn with_artifact_features_from_resolved_compile_kind(
+ mut self,
+ kind: Option<CompileKind>,
+ ) -> UnitFor {
+ self.artifact_target_for_features = kind.and_then(|kind| match kind {
+ CompileKind::Host => None,
+ CompileKind::Target(triple) => Some(triple),
+ });
+ self
+ }
+
+ /// Returns `true` if this unit is for a build script or any of its
+ /// dependencies, or a proc macro or any of its dependencies.
+ pub fn is_for_host(&self) -> bool {
+ self.host
+ }
+
+ pub fn is_for_host_features(&self) -> bool {
+ self.host_features
+ }
+
+ /// Returns how `panic` settings should be handled for this profile
+ fn panic_setting(&self) -> PanicSetting {
+ self.panic_setting
+ }
+
+ /// We might contain a parent artifact compile kind for features already, but will
+ /// gladly accept the one of this dependency as an override as it defines how
+ /// the artifact is built.
+ /// If we are an artifact but don't specify a `target`, we assume the default
+ /// compile kind that is suitable in this situation.
+ pub(crate) fn map_to_features_for(&self, dep_artifact: Option<&Artifact>) -> FeaturesFor {
+ FeaturesFor::from_for_host_or_artifact_target(
+ self.is_for_host_features(),
+ match dep_artifact {
+ Some(artifact) => artifact
+ .target()
+ .and_then(|t| t.to_resolved_compile_target(self.root_compile_kind)),
+ None => self.artifact_target_for_features,
+ },
+ )
+ }
+
+ pub(crate) fn root_compile_kind(&self) -> CompileKind {
+ self.root_compile_kind
+ }
+}
+
+/// Takes the manifest profiles, and overlays the config profiles on-top.
+///
+/// Returns a new copy of the profile map with all the mergers complete.
+fn merge_config_profiles(
+ ws: &Workspace<'_>,
+ requested_profile: InternedString,
+) -> CargoResult<BTreeMap<InternedString, TomlProfile>> {
+ let mut profiles = match ws.profiles() {
+ Some(profiles) => profiles.get_all().clone(),
+ None => BTreeMap::new(),
+ };
+ // Set of profile names to check if defined in config only.
+ let mut check_to_add = HashSet::new();
+ check_to_add.insert(requested_profile);
+ // Merge config onto manifest profiles.
+ for (name, profile) in &mut profiles {
+ if let Some(config_profile) = get_config_profile(ws, name)? {
+ profile.merge(&config_profile);
+ }
+ if let Some(inherits) = &profile.inherits {
+ check_to_add.insert(*inherits);
+ }
+ }
+ // Add the built-in profiles. This is important for things like `cargo
+ // test` which implicitly use the "dev" profile for dependencies.
+ for name in &["dev", "release", "test", "bench"] {
+ check_to_add.insert(InternedString::new(name));
+ }
+ // Add config-only profiles.
+ // Need to iterate repeatedly to get all the inherits values.
+ let mut current = HashSet::new();
+ while !check_to_add.is_empty() {
+ std::mem::swap(&mut current, &mut check_to_add);
+ for name in current.drain() {
+ if !profiles.contains_key(&name) {
+ if let Some(config_profile) = get_config_profile(ws, &name)? {
+ if let Some(inherits) = &config_profile.inherits {
+ check_to_add.insert(*inherits);
+ }
+ profiles.insert(name, config_profile);
+ }
+ }
+ }
+ }
+ Ok(profiles)
+}
+
+/// Helper for fetching a profile from config.
+fn get_config_profile(ws: &Workspace<'_>, name: &str) -> CargoResult<Option<TomlProfile>> {
+ let profile: Option<config::Value<TomlProfile>> =
+ ws.config().get(&format!("profile.{}", name))?;
+ let profile = match profile {
+ Some(profile) => profile,
+ None => return Ok(None),
+ };
+ let mut warnings = Vec::new();
+ profile
+ .val
+ .validate(
+ name,
+ ws.config().cli_unstable(),
+ ws.unstable_features(),
+ &mut warnings,
+ )
+ .with_context(|| {
+ format!(
+ "config profile `{}` is not valid (defined in `{}`)",
+ name, profile.definition
+ )
+ })?;
+ for warning in warnings {
+ ws.config().shell().warn(warning)?;
+ }
+ Ok(Some(profile.val))
+}
+
+/// Validate that a package does not match multiple package override specs.
+///
+/// For example `[profile.dev.package.bar]` and `[profile.dev.package."bar:0.5.0"]`
+/// would both match `bar:0.5.0` which would be ambiguous.
+fn validate_packages_unique(
+ resolve: &Resolve,
+ name: &str,
+ toml: &Option<TomlProfile>,
+) -> CargoResult<HashSet<PackageIdSpec>> {
+ let toml = match toml {
+ Some(ref toml) => toml,
+ None => return Ok(HashSet::new()),
+ };
+ let overrides = match toml.package.as_ref() {
+ Some(overrides) => overrides,
+ None => return Ok(HashSet::new()),
+ };
+ // Verify that a package doesn't match multiple spec overrides.
+ let mut found = HashSet::new();
+ for pkg_id in resolve.iter() {
+ let matches: Vec<&PackageIdSpec> = overrides
+ .keys()
+ .filter_map(|key| match *key {
+ ProfilePackageSpec::All => None,
+ ProfilePackageSpec::Spec(ref spec) => {
+ if spec.matches(pkg_id) {
+ Some(spec)
+ } else {
+ None
+ }
+ }
+ })
+ .collect();
+ match matches.len() {
+ 0 => {}
+ 1 => {
+ found.insert(matches[0].clone());
+ }
+ _ => {
+ let specs = matches
+ .iter()
+ .map(|spec| spec.to_string())
+ .collect::<Vec<_>>()
+ .join(", ");
+ bail!(
+ "multiple package overrides in profile `{}` match package `{}`\n\
+ found package specs: {}",
+ name,
+ pkg_id,
+ specs
+ );
+ }
+ }
+ }
+ Ok(found)
+}
+
+/// Check for any profile override specs that do not match any known packages.
+///
+/// This helps check for typos and mistakes.
+fn validate_packages_unmatched(
+ shell: &mut Shell,
+ resolve: &Resolve,
+ name: &str,
+ toml: &TomlProfile,
+ found: &HashSet<PackageIdSpec>,
+) -> CargoResult<()> {
+ let overrides = match toml.package.as_ref() {
+ Some(overrides) => overrides,
+ None => return Ok(()),
+ };
+
+ // Verify every override matches at least one package.
+ let missing_specs = overrides.keys().filter_map(|key| {
+ if let ProfilePackageSpec::Spec(ref spec) = *key {
+ if !found.contains(spec) {
+ return Some(spec);
+ }
+ }
+ None
+ });
+ for spec in missing_specs {
+ // See if there is an exact name match.
+ let name_matches: Vec<String> = resolve
+ .iter()
+ .filter_map(|pkg_id| {
+ if pkg_id.name() == spec.name() {
+ Some(pkg_id.to_string())
+ } else {
+ None
+ }
+ })
+ .collect();
+ if name_matches.is_empty() {
+ let suggestion = closest_msg(&spec.name(), resolve.iter(), |p| p.name().as_str());
+ shell.warn(format!(
+ "profile package spec `{}` in profile `{}` did not match any packages{}",
+ spec, name, suggestion
+ ))?;
+ } else {
+ shell.warn(format!(
+ "profile package spec `{}` in profile `{}` \
+ has a version or URL that does not match any of the packages: {}",
+ spec,
+ name,
+ name_matches.join(", ")
+ ))?;
+ }
+ }
+ Ok(())
+}
+
+/// Returns `true` if a string is a toggle that turns an option off.
+fn is_off(s: &str) -> bool {
+ matches!(s, "off" | "n" | "no" | "none")
+}
diff --git a/src/tools/cargo/src/cargo/core/registry.rs b/src/tools/cargo/src/cargo/core/registry.rs
new file mode 100644
index 000000000..e20531b70
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/registry.rs
@@ -0,0 +1,937 @@
+use std::collections::{HashMap, HashSet};
+use std::task::{ready, Poll};
+
+use crate::core::PackageSet;
+use crate::core::{Dependency, PackageId, QueryKind, Source, SourceId, SourceMap, Summary};
+use crate::sources::config::SourceConfigMap;
+use crate::util::errors::CargoResult;
+use crate::util::interning::InternedString;
+use crate::util::{CanonicalUrl, Config};
+use anyhow::{bail, Context as _};
+use log::{debug, trace};
+use url::Url;
+
+/// Source of information about a group of packages.
+///
+/// See also `core::Source`.
+pub trait Registry {
+ /// Attempt to find the packages that match a dependency request.
+ fn query(
+ &mut self,
+ dep: &Dependency,
+ kind: QueryKind,
+ f: &mut dyn FnMut(Summary),
+ ) -> Poll<CargoResult<()>>;
+
+ fn query_vec(&mut self, dep: &Dependency, kind: QueryKind) -> Poll<CargoResult<Vec<Summary>>> {
+ let mut ret = Vec::new();
+ self.query(dep, kind, &mut |s| ret.push(s)).map_ok(|()| ret)
+ }
+
+ fn describe_source(&self, source: SourceId) -> String;
+ fn is_replaced(&self, source: SourceId) -> bool;
+
+ /// Block until all outstanding Poll::Pending requests are Poll::Ready.
+ fn block_until_ready(&mut self) -> CargoResult<()>;
+}
+
+/// This structure represents a registry of known packages. It internally
+/// contains a number of `Box<Source>` instances which are used to load a
+/// `Package` from.
+///
+/// The resolution phase of Cargo uses this to drive knowledge about new
+/// packages as well as querying for lists of new packages. It is here that
+/// sources are updated (e.g., network operations) and overrides are
+/// handled.
+///
+/// The general idea behind this registry is that it is centered around the
+/// `SourceMap` structure, contained within which is a mapping of a `SourceId` to
+/// a `Source`. Each `Source` in the map has been updated (using network
+/// operations if necessary) and is ready to be queried for packages.
+pub struct PackageRegistry<'cfg> {
+ config: &'cfg Config,
+ sources: SourceMap<'cfg>,
+
+ // A list of sources which are considered "overrides" which take precedent
+ // when querying for packages.
+ overrides: Vec<SourceId>,
+
+ // Note that each SourceId does not take into account its `precise` field
+ // when hashing or testing for equality. When adding a new `SourceId`, we
+ // want to avoid duplicates in the `SourceMap` (to prevent re-updating the
+ // same git repo twice for example), but we also want to ensure that the
+ // loaded source is always updated.
+ //
+ // Sources with a `precise` field normally don't need to be updated because
+ // their contents are already on disk, but sources without a `precise` field
+ // almost always need to be updated. If we have a cached `Source` for a
+ // precise `SourceId`, then when we add a new `SourceId` that is not precise
+ // we want to ensure that the underlying source is updated.
+ //
+ // This is basically a long-winded way of saying that we want to know
+ // precisely what the keys of `sources` are, so this is a mapping of key to
+ // what exactly the key is.
+ source_ids: HashMap<SourceId, (SourceId, Kind)>,
+
+ locked: LockedMap,
+ yanked_whitelist: HashSet<PackageId>,
+ source_config: SourceConfigMap<'cfg>,
+
+ patches: HashMap<CanonicalUrl, Vec<Summary>>,
+ patches_locked: bool,
+ patches_available: HashMap<CanonicalUrl, Vec<PackageId>>,
+}
+
+/// A map of all "locked packages" which is filled in when parsing a lock file
+/// and is used to guide dependency resolution by altering summaries as they're
+/// queried from this source.
+///
+/// This map can be thought of as a glorified `Vec<MySummary>` where `MySummary`
+/// has a `PackageId` for which package it represents as well as a list of
+/// `PackageId` for the resolved dependencies. The hash map is otherwise
+/// structured though for easy access throughout this registry.
+type LockedMap = HashMap<
+ // The first level of key-ing done in this hash map is the source that
+ // dependencies come from, identified by a `SourceId`.
+ // The next level is keyed by the name of the package...
+ (SourceId, InternedString),
+ // ... and the value here is a list of tuples. The first element of each
+ // tuple is a package which has the source/name used to get to this
+ // point. The second element of each tuple is the list of locked
+ // dependencies that the first element has.
+ Vec<(PackageId, Vec<PackageId>)>,
+>;
+
+#[derive(PartialEq, Eq, Clone, Copy)]
+enum Kind {
+ Override,
+ Locked,
+ Normal,
+}
+
+/// Argument to `PackageRegistry::patch` which is information about a `[patch]`
+/// directive that we found in a lockfile, if present.
+pub struct LockedPatchDependency {
+ /// The original `Dependency` directive, except "locked" so it's version
+ /// requirement is `=foo` and its `SourceId` has a "precise" listed.
+ pub dependency: Dependency,
+ /// The `PackageId` that was previously found in a lock file which
+ /// `dependency` matches.
+ pub package_id: PackageId,
+ /// Something only used for backwards compatibility with the v2 lock file
+ /// format where `branch=master` is considered the same as `DefaultBranch`.
+ /// For more comments on this see the code in `ops/resolve.rs`.
+ pub alt_package_id: Option<PackageId>,
+}
+
+impl<'cfg> PackageRegistry<'cfg> {
+ pub fn new(config: &'cfg Config) -> CargoResult<PackageRegistry<'cfg>> {
+ let source_config = SourceConfigMap::new(config)?;
+ Ok(PackageRegistry {
+ config,
+ sources: SourceMap::new(),
+ source_ids: HashMap::new(),
+ overrides: Vec::new(),
+ source_config,
+ locked: HashMap::new(),
+ yanked_whitelist: HashSet::new(),
+ patches: HashMap::new(),
+ patches_locked: false,
+ patches_available: HashMap::new(),
+ })
+ }
+
+ pub fn get(self, package_ids: &[PackageId]) -> CargoResult<PackageSet<'cfg>> {
+ trace!("getting packages; sources={}", self.sources.len());
+ PackageSet::new(package_ids, self.sources, self.config)
+ }
+
+ fn ensure_loaded(&mut self, namespace: SourceId, kind: Kind) -> CargoResult<()> {
+ match self.source_ids.get(&namespace) {
+ // We've previously loaded this source, and we've already locked it,
+ // so we're not allowed to change it even if `namespace` has a
+ // slightly different precise version listed.
+ Some((_, Kind::Locked)) => {
+ debug!("load/locked {}", namespace);
+ return Ok(());
+ }
+
+ // If the previous source was not a precise source, then we can be
+ // sure that it's already been updated if we've already loaded it.
+ Some((previous, _)) if previous.precise().is_none() => {
+ debug!("load/precise {}", namespace);
+ return Ok(());
+ }
+
+ // If the previous source has the same precise version as we do,
+ // then we're done, otherwise we need to need to move forward
+ // updating this source.
+ Some((previous, _)) => {
+ if previous.precise() == namespace.precise() {
+ debug!("load/match {}", namespace);
+ return Ok(());
+ }
+ debug!("load/mismatch {}", namespace);
+ }
+ None => {
+ debug!("load/missing {}", namespace);
+ }
+ }
+
+ self.load(namespace, kind)?;
+
+ // This isn't strictly necessary since it will be called later.
+ // However it improves error messages for sources that issue errors
+ // in `block_until_ready` because the callers here have context about
+ // which deps are being resolved.
+ self.block_until_ready()?;
+ Ok(())
+ }
+
+ pub fn add_sources(&mut self, ids: impl IntoIterator<Item = SourceId>) -> CargoResult<()> {
+ for id in ids {
+ self.ensure_loaded(id, Kind::Locked)?;
+ }
+ Ok(())
+ }
+
+ pub fn add_preloaded(&mut self, source: Box<dyn Source + 'cfg>) {
+ self.add_source(source, Kind::Locked);
+ }
+
+ fn add_source(&mut self, source: Box<dyn Source + 'cfg>, kind: Kind) {
+ let id = source.source_id();
+ self.sources.insert(source);
+ self.source_ids.insert(id, (id, kind));
+ }
+
+ pub fn add_override(&mut self, source: Box<dyn Source + 'cfg>) {
+ self.overrides.push(source.source_id());
+ self.add_source(source, Kind::Override);
+ }
+
+ pub fn add_to_yanked_whitelist(&mut self, iter: impl Iterator<Item = PackageId>) {
+ let pkgs = iter.collect::<Vec<_>>();
+ for (_, source) in self.sources.sources_mut() {
+ source.add_to_yanked_whitelist(&pkgs);
+ }
+ self.yanked_whitelist.extend(pkgs);
+ }
+
+ /// remove all residual state from previous lock files.
+ pub fn clear_lock(&mut self) {
+ trace!("clear_lock");
+ self.locked = HashMap::new();
+ }
+
+ pub fn register_lock(&mut self, id: PackageId, deps: Vec<PackageId>) {
+ trace!("register_lock: {}", id);
+ for dep in deps.iter() {
+ trace!("\t-> {}", dep);
+ }
+ let sub_vec = self
+ .locked
+ .entry((id.source_id(), id.name()))
+ .or_insert_with(Vec::new);
+ sub_vec.push((id, deps));
+ }
+
+ /// Insert a `[patch]` section into this registry.
+ ///
+ /// This method will insert a `[patch]` section for the `url` specified,
+ /// with the given list of dependencies. The `url` specified is the URL of
+ /// the source to patch (for example this is `crates-io` in the manifest).
+ /// The `deps` is an array of all the entries in the `[patch]` section of
+ /// the manifest.
+ ///
+ /// Here the `deps` will be resolved to a precise version and stored
+ /// internally for future calls to `query` below. `deps` should be a tuple
+ /// where the first element is the patch definition straight from the
+ /// manifest, and the second element is an optional variant where the
+ /// patch has been locked. This locked patch is the patch locked to
+ /// a specific version found in Cargo.lock. This will be `None` if
+ /// `Cargo.lock` doesn't exist, or the patch did not match any existing
+ /// entries in `Cargo.lock`.
+ ///
+ /// Note that the patch list specified here *will not* be available to
+ /// `query` until `lock_patches` is called below, which should be called
+ /// once all patches have been added.
+ ///
+ /// The return value is a `Vec` of patches that should *not* be locked.
+ /// This happens when the patch is locked, but the patch has been updated
+ /// so the locked value is no longer correct.
+ pub fn patch(
+ &mut self,
+ url: &Url,
+ deps: &[(&Dependency, Option<LockedPatchDependency>)],
+ ) -> CargoResult<Vec<(Dependency, PackageId)>> {
+ // NOTE: None of this code is aware of required features. If a patch
+ // is missing a required feature, you end up with an "unused patch"
+ // warning, which is very hard to understand. Ideally the warning
+ // would be tailored to indicate *why* it is unused.
+ let canonical = CanonicalUrl::new(url)?;
+
+ // Return value of patches that shouldn't be locked.
+ let mut unlock_patches = Vec::new();
+
+ // First up we need to actually resolve each `deps` specification to
+ // precisely one summary. We're not using the `query` method below as it
+ // internally uses maps we're building up as part of this method
+ // (`patches_available` and `patches`). Instead we're going straight to
+ // the source to load information from it.
+ //
+ // Remember that each dependency listed in `[patch]` has to resolve to
+ // precisely one package, so that's why we're just creating a flat list
+ // of summaries which should be the same length as `deps` above.
+
+ let mut deps_remaining: Vec<_> = deps.iter().collect();
+ let mut unlocked_summaries = Vec::new();
+ while !deps_remaining.is_empty() {
+ let mut deps_pending = Vec::new();
+ for dep_remaining in deps_remaining {
+ let (orig_patch, locked) = dep_remaining;
+
+ // Use the locked patch if it exists, otherwise use the original.
+ let dep = match locked {
+ Some(lock) => &lock.dependency,
+ None => *orig_patch,
+ };
+ debug!(
+ "registering a patch for `{}` with `{}`",
+ url,
+ dep.package_name()
+ );
+
+ if dep.features().len() != 0 || !dep.uses_default_features() {
+ self.source_config.config().shell().warn(format!(
+ "patch for `{}` uses the features mechanism. \
+ default-features and features will not take effect because the patch dependency does not support this mechanism",
+ dep.package_name()
+ ))?;
+ }
+
+ // Go straight to the source for resolving `dep`. Load it as we
+ // normally would and then ask it directly for the list of summaries
+ // corresponding to this `dep`.
+ self.ensure_loaded(dep.source_id(), Kind::Normal)
+ .with_context(|| {
+ format!(
+ "failed to load source for dependency `{}`",
+ dep.package_name()
+ )
+ })?;
+
+ let source = self
+ .sources
+ .get_mut(dep.source_id())
+ .expect("loaded source not present");
+
+ let summaries = match source.query_vec(dep, QueryKind::Exact)? {
+ Poll::Ready(deps) => deps,
+ Poll::Pending => {
+ deps_pending.push(dep_remaining);
+ continue;
+ }
+ };
+
+ let (summary, should_unlock) =
+ match summary_for_patch(orig_patch, &locked, summaries, source) {
+ Poll::Ready(x) => x,
+ Poll::Pending => {
+ deps_pending.push(dep_remaining);
+ continue;
+ }
+ }
+ .with_context(|| {
+ format!(
+ "patch for `{}` in `{}` failed to resolve",
+ orig_patch.package_name(),
+ url,
+ )
+ })
+ .with_context(|| format!("failed to resolve patches for `{}`", url))?;
+
+ debug!(
+ "patch summary is {:?} should_unlock={:?}",
+ summary, should_unlock
+ );
+ if let Some(unlock_id) = should_unlock {
+ unlock_patches.push(((*orig_patch).clone(), unlock_id));
+ }
+
+ if *summary.package_id().source_id().canonical_url() == canonical {
+ return Err(anyhow::anyhow!(
+ "patch for `{}` in `{}` points to the same source, but \
+ patches must point to different sources",
+ dep.package_name(),
+ url
+ ))
+ .context(format!("failed to resolve patches for `{}`", url));
+ }
+ unlocked_summaries.push(summary);
+ }
+
+ deps_remaining = deps_pending;
+ self.block_until_ready()?;
+ }
+
+ let mut name_and_version = HashSet::new();
+ for summary in unlocked_summaries.iter() {
+ let name = summary.package_id().name();
+ let version = summary.package_id().version();
+ if !name_and_version.insert((name, version)) {
+ bail!(
+ "cannot have two `[patch]` entries which both resolve \
+ to `{} v{}`",
+ name,
+ version
+ );
+ }
+ }
+
+ // Calculate a list of all patches available for this source which is
+ // then used later during calls to `lock` to rewrite summaries to point
+ // directly at these patched entries.
+ //
+ // Note that this is somewhat subtle where the list of `ids` for a
+ // canonical URL is extend with possibly two ids per summary. This is done
+ // to handle the transition from the v2->v3 lock file format where in
+ // v2 DefeaultBranch was either DefaultBranch or Branch("master") for
+ // git dependencies. In this case if `summary.package_id()` is
+ // Branch("master") then alt_package_id will be DefaultBranch. This
+ // signifies that there's a patch available for either of those
+ // dependency directives if we see them in the dependency graph.
+ //
+ // This is a bit complicated and hopefully an edge case we can remove
+ // in the future, but for now it hopefully doesn't cause too much
+ // harm...
+ let mut ids = Vec::new();
+ for (summary, (_, lock)) in unlocked_summaries.iter().zip(deps) {
+ ids.push(summary.package_id());
+ if let Some(lock) = lock {
+ ids.extend(lock.alt_package_id);
+ }
+ }
+ self.patches_available.insert(canonical.clone(), ids);
+
+ // Note that we do not use `lock` here to lock summaries! That step
+ // happens later once `lock_patches` is invoked. In the meantime though
+ // we want to fill in the `patches_available` map (later used in the
+ // `lock` method) and otherwise store the unlocked summaries in
+ // `patches` to get locked in a future call to `lock_patches`.
+ self.patches.insert(canonical, unlocked_summaries);
+
+ Ok(unlock_patches)
+ }
+
+ /// Lock all patch summaries added via `patch`, making them available to
+ /// resolution via `query`.
+ ///
+ /// This function will internally `lock` each summary added via `patch`
+ /// above now that the full set of `patch` packages are known. This'll allow
+ /// us to correctly resolve overridden dependencies between patches
+ /// hopefully!
+ pub fn lock_patches(&mut self) {
+ assert!(!self.patches_locked);
+ for summaries in self.patches.values_mut() {
+ for summary in summaries {
+ debug!("locking patch {:?}", summary);
+ *summary = lock(&self.locked, &self.patches_available, summary.clone());
+ }
+ }
+ self.patches_locked = true;
+ }
+
+ /// Gets all patches grouped by the source URLS they are going to patch.
+ ///
+ /// These patches are mainly collected from [`patch`](Self::patch).
+ /// They might not be the same as patches actually used during dependency resolving.
+ pub fn patches(&self) -> &HashMap<CanonicalUrl, Vec<Summary>> {
+ &self.patches
+ }
+
+ fn load(&mut self, source_id: SourceId, kind: Kind) -> CargoResult<()> {
+ debug!("loading source {}", source_id);
+ let source = self
+ .source_config
+ .load(source_id, &self.yanked_whitelist)
+ .with_context(|| format!("Unable to update {}", source_id))?;
+ assert_eq!(source.source_id(), source_id);
+
+ if kind == Kind::Override {
+ self.overrides.push(source_id);
+ }
+ self.add_source(source, kind);
+
+ // If we have an imprecise version then we don't know what we're going
+ // to look for, so we always attempt to perform an update here.
+ //
+ // If we have a precise version, then we'll update lazily during the
+ // querying phase. Note that precise in this case is only
+ // `Some("locked")` as other `Some` values indicate a `cargo update
+ // --precise` request
+ if source_id.precise() != Some("locked") {
+ self.sources.get_mut(source_id).unwrap().invalidate_cache();
+ } else {
+ debug!("skipping update due to locked registry");
+ }
+ Ok(())
+ }
+
+ fn query_overrides(&mut self, dep: &Dependency) -> Poll<CargoResult<Option<Summary>>> {
+ for &s in self.overrides.iter() {
+ let src = self.sources.get_mut(s).unwrap();
+ let dep = Dependency::new_override(dep.package_name(), s);
+ let mut results = ready!(src.query_vec(&dep, QueryKind::Exact))?;
+ if !results.is_empty() {
+ return Poll::Ready(Ok(Some(results.remove(0))));
+ }
+ }
+ Poll::Ready(Ok(None))
+ }
+
+ /// This function is used to transform a summary to another locked summary
+ /// if possible. This is where the concept of a lock file comes into play.
+ ///
+ /// If a summary points at a package ID which was previously locked, then we
+ /// override the summary's ID itself, as well as all dependencies, to be
+ /// rewritten to the locked versions. This will transform the summary's
+ /// source to a precise source (listed in the locked version) as well as
+ /// transforming all of the dependencies from range requirements on
+ /// imprecise sources to exact requirements on precise sources.
+ ///
+ /// If a summary does not point at a package ID which was previously locked,
+ /// or if any dependencies were added and don't have a previously listed
+ /// version, we still want to avoid updating as many dependencies as
+ /// possible to keep the graph stable. In this case we map all of the
+ /// summary's dependencies to be rewritten to a locked version wherever
+ /// possible. If we're unable to map a dependency though, we just pass it on
+ /// through.
+ pub fn lock(&self, summary: Summary) -> Summary {
+ assert!(self.patches_locked);
+ lock(&self.locked, &self.patches_available, summary)
+ }
+
+ fn warn_bad_override(
+ &self,
+ override_summary: &Summary,
+ real_summary: &Summary,
+ ) -> CargoResult<()> {
+ let mut real_deps = real_summary.dependencies().iter().collect::<Vec<_>>();
+
+ let boilerplate = "\
+This is currently allowed but is known to produce buggy behavior with spurious
+recompiles and changes to the crate graph. Path overrides unfortunately were
+never intended to support this feature, so for now this message is just a
+warning. In the future, however, this message will become a hard error.
+
+To change the dependency graph via an override it's recommended to use the
+`[patch]` feature of Cargo instead of the path override feature. This is
+documented online at the url below for more information.
+
+https://doc.rust-lang.org/cargo/reference/overriding-dependencies.html
+";
+
+ for dep in override_summary.dependencies() {
+ if let Some(i) = real_deps.iter().position(|d| dep == *d) {
+ real_deps.remove(i);
+ continue;
+ }
+ let msg = format!(
+ "path override for crate `{}` has altered the original list of\n\
+ dependencies; the dependency on `{}` was either added or\n\
+ modified to not match the previously resolved version\n\n\
+ {}",
+ override_summary.package_id().name(),
+ dep.package_name(),
+ boilerplate
+ );
+ self.source_config.config().shell().warn(&msg)?;
+ return Ok(());
+ }
+
+ if let Some(dep) = real_deps.get(0) {
+ let msg = format!(
+ "path override for crate `{}` has altered the original list of\n\
+ dependencies; the dependency on `{}` was removed\n\n\
+ {}",
+ override_summary.package_id().name(),
+ dep.package_name(),
+ boilerplate
+ );
+ self.source_config.config().shell().warn(&msg)?;
+ return Ok(());
+ }
+
+ Ok(())
+ }
+}
+
+impl<'cfg> Registry for PackageRegistry<'cfg> {
+ fn query(
+ &mut self,
+ dep: &Dependency,
+ kind: QueryKind,
+ f: &mut dyn FnMut(Summary),
+ ) -> Poll<CargoResult<()>> {
+ assert!(self.patches_locked);
+ let (override_summary, n, to_warn) = {
+ // Look for an override and get ready to query the real source.
+ let override_summary = ready!(self.query_overrides(dep))?;
+
+ // Next up on our list of candidates is to check the `[patch]`
+ // section of the manifest. Here we look through all patches
+ // relevant to the source that `dep` points to, and then we match
+ // name/version. Note that we don't use `dep.matches(..)` because
+ // the patches, by definition, come from a different source.
+ // This means that `dep.matches(..)` will always return false, when
+ // what we really care about is the name/version match.
+ let mut patches = Vec::<Summary>::new();
+ if let Some(extra) = self.patches.get(dep.source_id().canonical_url()) {
+ patches.extend(
+ extra
+ .iter()
+ .filter(|s| dep.matches_ignoring_source(s.package_id()))
+ .cloned(),
+ );
+ }
+
+ // A crucial feature of the `[patch]` feature is that we *don't*
+ // query the actual registry if we have a "locked" dependency. A
+ // locked dep basically just means a version constraint of `=a.b.c`,
+ // and because patches take priority over the actual source then if
+ // we have a candidate we're done.
+ if patches.len() == 1 && dep.is_locked() {
+ let patch = patches.remove(0);
+ match override_summary {
+ Some(summary) => (summary, 1, Some(patch)),
+ None => {
+ f(patch);
+ return Poll::Ready(Ok(()));
+ }
+ }
+ } else {
+ if !patches.is_empty() {
+ debug!(
+ "found {} patches with an unlocked dep on `{}` at {} \
+ with `{}`, \
+ looking at sources",
+ patches.len(),
+ dep.package_name(),
+ dep.source_id(),
+ dep.version_req()
+ );
+ }
+
+ // Ensure the requested source_id is loaded
+ self.ensure_loaded(dep.source_id(), Kind::Normal)
+ .with_context(|| {
+ format!(
+ "failed to load source for dependency `{}`",
+ dep.package_name()
+ )
+ })?;
+
+ let source = self.sources.get_mut(dep.source_id());
+ match (override_summary, source) {
+ (Some(_), None) => {
+ return Poll::Ready(Err(anyhow::anyhow!("override found but no real ones")))
+ }
+ (None, None) => return Poll::Ready(Ok(())),
+
+ // If we don't have an override then we just ship
+ // everything upstairs after locking the summary
+ (None, Some(source)) => {
+ for patch in patches.iter() {
+ f(patch.clone());
+ }
+
+ // Our sources shouldn't ever come back to us with two
+ // summaries that have the same version. We could,
+ // however, have an `[patch]` section which is in use
+ // to override a version in the registry. This means
+ // that if our `summary` in this loop has the same
+ // version as something in `patches` that we've
+ // already selected, then we skip this `summary`.
+ let locked = &self.locked;
+ let all_patches = &self.patches_available;
+ let callback = &mut |summary: Summary| {
+ for patch in patches.iter() {
+ let patch = patch.package_id().version();
+ if summary.package_id().version() == patch {
+ return;
+ }
+ }
+ f(lock(locked, all_patches, summary))
+ };
+ return source.query(dep, kind, callback);
+ }
+
+ // If we have an override summary then we query the source
+ // to sanity check its results. We don't actually use any of
+ // the summaries it gives us though.
+ (Some(override_summary), Some(source)) => {
+ if !patches.is_empty() {
+ return Poll::Ready(Err(anyhow::anyhow!(
+ "found patches and a path override"
+ )));
+ }
+ let mut n = 0;
+ let mut to_warn = None;
+ {
+ let callback = &mut |summary| {
+ n += 1;
+ to_warn = Some(summary);
+ };
+ let pend = source.query(dep, kind, callback);
+ if pend.is_pending() {
+ return Poll::Pending;
+ }
+ }
+ (override_summary, n, to_warn)
+ }
+ }
+ }
+ };
+
+ if n > 1 {
+ return Poll::Ready(Err(anyhow::anyhow!(
+ "found an override with a non-locked list"
+ )));
+ } else if let Some(summary) = to_warn {
+ self.warn_bad_override(&override_summary, &summary)?;
+ }
+ f(self.lock(override_summary));
+ Poll::Ready(Ok(()))
+ }
+
+ fn describe_source(&self, id: SourceId) -> String {
+ match self.sources.get(id) {
+ Some(src) => src.describe(),
+ None => id.to_string(),
+ }
+ }
+
+ fn is_replaced(&self, id: SourceId) -> bool {
+ match self.sources.get(id) {
+ Some(src) => src.is_replaced(),
+ None => false,
+ }
+ }
+
+ fn block_until_ready(&mut self) -> CargoResult<()> {
+ for (source_id, source) in self.sources.sources_mut() {
+ source
+ .block_until_ready()
+ .with_context(|| format!("Unable to update {}", source_id))?;
+ }
+ Ok(())
+ }
+}
+
+fn lock(
+ locked: &LockedMap,
+ patches: &HashMap<CanonicalUrl, Vec<PackageId>>,
+ summary: Summary,
+) -> Summary {
+ let pair = locked
+ .get(&(summary.source_id(), summary.name()))
+ .and_then(|vec| vec.iter().find(|&&(id, _)| id == summary.package_id()));
+
+ trace!("locking summary of {}", summary.package_id());
+
+ // Lock the summary's ID if possible
+ let summary = match pair {
+ Some((precise, _)) => summary.override_id(*precise),
+ None => summary,
+ };
+ summary.map_dependencies(|dep| {
+ trace!(
+ "\t{}/{}/{}",
+ dep.package_name(),
+ dep.version_req(),
+ dep.source_id()
+ );
+
+ // If we've got a known set of overrides for this summary, then
+ // one of a few cases can arise:
+ //
+ // 1. We have a lock entry for this dependency from the same
+ // source as it's listed as coming from. In this case we make
+ // sure to lock to precisely the given package ID.
+ //
+ // 2. We have a lock entry for this dependency, but it's from a
+ // different source than what's listed, or the version
+ // requirement has changed. In this case we must discard the
+ // locked version because the dependency needs to be
+ // re-resolved.
+ //
+ // 3. We have a lock entry for this dependency, but it's from a
+ // different source than what's listed. This lock though happens
+ // through `[patch]`, so we want to preserve it.
+ //
+ // 4. We don't have a lock entry for this dependency, in which
+ // case it was likely an optional dependency which wasn't
+ // included previously so we just pass it through anyway.
+ //
+ // Cases 1/2 are handled by `matches_id`, case 3 is handled specially,
+ // and case 4 is handled by falling through to the logic below.
+ if let Some((_, locked_deps)) = pair {
+ let locked = locked_deps.iter().find(|&&id| {
+ // If the dependency matches the package id exactly then we've
+ // found a match, this is the id the dependency was previously
+ // locked to.
+ if dep.matches_id(id) {
+ return true;
+ }
+
+ // If the name/version doesn't match, then we definitely don't
+ // have a match whatsoever. Otherwise we need to check
+ // `[patch]`...
+ if !dep.matches_ignoring_source(id) {
+ return false;
+ }
+
+ // ... so here we look up the dependency url in the patches
+ // map, and we see if `id` is contained in the list of patches
+ // for that url. If it is then this lock is still valid,
+ // otherwise the lock is no longer valid.
+ match patches.get(dep.source_id().canonical_url()) {
+ Some(list) => list.contains(&id),
+ None => false,
+ }
+ });
+
+ if let Some(&locked) = locked {
+ trace!("\tfirst hit on {}", locked);
+ let mut dep = dep;
+
+ // If we found a locked version where the sources match, then
+ // we can `lock_to` to get an exact lock on this dependency.
+ // Otherwise we got a lock via `[patch]` so we only lock the
+ // version requirement, not the source.
+ if locked.source_id() == dep.source_id() {
+ dep.lock_to(locked);
+ } else {
+ dep.lock_version(locked.version());
+ }
+ return dep;
+ }
+ }
+
+ // If this dependency did not have a locked version, then we query
+ // all known locked packages to see if they match this dependency.
+ // If anything does then we lock it to that and move on.
+ let v = locked
+ .get(&(dep.source_id(), dep.package_name()))
+ .and_then(|vec| vec.iter().find(|&&(id, _)| dep.matches_id(id)));
+ if let Some(&(id, _)) = v {
+ trace!("\tsecond hit on {}", id);
+ let mut dep = dep;
+ dep.lock_to(id);
+ return dep;
+ }
+
+ trace!("\tnope, unlocked");
+ dep
+ })
+}
+
+/// This is a helper for selecting the summary, or generating a helpful error message.
+fn summary_for_patch(
+ orig_patch: &Dependency,
+ locked: &Option<LockedPatchDependency>,
+ mut summaries: Vec<Summary>,
+ source: &mut dyn Source,
+) -> Poll<CargoResult<(Summary, Option<PackageId>)>> {
+ if summaries.len() == 1 {
+ return Poll::Ready(Ok((summaries.pop().unwrap(), None)));
+ }
+ if summaries.len() > 1 {
+ // TODO: In the future, it might be nice to add all of these
+ // candidates so that version selection would just pick the
+ // appropriate one. However, as this is currently structured, if we
+ // added these all as patches, the unselected versions would end up in
+ // the "unused patch" listing, and trigger a warning. It might take a
+ // fair bit of restructuring to make that work cleanly, and there
+ // isn't any demand at this time to support that.
+ let mut vers: Vec<_> = summaries.iter().map(|summary| summary.version()).collect();
+ vers.sort();
+ let versions: Vec<_> = vers.into_iter().map(|v| v.to_string()).collect();
+ return Poll::Ready(Err(anyhow::anyhow!(
+ "patch for `{}` in `{}` resolved to more than one candidate\n\
+ Found versions: {}\n\
+ Update the patch definition to select only one package.\n\
+ For example, add an `=` version requirement to the patch definition, \
+ such as `version = \"={}\"`.",
+ orig_patch.package_name(),
+ orig_patch.source_id(),
+ versions.join(", "),
+ versions.last().unwrap()
+ )));
+ }
+ assert!(summaries.is_empty());
+ // No summaries found, try to help the user figure out what is wrong.
+ if let Some(locked) = locked {
+ // Since the locked patch did not match anything, try the unlocked one.
+ let orig_matches =
+ ready!(source.query_vec(orig_patch, QueryKind::Exact)).unwrap_or_else(|e| {
+ log::warn!(
+ "could not determine unlocked summaries for dep {:?}: {:?}",
+ orig_patch,
+ e
+ );
+ Vec::new()
+ });
+
+ let summary = ready!(summary_for_patch(orig_patch, &None, orig_matches, source))?;
+
+ // The unlocked version found a match. This returns a value to
+ // indicate that this entry should be unlocked.
+ return Poll::Ready(Ok((summary.0, Some(locked.package_id))));
+ }
+ // Try checking if there are *any* packages that match this by name.
+ let name_only_dep = Dependency::new_override(orig_patch.package_name(), orig_patch.source_id());
+
+ let name_summaries =
+ ready!(source.query_vec(&name_only_dep, QueryKind::Exact)).unwrap_or_else(|e| {
+ log::warn!(
+ "failed to do name-only summary query for {:?}: {:?}",
+ name_only_dep,
+ e
+ );
+ Vec::new()
+ });
+ let mut vers = name_summaries
+ .iter()
+ .map(|summary| summary.version())
+ .collect::<Vec<_>>();
+ let found = match vers.len() {
+ 0 => format!(""),
+ 1 => format!("version `{}`", vers[0]),
+ _ => {
+ vers.sort();
+ let strs: Vec<_> = vers.into_iter().map(|v| v.to_string()).collect();
+ format!("versions `{}`", strs.join(", "))
+ }
+ };
+ Poll::Ready(Err(if found.is_empty() {
+ anyhow::anyhow!(
+ "The patch location `{}` does not appear to contain any packages \
+ matching the name `{}`.",
+ orig_patch.source_id(),
+ orig_patch.package_name()
+ )
+ } else {
+ anyhow::anyhow!(
+ "The patch location `{}` contains a `{}` package with {}, but the patch \
+ definition requires `{}`.\n\
+ Check that the version in the patch location is what you expect, \
+ and update the patch definition to match.",
+ orig_patch.source_id(),
+ orig_patch.package_name(),
+ found,
+ orig_patch.version_req()
+ )
+ }))
+}
diff --git a/src/tools/cargo/src/cargo/core/resolver/conflict_cache.rs b/src/tools/cargo/src/cargo/core/resolver/conflict_cache.rs
new file mode 100644
index 000000000..10c41761d
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/resolver/conflict_cache.rs
@@ -0,0 +1,225 @@
+use std::collections::{BTreeMap, HashMap, HashSet};
+
+use log::trace;
+
+use super::types::ConflictMap;
+use crate::core::resolver::Context;
+use crate::core::{Dependency, PackageId};
+
+/// This is a trie for storing a large number of sets designed to
+/// efficiently see if any of the stored sets are a subset of a search set.
+enum ConflictStoreTrie {
+ /// One of the stored sets.
+ Leaf(ConflictMap),
+ /// A map from an element to a subtrie where
+ /// all the sets in the subtrie contains that element.
+ Node(BTreeMap<PackageId, ConflictStoreTrie>),
+}
+
+impl ConflictStoreTrie {
+ /// Finds any known set of conflicts, if any,
+ /// where all elements return some from `is_active` and contain `PackageId` specified.
+ /// If more than one are activated, then it will return
+ /// one that will allow for the most jump-back.
+ fn find(
+ &self,
+ is_active: &impl Fn(PackageId) -> Option<usize>,
+ must_contain: Option<PackageId>,
+ mut max_age: usize,
+ ) -> Option<(&ConflictMap, usize)> {
+ match self {
+ ConflictStoreTrie::Leaf(c) => {
+ if must_contain.is_none() {
+ Some((c, 0))
+ } else {
+ // We did not find `must_contain`, so we need to keep looking.
+ None
+ }
+ }
+ ConflictStoreTrie::Node(m) => {
+ let mut out = None;
+ for (&pid, store) in must_contain
+ .map(|f| m.range(..=f))
+ .unwrap_or_else(|| m.range(..))
+ {
+ // If the key is active, then we need to check all of the corresponding subtrie.
+ if let Some(age_this) = is_active(pid) {
+ if age_this >= max_age && must_contain != Some(pid) {
+ // not worth looking at, it is to old.
+ continue;
+ }
+ if let Some((o, age_o)) =
+ store.find(is_active, must_contain.filter(|&f| f != pid), max_age)
+ {
+ let age = if must_contain == Some(pid) {
+ // all the results will include `must_contain`
+ // so the age of must_contain is not relevant to find the best result.
+ age_o
+ } else {
+ std::cmp::max(age_this, age_o)
+ };
+ if max_age > age {
+ // we found one that can jump-back further so replace the out.
+ out = Some((o, age));
+ // and don't look at anything older
+ max_age = age
+ }
+ }
+ }
+ // Else, if it is not active then there is no way any of the corresponding
+ // subtrie will be conflicting.
+ }
+ out
+ }
+ }
+ }
+
+ fn insert(&mut self, mut iter: impl Iterator<Item = PackageId>, con: ConflictMap) {
+ if let Some(pid) = iter.next() {
+ if let ConflictStoreTrie::Node(p) = self {
+ p.entry(pid)
+ .or_insert_with(|| ConflictStoreTrie::Node(BTreeMap::new()))
+ .insert(iter, con);
+ }
+ // Else, we already have a subset of this in the `ConflictStore`.
+ } else {
+ // We are at the end of the set we are adding, there are three cases for what to do
+ // next:
+ // 1. `self` is an empty dummy Node inserted by `or_insert_with`
+ // in witch case we should replace it with `Leaf(con)`.
+ // 2. `self` is a `Node` because we previously inserted a superset of
+ // the thing we are working on (I don't know if this happens in practice)
+ // but the subset that we are working on will
+ // always match any time the larger set would have
+ // in witch case we can replace it with `Leaf(con)`.
+ // 3. `self` is a `Leaf` that is in the same spot in the structure as
+ // the thing we are working on. So it is equivalent.
+ // We can replace it with `Leaf(con)`.
+ if cfg!(debug_assertions) {
+ if let ConflictStoreTrie::Leaf(c) = self {
+ let a: Vec<_> = con.keys().collect();
+ let b: Vec<_> = c.keys().collect();
+ assert_eq!(a, b);
+ }
+ }
+ *self = ConflictStoreTrie::Leaf(con)
+ }
+ }
+}
+
+pub(super) struct ConflictCache {
+ // `con_from_dep` is a cache of the reasons for each time we
+ // backtrack. For example after several backtracks we may have:
+ //
+ // con_from_dep[`foo = "^1.0.2"`] = map!{
+ // `foo=1.0.1`: map!{`foo=1.0.1`: Semver},
+ // `foo=1.0.0`: map!{`foo=1.0.0`: Semver},
+ // };
+ //
+ // This can be read as "we cannot find a candidate for dep `foo = "^1.0.2"`
+ // if either `foo=1.0.1` OR `foo=1.0.0` are activated".
+ //
+ // Another example after several backtracks we may have:
+ //
+ // con_from_dep[`foo = ">=0.8.2, <=0.9.3"`] = map!{
+ // `foo=0.8.1`: map!{
+ // `foo=0.9.4`: map!{`foo=0.8.1`: Semver, `foo=0.9.4`: Semver},
+ // }
+ // };
+ //
+ // This can be read as "we cannot find a candidate for dep `foo = ">=0.8.2,
+ // <=0.9.3"` if both `foo=0.8.1` AND `foo=0.9.4` are activated".
+ //
+ // This is used to make sure we don't queue work we know will fail. See the
+ // discussion in https://github.com/rust-lang/cargo/pull/5168 for why this
+ // is so important. The nested HashMaps act as a kind of btree, that lets us
+ // look up which entries are still active without
+ // linearly scanning through the full list.
+ //
+ // Also, as a final note, this map is **not** ever removed from. This remains
+ // as a global cache which we never delete from. Any entry in this map is
+ // unconditionally true regardless of our resolution history of how we got
+ // here.
+ con_from_dep: HashMap<Dependency, ConflictStoreTrie>,
+ // `dep_from_pid` is an inverse-index of `con_from_dep`.
+ // For every `PackageId` this lists the `Dependency`s that mention it in `dep_from_pid`.
+ dep_from_pid: HashMap<PackageId, HashSet<Dependency>>,
+}
+
+impl ConflictCache {
+ pub fn new() -> ConflictCache {
+ ConflictCache {
+ con_from_dep: HashMap::new(),
+ dep_from_pid: HashMap::new(),
+ }
+ }
+ pub fn find(
+ &self,
+ dep: &Dependency,
+ is_active: &impl Fn(PackageId) -> Option<usize>,
+ must_contain: Option<PackageId>,
+ max_age: usize,
+ ) -> Option<&ConflictMap> {
+ self.con_from_dep
+ .get(dep)?
+ .find(is_active, must_contain, max_age)
+ .map(|(c, _)| c)
+ }
+ /// Finds any known set of conflicts, if any,
+ /// which are activated in `cx` and contain `PackageId` specified.
+ /// If more than one are activated, then it will return
+ /// one that will allow for the most jump-back.
+ pub fn find_conflicting(
+ &self,
+ cx: &Context,
+ dep: &Dependency,
+ must_contain: Option<PackageId>,
+ ) -> Option<&ConflictMap> {
+ let out = self.find(dep, &|id| cx.is_active(id), must_contain, usize::MAX);
+ if cfg!(debug_assertions) {
+ if let Some(c) = &out {
+ assert!(cx.is_conflicting(None, c).is_some());
+ if let Some(f) = must_contain {
+ assert!(c.contains_key(&f));
+ }
+ }
+ }
+ out
+ }
+ pub fn conflicting(&self, cx: &Context, dep: &Dependency) -> Option<&ConflictMap> {
+ self.find_conflicting(cx, dep, None)
+ }
+
+ /// Adds to the cache a conflict of the form:
+ /// `dep` is known to be unresolvable if
+ /// all the `PackageId` entries are activated.
+ pub fn insert(&mut self, dep: &Dependency, con: &ConflictMap) {
+ if con.values().any(|c| c.is_public_dependency()) {
+ // TODO: needs more info for back jumping
+ // for now refuse to cache it.
+ return;
+ }
+ self.con_from_dep
+ .entry(dep.clone())
+ .or_insert_with(|| ConflictStoreTrie::Node(BTreeMap::new()))
+ .insert(con.keys().cloned(), con.clone());
+
+ trace!(
+ "{} = \"{}\" adding a skip {:?}",
+ dep.package_name(),
+ dep.version_req(),
+ con
+ );
+
+ for c in con.keys() {
+ self.dep_from_pid
+ .entry(*c)
+ .or_insert_with(HashSet::new)
+ .insert(dep.clone());
+ }
+ }
+
+ pub fn dependencies_conflicting_with(&self, pid: PackageId) -> Option<&HashSet<Dependency>> {
+ self.dep_from_pid.get(&pid)
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/resolver/context.rs b/src/tools/cargo/src/cargo/core/resolver/context.rs
new file mode 100644
index 000000000..4854dcde7
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/resolver/context.rs
@@ -0,0 +1,441 @@
+use super::dep_cache::RegistryQueryer;
+use super::errors::ActivateResult;
+use super::types::{ConflictMap, ConflictReason, FeaturesSet, ResolveOpts};
+use super::RequestedFeatures;
+use crate::core::{Dependency, PackageId, SourceId, Summary};
+use crate::util::interning::InternedString;
+use crate::util::Graph;
+use anyhow::format_err;
+use log::debug;
+use std::collections::HashMap;
+use std::num::NonZeroU64;
+
+pub use super::encode::Metadata;
+pub use super::encode::{EncodableDependency, EncodablePackageId, EncodableResolve};
+pub use super::resolve::Resolve;
+
+// A `Context` is basically a bunch of local resolution information which is
+// kept around for all `BacktrackFrame` instances. As a result, this runs the
+// risk of being cloned *a lot* so we want to make this as cheap to clone as
+// possible.
+#[derive(Clone)]
+pub struct Context {
+ pub age: ContextAge,
+ pub activations: Activations,
+ /// list the features that are activated for each package
+ pub resolve_features: im_rc::HashMap<PackageId, FeaturesSet>,
+ /// get the package that will be linking to a native library by its links attribute
+ pub links: im_rc::HashMap<InternedString, PackageId>,
+ /// for each package the list of names it can see,
+ /// then for each name the exact version that name represents and whether the name is public.
+ pub public_dependency: Option<PublicDependency>,
+
+ /// a way to look up for a package in activations what packages required it
+ /// and all of the exact deps that it fulfilled.
+ pub parents: Graph<PackageId, im_rc::HashSet<Dependency>>,
+}
+
+/// When backtracking it can be useful to know how far back to go.
+/// The `ContextAge` of a `Context` is a monotonically increasing counter of the number
+/// of decisions made to get to this state.
+/// Several structures store the `ContextAge` when it was added,
+/// to be used in `find_candidate` for backtracking.
+pub type ContextAge = usize;
+
+/// Find the activated version of a crate based on the name, source, and semver compatibility.
+/// By storing this in a hash map we ensure that there is only one
+/// semver compatible version of each crate.
+/// This all so stores the `ContextAge`.
+pub type ActivationsKey = (InternedString, SourceId, SemverCompatibility);
+pub type Activations = im_rc::HashMap<ActivationsKey, (Summary, ContextAge)>;
+
+/// A type that represents when cargo treats two Versions as compatible.
+/// Versions `a` and `b` are compatible if their left-most nonzero digit is the
+/// same.
+#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug, PartialOrd, Ord)]
+pub enum SemverCompatibility {
+ Major(NonZeroU64),
+ Minor(NonZeroU64),
+ Patch(u64),
+}
+
+impl From<&semver::Version> for SemverCompatibility {
+ fn from(ver: &semver::Version) -> Self {
+ if let Some(m) = NonZeroU64::new(ver.major) {
+ return SemverCompatibility::Major(m);
+ }
+ if let Some(m) = NonZeroU64::new(ver.minor) {
+ return SemverCompatibility::Minor(m);
+ }
+ SemverCompatibility::Patch(ver.patch)
+ }
+}
+
+impl PackageId {
+ pub fn as_activations_key(self) -> ActivationsKey {
+ (self.name(), self.source_id(), self.version().into())
+ }
+}
+
+impl Context {
+ pub fn new(check_public_visible_dependencies: bool) -> Context {
+ Context {
+ age: 0,
+ resolve_features: im_rc::HashMap::new(),
+ links: im_rc::HashMap::new(),
+ public_dependency: if check_public_visible_dependencies {
+ Some(PublicDependency::new())
+ } else {
+ None
+ },
+ parents: Graph::new(),
+ activations: im_rc::HashMap::new(),
+ }
+ }
+
+ /// Activate this summary by inserting it into our list of known activations.
+ ///
+ /// The `parent` passed in here is the parent summary/dependency edge which
+ /// cased `summary` to get activated. This may not be present for the root
+ /// crate, for example.
+ ///
+ /// Returns `true` if this summary with the given features is already activated.
+ pub fn flag_activated(
+ &mut self,
+ summary: &Summary,
+ opts: &ResolveOpts,
+ parent: Option<(&Summary, &Dependency)>,
+ ) -> ActivateResult<bool> {
+ let id = summary.package_id();
+ let age: ContextAge = self.age;
+ match self.activations.entry(id.as_activations_key()) {
+ im_rc::hashmap::Entry::Occupied(o) => {
+ debug_assert_eq!(
+ &o.get().0,
+ summary,
+ "cargo does not allow two semver compatible versions"
+ );
+ }
+ im_rc::hashmap::Entry::Vacant(v) => {
+ if let Some(link) = summary.links() {
+ if self.links.insert(link, id).is_some() {
+ return Err(format_err!(
+ "Attempting to resolve a dependency with more than \
+ one crate with links={}.\nThis will not build as \
+ is. Consider rebuilding the .lock file.",
+ &*link
+ )
+ .into());
+ }
+ }
+ v.insert((summary.clone(), age));
+
+ // If we've got a parent dependency which activated us, *and*
+ // the dependency has a different source id listed than the
+ // `summary` itself, then things get interesting. This basically
+ // means that a `[patch]` was used to augment `dep.source_id()`
+ // with `summary`.
+ //
+ // In this scenario we want to consider the activation key, as
+ // viewed from the perspective of `dep.source_id()`, as being
+ // fulfilled. This means that we need to add a second entry in
+ // the activations map for the source that was patched, in
+ // addition to the source of the actual `summary` itself.
+ //
+ // Without this it would be possible to have both 1.0.0 and
+ // 1.1.0 "from crates.io" in a dependency graph if one of those
+ // versions came from a `[patch]` source.
+ if let Some((_, dep)) = parent {
+ if dep.source_id() != id.source_id() {
+ let key = (id.name(), dep.source_id(), id.version().into());
+ let prev = self.activations.insert(key, (summary.clone(), age));
+ if let Some((previous_summary, _)) = prev {
+ return Err(
+ (previous_summary.package_id(), ConflictReason::Semver).into()
+ );
+ }
+ }
+ }
+
+ return Ok(false);
+ }
+ }
+ debug!("checking if {} is already activated", summary.package_id());
+ match &opts.features {
+ // This returns `false` for CliFeatures just for simplicity. It
+ // would take a bit of work to compare since they are not in the
+ // same format as DepFeatures (and that may be expensive
+ // performance-wise). Also, it should only occur once for a root
+ // package. The only drawback is that it may re-activate a root
+ // package again, which should only affect performance, but that
+ // should be rare. Cycles should still be detected since those
+ // will have `DepFeatures` edges.
+ RequestedFeatures::CliFeatures(_) => Ok(false),
+ RequestedFeatures::DepFeatures {
+ features,
+ uses_default_features,
+ } => {
+ let has_default_feature = summary.features().contains_key("default");
+ Ok(match self.resolve_features.get(&id) {
+ Some(prev) => {
+ features.is_subset(prev)
+ && (!uses_default_features
+ || prev.contains("default")
+ || !has_default_feature)
+ }
+ None => features.is_empty() && (!uses_default_features || !has_default_feature),
+ })
+ }
+ }
+ }
+
+ /// If the package is active returns the `ContextAge` when it was added
+ pub fn is_active(&self, id: PackageId) -> Option<ContextAge> {
+ self.activations
+ .get(&id.as_activations_key())
+ .and_then(|(s, l)| if s.package_id() == id { Some(*l) } else { None })
+ }
+
+ /// If the conflict reason on the package still applies returns the `ContextAge` when it was added
+ pub fn still_applies(&self, id: PackageId, reason: &ConflictReason) -> Option<ContextAge> {
+ self.is_active(id).and_then(|mut max| {
+ match reason {
+ ConflictReason::PublicDependency(name) => {
+ if &id == name {
+ return Some(max);
+ }
+ max = std::cmp::max(max, self.is_active(*name)?);
+ max = std::cmp::max(
+ max,
+ self.public_dependency
+ .as_ref()
+ .unwrap()
+ .can_see_item(*name, id)?,
+ );
+ }
+ ConflictReason::PubliclyExports(name) => {
+ if &id == name {
+ return Some(max);
+ }
+ max = std::cmp::max(max, self.is_active(*name)?);
+ max = std::cmp::max(
+ max,
+ self.public_dependency
+ .as_ref()
+ .unwrap()
+ .publicly_exports_item(*name, id)?,
+ );
+ }
+ _ => {}
+ }
+ Some(max)
+ })
+ }
+
+ /// Checks whether all of `parent` and the keys of `conflicting activations`
+ /// are still active.
+ /// If so returns the `ContextAge` when the newest one was added.
+ pub fn is_conflicting(
+ &self,
+ parent: Option<PackageId>,
+ conflicting_activations: &ConflictMap,
+ ) -> Option<usize> {
+ let mut max = 0;
+ if let Some(parent) = parent {
+ max = std::cmp::max(max, self.is_active(parent)?);
+ }
+
+ for (id, reason) in conflicting_activations.iter() {
+ max = std::cmp::max(max, self.still_applies(*id, reason)?);
+ }
+ Some(max)
+ }
+
+ pub fn resolve_replacements(
+ &self,
+ registry: &RegistryQueryer<'_>,
+ ) -> HashMap<PackageId, PackageId> {
+ self.activations
+ .values()
+ .filter_map(|(s, _)| registry.used_replacement_for(s.package_id()))
+ .collect()
+ }
+
+ pub fn graph(&self) -> Graph<PackageId, std::collections::HashSet<Dependency>> {
+ let mut graph: Graph<PackageId, std::collections::HashSet<Dependency>> = Graph::new();
+ self.activations
+ .values()
+ .for_each(|(r, _)| graph.add(r.package_id()));
+ for i in self.parents.iter() {
+ graph.add(*i);
+ for (o, e) in self.parents.edges(i) {
+ let old_link = graph.link(*o, *i);
+ assert!(old_link.is_empty());
+ *old_link = e.iter().cloned().collect();
+ }
+ }
+ graph
+ }
+}
+
+impl Graph<PackageId, im_rc::HashSet<Dependency>> {
+ pub fn parents_of(&self, p: PackageId) -> impl Iterator<Item = (PackageId, bool)> + '_ {
+ self.edges(&p)
+ .map(|(grand, d)| (*grand, d.iter().any(|x| x.is_public())))
+ }
+}
+
+#[derive(Clone, Debug, Default)]
+pub struct PublicDependency {
+ /// For each active package the set of all the names it can see,
+ /// for each name the exact package that name resolves to,
+ /// the `ContextAge` when it was first visible,
+ /// and the `ContextAge` when it was first exported.
+ inner: im_rc::HashMap<
+ PackageId,
+ im_rc::HashMap<InternedString, (PackageId, ContextAge, Option<ContextAge>)>,
+ >,
+}
+
+impl PublicDependency {
+ fn new() -> Self {
+ PublicDependency {
+ inner: im_rc::HashMap::new(),
+ }
+ }
+ fn publicly_exports(&self, candidate_pid: PackageId) -> Vec<PackageId> {
+ self.inner
+ .get(&candidate_pid) // if we have seen it before
+ .iter()
+ .flat_map(|x| x.values()) // all the things we have stored
+ .filter(|x| x.2.is_some()) // as publicly exported
+ .map(|x| x.0)
+ .chain(Some(candidate_pid)) // but even if not we know that everything exports itself
+ .collect()
+ }
+ fn publicly_exports_item(
+ &self,
+ candidate_pid: PackageId,
+ target: PackageId,
+ ) -> Option<ContextAge> {
+ debug_assert_ne!(candidate_pid, target);
+ let out = self
+ .inner
+ .get(&candidate_pid)
+ .and_then(|names| names.get(&target.name()))
+ .filter(|(p, _, _)| *p == target)
+ .and_then(|(_, _, age)| *age);
+ debug_assert_eq!(
+ out.is_some(),
+ self.publicly_exports(candidate_pid).contains(&target)
+ );
+ out
+ }
+ pub fn can_see_item(&self, candidate_pid: PackageId, target: PackageId) -> Option<ContextAge> {
+ self.inner
+ .get(&candidate_pid)
+ .and_then(|names| names.get(&target.name()))
+ .filter(|(p, _, _)| *p == target)
+ .map(|(_, age, _)| *age)
+ }
+ pub fn add_edge(
+ &mut self,
+ candidate_pid: PackageId,
+ parent_pid: PackageId,
+ is_public: bool,
+ age: ContextAge,
+ parents: &Graph<PackageId, im_rc::HashSet<Dependency>>,
+ ) {
+ // one tricky part is that `candidate_pid` may already be active and
+ // have public dependencies of its own. So we not only need to mark
+ // `candidate_pid` as visible to its parents but also all of its existing
+ // publicly exported dependencies.
+ for c in self.publicly_exports(candidate_pid) {
+ // for each (transitive) parent that can newly see `t`
+ let mut stack = vec![(parent_pid, is_public)];
+ while let Some((p, public)) = stack.pop() {
+ match self.inner.entry(p).or_default().entry(c.name()) {
+ im_rc::hashmap::Entry::Occupied(mut o) => {
+ // the (transitive) parent can already see something by `c`s name, it had better be `c`.
+ assert_eq!(o.get().0, c);
+ if o.get().2.is_some() {
+ // The previous time the parent saw `c`, it was a public dependency.
+ // So all of its parents already know about `c`
+ // and we can save some time by stopping now.
+ continue;
+ }
+ if public {
+ // Mark that `c` has now bean seen publicly
+ let old_age = o.get().1;
+ o.insert((c, old_age, if public { Some(age) } else { None }));
+ }
+ }
+ im_rc::hashmap::Entry::Vacant(v) => {
+ // The (transitive) parent does not have anything by `c`s name,
+ // so we add `c`.
+ v.insert((c, age, if public { Some(age) } else { None }));
+ }
+ }
+ // if `candidate_pid` was a private dependency of `p` then `p` parents can't see `c` thru `p`
+ if public {
+ // if it was public, then we add all of `p`s parents to be checked
+ stack.extend(parents.parents_of(p));
+ }
+ }
+ }
+ }
+ pub fn can_add_edge(
+ &self,
+ b_id: PackageId,
+ parent: PackageId,
+ is_public: bool,
+ parents: &Graph<PackageId, im_rc::HashSet<Dependency>>,
+ ) -> Result<
+ (),
+ (
+ ((PackageId, ConflictReason), (PackageId, ConflictReason)),
+ Option<(PackageId, ConflictReason)>,
+ ),
+ > {
+ // one tricky part is that `candidate_pid` may already be active and
+ // have public dependencies of its own. So we not only need to check
+ // `b_id` as visible to its parents but also all of its existing
+ // publicly exported dependencies.
+ for t in self.publicly_exports(b_id) {
+ // for each (transitive) parent that can newly see `t`
+ let mut stack = vec![(parent, is_public)];
+ while let Some((p, public)) = stack.pop() {
+ // TODO: don't look at the same thing more than once
+ if let Some(o) = self.inner.get(&p).and_then(|x| x.get(&t.name())) {
+ if o.0 != t {
+ // the (transitive) parent can already see a different version by `t`s name.
+ // So, adding `b` will cause `p` to have a public dependency conflict on `t`.
+ return Err((
+ (o.0, ConflictReason::PublicDependency(p)), // p can see the other version and
+ (parent, ConflictReason::PublicDependency(p)), // p can see us
+ ))
+ .map_err(|e| {
+ if t == b_id {
+ (e, None)
+ } else {
+ (e, Some((t, ConflictReason::PubliclyExports(b_id))))
+ }
+ });
+ }
+ if o.2.is_some() {
+ // The previous time the parent saw `t`, it was a public dependency.
+ // So all of its parents already know about `t`
+ // and we can save some time by stopping now.
+ continue;
+ }
+ }
+ // if `b` was a private dependency of `p` then `p` parents can't see `t` thru `p`
+ if public {
+ // if it was public, then we add all of `p`s parents to be checked
+ stack.extend(parents.parents_of(p));
+ }
+ }
+ }
+ Ok(())
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/resolver/dep_cache.rs b/src/tools/cargo/src/cargo/core/resolver/dep_cache.rs
new file mode 100644
index 000000000..4fd275385
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/resolver/dep_cache.rs
@@ -0,0 +1,579 @@
+//! There are 2 sources of facts for the resolver:
+//!
+//! - The `Registry` tells us for a `Dependency` what versions are available to fulfil it.
+//! - The `Summary` tells us for a version (and features) what dependencies need to be fulfilled for it to be activated.
+//!
+//! These constitute immutable facts, the soled ground truth that all other inference depends on.
+//! Theoretically this could all be enumerated ahead of time, but we want to be lazy and only
+//! look up things we need to. The compromise is to cache the results as they are computed.
+//!
+//! This module impl that cache in all the gory details
+
+use crate::core::resolver::context::Context;
+use crate::core::resolver::errors::describe_path_in_context;
+use crate::core::resolver::types::{ConflictReason, DepInfo, FeaturesSet};
+use crate::core::resolver::{
+ ActivateError, ActivateResult, CliFeatures, RequestedFeatures, ResolveOpts, VersionOrdering,
+ VersionPreferences,
+};
+use crate::core::{
+ Dependency, FeatureValue, PackageId, PackageIdSpec, QueryKind, Registry, Summary,
+};
+use crate::util::errors::CargoResult;
+use crate::util::interning::InternedString;
+
+use anyhow::Context as _;
+use log::debug;
+use std::collections::{BTreeSet, HashMap, HashSet};
+use std::rc::Rc;
+use std::task::Poll;
+
+pub struct RegistryQueryer<'a> {
+ pub registry: &'a mut (dyn Registry + 'a),
+ replacements: &'a [(PackageIdSpec, Dependency)],
+ version_prefs: &'a VersionPreferences,
+ /// If set the list of dependency candidates will be sorted by minimal
+ /// versions first. That allows `cargo update -Z minimal-versions` which will
+ /// specify minimum dependency versions to be used.
+ minimal_versions: bool,
+ /// a cache of `Candidate`s that fulfil a `Dependency` (and whether `first_minimal_version`)
+ registry_cache: HashMap<(Dependency, bool), Poll<Rc<Vec<Summary>>>>,
+ /// a cache of `Dependency`s that are required for a `Summary`
+ ///
+ /// HACK: `first_minimal_version` is not kept in the cache key is it is 1:1 with
+ /// `parent.is_none()` (the first element of the cache key) as it doesn't change through
+ /// execution.
+ summary_cache: HashMap<
+ (Option<PackageId>, Summary, ResolveOpts),
+ (Rc<(HashSet<InternedString>, Rc<Vec<DepInfo>>)>, bool),
+ >,
+ /// all the cases we ended up using a supplied replacement
+ used_replacements: HashMap<PackageId, Summary>,
+}
+
+impl<'a> RegistryQueryer<'a> {
+ pub fn new(
+ registry: &'a mut dyn Registry,
+ replacements: &'a [(PackageIdSpec, Dependency)],
+ version_prefs: &'a VersionPreferences,
+ minimal_versions: bool,
+ ) -> Self {
+ RegistryQueryer {
+ registry,
+ replacements,
+ version_prefs,
+ minimal_versions,
+ registry_cache: HashMap::new(),
+ summary_cache: HashMap::new(),
+ used_replacements: HashMap::new(),
+ }
+ }
+
+ pub fn reset_pending(&mut self) -> bool {
+ let mut all_ready = true;
+ self.registry_cache.retain(|_, r| {
+ if !r.is_ready() {
+ all_ready = false;
+ }
+ r.is_ready()
+ });
+ self.summary_cache.retain(|_, (_, r)| {
+ if !*r {
+ all_ready = false;
+ }
+ *r
+ });
+ all_ready
+ }
+
+ pub fn used_replacement_for(&self, p: PackageId) -> Option<(PackageId, PackageId)> {
+ self.used_replacements.get(&p).map(|r| (p, r.package_id()))
+ }
+
+ pub fn replacement_summary(&self, p: PackageId) -> Option<&Summary> {
+ self.used_replacements.get(&p)
+ }
+
+ /// Queries the `registry` to return a list of candidates for `dep`.
+ ///
+ /// This method is the location where overrides are taken into account. If
+ /// any candidates are returned which match an override then the override is
+ /// applied by performing a second query for what the override should
+ /// return.
+ pub fn query(
+ &mut self,
+ dep: &Dependency,
+ first_minimal_version: bool,
+ ) -> Poll<CargoResult<Rc<Vec<Summary>>>> {
+ let registry_cache_key = (dep.clone(), first_minimal_version);
+ if let Some(out) = self.registry_cache.get(&registry_cache_key).cloned() {
+ return out.map(Result::Ok);
+ }
+
+ let mut ret = Vec::new();
+ let ready = self.registry.query(dep, QueryKind::Exact, &mut |s| {
+ ret.push(s);
+ })?;
+ if ready.is_pending() {
+ self.registry_cache
+ .insert((dep.clone(), first_minimal_version), Poll::Pending);
+ return Poll::Pending;
+ }
+ for summary in ret.iter() {
+ let mut potential_matches = self
+ .replacements
+ .iter()
+ .filter(|&&(ref spec, _)| spec.matches(summary.package_id()));
+
+ let &(ref spec, ref dep) = match potential_matches.next() {
+ None => continue,
+ Some(replacement) => replacement,
+ };
+ debug!(
+ "found an override for {} {}",
+ dep.package_name(),
+ dep.version_req()
+ );
+
+ let mut summaries = match self.registry.query_vec(dep, QueryKind::Exact)? {
+ Poll::Ready(s) => s.into_iter(),
+ Poll::Pending => {
+ self.registry_cache
+ .insert((dep.clone(), first_minimal_version), Poll::Pending);
+ return Poll::Pending;
+ }
+ };
+ let s = summaries.next().ok_or_else(|| {
+ anyhow::format_err!(
+ "no matching package for override `{}` found\n\
+ location searched: {}\n\
+ version required: {}",
+ spec,
+ dep.source_id(),
+ dep.version_req()
+ )
+ })?;
+ let summaries = summaries.collect::<Vec<_>>();
+ if !summaries.is_empty() {
+ let bullets = summaries
+ .iter()
+ .map(|s| format!(" * {}", s.package_id()))
+ .collect::<Vec<_>>();
+ return Poll::Ready(Err(anyhow::anyhow!(
+ "the replacement specification `{}` matched \
+ multiple packages:\n * {}\n{}",
+ spec,
+ s.package_id(),
+ bullets.join("\n")
+ )));
+ }
+
+ // The dependency should be hard-coded to have the same name and an
+ // exact version requirement, so both of these assertions should
+ // never fail.
+ assert_eq!(s.version(), summary.version());
+ assert_eq!(s.name(), summary.name());
+
+ let replace = if s.source_id() == summary.source_id() {
+ debug!("Preventing\n{:?}\nfrom replacing\n{:?}", summary, s);
+ None
+ } else {
+ Some(s)
+ };
+ let matched_spec = spec.clone();
+
+ // Make sure no duplicates
+ if let Some(&(ref spec, _)) = potential_matches.next() {
+ return Poll::Ready(Err(anyhow::anyhow!(
+ "overlapping replacement specifications found:\n\n \
+ * {}\n * {}\n\nboth specifications match: {}",
+ matched_spec,
+ spec,
+ summary.package_id()
+ )));
+ }
+
+ for dep in summary.dependencies() {
+ debug!("\t{} => {}", dep.package_name(), dep.version_req());
+ }
+ if let Some(r) = replace {
+ self.used_replacements.insert(summary.package_id(), r);
+ }
+ }
+
+ // When we attempt versions for a package we'll want to do so in a sorted fashion to pick
+ // the "best candidates" first. VersionPreferences implements this notion.
+ let ordering = if first_minimal_version || self.minimal_versions {
+ VersionOrdering::MinimumVersionsFirst
+ } else {
+ VersionOrdering::MaximumVersionsFirst
+ };
+ let first_version = first_minimal_version;
+ self.version_prefs
+ .sort_summaries(&mut ret, ordering, first_version);
+
+ let out = Poll::Ready(Rc::new(ret));
+
+ self.registry_cache.insert(registry_cache_key, out.clone());
+
+ out.map(Result::Ok)
+ }
+
+ /// Find out what dependencies will be added by activating `candidate`,
+ /// with features described in `opts`. Then look up in the `registry`
+ /// the candidates that will fulfil each of these dependencies, as it is the
+ /// next obvious question.
+ pub fn build_deps(
+ &mut self,
+ cx: &Context,
+ parent: Option<PackageId>,
+ candidate: &Summary,
+ opts: &ResolveOpts,
+ first_minimal_version: bool,
+ ) -> ActivateResult<Rc<(HashSet<InternedString>, Rc<Vec<DepInfo>>)>> {
+ // if we have calculated a result before, then we can just return it,
+ // as it is a "pure" query of its arguments.
+ if let Some(out) = self
+ .summary_cache
+ .get(&(parent, candidate.clone(), opts.clone()))
+ {
+ return Ok(out.0.clone());
+ }
+ // First, figure out our set of dependencies based on the requested set
+ // of features. This also calculates what features we're going to enable
+ // for our own dependencies.
+ let (used_features, deps) = resolve_features(parent, candidate, opts)?;
+
+ // Next, transform all dependencies into a list of possible candidates
+ // which can satisfy that dependency.
+ let mut all_ready = true;
+ let mut deps = deps
+ .into_iter()
+ .filter_map(
+ |(dep, features)| match self.query(&dep, first_minimal_version) {
+ Poll::Ready(Ok(candidates)) => Some(Ok((dep, candidates, features))),
+ Poll::Pending => {
+ all_ready = false;
+ // we can ignore Pending deps, resolve will be repeatedly called
+ // until there are none to ignore
+ None
+ }
+ Poll::Ready(Err(e)) => Some(Err(e).with_context(|| {
+ format!(
+ "failed to get `{}` as a dependency of {}",
+ dep.package_name(),
+ describe_path_in_context(cx, &candidate.package_id()),
+ )
+ })),
+ },
+ )
+ .collect::<CargoResult<Vec<DepInfo>>>()?;
+
+ // Attempt to resolve dependencies with fewer candidates before trying
+ // dependencies with more candidates. This way if the dependency with
+ // only one candidate can't be resolved we don't have to do a bunch of
+ // work before we figure that out.
+ deps.sort_by_key(|&(_, ref a, _)| a.len());
+
+ let out = Rc::new((used_features, Rc::new(deps)));
+
+ // If we succeed we add the result to the cache so we can use it again next time.
+ // We don't cache the failure cases as they don't impl Clone.
+ self.summary_cache.insert(
+ (parent, candidate.clone(), opts.clone()),
+ (out.clone(), all_ready),
+ );
+
+ Ok(out)
+ }
+}
+
+/// Returns the features we ended up using and
+/// all dependencies and the features we want from each of them.
+pub fn resolve_features<'b>(
+ parent: Option<PackageId>,
+ s: &'b Summary,
+ opts: &'b ResolveOpts,
+) -> ActivateResult<(HashSet<InternedString>, Vec<(Dependency, FeaturesSet)>)> {
+ // First, filter by dev-dependencies.
+ let deps = s.dependencies();
+ let deps = deps.iter().filter(|d| d.is_transitive() || opts.dev_deps);
+
+ let reqs = build_requirements(parent, s, opts)?;
+ let mut ret = Vec::new();
+ let default_dep = BTreeSet::new();
+ let mut valid_dep_names = HashSet::new();
+
+ // Next, collect all actually enabled dependencies and their features.
+ for dep in deps {
+ // Skip optional dependencies, but not those enabled through a
+ // feature
+ if dep.is_optional() && !reqs.deps.contains_key(&dep.name_in_toml()) {
+ continue;
+ }
+ valid_dep_names.insert(dep.name_in_toml());
+ // So we want this dependency. Move the features we want from
+ // `feature_deps` to `ret` and register ourselves as using this
+ // name.
+ let mut base = reqs
+ .deps
+ .get(&dep.name_in_toml())
+ .unwrap_or(&default_dep)
+ .clone();
+ base.extend(dep.features().iter());
+ ret.push((dep.clone(), Rc::new(base)));
+ }
+
+ // This is a special case for command-line `--features
+ // dep_name/feat_name` where `dep_name` does not exist. All other
+ // validation is done either in `build_requirements` or
+ // `build_feature_map`.
+ if parent.is_none() {
+ for dep_name in reqs.deps.keys() {
+ if !valid_dep_names.contains(dep_name) {
+ let e = RequirementError::MissingDependency(*dep_name);
+ return Err(e.into_activate_error(parent, s));
+ }
+ }
+ }
+
+ Ok((reqs.into_features(), ret))
+}
+
+/// Takes requested features for a single package from the input `ResolveOpts` and
+/// recurses to find all requested features, dependencies and requested
+/// dependency features in a `Requirements` object, returning it to the resolver.
+fn build_requirements<'a, 'b: 'a>(
+ parent: Option<PackageId>,
+ s: &'a Summary,
+ opts: &'b ResolveOpts,
+) -> ActivateResult<Requirements<'a>> {
+ let mut reqs = Requirements::new(s);
+
+ let handle_default = |uses_default_features, reqs: &mut Requirements<'_>| {
+ if uses_default_features && s.features().contains_key("default") {
+ if let Err(e) = reqs.require_feature(InternedString::new("default")) {
+ return Err(e.into_activate_error(parent, s));
+ }
+ }
+ Ok(())
+ };
+
+ match &opts.features {
+ RequestedFeatures::CliFeatures(CliFeatures {
+ features,
+ all_features,
+ uses_default_features,
+ }) => {
+ if *all_features {
+ for key in s.features().keys() {
+ if let Err(e) = reqs.require_feature(*key) {
+ return Err(e.into_activate_error(parent, s));
+ }
+ }
+ }
+
+ for fv in features.iter() {
+ if let Err(e) = reqs.require_value(fv) {
+ return Err(e.into_activate_error(parent, s));
+ }
+ }
+ handle_default(*uses_default_features, &mut reqs)?;
+ }
+ RequestedFeatures::DepFeatures {
+ features,
+ uses_default_features,
+ } => {
+ for feature in features.iter() {
+ if let Err(e) = reqs.require_feature(*feature) {
+ return Err(e.into_activate_error(parent, s));
+ }
+ }
+ handle_default(*uses_default_features, &mut reqs)?;
+ }
+ }
+
+ Ok(reqs)
+}
+
+/// Set of feature and dependency requirements for a package.
+#[derive(Debug)]
+struct Requirements<'a> {
+ summary: &'a Summary,
+ /// The deps map is a mapping of dependency name to list of features enabled.
+ ///
+ /// The resolver will activate all of these dependencies, with the given
+ /// features enabled.
+ deps: HashMap<InternedString, BTreeSet<InternedString>>,
+ /// The set of features enabled on this package which is later used when
+ /// compiling to instruct the code what features were enabled.
+ features: HashSet<InternedString>,
+}
+
+/// An error for a requirement.
+///
+/// This will later be converted to an `ActivateError` depending on whether or
+/// not this is a dependency or a root package.
+enum RequirementError {
+ /// The package does not have the requested feature.
+ MissingFeature(InternedString),
+ /// The package does not have the requested dependency.
+ MissingDependency(InternedString),
+ /// A feature has a direct cycle to itself.
+ ///
+ /// Note that cycles through multiple features are allowed (but perhaps
+ /// they shouldn't be?).
+ Cycle(InternedString),
+}
+
+impl Requirements<'_> {
+ fn new(summary: &Summary) -> Requirements<'_> {
+ Requirements {
+ summary,
+ deps: HashMap::new(),
+ features: HashSet::new(),
+ }
+ }
+
+ fn into_features(self) -> HashSet<InternedString> {
+ self.features
+ }
+
+ fn require_dep_feature(
+ &mut self,
+ package: InternedString,
+ feat: InternedString,
+ weak: bool,
+ ) -> Result<(), RequirementError> {
+ // If `package` is indeed an optional dependency then we activate the
+ // feature named `package`, but otherwise if `package` is a required
+ // dependency then there's no feature associated with it.
+ if !weak
+ && self
+ .summary
+ .dependencies()
+ .iter()
+ .any(|dep| dep.name_in_toml() == package && dep.is_optional())
+ {
+ self.require_feature(package)?;
+ }
+ self.deps.entry(package).or_default().insert(feat);
+ Ok(())
+ }
+
+ fn require_dependency(&mut self, pkg: InternedString) {
+ self.deps.entry(pkg).or_default();
+ }
+
+ fn require_feature(&mut self, feat: InternedString) -> Result<(), RequirementError> {
+ if !self.features.insert(feat) {
+ // Already seen this feature.
+ return Ok(());
+ }
+
+ let fvs = match self.summary.features().get(&feat) {
+ Some(fvs) => fvs,
+ None => return Err(RequirementError::MissingFeature(feat)),
+ };
+
+ for fv in fvs {
+ if let FeatureValue::Feature(dep_feat) = fv {
+ if *dep_feat == feat {
+ return Err(RequirementError::Cycle(feat));
+ }
+ }
+ self.require_value(fv)?;
+ }
+ Ok(())
+ }
+
+ fn require_value(&mut self, fv: &FeatureValue) -> Result<(), RequirementError> {
+ match fv {
+ FeatureValue::Feature(feat) => self.require_feature(*feat)?,
+ FeatureValue::Dep { dep_name } => self.require_dependency(*dep_name),
+ FeatureValue::DepFeature {
+ dep_name,
+ dep_feature,
+ // Weak features are always activated in the dependency
+ // resolver. They will be narrowed inside the new feature
+ // resolver.
+ weak,
+ } => self.require_dep_feature(*dep_name, *dep_feature, *weak)?,
+ };
+ Ok(())
+ }
+}
+
+impl RequirementError {
+ fn into_activate_error(self, parent: Option<PackageId>, summary: &Summary) -> ActivateError {
+ match self {
+ RequirementError::MissingFeature(feat) => {
+ let deps: Vec<_> = summary
+ .dependencies()
+ .iter()
+ .filter(|dep| dep.name_in_toml() == feat)
+ .collect();
+ if deps.is_empty() {
+ return match parent {
+ None => ActivateError::Fatal(anyhow::format_err!(
+ "Package `{}` does not have the feature `{}`",
+ summary.package_id(),
+ feat
+ )),
+ Some(p) => ActivateError::Conflict(
+ p,
+ ConflictReason::MissingFeatures(feat.to_string()),
+ ),
+ };
+ }
+ if deps.iter().any(|dep| dep.is_optional()) {
+ match parent {
+ None => ActivateError::Fatal(anyhow::format_err!(
+ "Package `{}` does not have feature `{}`. It has an optional dependency \
+ with that name, but that dependency uses the \"dep:\" \
+ syntax in the features table, so it does not have an implicit feature with that name.",
+ summary.package_id(),
+ feat
+ )),
+ Some(p) => ActivateError::Conflict(
+ p,
+ ConflictReason::NonImplicitDependencyAsFeature(feat),
+ ),
+ }
+ } else {
+ match parent {
+ None => ActivateError::Fatal(anyhow::format_err!(
+ "Package `{}` does not have feature `{}`. It has a required dependency \
+ with that name, but only optional dependencies can be used as features.",
+ summary.package_id(),
+ feat
+ )),
+ Some(p) => ActivateError::Conflict(
+ p,
+ ConflictReason::RequiredDependencyAsFeature(feat),
+ ),
+ }
+ }
+ }
+ RequirementError::MissingDependency(dep_name) => {
+ match parent {
+ None => ActivateError::Fatal(anyhow::format_err!(
+ "package `{}` does not have a dependency named `{}`",
+ summary.package_id(),
+ dep_name
+ )),
+ // This code path currently isn't used, since `foo/bar`
+ // and `dep:` syntax is not allowed in a dependency.
+ Some(p) => ActivateError::Conflict(
+ p,
+ ConflictReason::MissingFeatures(dep_name.to_string()),
+ ),
+ }
+ }
+ RequirementError::Cycle(feat) => ActivateError::Fatal(anyhow::format_err!(
+ "cyclic feature dependency: feature `{}` depends on itself",
+ feat
+ )),
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/resolver/encode.rs b/src/tools/cargo/src/cargo/core/resolver/encode.rs
new file mode 100644
index 000000000..88d0d8296
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/resolver/encode.rs
@@ -0,0 +1,719 @@
+//! Definition of how to encode a `Resolve` into a TOML `Cargo.lock` file
+//!
+//! This module contains all machinery necessary to parse a `Resolve` from a
+//! `Cargo.lock` as well as serialize a `Resolve` to a `Cargo.lock`.
+//!
+//! ## Changing `Cargo.lock`
+//!
+//! In general Cargo is quite conservative about changing the format of
+//! `Cargo.lock`. Usage of new features in Cargo can change `Cargo.lock` at any
+//! time, but otherwise changing the serialization of `Cargo.lock` is a
+//! difficult operation to do that we typically avoid.
+//!
+//! The main problem with changing the format of `Cargo.lock` is that it can
+//! cause quite a bad experience for end users who use different versions of
+//! Cargo. If every PR to a project oscillates between the stable channel's
+//! encoding of Cargo.lock and the nightly channel's encoding then that's a
+//! pretty bad experience.
+//!
+//! We do, however, want to change `Cargo.lock` over time. (and we have!). To do
+//! this the rules that we currently have are:
+//!
+//! * Add support for the new format to Cargo. This involves code changes in
+//! Cargo itself, likely by adding a new variant of `ResolveVersion` and
+//! branching on that where necessary. This is accompanied with tests in the
+//! `lockfile_compat` module.
+//!
+//! * Do not update `ResolveVersion::default()`. The new lockfile format will
+//! not be used yet.
+//!
+//! * Preserve the new format if found. This means that if Cargo finds the new
+//! version it'll keep using it, but otherwise it continues to use whatever
+//! format it previously found.
+//!
+//! * Wait a "long time". This is at least until the changes here hit stable
+//! Rust. Often though we wait a little longer to let the changes percolate
+//! into one or two older stable releases.
+//!
+//! * Change the return value of `ResolveVersion::default()` to the new format.
+//! This will cause new lock files to use the latest encoding as well as
+//! causing any operation which updates the lock file to update to the new
+//! format.
+//!
+//! This migration scheme in general means that Cargo we'll get *support* for a
+//! new format into Cargo ASAP, but it won't be exercised yet (except in Cargo's
+//! own tests). Eventually when stable/beta/nightly all have support for the new
+//! format (and maybe a few previous stable versions) we flip the switch.
+//! Projects on nightly will quickly start seeing changes, but
+//! stable/beta/nightly will all understand this new format and will preserve
+//! it.
+//!
+//! While this does mean that projects' `Cargo.lock` changes over time, it's
+//! typically a pretty minimal effort change that's just "check in what's
+//! there".
+//!
+//! ## Historical changes to `Cargo.lock`
+//!
+//! Listed from most recent to oldest, these are some of the changes we've made
+//! to `Cargo.lock`'s serialization format:
+//!
+//! * A `version` marker is now at the top of the lock file which is a way for
+//! super-old Cargos (at least since this was implemented) to give a formal
+//! error if they see a lock file from a super-future Cargo. Additionally as
+//! part of this change the encoding of `git` dependencies in lock files
+//! changed where `branch = "master"` is now encoded with `branch=master`
+//! instead of with nothing at all.
+//!
+//! * The entries in `dependencies` arrays have been shortened and the
+//! `checksum` field now shows up directly in `[[package]]` instead of always
+//! at the end of the file. The goal of this change was to ideally reduce
+//! merge conflicts being generated on `Cargo.lock`. Updating a version of a
+//! package now only updates two lines in the file, the checksum and the
+//! version number, most of the time. Dependency edges are specified in a
+//! compact form where possible where just the name is listed. The
+//! version/source on dependency edges are only listed if necessary to
+//! disambiguate which version or which source is in use.
+//!
+//! * A comment at the top of the file indicates that the file is a generated
+//! file and contains the special symbol `@generated` to indicate to common
+//! review tools that it's a generated file.
+//!
+//! * A `[root]` entry for the "root crate" has been removed and instead now
+//! included in `[[package]]` like everything else.
+//!
+//! * All packages from registries contain a `checksum` which is a sha256
+//! checksum of the tarball the package is associated with. This is all stored
+//! in the `[metadata]` table of `Cargo.lock` which all versions of Cargo
+//! since 1.0 have preserved. The goal of this was to start recording
+//! checksums so mirror sources can be verified.
+//!
+//! ## Other oddities about `Cargo.lock`
+//!
+//! There's a few other miscellaneous weird things about `Cargo.lock` that you
+//! may want to be aware of when reading this file:
+//!
+//! * All packages have a `source` listed to indicate where they come from. For
+//! `path` dependencies, however, no `source` is listed. There's no way we
+//! could emit a filesystem path name and have that be portable across
+//! systems, so all packages from a `path` are not listed with a `source`.
+//! Note that this also means that all packages with `path` sources must have
+//! unique names.
+//!
+//! * The `[metadata]` table in `Cargo.lock` is intended to be a generic mapping
+//! of strings to strings that's simply preserved by Cargo. This was a very
+//! early effort to be forward compatible against changes to `Cargo.lock`'s
+//! format. This is nowadays sort of deemed a bad idea though and we don't
+//! really use it that much except for `checksum`s historically. It's not
+//! really recommended to use this.
+//!
+//! * The actual literal on-disk serialiation is found in
+//! `src/cargo/ops/lockfile.rs` which basically renders a `toml::Value` in a
+//! special fashion to make sure we have strict control over the on-disk
+//! format.
+
+use super::{Resolve, ResolveVersion};
+use crate::core::{Dependency, GitReference, Package, PackageId, SourceId, Workspace};
+use crate::util::errors::CargoResult;
+use crate::util::interning::InternedString;
+use crate::util::{internal, Graph};
+use anyhow::{bail, Context as _};
+use log::debug;
+use serde::de;
+use serde::ser;
+use serde::{Deserialize, Serialize};
+use std::collections::{BTreeMap, HashMap, HashSet};
+use std::fmt;
+use std::str::FromStr;
+
+/// The `Cargo.lock` structure.
+#[derive(Serialize, Deserialize, Debug)]
+pub struct EncodableResolve {
+ version: Option<u32>,
+ package: Option<Vec<EncodableDependency>>,
+ /// `root` is optional to allow backward compatibility.
+ root: Option<EncodableDependency>,
+ metadata: Option<Metadata>,
+ #[serde(default, skip_serializing_if = "Patch::is_empty")]
+ patch: Patch,
+}
+
+#[derive(Serialize, Deserialize, Debug, Default)]
+struct Patch {
+ unused: Vec<EncodableDependency>,
+}
+
+pub type Metadata = BTreeMap<String, String>;
+
+impl EncodableResolve {
+ /// Convert a `Cargo.lock` to a Resolve.
+ ///
+ /// Note that this `Resolve` is not "complete". For example, the
+ /// dependencies do not know the difference between regular/dev/build
+ /// dependencies, so they are not filled in. It also does not include
+ /// `features`. Care should be taken when using this Resolve. One of the
+ /// primary uses is to be used with `resolve_with_previous` to guide the
+ /// resolver to create a complete Resolve.
+ pub fn into_resolve(self, original: &str, ws: &Workspace<'_>) -> CargoResult<Resolve> {
+ let path_deps = build_path_deps(ws)?;
+ let mut checksums = HashMap::new();
+
+ let mut version = match self.version {
+ Some(3) => ResolveVersion::V3,
+ Some(n) => bail!(
+ "lock file version `{}` was found, but this version of Cargo \
+ does not understand this lock file, perhaps Cargo needs \
+ to be updated?",
+ n,
+ ),
+ // Historically Cargo did not have a version indicator in lock
+ // files, so this could either be the V1 or V2 encoding. We assume
+ // an older format is being parsed until we see so otherwise.
+ None => ResolveVersion::V1,
+ };
+
+ let packages = {
+ let mut packages = self.package.unwrap_or_default();
+ if let Some(root) = self.root {
+ packages.insert(0, root);
+ }
+ packages
+ };
+
+ // `PackageId`s in the lock file don't include the `source` part
+ // for workspace members, so we reconstruct proper IDs.
+ let live_pkgs = {
+ let mut live_pkgs = HashMap::new();
+ let mut all_pkgs = HashSet::new();
+ for pkg in packages.iter() {
+ let enc_id = EncodablePackageId {
+ name: pkg.name.clone(),
+ version: Some(pkg.version.clone()),
+ source: pkg.source,
+ };
+
+ if !all_pkgs.insert(enc_id.clone()) {
+ anyhow::bail!("package `{}` is specified twice in the lockfile", pkg.name);
+ }
+ let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) {
+ // We failed to find a local package in the workspace.
+ // It must have been removed and should be ignored.
+ None => {
+ debug!("path dependency now missing {} v{}", pkg.name, pkg.version);
+ continue;
+ }
+ Some(&source) => PackageId::new(&pkg.name, &pkg.version, source)?,
+ };
+
+ // If a package has a checksum listed directly on it then record
+ // that here, and we also bump our version up to 2 since V1
+ // didn't ever encode this field.
+ if let Some(cksum) = &pkg.checksum {
+ version = version.max(ResolveVersion::V2);
+ checksums.insert(id, Some(cksum.clone()));
+ }
+
+ assert!(live_pkgs.insert(enc_id, (id, pkg)).is_none())
+ }
+ live_pkgs
+ };
+
+ // When decoding a V2 version the edges in `dependencies` aren't
+ // guaranteed to have either version or source information. This `map`
+ // is used to find package ids even if dependencies have missing
+ // information. This map is from name to version to source to actual
+ // package ID. (various levels to drill down step by step)
+ let mut map = HashMap::new();
+ for (id, _) in live_pkgs.values() {
+ map.entry(id.name().as_str())
+ .or_insert_with(HashMap::new)
+ .entry(id.version().to_string())
+ .or_insert_with(HashMap::new)
+ .insert(id.source_id(), *id);
+ }
+
+ let mut lookup_id = |enc_id: &EncodablePackageId| -> Option<PackageId> {
+ // The name of this package should always be in the larger list of
+ // all packages.
+ let by_version = map.get(enc_id.name.as_str())?;
+
+ // If the version is provided, look that up. Otherwise if the
+ // version isn't provided this is a V2 manifest and we should only
+ // have one version for this name. If we have more than one version
+ // for the name then it's ambiguous which one we'd use. That
+ // shouldn't ever actually happen but in theory bad git merges could
+ // produce invalid lock files, so silently ignore these cases.
+ let by_source = match &enc_id.version {
+ Some(version) => by_version.get(version)?,
+ None => {
+ version = version.max(ResolveVersion::V2);
+ if by_version.len() == 1 {
+ by_version.values().next().unwrap()
+ } else {
+ return None;
+ }
+ }
+ };
+
+ // This is basically the same as above. Note though that `source` is
+ // always missing for path dependencies regardless of serialization
+ // format. That means we have to handle the `None` case a bit more
+ // carefully.
+ match &enc_id.source {
+ Some(source) => by_source.get(source).cloned(),
+ None => {
+ // Look through all possible packages ids for this
+ // name/version. If there's only one `path` dependency then
+ // we are hardcoded to use that since `path` dependencies
+ // can't have a source listed.
+ let mut path_packages = by_source.values().filter(|p| p.source_id().is_path());
+ if let Some(path) = path_packages.next() {
+ if path_packages.next().is_some() {
+ return None;
+ }
+ Some(*path)
+
+ // ... otherwise if there's only one then we must be
+ // implicitly using that one due to a V2 serialization of
+ // the lock file
+ } else if by_source.len() == 1 {
+ let id = by_source.values().next().unwrap();
+ version = version.max(ResolveVersion::V2);
+ Some(*id)
+
+ // ... and failing that we probably had a bad git merge of
+ // `Cargo.lock` or something like that, so just ignore this.
+ } else {
+ None
+ }
+ }
+ }
+ };
+
+ let mut g = Graph::new();
+
+ for &(ref id, _) in live_pkgs.values() {
+ g.add(*id);
+ }
+
+ for &(ref id, pkg) in live_pkgs.values() {
+ let deps = match pkg.dependencies {
+ Some(ref deps) => deps,
+ None => continue,
+ };
+
+ for edge in deps.iter() {
+ if let Some(to_depend_on) = lookup_id(edge) {
+ g.link(*id, to_depend_on);
+ }
+ }
+ }
+
+ let replacements = {
+ let mut replacements = HashMap::new();
+ for &(ref id, pkg) in live_pkgs.values() {
+ if let Some(ref replace) = pkg.replace {
+ assert!(pkg.dependencies.is_none());
+ if let Some(replace_id) = lookup_id(replace) {
+ replacements.insert(*id, replace_id);
+ }
+ }
+ }
+ replacements
+ };
+
+ let mut metadata = self.metadata.unwrap_or_default();
+
+ // In the V1 serialization formats all checksums were listed in the lock
+ // file in the `[metadata]` section, so if we're still V1 then look for
+ // that here.
+ let prefix = "checksum ";
+ let mut to_remove = Vec::new();
+ for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) {
+ to_remove.push(k.to_string());
+ let k = &k[prefix.len()..];
+ let enc_id: EncodablePackageId = k
+ .parse()
+ .with_context(|| internal("invalid encoding of checksum in lockfile"))?;
+ let id = match lookup_id(&enc_id) {
+ Some(id) => id,
+ _ => continue,
+ };
+
+ let v = if v == "<none>" {
+ None
+ } else {
+ Some(v.to_string())
+ };
+ checksums.insert(id, v);
+ }
+ // If `checksum` was listed in `[metadata]` but we were previously
+ // listed as `V2` then assume some sort of bad git merge happened, so
+ // discard all checksums and let's regenerate them later.
+ if !to_remove.is_empty() && version >= ResolveVersion::V2 {
+ checksums.drain();
+ }
+ for k in to_remove {
+ metadata.remove(&k);
+ }
+
+ let mut unused_patches = Vec::new();
+ for pkg in self.patch.unused {
+ let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) {
+ Some(&src) => PackageId::new(&pkg.name, &pkg.version, src)?,
+ None => continue,
+ };
+ unused_patches.push(id);
+ }
+
+ // We have a curious issue where in the "v1 format" we buggily had a
+ // trailing blank line at the end of lock files under some specific
+ // conditions.
+ //
+ // Cargo is trying to write new lockfies in the "v2 format" but if you
+ // have no dependencies, for example, then the lockfile encoded won't
+ // really have any indicator that it's in the new format (no
+ // dependencies or checksums listed). This means that if you type `cargo
+ // new` followed by `cargo build` it will generate a "v2 format" lock
+ // file since none previously existed. When reading this on the next
+ // `cargo build`, however, it generates a new lock file because when
+ // reading in that lockfile we think it's the v1 format.
+ //
+ // To help fix this issue we special case here. If our lockfile only has
+ // one trailing newline, not two, *and* it only has one package, then
+ // this is actually the v2 format.
+ if original.ends_with('\n')
+ && !original.ends_with("\n\n")
+ && version == ResolveVersion::V1
+ && g.iter().count() == 1
+ {
+ version = ResolveVersion::V2;
+ }
+
+ Ok(Resolve::new(
+ g,
+ replacements,
+ HashMap::new(),
+ checksums,
+ metadata,
+ unused_patches,
+ version,
+ HashMap::new(),
+ ))
+ }
+}
+
+fn build_path_deps(ws: &Workspace<'_>) -> CargoResult<HashMap<String, SourceId>> {
+ // If a crate is **not** a path source, then we're probably in a situation
+ // such as `cargo install` with a lock file from a remote dependency. In
+ // that case we don't need to fixup any path dependencies (as they're not
+ // actually path dependencies any more), so we ignore them.
+ let members = ws
+ .members()
+ .filter(|p| p.package_id().source_id().is_path())
+ .collect::<Vec<_>>();
+
+ let mut ret = HashMap::new();
+ let mut visited = HashSet::new();
+ for member in members.iter() {
+ ret.insert(
+ member.package_id().name().to_string(),
+ member.package_id().source_id(),
+ );
+ visited.insert(member.package_id().source_id());
+ }
+ for member in members.iter() {
+ build_pkg(member, ws, &mut ret, &mut visited);
+ }
+ for deps in ws.root_patch()?.values() {
+ for dep in deps {
+ build_dep(dep, ws, &mut ret, &mut visited);
+ }
+ }
+ for &(_, ref dep) in ws.root_replace() {
+ build_dep(dep, ws, &mut ret, &mut visited);
+ }
+
+ return Ok(ret);
+
+ fn build_pkg(
+ pkg: &Package,
+ ws: &Workspace<'_>,
+ ret: &mut HashMap<String, SourceId>,
+ visited: &mut HashSet<SourceId>,
+ ) {
+ for dep in pkg.dependencies() {
+ build_dep(dep, ws, ret, visited);
+ }
+ }
+
+ fn build_dep(
+ dep: &Dependency,
+ ws: &Workspace<'_>,
+ ret: &mut HashMap<String, SourceId>,
+ visited: &mut HashSet<SourceId>,
+ ) {
+ let id = dep.source_id();
+ if visited.contains(&id) || !id.is_path() {
+ return;
+ }
+ let path = match id.url().to_file_path() {
+ Ok(p) => p.join("Cargo.toml"),
+ Err(_) => return,
+ };
+ let pkg = match ws.load(&path) {
+ Ok(p) => p,
+ Err(_) => return,
+ };
+ ret.insert(pkg.name().to_string(), pkg.package_id().source_id());
+ visited.insert(pkg.package_id().source_id());
+ build_pkg(&pkg, ws, ret, visited);
+ }
+}
+
+impl Patch {
+ fn is_empty(&self) -> bool {
+ self.unused.is_empty()
+ }
+}
+
+#[derive(Serialize, Deserialize, Debug, PartialOrd, Ord, PartialEq, Eq)]
+pub struct EncodableDependency {
+ name: String,
+ version: String,
+ source: Option<SourceId>,
+ checksum: Option<String>,
+ dependencies: Option<Vec<EncodablePackageId>>,
+ replace: Option<EncodablePackageId>,
+}
+
+#[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Hash, Clone)]
+pub struct EncodablePackageId {
+ name: String,
+ version: Option<String>,
+ source: Option<SourceId>,
+}
+
+impl fmt::Display for EncodablePackageId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.name)?;
+ if let Some(s) = &self.version {
+ write!(f, " {}", s)?;
+ }
+ if let Some(s) = &self.source {
+ write!(f, " ({})", s.as_url())?;
+ }
+ Ok(())
+ }
+}
+
+impl FromStr for EncodablePackageId {
+ type Err = anyhow::Error;
+
+ fn from_str(s: &str) -> CargoResult<EncodablePackageId> {
+ let mut s = s.splitn(3, ' ');
+ let name = s.next().unwrap();
+ let version = s.next();
+ let source_id = match s.next() {
+ Some(s) => {
+ if s.starts_with('(') && s.ends_with(')') {
+ Some(SourceId::from_url(&s[1..s.len() - 1])?)
+ } else {
+ anyhow::bail!("invalid serialized PackageId")
+ }
+ }
+ None => None,
+ };
+
+ Ok(EncodablePackageId {
+ name: name.to_string(),
+ version: version.map(|v| v.to_string()),
+ source: source_id,
+ })
+ }
+}
+
+impl ser::Serialize for EncodablePackageId {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: ser::Serializer,
+ {
+ s.collect_str(self)
+ }
+}
+
+impl<'de> de::Deserialize<'de> for EncodablePackageId {
+ fn deserialize<D>(d: D) -> Result<EncodablePackageId, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ String::deserialize(d).and_then(|string| {
+ string
+ .parse::<EncodablePackageId>()
+ .map_err(de::Error::custom)
+ })
+ }
+}
+
+impl ser::Serialize for Resolve {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: ser::Serializer,
+ {
+ let mut ids: Vec<_> = self.iter().collect();
+ ids.sort();
+
+ let state = EncodeState::new(self);
+
+ let encodable = ids
+ .iter()
+ .map(|&id| encodable_resolve_node(id, self, &state))
+ .collect::<Vec<_>>();
+
+ let mut metadata = self.metadata().clone();
+
+ if self.version() == ResolveVersion::V1 {
+ for &id in ids.iter().filter(|id| !id.source_id().is_path()) {
+ let checksum = match self.checksums()[&id] {
+ Some(ref s) => &s[..],
+ None => "<none>",
+ };
+ let id = encodable_package_id(id, &state, self.version());
+ metadata.insert(format!("checksum {}", id.to_string()), checksum.to_string());
+ }
+ }
+
+ let metadata = if metadata.is_empty() {
+ None
+ } else {
+ Some(metadata)
+ };
+
+ let patch = Patch {
+ unused: self
+ .unused_patches()
+ .iter()
+ .map(|id| EncodableDependency {
+ name: id.name().to_string(),
+ version: id.version().to_string(),
+ source: encode_source(id.source_id()),
+ dependencies: None,
+ replace: None,
+ checksum: if self.version() >= ResolveVersion::V2 {
+ self.checksums().get(id).and_then(|x| x.clone())
+ } else {
+ None
+ },
+ })
+ .collect(),
+ };
+ EncodableResolve {
+ package: Some(encodable),
+ root: None,
+ metadata,
+ patch,
+ version: match self.version() {
+ ResolveVersion::V3 => Some(3),
+ ResolveVersion::V2 | ResolveVersion::V1 => None,
+ },
+ }
+ .serialize(s)
+ }
+}
+
+pub struct EncodeState<'a> {
+ counts: Option<HashMap<InternedString, HashMap<&'a semver::Version, usize>>>,
+}
+
+impl<'a> EncodeState<'a> {
+ pub fn new(resolve: &'a Resolve) -> EncodeState<'a> {
+ let counts = if resolve.version() >= ResolveVersion::V2 {
+ let mut map = HashMap::new();
+ for id in resolve.iter() {
+ let slot = map
+ .entry(id.name())
+ .or_insert_with(HashMap::new)
+ .entry(id.version())
+ .or_insert(0);
+ *slot += 1;
+ }
+ Some(map)
+ } else {
+ None
+ };
+ EncodeState { counts }
+ }
+}
+
+fn encodable_resolve_node(
+ id: PackageId,
+ resolve: &Resolve,
+ state: &EncodeState<'_>,
+) -> EncodableDependency {
+ let (replace, deps) = match resolve.replacement(id) {
+ Some(id) => (
+ Some(encodable_package_id(id, state, resolve.version())),
+ None,
+ ),
+ None => {
+ let mut deps = resolve
+ .deps_not_replaced(id)
+ .map(|(id, _)| encodable_package_id(id, state, resolve.version()))
+ .collect::<Vec<_>>();
+ deps.sort();
+ (None, Some(deps))
+ }
+ };
+
+ EncodableDependency {
+ name: id.name().to_string(),
+ version: id.version().to_string(),
+ source: encode_source(id.source_id()),
+ dependencies: deps,
+ replace,
+ checksum: if resolve.version() >= ResolveVersion::V2 {
+ resolve.checksums().get(&id).and_then(|s| s.clone())
+ } else {
+ None
+ },
+ }
+}
+
+pub fn encodable_package_id(
+ id: PackageId,
+ state: &EncodeState<'_>,
+ resolve_version: ResolveVersion,
+) -> EncodablePackageId {
+ let mut version = Some(id.version().to_string());
+ let mut id_to_encode = id.source_id();
+ if resolve_version <= ResolveVersion::V2 {
+ if let Some(GitReference::Branch(b)) = id_to_encode.git_reference() {
+ if b == "master" {
+ id_to_encode =
+ SourceId::for_git(id_to_encode.url(), GitReference::DefaultBranch).unwrap();
+ }
+ }
+ }
+ let mut source = encode_source(id_to_encode).map(|s| s.with_precise(None));
+ if let Some(counts) = &state.counts {
+ let version_counts = &counts[&id.name()];
+ if version_counts[&id.version()] == 1 {
+ source = None;
+ if version_counts.len() == 1 {
+ version = None;
+ }
+ }
+ }
+ EncodablePackageId {
+ name: id.name().to_string(),
+ version,
+ source,
+ }
+}
+
+fn encode_source(id: SourceId) -> Option<SourceId> {
+ if id.is_path() {
+ None
+ } else {
+ Some(id)
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/resolver/errors.rs b/src/tools/cargo/src/cargo/core/resolver/errors.rs
new file mode 100644
index 000000000..d75240df6
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/resolver/errors.rs
@@ -0,0 +1,439 @@
+use std::fmt;
+use std::task::Poll;
+
+use crate::core::{Dependency, PackageId, QueryKind, Registry, Summary};
+use crate::util::lev_distance::lev_distance;
+use crate::util::{Config, VersionExt};
+use anyhow::Error;
+
+use super::context::Context;
+use super::types::{ConflictMap, ConflictReason};
+
+/// Error during resolution providing a path of `PackageId`s.
+pub struct ResolveError {
+ cause: Error,
+ package_path: Vec<PackageId>,
+}
+
+impl ResolveError {
+ pub fn new<E: Into<Error>>(cause: E, package_path: Vec<PackageId>) -> Self {
+ Self {
+ cause: cause.into(),
+ package_path,
+ }
+ }
+
+ /// Returns a path of packages from the package whose requirements could not be resolved up to
+ /// the root.
+ pub fn package_path(&self) -> &[PackageId] {
+ &self.package_path
+ }
+}
+
+impl std::error::Error for ResolveError {
+ fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
+ self.cause.source()
+ }
+}
+
+impl fmt::Debug for ResolveError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.cause.fmt(f)
+ }
+}
+
+impl fmt::Display for ResolveError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.cause.fmt(f)
+ }
+}
+
+pub type ActivateResult<T> = Result<T, ActivateError>;
+
+#[derive(Debug)]
+pub enum ActivateError {
+ Fatal(anyhow::Error),
+ Conflict(PackageId, ConflictReason),
+}
+
+impl From<::anyhow::Error> for ActivateError {
+ fn from(t: ::anyhow::Error) -> Self {
+ ActivateError::Fatal(t)
+ }
+}
+
+impl From<(PackageId, ConflictReason)> for ActivateError {
+ fn from(t: (PackageId, ConflictReason)) -> Self {
+ ActivateError::Conflict(t.0, t.1)
+ }
+}
+
+pub(super) fn activation_error(
+ cx: &Context,
+ registry: &mut dyn Registry,
+ parent: &Summary,
+ dep: &Dependency,
+ conflicting_activations: &ConflictMap,
+ candidates: &[Summary],
+ config: Option<&Config>,
+) -> ResolveError {
+ let to_resolve_err = |err| {
+ ResolveError::new(
+ err,
+ cx.parents
+ .path_to_bottom(&parent.package_id())
+ .into_iter()
+ .map(|(node, _)| node)
+ .cloned()
+ .collect(),
+ )
+ };
+
+ if !candidates.is_empty() {
+ let mut msg = format!("failed to select a version for `{}`.", dep.package_name());
+ msg.push_str("\n ... required by ");
+ msg.push_str(&describe_path_in_context(cx, &parent.package_id()));
+
+ msg.push_str("\nversions that meet the requirements `");
+ msg.push_str(&dep.version_req().to_string());
+ msg.push_str("` ");
+
+ if let Some(v) = dep.version_req().locked_version() {
+ msg.push_str("(locked to ");
+ msg.push_str(&v.to_string());
+ msg.push_str(") ");
+ }
+
+ msg.push_str("are: ");
+ msg.push_str(
+ &candidates
+ .iter()
+ .map(|v| v.version())
+ .map(|v| v.to_string())
+ .collect::<Vec<_>>()
+ .join(", "),
+ );
+
+ let mut conflicting_activations: Vec<_> = conflicting_activations.iter().collect();
+ conflicting_activations.sort_unstable();
+ // This is reversed to show the newest versions first. I don't know if there is
+ // a strong reason to do this, but that is how the code previously worked
+ // (see https://github.com/rust-lang/cargo/pull/5037) and I don't feel like changing it.
+ conflicting_activations.reverse();
+ // Flag used for grouping all semver errors together.
+ let mut has_semver = false;
+
+ for (p, r) in &conflicting_activations {
+ match r {
+ ConflictReason::Semver => {
+ has_semver = true;
+ }
+ ConflictReason::Links(link) => {
+ msg.push_str("\n\nthe package `");
+ msg.push_str(&*dep.package_name());
+ msg.push_str("` links to the native library `");
+ msg.push_str(link);
+ msg.push_str("`, but it conflicts with a previous package which links to `");
+ msg.push_str(link);
+ msg.push_str("` as well:\n");
+ msg.push_str(&describe_path_in_context(cx, p));
+ msg.push_str("\nOnly one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. ");
+ msg.push_str("Try to adjust your dependencies so that only one package uses the links ='");
+ msg.push_str(&*dep.package_name());
+ msg.push_str("' value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links.");
+ }
+ ConflictReason::MissingFeatures(features) => {
+ msg.push_str("\n\nthe package `");
+ msg.push_str(&*p.name());
+ msg.push_str("` depends on `");
+ msg.push_str(&*dep.package_name());
+ msg.push_str("`, with features: `");
+ msg.push_str(features);
+ msg.push_str("` but `");
+ msg.push_str(&*dep.package_name());
+ msg.push_str("` does not have these features.\n");
+ // p == parent so the full path is redundant.
+ }
+ ConflictReason::RequiredDependencyAsFeature(features) => {
+ msg.push_str("\n\nthe package `");
+ msg.push_str(&*p.name());
+ msg.push_str("` depends on `");
+ msg.push_str(&*dep.package_name());
+ msg.push_str("`, with features: `");
+ msg.push_str(features);
+ msg.push_str("` but `");
+ msg.push_str(&*dep.package_name());
+ msg.push_str("` does not have these features.\n");
+ msg.push_str(
+ " It has a required dependency with that name, \
+ but only optional dependencies can be used as features.\n",
+ );
+ // p == parent so the full path is redundant.
+ }
+ ConflictReason::NonImplicitDependencyAsFeature(features) => {
+ msg.push_str("\n\nthe package `");
+ msg.push_str(&*p.name());
+ msg.push_str("` depends on `");
+ msg.push_str(&*dep.package_name());
+ msg.push_str("`, with features: `");
+ msg.push_str(features);
+ msg.push_str("` but `");
+ msg.push_str(&*dep.package_name());
+ msg.push_str("` does not have these features.\n");
+ msg.push_str(
+ " It has an optional dependency with that name, \
+ but that dependency uses the \"dep:\" \
+ syntax in the features table, so it does not have an \
+ implicit feature with that name.\n",
+ );
+ // p == parent so the full path is redundant.
+ }
+ ConflictReason::PublicDependency(pkg_id) => {
+ // TODO: This needs to be implemented.
+ unimplemented!("pub dep {:?}", pkg_id);
+ }
+ ConflictReason::PubliclyExports(pkg_id) => {
+ // TODO: This needs to be implemented.
+ unimplemented!("pub exp {:?}", pkg_id);
+ }
+ }
+ }
+
+ if has_semver {
+ // Group these errors together.
+ msg.push_str("\n\nall possible versions conflict with previously selected packages.");
+ for (p, r) in &conflicting_activations {
+ if let ConflictReason::Semver = r {
+ msg.push_str("\n\n previously selected ");
+ msg.push_str(&describe_path_in_context(cx, p));
+ }
+ }
+ }
+
+ msg.push_str("\n\nfailed to select a version for `");
+ msg.push_str(&*dep.package_name());
+ msg.push_str("` which could resolve this conflict");
+
+ return to_resolve_err(anyhow::format_err!("{}", msg));
+ }
+
+ // We didn't actually find any candidates, so we need to
+ // give an error message that nothing was found.
+ //
+ // Maybe the user mistyped the ver_req? Like `dep="2"` when `dep="0.2"`
+ // was meant. So we re-query the registry with `dep="*"` so we can
+ // list a few versions that were actually found.
+ let all_req = semver::VersionReq::parse("*").unwrap();
+ let mut new_dep = dep.clone();
+ new_dep.set_version_req(all_req);
+
+ let mut candidates = loop {
+ match registry.query_vec(&new_dep, QueryKind::Exact) {
+ Poll::Ready(Ok(candidates)) => break candidates,
+ Poll::Ready(Err(e)) => return to_resolve_err(e),
+ Poll::Pending => match registry.block_until_ready() {
+ Ok(()) => continue,
+ Err(e) => return to_resolve_err(e),
+ },
+ }
+ };
+
+ candidates.sort_unstable_by(|a, b| b.version().cmp(a.version()));
+
+ let mut msg =
+ if !candidates.is_empty() {
+ let versions = {
+ let mut versions = candidates
+ .iter()
+ .take(3)
+ .map(|cand| cand.version().to_string())
+ .collect::<Vec<_>>();
+
+ if candidates.len() > 3 {
+ versions.push("...".into());
+ }
+
+ versions.join(", ")
+ };
+
+ let locked_version = dep
+ .version_req()
+ .locked_version()
+ .map(|v| format!(" (locked to {})", v))
+ .unwrap_or_default();
+
+ let mut msg = format!(
+ "failed to select a version for the requirement `{} = \"{}\"`{}\n\
+ candidate versions found which didn't match: {}\n\
+ location searched: {}\n",
+ dep.package_name(),
+ dep.version_req(),
+ locked_version,
+ versions,
+ registry.describe_source(dep.source_id()),
+ );
+ msg.push_str("required by ");
+ msg.push_str(&describe_path_in_context(cx, &parent.package_id()));
+
+ // If we have a path dependency with a locked version, then this may
+ // indicate that we updated a sub-package and forgot to run `cargo
+ // update`. In this case try to print a helpful error!
+ if dep.source_id().is_path() && dep.version_req().is_locked() {
+ msg.push_str(
+ "\nconsider running `cargo update` to update \
+ a path dependency's locked version",
+ );
+ }
+
+ if registry.is_replaced(dep.source_id()) {
+ msg.push_str("\nperhaps a crate was updated and forgotten to be re-vendored?");
+ }
+
+ msg
+ } else {
+ // Maybe the user mistyped the name? Like `dep-thing` when `Dep_Thing`
+ // was meant. So we try asking the registry for a `fuzzy` search for suggestions.
+ let mut candidates = loop {
+ match registry.query_vec(&new_dep, QueryKind::Fuzzy) {
+ Poll::Ready(Ok(candidates)) => break candidates,
+ Poll::Ready(Err(e)) => return to_resolve_err(e),
+ Poll::Pending => match registry.block_until_ready() {
+ Ok(()) => continue,
+ Err(e) => return to_resolve_err(e),
+ },
+ }
+ };
+
+ candidates.sort_unstable_by_key(|a| a.name());
+ candidates.dedup_by(|a, b| a.name() == b.name());
+ let mut candidates: Vec<_> = candidates
+ .iter()
+ .map(|n| (lev_distance(&*new_dep.package_name(), &*n.name()), n))
+ .filter(|&(d, _)| d < 4)
+ .collect();
+ candidates.sort_by_key(|o| o.0);
+ let mut msg: String;
+ if candidates.is_empty() {
+ msg = format!("no matching package named `{}` found\n", dep.package_name());
+ } else {
+ msg = format!(
+ "no matching package found\nsearched package name: `{}`\n",
+ dep.package_name()
+ );
+
+ // If dependency package name is equal to the name of the candidate here
+ // it may be a prerelease package which hasn't been specified correctly
+ if dep.package_name() == candidates[0].1.name()
+ && candidates[0].1.package_id().version().is_prerelease()
+ {
+ msg.push_str("prerelease package needs to be specified explicitly\n");
+ msg.push_str(&format!(
+ "{name} = {{ version = \"{version}\" }}",
+ name = candidates[0].1.name(),
+ version = candidates[0].1.package_id().version()
+ ));
+ } else {
+ let mut names = candidates
+ .iter()
+ .take(3)
+ .map(|c| c.1.name().as_str())
+ .collect::<Vec<_>>();
+
+ if candidates.len() > 3 {
+ names.push("...");
+ }
+ // Vertically align first suggestion with missing crate name
+ // so a typo jumps out at you.
+ msg.push_str("perhaps you meant: ");
+ msg.push_str(&names.iter().enumerate().fold(
+ String::default(),
+ |acc, (i, el)| match i {
+ 0 => acc + el,
+ i if names.len() - 1 == i && candidates.len() <= 3 => acc + " or " + el,
+ _ => acc + ", " + el,
+ },
+ ));
+ }
+ msg.push('\n');
+ }
+ msg.push_str(&format!("location searched: {}\n", dep.source_id()));
+ msg.push_str("required by ");
+ msg.push_str(&describe_path_in_context(cx, &parent.package_id()));
+
+ msg
+ };
+
+ if let Some(config) = config {
+ if config.offline() {
+ msg.push_str(
+ "\nAs a reminder, you're using offline mode (--offline) \
+ which can sometimes cause surprising resolution failures, \
+ if this error is too confusing you may wish to retry \
+ without the offline flag.",
+ );
+ }
+ }
+
+ to_resolve_err(anyhow::format_err!("{}", msg))
+}
+
+/// Returns String representation of dependency chain for a particular `pkgid`
+/// within given context.
+pub(super) fn describe_path_in_context(cx: &Context, id: &PackageId) -> String {
+ let iter = cx
+ .parents
+ .path_to_bottom(id)
+ .into_iter()
+ .map(|(p, d)| (p, d.and_then(|d| d.iter().next())));
+ describe_path(iter)
+}
+
+/// Returns String representation of dependency chain for a particular `pkgid`.
+///
+/// Note that all elements of `path` iterator should have `Some` dependency
+/// except the first one. It would look like:
+///
+/// (pkg0, None)
+/// -> (pkg1, dep from pkg1 satisfied by pkg0)
+/// -> (pkg2, dep from pkg2 satisfied by pkg1)
+/// -> ...
+pub(crate) fn describe_path<'a>(
+ mut path: impl Iterator<Item = (&'a PackageId, Option<&'a Dependency>)>,
+) -> String {
+ use std::fmt::Write;
+
+ if let Some(p) = path.next() {
+ let mut dep_path_desc = format!("package `{}`", p.0);
+ for (pkg, dep) in path {
+ let dep = dep.unwrap();
+ let source_kind = if dep.source_id().is_path() {
+ "path "
+ } else if dep.source_id().is_git() {
+ "git "
+ } else {
+ ""
+ };
+ let requirement = if source_kind.is_empty() {
+ format!("{} = \"{}\"", dep.name_in_toml(), dep.version_req())
+ } else {
+ dep.name_in_toml().to_string()
+ };
+ let locked_version = dep
+ .version_req()
+ .locked_version()
+ .map(|v| format!("(locked to {}) ", v))
+ .unwrap_or_default();
+
+ write!(
+ dep_path_desc,
+ "\n ... which satisfies {}dependency `{}` {}of package `{}`",
+ source_kind, requirement, locked_version, pkg
+ )
+ .unwrap();
+ }
+
+ return dep_path_desc;
+ }
+
+ String::new()
+}
diff --git a/src/tools/cargo/src/cargo/core/resolver/features.rs b/src/tools/cargo/src/cargo/core/resolver/features.rs
new file mode 100644
index 000000000..e2f2bd5c5
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/resolver/features.rs
@@ -0,0 +1,924 @@
+//! # Feature resolver
+//!
+//! This is a [new feature resolver] that runs independently of the main
+//! dependency resolver. It has several options which can enable new feature
+//! resolution behavior.
+//!
+//! One of its key characteristics is that it can avoid unifying features for
+//! shared dependencies in some situations. See [`FeatureOpts`] for the
+//! different behaviors that can be enabled. If no extra options are enabled,
+//! then it should behave exactly the same as the dependency resolver's
+//! feature resolution.
+//!
+//! The preferred way to engage this new resolver is via [`resolve_ws_with_opts`].
+//!
+//! This does not *replace* feature resolution in the dependency resolver, but
+//! instead acts as a second pass which can *narrow* the features selected in
+//! the dependency resolver. The dependency resolver still needs to do its own
+//! feature resolution in order to avoid selecting optional dependencies that
+//! are never enabled. The dependency resolver could, in theory, just assume
+//! all optional dependencies on all packages are enabled (and remove all
+//! knowledge of features), but that could introduce new requirements that
+//! might change old behavior or cause conflicts. Maybe some day in the future
+//! we could experiment with that, but it seems unlikely to work or be all
+//! that helpful.
+//!
+//! ## Assumptions
+//!
+//! There are many assumptions made about the dependency resolver:
+//!
+//! * Assumes feature validation has already been done during the construction
+//! of feature maps, so the feature resolver doesn't do that validation at all.
+//! * Assumes `dev-dependencies` within a dependency have been removed
+//! in the given [`Resolve`].
+//!
+//! There are probably other assumptions that I am forgetting.
+//!
+//! [new feature resolver]: https://doc.rust-lang.org/nightly/cargo/reference/resolver.html#feature-resolver-version-2
+//! [`resolve_ws_with_opts`]: crate::ops::resolve_ws_with_opts
+
+use crate::core::compiler::{CompileKind, CompileTarget, RustcTargetData};
+use crate::core::dependency::{ArtifactTarget, DepKind, Dependency};
+use crate::core::resolver::types::FeaturesSet;
+use crate::core::resolver::{Resolve, ResolveBehavior};
+use crate::core::{FeatureValue, PackageId, PackageIdSpec, PackageSet, Workspace};
+use crate::util::interning::InternedString;
+use crate::util::CargoResult;
+use anyhow::bail;
+use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
+use std::rc::Rc;
+
+/// The key used in various places to store features for a particular dependency.
+/// The actual discrimination happens with the [`FeaturesFor`] type.
+type PackageFeaturesKey = (PackageId, FeaturesFor);
+/// Map of activated features.
+type ActivateMap = HashMap<PackageFeaturesKey, BTreeSet<InternedString>>;
+
+/// Set of all activated features for all packages in the resolve graph.
+pub struct ResolvedFeatures {
+ activated_features: ActivateMap,
+ /// Optional dependencies that should be built.
+ ///
+ /// The value is the `name_in_toml` of the dependencies.
+ activated_dependencies: ActivateMap,
+ opts: FeatureOpts,
+}
+
+/// Options for how the feature resolver works.
+#[derive(Default)]
+pub struct FeatureOpts {
+ /// Build deps and proc-macros will not share features with other dep kinds,
+ /// and so won't artifact targets.
+ /// In other terms, if true, features associated with certain kinds of dependencies
+ /// will only be unified together.
+ /// If false, there is only one namespace for features, unifying all features across
+ /// all dependencies, no matter what kind.
+ decouple_host_deps: bool,
+ /// Dev dep features will not be activated unless needed.
+ decouple_dev_deps: bool,
+ /// Targets that are not in use will not activate features.
+ ignore_inactive_targets: bool,
+ /// If enabled, compare against old resolver (for testing).
+ compare: bool,
+}
+
+/// Flag to indicate if Cargo is building *any* dev units (tests, examples, etc.).
+///
+/// This disables decoupling of dev dependencies. It may be possible to relax
+/// this in the future, but it will require significant changes to how unit
+/// dependencies are computed, and can result in longer build times with
+/// `cargo test` because the lib may need to be built 3 times instead of
+/// twice.
+#[derive(Copy, Clone, PartialEq)]
+pub enum HasDevUnits {
+ Yes,
+ No,
+}
+
+/// Flag to indicate that target-specific filtering should be disabled.
+#[derive(Copy, Clone, PartialEq)]
+pub enum ForceAllTargets {
+ Yes,
+ No,
+}
+
+/// Flag to indicate if features are requested for a certain type of dependency.
+///
+/// This is primarily used for constructing a [`PackageFeaturesKey`] to decouple
+/// activated features of the same package with different types of dependency.
+#[derive(Default, Copy, Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash)]
+pub enum FeaturesFor {
+ /// Normal or dev dependency.
+ #[default]
+ NormalOrDev,
+ /// Build dependency or proc-macro.
+ HostDep,
+ /// Any dependency with both artifact and target specified.
+ ///
+ /// That is, `dep = { …, artifact = <crate-type>, target = <triple> }`
+ ArtifactDep(CompileTarget),
+}
+
+impl std::fmt::Display for FeaturesFor {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ FeaturesFor::HostDep => f.write_str("host"),
+ FeaturesFor::ArtifactDep(target) => f.write_str(&target.rustc_target()),
+ FeaturesFor::NormalOrDev => Ok(()),
+ }
+ }
+}
+
+impl FeaturesFor {
+ pub fn from_for_host(for_host: bool) -> FeaturesFor {
+ if for_host {
+ FeaturesFor::HostDep
+ } else {
+ FeaturesFor::NormalOrDev
+ }
+ }
+
+ pub fn from_for_host_or_artifact_target(
+ for_host: bool,
+ artifact_target: Option<CompileTarget>,
+ ) -> FeaturesFor {
+ match artifact_target {
+ Some(target) => FeaturesFor::ArtifactDep(target),
+ None => {
+ if for_host {
+ FeaturesFor::HostDep
+ } else {
+ FeaturesFor::NormalOrDev
+ }
+ }
+ }
+ }
+
+ fn apply_opts(self, opts: &FeatureOpts) -> Self {
+ if opts.decouple_host_deps {
+ self
+ } else {
+ FeaturesFor::default()
+ }
+ }
+}
+
+impl FeatureOpts {
+ pub fn new(
+ ws: &Workspace<'_>,
+ has_dev_units: HasDevUnits,
+ force_all_targets: ForceAllTargets,
+ ) -> CargoResult<FeatureOpts> {
+ let mut opts = FeatureOpts::default();
+ let unstable_flags = ws.config().cli_unstable();
+ let mut enable = |feat_opts: &Vec<String>| {
+ for opt in feat_opts {
+ match opt.as_ref() {
+ "build_dep" | "host_dep" => opts.decouple_host_deps = true,
+ "dev_dep" => opts.decouple_dev_deps = true,
+ "itarget" => opts.ignore_inactive_targets = true,
+ "all" => {
+ opts.decouple_host_deps = true;
+ opts.decouple_dev_deps = true;
+ opts.ignore_inactive_targets = true;
+ }
+ "compare" => opts.compare = true,
+ "ws" => unimplemented!(),
+ s => bail!("-Zfeatures flag `{}` is not supported", s),
+ }
+ }
+ Ok(())
+ };
+ if let Some(feat_opts) = unstable_flags.features.as_ref() {
+ enable(feat_opts)?;
+ }
+ match ws.resolve_behavior() {
+ ResolveBehavior::V1 => {}
+ ResolveBehavior::V2 => {
+ enable(&vec!["all".to_string()]).unwrap();
+ }
+ }
+ if let HasDevUnits::Yes = has_dev_units {
+ // Dev deps cannot be decoupled when they are in use.
+ opts.decouple_dev_deps = false;
+ }
+ if let ForceAllTargets::Yes = force_all_targets {
+ opts.ignore_inactive_targets = false;
+ }
+ Ok(opts)
+ }
+
+ /// Creates a new FeatureOpts for the given behavior.
+ pub fn new_behavior(behavior: ResolveBehavior, has_dev_units: HasDevUnits) -> FeatureOpts {
+ match behavior {
+ ResolveBehavior::V1 => FeatureOpts::default(),
+ ResolveBehavior::V2 => FeatureOpts {
+ decouple_host_deps: true,
+ decouple_dev_deps: has_dev_units == HasDevUnits::No,
+ ignore_inactive_targets: true,
+ compare: false,
+ },
+ }
+ }
+}
+
+/// Features flags requested for a package.
+///
+/// This should be cheap and fast to clone, it is used in the resolver for
+/// various caches.
+///
+/// This is split into enum variants because the resolver needs to handle
+/// features coming from different places (command-line and dependency
+/// declarations), but those different places have different constraints on
+/// which syntax is allowed. This helps ensure that every place dealing with
+/// features is properly handling those syntax restrictions.
+#[derive(Debug, Clone, Eq, PartialEq, Hash)]
+pub enum RequestedFeatures {
+ /// Features requested on the command-line with flags.
+ CliFeatures(CliFeatures),
+ /// Features specified in a dependency declaration.
+ DepFeatures {
+ /// The `features` dependency field.
+ features: FeaturesSet,
+ /// The `default-features` dependency field.
+ uses_default_features: bool,
+ },
+}
+
+/// Features specified on the command-line.
+#[derive(Debug, Clone, Eq, PartialEq, Hash)]
+pub struct CliFeatures {
+ /// Features from the `--features` flag.
+ pub features: Rc<BTreeSet<FeatureValue>>,
+ /// The `--all-features` flag.
+ pub all_features: bool,
+ /// Inverse of `--no-default-features` flag.
+ pub uses_default_features: bool,
+}
+
+impl CliFeatures {
+ /// Creates a new CliFeatures from the given command-line flags.
+ pub fn from_command_line(
+ features: &[String],
+ all_features: bool,
+ uses_default_features: bool,
+ ) -> CargoResult<CliFeatures> {
+ let features = Rc::new(CliFeatures::split_features(features));
+ // Some early validation to ensure correct syntax.
+ for feature in features.iter() {
+ match feature {
+ // Maybe call validate_feature_name here once it is an error?
+ FeatureValue::Feature(_) => {}
+ FeatureValue::Dep { .. } => {
+ bail!(
+ "feature `{}` is not allowed to use explicit `dep:` syntax",
+ feature
+ );
+ }
+ FeatureValue::DepFeature { dep_feature, .. } => {
+ if dep_feature.contains('/') {
+ bail!("multiple slashes in feature `{}` is not allowed", feature);
+ }
+ }
+ }
+ }
+ Ok(CliFeatures {
+ features,
+ all_features,
+ uses_default_features,
+ })
+ }
+
+ /// Creates a new CliFeatures with the given `all_features` setting.
+ pub fn new_all(all_features: bool) -> CliFeatures {
+ CliFeatures {
+ features: Rc::new(BTreeSet::new()),
+ all_features,
+ uses_default_features: true,
+ }
+ }
+
+ fn split_features(features: &[String]) -> BTreeSet<FeatureValue> {
+ features
+ .iter()
+ .flat_map(|s| s.split_whitespace())
+ .flat_map(|s| s.split(','))
+ .filter(|s| !s.is_empty())
+ .map(InternedString::new)
+ .map(FeatureValue::new)
+ .collect()
+ }
+}
+
+impl ResolvedFeatures {
+ /// Returns the list of features that are enabled for the given package.
+ pub fn activated_features(
+ &self,
+ pkg_id: PackageId,
+ features_for: FeaturesFor,
+ ) -> Vec<InternedString> {
+ self.activated_features_int(pkg_id, features_for)
+ .expect("activated_features for invalid package")
+ }
+
+ /// Returns if the given dependency should be included.
+ ///
+ /// This handles dependencies disabled via `cfg` expressions and optional
+ /// dependencies which are not enabled.
+ pub fn is_dep_activated(
+ &self,
+ pkg_id: PackageId,
+ features_for: FeaturesFor,
+ dep_name: InternedString,
+ ) -> bool {
+ let key = features_for.apply_opts(&self.opts);
+ self.activated_dependencies
+ .get(&(pkg_id, key))
+ .map(|deps| deps.contains(&dep_name))
+ .unwrap_or(false)
+ }
+
+ /// Variant of `activated_features` that returns `None` if this is
+ /// not a valid pkg_id/is_build combination. Used in places which do
+ /// not know which packages are activated (like `cargo clean`).
+ pub fn activated_features_unverified(
+ &self,
+ pkg_id: PackageId,
+ features_for: FeaturesFor,
+ ) -> Option<Vec<InternedString>> {
+ self.activated_features_int(pkg_id, features_for).ok()
+ }
+
+ fn activated_features_int(
+ &self,
+ pkg_id: PackageId,
+ features_for: FeaturesFor,
+ ) -> CargoResult<Vec<InternedString>> {
+ let fk = features_for.apply_opts(&self.opts);
+ if let Some(fs) = self.activated_features.get(&(pkg_id, fk)) {
+ Ok(fs.iter().cloned().collect())
+ } else {
+ bail!("features did not find {:?} {:?}", pkg_id, fk)
+ }
+ }
+
+ /// Compares the result against the original resolver behavior.
+ ///
+ /// Used by `cargo fix --edition` to display any differences.
+ pub fn compare_legacy(&self, legacy: &ResolvedFeatures) -> DiffMap {
+ self.activated_features
+ .iter()
+ .filter_map(|((pkg_id, for_host), new_features)| {
+ let old_features = legacy
+ .activated_features
+ .get(&(*pkg_id, *for_host))
+ // The new features may have for_host entries where the old one does not.
+ .or_else(|| {
+ legacy
+ .activated_features
+ .get(&(*pkg_id, FeaturesFor::default()))
+ })
+ .map(|feats| feats.iter().cloned().collect())
+ .unwrap_or_else(|| BTreeSet::new());
+ // The new resolver should never add features.
+ assert_eq!(new_features.difference(&old_features).next(), None);
+ let removed_features: BTreeSet<_> =
+ old_features.difference(new_features).cloned().collect();
+ if removed_features.is_empty() {
+ None
+ } else {
+ Some(((*pkg_id, *for_host), removed_features))
+ }
+ })
+ .collect()
+ }
+}
+
+/// Map of differences.
+///
+/// Key is `(pkg_id, for_host)`. Value is a set of features or dependencies removed.
+pub type DiffMap = BTreeMap<PackageFeaturesKey, BTreeSet<InternedString>>;
+
+/// The new feature resolver that [`resolve`]s your project.
+///
+/// For more information, please see the [module-level documentation].
+///
+/// [`resolve`]: Self::resolve
+/// [module-level documentation]: crate::core::resolver::features
+pub struct FeatureResolver<'a, 'cfg> {
+ ws: &'a Workspace<'cfg>,
+ target_data: &'a RustcTargetData<'cfg>,
+ /// The platforms to build for, requested by the user.
+ requested_targets: &'a [CompileKind],
+ resolve: &'a Resolve,
+ package_set: &'a PackageSet<'cfg>,
+ /// Options that change how the feature resolver operates.
+ opts: FeatureOpts,
+ /// Map of features activated for each package.
+ activated_features: ActivateMap,
+ /// Map of optional dependencies activated for each package.
+ activated_dependencies: ActivateMap,
+ /// Keeps track of which packages have had its dependencies processed.
+ /// Used to avoid cycles, and to speed up processing.
+ processed_deps: HashSet<PackageFeaturesKey>,
+ /// If this is `true`, then a non-default `feature_key` needs to be tracked while
+ /// traversing the graph.
+ ///
+ /// This is only here to avoid calling `is_proc_macro` when all feature
+ /// options are disabled (because `is_proc_macro` can trigger downloads).
+ /// This has to be separate from `FeatureOpts.decouple_host_deps` because
+ /// `for_host` tracking is also needed for `itarget` to work properly.
+ track_for_host: bool,
+ /// `dep_name?/feat_name` features that will be activated if `dep_name` is
+ /// ever activated.
+ ///
+ /// The key is the `(package, for_host, dep_name)` of the package whose
+ /// dependency will trigger the addition of new features. The value is the
+ /// set of features to activate.
+ deferred_weak_dependencies:
+ HashMap<(PackageId, FeaturesFor, InternedString), HashSet<InternedString>>,
+}
+
+impl<'a, 'cfg> FeatureResolver<'a, 'cfg> {
+ /// Runs the resolution algorithm and returns a new [`ResolvedFeatures`]
+ /// with the result.
+ pub fn resolve(
+ ws: &Workspace<'cfg>,
+ target_data: &RustcTargetData<'cfg>,
+ resolve: &Resolve,
+ package_set: &'a PackageSet<'cfg>,
+ cli_features: &CliFeatures,
+ specs: &[PackageIdSpec],
+ requested_targets: &[CompileKind],
+ opts: FeatureOpts,
+ ) -> CargoResult<ResolvedFeatures> {
+ use crate::util::profile;
+ let _p = profile::start("resolve features");
+ let track_for_host = opts.decouple_host_deps || opts.ignore_inactive_targets;
+ let mut r = FeatureResolver {
+ ws,
+ target_data,
+ requested_targets,
+ resolve,
+ package_set,
+ opts,
+ activated_features: HashMap::new(),
+ activated_dependencies: HashMap::new(),
+ processed_deps: HashSet::new(),
+ track_for_host,
+ deferred_weak_dependencies: HashMap::new(),
+ };
+ r.do_resolve(specs, cli_features)?;
+ log::debug!("features={:#?}", r.activated_features);
+ if r.opts.compare {
+ r.compare();
+ }
+ Ok(ResolvedFeatures {
+ activated_features: r.activated_features,
+ activated_dependencies: r.activated_dependencies,
+ opts: r.opts,
+ })
+ }
+
+ /// Performs the process of resolving all features for the resolve graph.
+ fn do_resolve(
+ &mut self,
+ specs: &[PackageIdSpec],
+ cli_features: &CliFeatures,
+ ) -> CargoResult<()> {
+ let member_features = self.ws.members_with_features(specs, cli_features)?;
+ for (member, cli_features) in &member_features {
+ let fvs = self.fvs_from_requested(member.package_id(), cli_features);
+ let fk = if self.track_for_host && self.is_proc_macro(member.package_id()) {
+ // Also activate for normal dependencies. This is needed if the
+ // proc-macro includes other targets (like binaries or tests),
+ // or running in `cargo test`. Note that in a workspace, if
+ // the proc-macro is selected on the command like (like with
+ // `--workspace`), this forces feature unification with normal
+ // dependencies. This is part of the bigger problem where
+ // features depend on which packages are built.
+ self.activate_pkg(member.package_id(), FeaturesFor::default(), &fvs)?;
+ FeaturesFor::HostDep
+ } else {
+ FeaturesFor::default()
+ };
+ self.activate_pkg(member.package_id(), fk, &fvs)?;
+ }
+ Ok(())
+ }
+
+ /// Activates [`FeatureValue`]s on the given package.
+ ///
+ /// This is the main entrance into the recursion of feature activation
+ /// for a package.
+ fn activate_pkg(
+ &mut self,
+ pkg_id: PackageId,
+ fk: FeaturesFor,
+ fvs: &[FeatureValue],
+ ) -> CargoResult<()> {
+ log::trace!("activate_pkg {} {}", pkg_id.name(), fk);
+ // Add an empty entry to ensure everything is covered. This is intended for
+ // finding bugs where the resolver missed something it should have visited.
+ // Remove this in the future if `activated_features` uses an empty default.
+ self.activated_features
+ .entry((pkg_id, fk.apply_opts(&self.opts)))
+ .or_insert_with(BTreeSet::new);
+ for fv in fvs {
+ self.activate_fv(pkg_id, fk, fv)?;
+ }
+ if !self.processed_deps.insert((pkg_id, fk)) {
+ // Already processed dependencies. There's no need to process them
+ // again. This is primarily to avoid cycles, but also helps speed
+ // things up.
+ //
+ // This is safe because if another package comes along and adds a
+ // feature on this package, it will immediately add it (in
+ // `activate_fv`), and recurse as necessary right then and there.
+ // For example, consider we've already processed our dependencies,
+ // and another package comes along and enables one of our optional
+ // dependencies, it will do so immediately in the
+ // `FeatureValue::DepFeature` branch, and then immediately
+ // recurse into that optional dependency. This also holds true for
+ // features that enable other features.
+ return Ok(());
+ }
+ for (dep_pkg_id, deps) in self.deps(pkg_id, fk) {
+ for (dep, dep_fk) in deps {
+ if dep.is_optional() {
+ // Optional dependencies are enabled in `activate_fv` when
+ // a feature enables it.
+ continue;
+ }
+ // Recurse into the dependency.
+ let fvs = self.fvs_from_dependency(dep_pkg_id, dep);
+ self.activate_pkg(dep_pkg_id, dep_fk, &fvs)?;
+ }
+ }
+ Ok(())
+ }
+
+ /// Activate a single FeatureValue for a package.
+ fn activate_fv(
+ &mut self,
+ pkg_id: PackageId,
+ fk: FeaturesFor,
+ fv: &FeatureValue,
+ ) -> CargoResult<()> {
+ log::trace!("activate_fv {} {} {}", pkg_id.name(), fk, fv);
+ match fv {
+ FeatureValue::Feature(f) => {
+ self.activate_rec(pkg_id, fk, *f)?;
+ }
+ FeatureValue::Dep { dep_name } => {
+ self.activate_dependency(pkg_id, fk, *dep_name)?;
+ }
+ FeatureValue::DepFeature {
+ dep_name,
+ dep_feature,
+ weak,
+ } => {
+ self.activate_dep_feature(pkg_id, fk, *dep_name, *dep_feature, *weak)?;
+ }
+ }
+ Ok(())
+ }
+
+ /// Activate the given feature for the given package, and then recursively
+ /// activate any other features that feature enables.
+ fn activate_rec(
+ &mut self,
+ pkg_id: PackageId,
+ fk: FeaturesFor,
+ feature_to_enable: InternedString,
+ ) -> CargoResult<()> {
+ log::trace!(
+ "activate_rec {} {} feat={}",
+ pkg_id.name(),
+ fk,
+ feature_to_enable
+ );
+ let enabled = self
+ .activated_features
+ .entry((pkg_id, fk.apply_opts(&self.opts)))
+ .or_insert_with(BTreeSet::new);
+ if !enabled.insert(feature_to_enable) {
+ // Already enabled.
+ return Ok(());
+ }
+ let summary = self.resolve.summary(pkg_id);
+ let feature_map = summary.features();
+ let fvs = match feature_map.get(&feature_to_enable) {
+ Some(fvs) => fvs,
+ None => {
+ // TODO: this should only happen for optional dependencies.
+ // Other cases should be validated by Summary's `build_feature_map`.
+ // Figure out some way to validate this assumption.
+ log::debug!(
+ "pkg {:?} does not define feature {}",
+ pkg_id,
+ feature_to_enable
+ );
+ return Ok(());
+ }
+ };
+ for fv in fvs {
+ self.activate_fv(pkg_id, fk, fv)?;
+ }
+ Ok(())
+ }
+
+ /// Activate a dependency (`dep:dep_name` syntax).
+ fn activate_dependency(
+ &mut self,
+ pkg_id: PackageId,
+ fk: FeaturesFor,
+ dep_name: InternedString,
+ ) -> CargoResult<()> {
+ // Mark this dependency as activated.
+ let save_decoupled = fk.apply_opts(&self.opts);
+ self.activated_dependencies
+ .entry((pkg_id, save_decoupled))
+ .or_default()
+ .insert(dep_name);
+ // Check for any deferred features.
+ let to_enable = self
+ .deferred_weak_dependencies
+ .remove(&(pkg_id, fk, dep_name));
+ // Activate the optional dep.
+ for (dep_pkg_id, deps) in self.deps(pkg_id, fk) {
+ for (dep, dep_fk) in deps {
+ if dep.name_in_toml() != dep_name {
+ continue;
+ }
+ if let Some(to_enable) = &to_enable {
+ for dep_feature in to_enable {
+ log::trace!(
+ "activate deferred {} {} -> {}/{}",
+ pkg_id.name(),
+ fk,
+ dep_name,
+ dep_feature
+ );
+ let fv = FeatureValue::new(*dep_feature);
+ self.activate_fv(dep_pkg_id, dep_fk, &fv)?;
+ }
+ }
+ let fvs = self.fvs_from_dependency(dep_pkg_id, dep);
+ self.activate_pkg(dep_pkg_id, dep_fk, &fvs)?;
+ }
+ }
+ Ok(())
+ }
+
+ /// Activate a feature within a dependency (`dep_name/feat_name` syntax).
+ fn activate_dep_feature(
+ &mut self,
+ pkg_id: PackageId,
+ fk: FeaturesFor,
+ dep_name: InternedString,
+ dep_feature: InternedString,
+ weak: bool,
+ ) -> CargoResult<()> {
+ for (dep_pkg_id, deps) in self.deps(pkg_id, fk) {
+ for (dep, dep_fk) in deps {
+ if dep.name_in_toml() != dep_name {
+ continue;
+ }
+ if dep.is_optional() {
+ let save_for_host = fk.apply_opts(&self.opts);
+ if weak
+ && !self
+ .activated_dependencies
+ .get(&(pkg_id, save_for_host))
+ .map(|deps| deps.contains(&dep_name))
+ .unwrap_or(false)
+ {
+ // This is weak, but not yet activated. Defer in case
+ // something comes along later and enables it.
+ log::trace!(
+ "deferring feature {} {} -> {}/{}",
+ pkg_id.name(),
+ fk,
+ dep_name,
+ dep_feature
+ );
+ self.deferred_weak_dependencies
+ .entry((pkg_id, fk, dep_name))
+ .or_default()
+ .insert(dep_feature);
+ continue;
+ }
+
+ // Activate the dependency on self.
+ let fv = FeatureValue::Dep { dep_name };
+ self.activate_fv(pkg_id, fk, &fv)?;
+ if !weak {
+ // The old behavior before weak dependencies were
+ // added is to also enables a feature of the same
+ // name.
+ self.activate_rec(pkg_id, fk, dep_name)?;
+ }
+ }
+ // Activate the feature on the dependency.
+ let fv = FeatureValue::new(dep_feature);
+ self.activate_fv(dep_pkg_id, dep_fk, &fv)?;
+ }
+ }
+ Ok(())
+ }
+
+ /// Returns Vec of FeatureValues from a Dependency definition.
+ fn fvs_from_dependency(&self, dep_id: PackageId, dep: &Dependency) -> Vec<FeatureValue> {
+ let summary = self.resolve.summary(dep_id);
+ let feature_map = summary.features();
+ let mut result: Vec<FeatureValue> = dep
+ .features()
+ .iter()
+ .map(|f| FeatureValue::new(*f))
+ .collect();
+ let default = InternedString::new("default");
+ if dep.uses_default_features() && feature_map.contains_key(&default) {
+ result.push(FeatureValue::Feature(default));
+ }
+ result
+ }
+
+ /// Returns Vec of FeatureValues from a set of command-line features.
+ fn fvs_from_requested(
+ &self,
+ pkg_id: PackageId,
+ cli_features: &CliFeatures,
+ ) -> Vec<FeatureValue> {
+ let summary = self.resolve.summary(pkg_id);
+ let feature_map = summary.features();
+
+ let mut result: Vec<FeatureValue> = cli_features.features.iter().cloned().collect();
+ let default = InternedString::new("default");
+ if cli_features.uses_default_features && feature_map.contains_key(&default) {
+ result.push(FeatureValue::Feature(default));
+ }
+
+ if cli_features.all_features {
+ result.extend(feature_map.keys().map(|k| FeatureValue::Feature(*k)))
+ }
+
+ result
+ }
+
+ /// Returns the dependencies for a package, filtering out inactive targets.
+ fn deps(
+ &self,
+ pkg_id: PackageId,
+ fk: FeaturesFor,
+ ) -> Vec<(PackageId, Vec<(&'a Dependency, FeaturesFor)>)> {
+ // Helper for determining if a platform is activated.
+ let platform_activated = |dep: &Dependency| -> bool {
+ // We always count platforms as activated if the target stems from an artifact
+ // dependency's target specification. This triggers in conjunction with
+ // `[target.'cfg(…)'.dependencies]` manifest sections.
+ match (dep.is_build(), fk) {
+ (true, _) | (_, FeaturesFor::HostDep) => {
+ // We always care about build-dependencies, and they are always
+ // Host. If we are computing dependencies "for a build script",
+ // even normal dependencies are host-only.
+ self.target_data
+ .dep_platform_activated(dep, CompileKind::Host)
+ }
+ (_, FeaturesFor::NormalOrDev) => self
+ .requested_targets
+ .iter()
+ .any(|kind| self.target_data.dep_platform_activated(dep, *kind)),
+ (_, FeaturesFor::ArtifactDep(target)) => self
+ .target_data
+ .dep_platform_activated(dep, CompileKind::Target(target)),
+ }
+ };
+ self.resolve
+ .deps(pkg_id)
+ .map(|(dep_id, deps)| {
+ let deps = deps
+ .iter()
+ .filter(|dep| {
+ if dep.platform().is_some()
+ && self.opts.ignore_inactive_targets
+ && !platform_activated(dep)
+ {
+ return false;
+ }
+ if self.opts.decouple_dev_deps && dep.kind() == DepKind::Development {
+ return false;
+ }
+ true
+ })
+ .flat_map(|dep| {
+ // Each `dep`endency can be built for multiple targets. For one, it
+ // may be a library target which is built as initially configured
+ // by `fk`. If it appears as build dependency, it must be built
+ // for the host.
+ //
+ // It may also be an artifact dependency,
+ // which could be built either
+ //
+ // - for a specified (aka 'forced') target, specified by
+ // `dep = { …, target = <triple>` }`
+ // - as an artifact for use in build dependencies that should
+ // build for whichever `--target`s are specified
+ // - like a library would be built
+ //
+ // Generally, the logic for choosing a target for dependencies is
+ // unaltered and used to determine how to build non-artifacts,
+ // artifacts without target specification and no library,
+ // or an artifacts library.
+ //
+ // All this may result in a dependency being built multiple times
+ // for various targets which are either specified in the manifest
+ // or on the cargo command-line.
+ let lib_fk = if fk == FeaturesFor::default() {
+ (self.track_for_host && (dep.is_build() || self.is_proc_macro(dep_id)))
+ .then(|| FeaturesFor::HostDep)
+ .unwrap_or_default()
+ } else {
+ fk
+ };
+
+ // `artifact_target_keys` are produced to fulfil the needs of artifacts that have a target specification.
+ let artifact_target_keys = dep.artifact().map(|artifact| {
+ (
+ artifact.is_lib(),
+ artifact.target().map(|target| match target {
+ ArtifactTarget::Force(target) => {
+ vec![FeaturesFor::ArtifactDep(target)]
+ }
+ ArtifactTarget::BuildDependencyAssumeTarget => self
+ .requested_targets
+ .iter()
+ .map(|kind| match kind {
+ CompileKind::Host => {
+ let host_triple = self.target_data.rustc.host;
+ CompileTarget::new(&host_triple).unwrap()
+ }
+ CompileKind::Target(target) => *target,
+ })
+ .map(FeaturesFor::ArtifactDep)
+ .collect(),
+ }),
+ )
+ });
+
+ let dep_fks = match artifact_target_keys {
+ // The artifact is also a library and does specify custom
+ // targets.
+ // The library's feature key needs to be used alongside
+ // the keys artifact targets.
+ Some((is_lib, Some(mut dep_fks))) if is_lib => {
+ dep_fks.push(lib_fk);
+ dep_fks
+ }
+ // The artifact is not a library, but does specify
+ // custom targets.
+ // Use only these targets feature keys.
+ Some((_, Some(dep_fks))) => dep_fks,
+ // There is no artifact in the current dependency
+ // or there is no target specified on the artifact.
+ // Use the standard feature key without any alteration.
+ Some((_, None)) | None => vec![lib_fk],
+ };
+ dep_fks.into_iter().map(move |dep_fk| (dep, dep_fk))
+ })
+ .collect::<Vec<_>>();
+ (dep_id, deps)
+ })
+ .filter(|(_id, deps)| !deps.is_empty())
+ .collect()
+ }
+
+ /// Compare the activated features to the resolver. Used for testing.
+ fn compare(&self) {
+ let mut found = false;
+ for ((pkg_id, dep_kind), features) in &self.activated_features {
+ let r_features = self.resolve.features(*pkg_id);
+ if !r_features.iter().eq(features.iter()) {
+ crate::drop_eprintln!(
+ self.ws.config(),
+ "{}/{:?} features mismatch\nresolve: {:?}\nnew: {:?}\n",
+ pkg_id,
+ dep_kind,
+ r_features,
+ features
+ );
+ found = true;
+ }
+ }
+ if found {
+ panic!("feature mismatch");
+ }
+ }
+
+ fn is_proc_macro(&self, package_id: PackageId) -> bool {
+ self.package_set
+ .get_one(package_id)
+ .expect("packages downloaded")
+ .proc_macro()
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/resolver/mod.rs b/src/tools/cargo/src/cargo/core/resolver/mod.rs
new file mode 100644
index 000000000..b9c29fb87
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/resolver/mod.rs
@@ -0,0 +1,1152 @@
+//! Resolution of the entire dependency graph for a crate.
+//!
+//! This module implements the core logic in taking the world of crates and
+//! constraints and creating a resolved graph with locked versions for all
+//! crates and their dependencies. This is separate from the registry module
+//! which is more worried about discovering crates from various sources, this
+//! module just uses the Registry trait as a source to learn about crates from.
+//!
+//! Actually solving a constraint graph is an NP-hard problem. This algorithm
+//! is basically a nice heuristic to make sure we get roughly the best answer
+//! most of the time. The constraints that we're working with are:
+//!
+//! 1. Each crate can have any number of dependencies. Each dependency can
+//! declare a version range that it is compatible with.
+//! 2. Crates can be activated with multiple version (e.g., show up in the
+//! dependency graph twice) so long as each pairwise instance have
+//! semver-incompatible versions.
+//!
+//! The algorithm employed here is fairly simple, we simply do a DFS, activating
+//! the "newest crate" (highest version) first and then going to the next
+//! option. The heuristics we employ are:
+//!
+//! * Never try to activate a crate version which is incompatible. This means we
+//! only try crates which will actually satisfy a dependency and we won't ever
+//! try to activate a crate that's semver compatible with something else
+//! activated (as we're only allowed to have one) nor try to activate a crate
+//! that has the same links attribute as something else
+//! activated.
+//! * Always try to activate the highest version crate first. The default
+//! dependency in Cargo (e.g., when you write `foo = "0.1.2"`) is
+//! semver-compatible, so selecting the highest version possible will allow us
+//! to hopefully satisfy as many dependencies at once.
+//!
+//! Beyond that, what's implemented below is just a naive backtracking version
+//! which should in theory try all possible combinations of dependencies and
+//! versions to see if one works. The first resolution that works causes
+//! everything to bail out immediately and return success, and only if *nothing*
+//! works do we actually return an error up the stack.
+//!
+//! Resolution is currently performed twice
+//! 1. With all features enabled (this is what gets saved to `Cargo.lock`)
+//! 2. With only the specific features the user selected on the command-line. Ideally this
+//! run will get removed in the future when transitioning to the new feature resolver.
+//!
+//! A new feature-specific resolver was added in 2020 which adds more sophisticated feature
+//! resolution. It is located in the [`features`] module. The original dependency resolver still
+//! performs feature unification, as it can help reduce the dependencies it has to consider during
+//! resolution (rather than assuming every optional dependency of every package is enabled).
+//! Checking if a feature is enabled must go through the new feature resolver.
+//!
+//! ## Performance
+//!
+//! Note that this is a relatively performance-critical portion of Cargo. The
+//! data that we're processing is proportional to the size of the dependency
+//! graph, which can often be quite large (e.g., take a look at Servo). To make
+//! matters worse the DFS algorithm we're implemented is inherently quite
+//! inefficient. When we add the requirement of backtracking on top it means
+//! that we're implementing something that probably shouldn't be allocating all
+//! over the place.
+
+use std::collections::{BTreeMap, HashMap, HashSet};
+use std::mem;
+use std::rc::Rc;
+use std::time::{Duration, Instant};
+
+use log::{debug, trace};
+
+use crate::core::PackageIdSpec;
+use crate::core::{Dependency, PackageId, Registry, Summary};
+use crate::util::config::Config;
+use crate::util::errors::CargoResult;
+use crate::util::network::PollExt;
+use crate::util::profile;
+
+use self::context::Context;
+use self::dep_cache::RegistryQueryer;
+use self::features::RequestedFeatures;
+use self::types::{ConflictMap, ConflictReason, DepsFrame};
+use self::types::{FeaturesSet, RcVecIter, RemainingDeps, ResolverProgress};
+
+pub use self::encode::Metadata;
+pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve};
+pub use self::errors::{ActivateError, ActivateResult, ResolveError};
+pub use self::features::{CliFeatures, ForceAllTargets, HasDevUnits};
+pub use self::resolve::{Resolve, ResolveVersion};
+pub use self::types::{ResolveBehavior, ResolveOpts};
+pub use self::version_prefs::{VersionOrdering, VersionPreferences};
+
+mod conflict_cache;
+mod context;
+mod dep_cache;
+pub(crate) mod encode;
+pub(crate) mod errors;
+pub mod features;
+mod resolve;
+mod types;
+mod version_prefs;
+
+/// Builds the list of all packages required to build the first argument.
+///
+/// * `summaries` - the list of package summaries along with how to resolve
+/// their features. This is a list of all top-level packages that are intended
+/// to be part of the lock file (resolve output). These typically are a list
+/// of all workspace members.
+///
+/// * `replacements` - this is a list of `[replace]` directives found in the
+/// root of the workspace. The list here is a `PackageIdSpec` of what to
+/// replace and a `Dependency` to replace that with. In general it's not
+/// recommended to use `[replace]` any more and use `[patch]` instead, which
+/// is supported elsewhere.
+///
+/// * `registry` - this is the source from which all package summaries are
+/// loaded. It's expected that this is extensively configured ahead of time
+/// and is idempotent with our requests to it (aka returns the same results
+/// for the same query every time). Typically this is an instance of a
+/// `PackageRegistry`.
+///
+/// * `version_prefs` - this represents a preference for some versions over others,
+/// based on the lock file or other reasons such as `[patch]`es.
+///
+/// * `config` - a location to print warnings and such, or `None` if no warnings
+/// should be printed
+///
+/// * `check_public_visible_dependencies` - a flag for whether to enforce the restrictions
+/// introduced in the "public & private dependencies" RFC (1977). The current implementation
+/// makes sure that there is only one version of each name visible to each package.
+///
+/// But there are 2 stable ways to directly depend on different versions of the same name.
+/// 1. Use the renamed dependencies functionality
+/// 2. Use 'cfg({})' dependencies functionality
+///
+/// When we have a decision for how to implement is without breaking existing functionality
+/// this flag can be removed.
+pub fn resolve(
+ summaries: &[(Summary, ResolveOpts)],
+ replacements: &[(PackageIdSpec, Dependency)],
+ registry: &mut dyn Registry,
+ version_prefs: &VersionPreferences,
+ config: Option<&Config>,
+ check_public_visible_dependencies: bool,
+) -> CargoResult<Resolve> {
+ let _p = profile::start("resolving");
+ let minimal_versions = match config {
+ Some(config) => config.cli_unstable().minimal_versions,
+ None => false,
+ };
+ let direct_minimal_versions = match config {
+ Some(config) => config.cli_unstable().direct_minimal_versions,
+ None => false,
+ };
+ let mut registry =
+ RegistryQueryer::new(registry, replacements, version_prefs, minimal_versions);
+ let cx = loop {
+ let cx = Context::new(check_public_visible_dependencies);
+ let cx = activate_deps_loop(
+ cx,
+ &mut registry,
+ summaries,
+ direct_minimal_versions,
+ config,
+ )?;
+ if registry.reset_pending() {
+ break cx;
+ } else {
+ registry.registry.block_until_ready()?;
+ }
+ };
+
+ let mut cksums = HashMap::new();
+ for (summary, _) in cx.activations.values() {
+ let cksum = summary.checksum().map(|s| s.to_string());
+ cksums.insert(summary.package_id(), cksum);
+ }
+ let graph = cx.graph();
+ let replacements = cx.resolve_replacements(&registry);
+ let features = cx
+ .resolve_features
+ .iter()
+ .map(|(k, v)| (*k, v.iter().cloned().collect()))
+ .collect();
+ let summaries = cx
+ .activations
+ .into_iter()
+ .map(|(_key, (summary, _age))| (summary.package_id(), summary))
+ .collect();
+ let resolve = Resolve::new(
+ graph,
+ replacements,
+ features,
+ cksums,
+ BTreeMap::new(),
+ Vec::new(),
+ ResolveVersion::default(),
+ summaries,
+ );
+
+ check_cycles(&resolve)?;
+ check_duplicate_pkgs_in_lockfile(&resolve)?;
+ trace!("resolved: {:?}", resolve);
+
+ Ok(resolve)
+}
+
+/// Recursively activates the dependencies for `summaries`, in depth-first order,
+/// backtracking across possible candidates for each dependency as necessary.
+///
+/// If all dependencies can be activated and resolved to a version in the
+/// dependency graph, `cx` is returned.
+fn activate_deps_loop(
+ mut cx: Context,
+ registry: &mut RegistryQueryer<'_>,
+ summaries: &[(Summary, ResolveOpts)],
+ direct_minimal_versions: bool,
+ config: Option<&Config>,
+) -> CargoResult<Context> {
+ let mut backtrack_stack = Vec::new();
+ let mut remaining_deps = RemainingDeps::new();
+
+ // `past_conflicting_activations` is a cache of the reasons for each time we
+ // backtrack.
+ let mut past_conflicting_activations = conflict_cache::ConflictCache::new();
+
+ // Activate all the initial summaries to kick off some work.
+ for &(ref summary, ref opts) in summaries {
+ debug!("initial activation: {}", summary.package_id());
+ let res = activate(
+ &mut cx,
+ registry,
+ None,
+ summary.clone(),
+ direct_minimal_versions,
+ opts,
+ );
+ match res {
+ Ok(Some((frame, _))) => remaining_deps.push(frame),
+ Ok(None) => (),
+ Err(ActivateError::Fatal(e)) => return Err(e),
+ Err(ActivateError::Conflict(_, _)) => panic!("bad error from activate"),
+ }
+ }
+
+ let mut printed = ResolverProgress::new();
+
+ // Main resolution loop, this is the workhorse of the resolution algorithm.
+ //
+ // You'll note that a few stacks are maintained on the side, which might
+ // seem odd when this algorithm looks like it could be implemented
+ // recursively. While correct, this is implemented iteratively to avoid
+ // blowing the stack (the recursion depth is proportional to the size of the
+ // input).
+ //
+ // The general sketch of this loop is to run until there are no dependencies
+ // left to activate, and for each dependency to attempt to activate all of
+ // its own dependencies in turn. The `backtrack_stack` is a side table of
+ // backtracking states where if we hit an error we can return to in order to
+ // attempt to continue resolving.
+ while let Some((just_here_for_the_error_messages, frame)) =
+ remaining_deps.pop_most_constrained()
+ {
+ let (mut parent, (mut dep, candidates, mut features)) = frame;
+
+ // If we spend a lot of time here (we shouldn't in most cases) then give
+ // a bit of a visual indicator as to what we're doing.
+ printed.shell_status(config)?;
+
+ trace!(
+ "{}[{}]>{} {} candidates",
+ parent.name(),
+ cx.age,
+ dep.package_name(),
+ candidates.len()
+ );
+
+ let just_here_for_the_error_messages = just_here_for_the_error_messages
+ && past_conflicting_activations
+ .conflicting(&cx, &dep)
+ .is_some();
+
+ let mut remaining_candidates = RemainingCandidates::new(&candidates);
+
+ // `conflicting_activations` stores all the reasons we were unable to
+ // activate candidates. One of these reasons will have to go away for
+ // backtracking to find a place to restart. It is also the list of
+ // things to explain in the error message if we fail to resolve.
+ //
+ // This is a map of package ID to a reason why that packaged caused a
+ // conflict for us.
+ let mut conflicting_activations = ConflictMap::new();
+
+ // When backtracking we don't fully update `conflicting_activations`
+ // especially for the cases that we didn't make a backtrack frame in the
+ // first place. This `backtracked` var stores whether we are continuing
+ // from a restored backtrack frame so that we can skip caching
+ // `conflicting_activations` in `past_conflicting_activations`
+ let mut backtracked = false;
+
+ loop {
+ let next = remaining_candidates.next(
+ &mut conflicting_activations,
+ &cx,
+ &dep,
+ parent.package_id(),
+ );
+
+ let (candidate, has_another) = next.ok_or(()).or_else(|_| {
+ // If we get here then our `remaining_candidates` was just
+ // exhausted, so `dep` failed to activate.
+ //
+ // It's our job here to backtrack, if possible, and find a
+ // different candidate to activate. If we can't find any
+ // candidates whatsoever then it's time to bail entirely.
+ trace!(
+ "{}[{}]>{} -- no candidates",
+ parent.name(),
+ cx.age,
+ dep.package_name()
+ );
+
+ // Use our list of `conflicting_activations` to add to our
+ // global list of past conflicting activations, effectively
+ // globally poisoning `dep` if `conflicting_activations` ever
+ // shows up again. We'll use the `past_conflicting_activations`
+ // below to determine if a dependency is poisoned and skip as
+ // much work as possible.
+ //
+ // If we're only here for the error messages then there's no
+ // need to try this as this dependency is already known to be
+ // bad.
+ //
+ // As we mentioned above with the `backtracked` variable if this
+ // local is set to `true` then our `conflicting_activations` may
+ // not be right, so we can't push into our global cache.
+ let mut generalize_conflicting_activations = None;
+ if !just_here_for_the_error_messages && !backtracked {
+ past_conflicting_activations.insert(&dep, &conflicting_activations);
+ if let Some(c) = generalize_conflicting(
+ &cx,
+ registry,
+ &mut past_conflicting_activations,
+ &parent,
+ &dep,
+ &conflicting_activations,
+ ) {
+ generalize_conflicting_activations = Some(c);
+ }
+ }
+
+ match find_candidate(
+ &cx,
+ &mut backtrack_stack,
+ &parent,
+ backtracked,
+ generalize_conflicting_activations
+ .as_ref()
+ .unwrap_or(&conflicting_activations),
+ ) {
+ Some((candidate, has_another, frame)) => {
+ // Reset all of our local variables used with the
+ // contents of `frame` to complete our backtrack.
+ cx = frame.context;
+ remaining_deps = frame.remaining_deps;
+ remaining_candidates = frame.remaining_candidates;
+ parent = frame.parent;
+ dep = frame.dep;
+ features = frame.features;
+ conflicting_activations = frame.conflicting_activations;
+ backtracked = true;
+ Ok((candidate, has_another))
+ }
+ None => {
+ debug!("no candidates found");
+ Err(errors::activation_error(
+ &cx,
+ registry.registry,
+ &parent,
+ &dep,
+ &conflicting_activations,
+ &candidates,
+ config,
+ ))
+ }
+ }
+ })?;
+
+ // If we're only here for the error messages then we know that this
+ // activation will fail one way or another. To that end if we've got
+ // more candidates we want to fast-forward to the last one as
+ // otherwise we'll just backtrack here anyway (helping us to skip
+ // some work).
+ if just_here_for_the_error_messages && !backtracked && has_another {
+ continue;
+ }
+
+ // We have a `candidate`. Create a `BacktrackFrame` so we can add it
+ // to the `backtrack_stack` later if activation succeeds.
+ //
+ // Note that if we don't actually have another candidate then there
+ // will be nothing to backtrack to so we skip construction of the
+ // frame. This is a relatively important optimization as a number of
+ // the `clone` calls below can be quite expensive, so we avoid them
+ // if we can.
+ let backtrack = if has_another {
+ Some(BacktrackFrame {
+ context: Context::clone(&cx),
+ remaining_deps: remaining_deps.clone(),
+ remaining_candidates: remaining_candidates.clone(),
+ parent: Summary::clone(&parent),
+ dep: Dependency::clone(&dep),
+ features: Rc::clone(&features),
+ conflicting_activations: conflicting_activations.clone(),
+ })
+ } else {
+ None
+ };
+
+ let pid = candidate.package_id();
+ let opts = ResolveOpts {
+ dev_deps: false,
+ features: RequestedFeatures::DepFeatures {
+ features: Rc::clone(&features),
+ uses_default_features: dep.uses_default_features(),
+ },
+ };
+ trace!(
+ "{}[{}]>{} trying {}",
+ parent.name(),
+ cx.age,
+ dep.package_name(),
+ candidate.version()
+ );
+ let direct_minimal_version = false; // this is an indirect dependency
+ let res = activate(
+ &mut cx,
+ registry,
+ Some((&parent, &dep)),
+ candidate,
+ direct_minimal_version,
+ &opts,
+ );
+
+ let successfully_activated = match res {
+ // Success! We've now activated our `candidate` in our context
+ // and we're almost ready to move on. We may want to scrap this
+ // frame in the end if it looks like it's not going to end well,
+ // so figure that out here.
+ Ok(Some((mut frame, dur))) => {
+ printed.elapsed(dur);
+
+ // Our `frame` here is a new package with its own list of
+ // dependencies. Do a sanity check here of all those
+ // dependencies by cross-referencing our global
+ // `past_conflicting_activations`. Recall that map is a
+ // global cache which lists sets of packages where, when
+ // activated, the dependency is unresolvable.
+ //
+ // If any our frame's dependencies fit in that bucket,
+ // aka known unresolvable, then we extend our own set of
+ // conflicting activations with theirs. We can do this
+ // because the set of conflicts we found implies the
+ // dependency can't be activated which implies that we
+ // ourselves can't be activated, so we know that they
+ // conflict with us.
+ let mut has_past_conflicting_dep = just_here_for_the_error_messages;
+ if !has_past_conflicting_dep {
+ if let Some(conflicting) = frame
+ .remaining_siblings
+ .clone()
+ .filter_map(|(ref new_dep, _, _)| {
+ past_conflicting_activations.conflicting(&cx, new_dep)
+ })
+ .next()
+ {
+ // If one of our deps is known unresolvable
+ // then we will not succeed.
+ // How ever if we are part of the reason that
+ // one of our deps conflicts then
+ // we can make a stronger statement
+ // because we will definitely be activated when
+ // we try our dep.
+ conflicting_activations.extend(
+ conflicting
+ .iter()
+ .filter(|&(p, _)| p != &pid)
+ .map(|(&p, r)| (p, r.clone())),
+ );
+
+ has_past_conflicting_dep = true;
+ }
+ }
+ // If any of `remaining_deps` are known unresolvable with
+ // us activated, then we extend our own set of
+ // conflicting activations with theirs and its parent. We can do this
+ // because the set of conflicts we found implies the
+ // dependency can't be activated which implies that we
+ // ourselves are incompatible with that dep, so we know that deps
+ // parent conflict with us.
+ if !has_past_conflicting_dep {
+ if let Some(known_related_bad_deps) =
+ past_conflicting_activations.dependencies_conflicting_with(pid)
+ {
+ if let Some((other_parent, conflict)) = remaining_deps
+ .iter()
+ // for deps related to us
+ .filter(|&(_, ref other_dep)| {
+ known_related_bad_deps.contains(other_dep)
+ })
+ .filter_map(|(other_parent, other_dep)| {
+ past_conflicting_activations
+ .find_conflicting(&cx, &other_dep, Some(pid))
+ .map(|con| (other_parent, con))
+ })
+ .next()
+ {
+ let rel = conflict.get(&pid).unwrap().clone();
+
+ // The conflict we found is
+ // "other dep will not succeed if we are activated."
+ // We want to add
+ // "our dep will not succeed if other dep is in remaining_deps"
+ // but that is not how the cache is set up.
+ // So we add the less general but much faster,
+ // "our dep will not succeed if other dep's parent is activated".
+ conflicting_activations.extend(
+ conflict
+ .iter()
+ .filter(|&(p, _)| p != &pid)
+ .map(|(&p, r)| (p, r.clone())),
+ );
+ conflicting_activations.insert(other_parent, rel);
+ has_past_conflicting_dep = true;
+ }
+ }
+ }
+
+ // Ok if we're in a "known failure" state for this frame we
+ // may want to skip it altogether though. We don't want to
+ // skip it though in the case that we're displaying error
+ // messages to the user!
+ //
+ // Here we need to figure out if the user will see if we
+ // skipped this candidate (if it's known to fail, aka has a
+ // conflicting dep and we're the last candidate). If we're
+ // here for the error messages, we can't skip it (but we can
+ // prune extra work). If we don't have any candidates in our
+ // backtrack stack then we're the last line of defense, so
+ // we'll want to present an error message for sure.
+ let activate_for_error_message = has_past_conflicting_dep && !has_another && {
+ just_here_for_the_error_messages || {
+ find_candidate(
+ &cx,
+ &mut backtrack_stack.clone(),
+ &parent,
+ backtracked,
+ &conflicting_activations,
+ )
+ .is_none()
+ }
+ };
+
+ // If we're only here for the error messages then we know
+ // one of our candidate deps will fail, meaning we will
+ // fail and that none of the backtrack frames will find a
+ // candidate that will help. Consequently let's clean up the
+ // no longer needed backtrack frames.
+ if activate_for_error_message {
+ backtrack_stack.clear();
+ }
+
+ // If we don't know for a fact that we'll fail or if we're
+ // just here for the error message then we push this frame
+ // onto our list of to-be-resolve, which will generate more
+ // work for us later on.
+ //
+ // Otherwise we're guaranteed to fail and were not here for
+ // error messages, so we skip work and don't push anything
+ // onto our stack.
+ frame.just_for_error_messages = has_past_conflicting_dep;
+ if !has_past_conflicting_dep || activate_for_error_message {
+ remaining_deps.push(frame);
+ true
+ } else {
+ trace!(
+ "{}[{}]>{} skipping {} ",
+ parent.name(),
+ cx.age,
+ dep.package_name(),
+ pid.version()
+ );
+ false
+ }
+ }
+
+ // This candidate's already activated, so there's no extra work
+ // for us to do. Let's keep going.
+ Ok(None) => true,
+
+ // We failed with a super fatal error (like a network error), so
+ // bail out as quickly as possible as we can't reliably
+ // backtrack from errors like these
+ Err(ActivateError::Fatal(e)) => return Err(e),
+
+ // We failed due to a bland conflict, bah! Record this in our
+ // frame's list of conflicting activations as to why this
+ // candidate failed, and then move on.
+ Err(ActivateError::Conflict(id, reason)) => {
+ conflicting_activations.insert(id, reason);
+ false
+ }
+ };
+
+ // If we've successfully activated then save off the backtrack frame
+ // if one was created, and otherwise break out of the inner
+ // activation loop as we're ready to move to the next dependency
+ if successfully_activated {
+ backtrack_stack.extend(backtrack);
+ break;
+ }
+
+ // We've failed to activate this dependency, oh dear! Our call to
+ // `activate` above may have altered our `cx` local variable, so
+ // restore it back if we've got a backtrack frame.
+ //
+ // If we don't have a backtrack frame then we're just using the `cx`
+ // for error messages anyway so we can live with a little
+ // imprecision.
+ if let Some(b) = backtrack {
+ cx = b.context;
+ }
+ }
+
+ // Ok phew, that loop was a big one! If we've broken out then we've
+ // successfully activated a candidate. Our stacks are all in place that
+ // we're ready to move on to the next dependency that needs activation,
+ // so loop back to the top of the function here.
+ }
+
+ Ok(cx)
+}
+
+/// Attempts to activate the summary `candidate` in the context `cx`.
+///
+/// This function will pull dependency summaries from the registry provided, and
+/// the dependencies of the package will be determined by the `opts` provided.
+/// If `candidate` was activated, this function returns the dependency frame to
+/// iterate through next.
+fn activate(
+ cx: &mut Context,
+ registry: &mut RegistryQueryer<'_>,
+ parent: Option<(&Summary, &Dependency)>,
+ candidate: Summary,
+ first_minimal_version: bool,
+ opts: &ResolveOpts,
+) -> ActivateResult<Option<(DepsFrame, Duration)>> {
+ let candidate_pid = candidate.package_id();
+ cx.age += 1;
+ if let Some((parent, dep)) = parent {
+ let parent_pid = parent.package_id();
+ // add an edge from candidate to parent in the parents graph
+ cx.parents
+ .link(candidate_pid, parent_pid)
+ // and associate dep with that edge
+ .insert(dep.clone());
+ if let Some(public_dependency) = cx.public_dependency.as_mut() {
+ public_dependency.add_edge(
+ candidate_pid,
+ parent_pid,
+ dep.is_public(),
+ cx.age,
+ &cx.parents,
+ );
+ }
+ }
+
+ let activated = cx.flag_activated(&candidate, opts, parent)?;
+
+ let candidate = match registry.replacement_summary(candidate_pid) {
+ Some(replace) => {
+ // Note the `None` for parent here since `[replace]` is a bit wonky
+ // and doesn't activate the same things that `[patch]` typically
+ // does. TBH it basically cause panics in the test suite if
+ // `parent` is passed through here and `[replace]` is otherwise
+ // on life support so it's not critical to fix bugs anyway per se.
+ if cx.flag_activated(replace, opts, None)? && activated {
+ return Ok(None);
+ }
+ trace!(
+ "activating {} (replacing {})",
+ replace.package_id(),
+ candidate_pid
+ );
+ replace.clone()
+ }
+ None => {
+ if activated {
+ return Ok(None);
+ }
+ trace!("activating {}", candidate_pid);
+ candidate
+ }
+ };
+
+ let now = Instant::now();
+ let (used_features, deps) = &*registry.build_deps(
+ cx,
+ parent.map(|p| p.0.package_id()),
+ &candidate,
+ opts,
+ first_minimal_version,
+ )?;
+
+ // Record what list of features is active for this package.
+ if !used_features.is_empty() {
+ Rc::make_mut(
+ cx.resolve_features
+ .entry(candidate.package_id())
+ .or_insert_with(Rc::default),
+ )
+ .extend(used_features);
+ }
+
+ let frame = DepsFrame {
+ parent: candidate,
+ just_for_error_messages: false,
+ remaining_siblings: RcVecIter::new(Rc::clone(deps)),
+ };
+ Ok(Some((frame, now.elapsed())))
+}
+
+#[derive(Clone)]
+struct BacktrackFrame {
+ context: Context,
+ remaining_deps: RemainingDeps,
+ remaining_candidates: RemainingCandidates,
+ parent: Summary,
+ dep: Dependency,
+ features: FeaturesSet,
+ conflicting_activations: ConflictMap,
+}
+
+/// A helper "iterator" used to extract candidates within a current `Context` of
+/// a dependency graph.
+///
+/// This struct doesn't literally implement the `Iterator` trait (requires a few
+/// more inputs) but in general acts like one. Each `RemainingCandidates` is
+/// created with a list of candidates to choose from. When attempting to iterate
+/// over the list of candidates only *valid* candidates are returned. Validity
+/// is defined within a `Context`.
+///
+/// Candidates passed to `new` may not be returned from `next` as they could be
+/// filtered out, and as they are filtered the causes will be added to `conflicting_prev_active`.
+#[derive(Clone)]
+struct RemainingCandidates {
+ remaining: RcVecIter<Summary>,
+ // This is an inlined peekable generator
+ has_another: Option<Summary>,
+}
+
+impl RemainingCandidates {
+ fn new(candidates: &Rc<Vec<Summary>>) -> RemainingCandidates {
+ RemainingCandidates {
+ remaining: RcVecIter::new(Rc::clone(candidates)),
+ has_another: None,
+ }
+ }
+
+ /// Attempts to find another candidate to check from this list.
+ ///
+ /// This method will attempt to move this iterator forward, returning a
+ /// candidate that's possible to activate. The `cx` argument is the current
+ /// context which determines validity for candidates returned, and the `dep`
+ /// is the dependency listing that we're activating for.
+ ///
+ /// If successful a `(Candidate, bool)` pair will be returned. The
+ /// `Candidate` is the candidate to attempt to activate, and the `bool` is
+ /// an indicator of whether there are remaining candidates to try of if
+ /// we've reached the end of iteration.
+ ///
+ /// If we've reached the end of the iterator here then `Err` will be
+ /// returned. The error will contain a map of package ID to conflict reason,
+ /// where each package ID caused a candidate to be filtered out from the
+ /// original list for the reason listed.
+ fn next(
+ &mut self,
+ conflicting_prev_active: &mut ConflictMap,
+ cx: &Context,
+ dep: &Dependency,
+ parent: PackageId,
+ ) -> Option<(Summary, bool)> {
+ for b in self.remaining.by_ref() {
+ let b_id = b.package_id();
+ // The `links` key in the manifest dictates that there's only one
+ // package in a dependency graph, globally, with that particular
+ // `links` key. If this candidate links to something that's already
+ // linked to by a different package then we've gotta skip this.
+ if let Some(link) = b.links() {
+ if let Some(&a) = cx.links.get(&link) {
+ if a != b_id {
+ conflicting_prev_active
+ .entry(a)
+ .or_insert_with(|| ConflictReason::Links(link));
+ continue;
+ }
+ }
+ }
+
+ // Otherwise the condition for being a valid candidate relies on
+ // semver. Cargo dictates that you can't duplicate multiple
+ // semver-compatible versions of a crate. For example we can't
+ // simultaneously activate `foo 1.0.2` and `foo 1.2.0`. We can,
+ // however, activate `1.0.2` and `2.0.0`.
+ //
+ // Here we throw out our candidate if it's *compatible*, yet not
+ // equal, to all previously activated versions.
+ if let Some((a, _)) = cx.activations.get(&b_id.as_activations_key()) {
+ if *a != b {
+ conflicting_prev_active
+ .entry(a.package_id())
+ .or_insert(ConflictReason::Semver);
+ continue;
+ }
+ }
+ // We may still have to reject do to a public dependency conflict. If one of any of our
+ // ancestors that can see us already knows about a different crate with this name then
+ // we have to reject this candidate. Additionally this candidate may already have been
+ // activated and have public dependants of its own,
+ // all of witch also need to be checked the same way.
+ if let Some(public_dependency) = cx.public_dependency.as_ref() {
+ if let Err(((c1, c2), c3)) =
+ public_dependency.can_add_edge(b_id, parent, dep.is_public(), &cx.parents)
+ {
+ conflicting_prev_active.insert(c1.0, c1.1);
+ conflicting_prev_active.insert(c2.0, c2.1);
+ if let Some(c3) = c3 {
+ conflicting_prev_active.insert(c3.0, c3.1);
+ }
+ continue;
+ }
+ }
+
+ // Well if we made it this far then we've got a valid dependency. We
+ // want this iterator to be inherently "peekable" so we don't
+ // necessarily return the item just yet. Instead we stash it away to
+ // get returned later, and if we replaced something then that was
+ // actually the candidate to try first so we return that.
+ if let Some(r) = mem::replace(&mut self.has_another, Some(b)) {
+ return Some((r, true));
+ }
+ }
+
+ // Alright we've entirely exhausted our list of candidates. If we've got
+ // something stashed away return that here (also indicating that there's
+ // nothing else).
+ self.has_another.take().map(|r| (r, false))
+ }
+}
+
+/// Attempts to find a new conflict that allows a `find_candidate` better then the input one.
+/// It will add the new conflict to the cache if one is found.
+fn generalize_conflicting(
+ cx: &Context,
+ registry: &mut RegistryQueryer<'_>,
+ past_conflicting_activations: &mut conflict_cache::ConflictCache,
+ parent: &Summary,
+ dep: &Dependency,
+ conflicting_activations: &ConflictMap,
+) -> Option<ConflictMap> {
+ // We need to determine the `ContextAge` that this `conflicting_activations` will jump to, and why.
+ let (backtrack_critical_age, backtrack_critical_id) = shortcircuit_max(
+ conflicting_activations
+ .keys()
+ .map(|&c| cx.is_active(c).map(|a| (a, c))),
+ )?;
+ let backtrack_critical_reason: ConflictReason =
+ conflicting_activations[&backtrack_critical_id].clone();
+
+ if backtrack_critical_reason.is_public_dependency() {
+ return None;
+ }
+
+ if cx
+ .parents
+ .is_path_from_to(&parent.package_id(), &backtrack_critical_id)
+ {
+ // We are a descendant of the trigger of the problem.
+ // The best generalization of this is to let things bubble up
+ // and let `backtrack_critical_id` figure this out.
+ return None;
+ }
+ // What parents does that critical activation have
+ for (critical_parent, critical_parents_deps) in
+ cx.parents.edges(&backtrack_critical_id).filter(|(p, _)| {
+ // it will only help backjump further if it is older then the critical_age
+ cx.is_active(**p).expect("parent not currently active!?") < backtrack_critical_age
+ })
+ {
+ for critical_parents_dep in critical_parents_deps.iter() {
+ // We only want `first_minimal_version=true` for direct dependencies of workspace
+ // members which isn't the case here as this has a `parent`
+ let first_minimal_version = false;
+ // A dep is equivalent to one of the things it can resolve to.
+ // Thus, if all the things it can resolve to have already ben determined
+ // to be conflicting, then we can just say that we conflict with the parent.
+ if let Some(others) = registry
+ .query(critical_parents_dep, first_minimal_version)
+ .expect("an already used dep now error!?")
+ .expect("an already used dep now pending!?")
+ .iter()
+ .rev() // the last one to be tried is the least likely to be in the cache, so start with that.
+ .map(|other| {
+ past_conflicting_activations
+ .find(
+ dep,
+ &|id| {
+ if id == other.package_id() {
+ // we are imagining that we used other instead
+ Some(backtrack_critical_age)
+ } else {
+ cx.is_active(id)
+ }
+ },
+ Some(other.package_id()),
+ // we only care about things that are newer then critical_age
+ backtrack_critical_age,
+ )
+ .map(|con| (other.package_id(), con))
+ })
+ .collect::<Option<Vec<(PackageId, &ConflictMap)>>>()
+ {
+ let mut con = conflicting_activations.clone();
+ // It is always valid to combine previously inserted conflicts.
+ // A, B are both known bad states each that can never be activated.
+ // A + B is redundant but can't be activated, as if
+ // A + B is active then A is active and we know that is not ok.
+ for (_, other) in &others {
+ con.extend(other.iter().map(|(&id, re)| (id, re.clone())));
+ }
+ // Now that we have this combined conflict, we can do a substitution:
+ // A dep is equivalent to one of the things it can resolve to.
+ // So we can remove all the things that it resolves to and replace with the parent.
+ for (other_id, _) in &others {
+ con.remove(other_id);
+ }
+ con.insert(*critical_parent, backtrack_critical_reason);
+
+ if cfg!(debug_assertions) {
+ // the entire point is to find an older conflict, so let's make sure we did
+ let new_age = con
+ .keys()
+ .map(|&c| cx.is_active(c).expect("not currently active!?"))
+ .max()
+ .unwrap();
+ assert!(
+ new_age < backtrack_critical_age,
+ "new_age {} < backtrack_critical_age {}",
+ new_age,
+ backtrack_critical_age
+ );
+ }
+ past_conflicting_activations.insert(dep, &con);
+ return Some(con);
+ }
+ }
+ }
+ None
+}
+
+/// Returns Some of the largest item in the iterator.
+/// Returns None if any of the items are None or the iterator is empty.
+fn shortcircuit_max<I: Ord>(iter: impl Iterator<Item = Option<I>>) -> Option<I> {
+ let mut out = None;
+ for i in iter {
+ if i.is_none() {
+ return None;
+ }
+ out = std::cmp::max(out, i);
+ }
+ out
+}
+
+/// Looks through the states in `backtrack_stack` for dependencies with
+/// remaining candidates. For each one, also checks if rolling back
+/// could change the outcome of the failed resolution that caused backtracking
+/// in the first place. Namely, if we've backtracked past the parent of the
+/// failed dep, or any of the packages flagged as giving us trouble in
+/// `conflicting_activations`.
+///
+/// Read <https://github.com/rust-lang/cargo/pull/4834>
+/// For several more detailed explanations of the logic here.
+fn find_candidate(
+ cx: &Context,
+ backtrack_stack: &mut Vec<BacktrackFrame>,
+ parent: &Summary,
+ backtracked: bool,
+ conflicting_activations: &ConflictMap,
+) -> Option<(Summary, bool, BacktrackFrame)> {
+ // When we're calling this method we know that `parent` failed to
+ // activate. That means that some dependency failed to get resolved for
+ // whatever reason. Normally, that means that all of those reasons
+ // (plus maybe some extras) are listed in `conflicting_activations`.
+ //
+ // The abnormal situations are things that do not put all of the reasons in `conflicting_activations`:
+ // If we backtracked we do not know how our `conflicting_activations` related to
+ // the cause of that backtrack, so we do not update it.
+ let age = if !backtracked {
+ // we don't have abnormal situations. So we can ask `cx` for how far back we need to go.
+ // If the `conflicting_activations` does not apply to `cx`,
+ // we will just fall back to laboriously trying all possibilities witch
+ // will give us the correct answer.
+ cx.is_conflicting(Some(parent.package_id()), conflicting_activations)
+ } else {
+ None
+ };
+
+ while let Some(mut frame) = backtrack_stack.pop() {
+ let next = frame.remaining_candidates.next(
+ &mut frame.conflicting_activations,
+ &frame.context,
+ &frame.dep,
+ frame.parent.package_id(),
+ );
+ let (candidate, has_another) = match next {
+ Some(pair) => pair,
+ None => continue,
+ };
+
+ // If all members of `conflicting_activations` are still
+ // active in this back up we know that we're guaranteed to not actually
+ // make any progress. As a result if we hit this condition we can
+ // completely skip this backtrack frame and move on to the next.
+ if let Some(age) = age {
+ if frame.context.age >= age {
+ trace!(
+ "{} = \"{}\" skip as not solving {}: {:?}",
+ frame.dep.package_name(),
+ frame.dep.version_req(),
+ parent.package_id(),
+ conflicting_activations
+ );
+ // above we use `cx` to determine that this is still going to be conflicting.
+ // but lets just double check.
+ debug_assert!(
+ frame
+ .context
+ .is_conflicting(Some(parent.package_id()), conflicting_activations)
+ == Some(age)
+ );
+ continue;
+ } else {
+ // above we use `cx` to determine that this is not going to be conflicting.
+ // but lets just double check.
+ debug_assert!(frame
+ .context
+ .is_conflicting(Some(parent.package_id()), conflicting_activations)
+ .is_none());
+ }
+ }
+
+ return Some((candidate, has_another, frame));
+ }
+ None
+}
+
+fn check_cycles(resolve: &Resolve) -> CargoResult<()> {
+ // Create a simple graph representation alternative of `resolve` which has
+ // only the edges we care about. Note that `BTree*` is used to produce
+ // deterministic error messages here. Also note that the main reason for
+ // this copy of the resolve graph is to avoid edges between a crate and its
+ // dev-dependency since that doesn't count for cycles.
+ let mut graph = BTreeMap::new();
+ for id in resolve.iter() {
+ let map = graph.entry(id).or_insert_with(BTreeMap::new);
+ for (dep_id, listings) in resolve.deps_not_replaced(id) {
+ let transitive_dep = listings.iter().find(|d| d.is_transitive());
+
+ if let Some(transitive_dep) = transitive_dep.cloned() {
+ map.insert(dep_id, transitive_dep.clone());
+ resolve
+ .replacement(dep_id)
+ .map(|p| map.insert(p, transitive_dep));
+ }
+ }
+ }
+
+ // After we have the `graph` that we care about, perform a simple cycle
+ // check by visiting all nodes. We visit each node at most once and we keep
+ // track of the path through the graph as we walk it. If we walk onto the
+ // same node twice that's a cycle.
+ let mut checked = HashSet::new();
+ let mut path = Vec::new();
+ let mut visited = HashSet::new();
+ for pkg in graph.keys() {
+ if !checked.contains(pkg) {
+ visit(&graph, *pkg, &mut visited, &mut path, &mut checked)?
+ }
+ }
+ return Ok(());
+
+ fn visit(
+ graph: &BTreeMap<PackageId, BTreeMap<PackageId, Dependency>>,
+ id: PackageId,
+ visited: &mut HashSet<PackageId>,
+ path: &mut Vec<PackageId>,
+ checked: &mut HashSet<PackageId>,
+ ) -> CargoResult<()> {
+ path.push(id);
+ if !visited.insert(id) {
+ let iter = path.iter().rev().skip(1).scan(id, |child, parent| {
+ let dep = graph.get(parent).and_then(|adjacent| adjacent.get(child));
+ *child = *parent;
+ Some((parent, dep))
+ });
+ let iter = std::iter::once((&id, None)).chain(iter);
+ anyhow::bail!(
+ "cyclic package dependency: package `{}` depends on itself. Cycle:\n{}",
+ id,
+ errors::describe_path(iter),
+ );
+ }
+
+ if checked.insert(id) {
+ for dep in graph[&id].keys() {
+ visit(graph, *dep, visited, path, checked)?;
+ }
+ }
+
+ path.pop();
+ visited.remove(&id);
+ Ok(())
+ }
+}
+
+/// Checks that packages are unique when written to lock file.
+///
+/// When writing package ID's to lock file, we apply lossy encoding. In
+/// particular, we don't store paths of path dependencies. That means that
+/// *different* packages may collide in the lock file, hence this check.
+fn check_duplicate_pkgs_in_lockfile(resolve: &Resolve) -> CargoResult<()> {
+ let mut unique_pkg_ids = HashMap::new();
+ let state = encode::EncodeState::new(resolve);
+ for pkg_id in resolve.iter() {
+ let encodable_pkd_id = encode::encodable_package_id(pkg_id, &state, resolve.version());
+ if let Some(prev_pkg_id) = unique_pkg_ids.insert(encodable_pkd_id, pkg_id) {
+ anyhow::bail!(
+ "package collision in the lockfile: packages {} and {} are different, \
+ but only one can be written to lockfile unambiguously",
+ prev_pkg_id,
+ pkg_id
+ )
+ }
+ }
+ Ok(())
+}
diff --git a/src/tools/cargo/src/cargo/core/resolver/resolve.rs b/src/tools/cargo/src/cargo/core/resolver/resolve.rs
new file mode 100644
index 000000000..6ab957e92
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/resolver/resolve.rs
@@ -0,0 +1,407 @@
+use super::encode::Metadata;
+use crate::core::dependency::DepKind;
+use crate::core::{Dependency, PackageId, PackageIdSpec, Summary, Target};
+use crate::util::errors::CargoResult;
+use crate::util::interning::InternedString;
+use crate::util::Graph;
+use std::borrow::Borrow;
+use std::collections::{HashMap, HashSet};
+use std::fmt;
+
+/// Represents a fully-resolved package dependency graph. Each node in the graph
+/// is a package and edges represent dependencies between packages.
+///
+/// Each instance of `Resolve` also understands the full set of features used
+/// for each package.
+pub struct Resolve {
+ /// A graph, whose vertices are packages and edges are dependency specifications
+ /// from `Cargo.toml`. We need a `HashSet` here because the same package
+ /// might be present in both `[dependencies]` and `[build-dependencies]`.
+ graph: Graph<PackageId, HashSet<Dependency>>,
+ /// Replacements from the `[replace]` table.
+ replacements: HashMap<PackageId, PackageId>,
+ /// Inverted version of `replacements`.
+ reverse_replacements: HashMap<PackageId, PackageId>,
+ /// Features enabled for a given package.
+ features: HashMap<PackageId, Vec<InternedString>>,
+ /// Checksum for each package. A SHA256 hash of the `.crate` file used to
+ /// validate the correct crate file is used. This is `None` for sources
+ /// that do not use `.crate` files, like path or git dependencies.
+ checksums: HashMap<PackageId, Option<String>>,
+ /// "Unknown" metadata. This is a collection of extra, unrecognized data
+ /// found in the `[metadata]` section of `Cargo.lock`, preserved for
+ /// forwards compatibility.
+ metadata: Metadata,
+ /// `[patch]` entries that did not match anything, preserved in
+ /// `Cargo.lock` as the `[[patch.unused]]` table array. Tracking unused
+ /// patches helps prevent Cargo from being forced to re-update the
+ /// registry every time it runs, and keeps the resolve in a locked state
+ /// so it doesn't re-resolve the unused entries.
+ unused_patches: Vec<PackageId>,
+ /// A map from packages to a set of their public dependencies
+ public_dependencies: HashMap<PackageId, HashSet<PackageId>>,
+ /// Version of the `Cargo.lock` format, see
+ /// `cargo::core::resolver::encode` for more.
+ version: ResolveVersion,
+ summaries: HashMap<PackageId, Summary>,
+}
+
+/// A version to indicate how a `Cargo.lock` should be serialized.
+///
+/// When creating a new lockfile, the version with `#[default]` is used.
+/// If an old version of lockfile already exists, it will stay as-is.
+///
+/// It's important that if a new version is added that this is not updated
+/// until *at least* the support for the version is in the stable release of Rust.
+///
+/// This resolve version will be used for all new lock files, for example
+/// those generated by `cargo update` (update everything) or building after
+/// a `cargo new` (where no lock file previously existed). This is also used
+/// for *updated* lock files such as when a dependency is added or when a
+/// version requirement changes. In this situation Cargo's updating the lock
+/// file anyway so it takes the opportunity to bump the lock file version
+/// forward.
+///
+/// It's theorized that we can add more here over time to track larger changes
+/// to the `Cargo.lock` format, but we've yet to see how that strategy pans out.
+#[derive(Default, PartialEq, Eq, Clone, Copy, Debug, PartialOrd, Ord)]
+pub enum ResolveVersion {
+ /// Historical baseline for when this abstraction was added.
+ V1,
+ /// A more compact format, more amenable to avoiding source-control merge
+ /// conflicts. The `dependencies` arrays are compressed and checksums are
+ /// listed inline. Introduced in 2019 in version 1.38. New lockfiles use
+ /// V2 by default from 1.41 to 1.52.
+ V2,
+ /// A format that explicitly lists a `version` at the top of the file as
+ /// well as changing how git dependencies are encoded. Dependencies with
+ /// `branch = "master"` are no longer encoded the same way as those without
+ /// branch specifiers. Introduced in 2020 in version 1.47. New lockfiles use
+ /// V3 by default staring in 1.53.
+ #[default]
+ V3,
+}
+
+impl Resolve {
+ pub fn new(
+ graph: Graph<PackageId, HashSet<Dependency>>,
+ replacements: HashMap<PackageId, PackageId>,
+ features: HashMap<PackageId, Vec<InternedString>>,
+ checksums: HashMap<PackageId, Option<String>>,
+ metadata: Metadata,
+ unused_patches: Vec<PackageId>,
+ version: ResolveVersion,
+ summaries: HashMap<PackageId, Summary>,
+ ) -> Resolve {
+ let reverse_replacements = replacements.iter().map(|(&p, &r)| (r, p)).collect();
+ let public_dependencies = graph
+ .iter()
+ .map(|p| {
+ let public_deps = graph
+ .edges(p)
+ .filter(|(_, deps)| {
+ deps.iter()
+ .any(|d| d.kind() == DepKind::Normal && d.is_public())
+ })
+ .map(|(dep_package, _)| *dep_package)
+ .collect::<HashSet<PackageId>>();
+
+ (*p, public_deps)
+ })
+ .collect();
+
+ Resolve {
+ graph,
+ replacements,
+ features,
+ checksums,
+ metadata,
+ unused_patches,
+ reverse_replacements,
+ public_dependencies,
+ version,
+ summaries,
+ }
+ }
+
+ /// Resolves one of the paths from the given dependent package up to
+ /// the root.
+ pub fn path_to_top<'a>(
+ &'a self,
+ pkg: &'a PackageId,
+ ) -> Vec<(&'a PackageId, Option<&'a HashSet<Dependency>>)> {
+ self.graph.path_to_top(pkg)
+ }
+
+ pub fn register_used_patches(&mut self, patches: &[Summary]) {
+ for summary in patches {
+ if !self.graph.contains(&summary.package_id()) {
+ self.unused_patches.push(summary.package_id())
+ };
+ }
+ }
+
+ pub fn merge_from(&mut self, previous: &Resolve) -> CargoResult<()> {
+ // Given a previous instance of resolve, it should be forbidden to ever
+ // have a checksums which *differ*. If the same package ID has differing
+ // checksums, then something has gone wrong such as:
+ //
+ // * Something got seriously corrupted
+ // * A "mirror" isn't actually a mirror as some changes were made
+ // * A replacement source wasn't actually a replacement, some changes
+ // were made
+ //
+ // In all of these cases, we want to report an error to indicate that
+ // something is awry. Normal execution (esp just using crates.io) should
+ // never run into this.
+ for (id, cksum) in previous.checksums.iter() {
+ if let Some(mine) = self.checksums.get(id) {
+ if mine == cksum {
+ continue;
+ }
+
+ // If the previous checksum wasn't calculated, the current
+ // checksum is `Some`. This may indicate that a source was
+ // erroneously replaced or was replaced with something that
+ // desires stronger checksum guarantees than can be afforded
+ // elsewhere.
+ if cksum.is_none() {
+ anyhow::bail!(
+ "\
+checksum for `{}` was not previously calculated, but a checksum could now \
+be calculated
+
+this could be indicative of a few possible situations:
+
+ * the source `{}` did not previously support checksums,
+ but was replaced with one that does
+ * newer Cargo implementations know how to checksum this source, but this
+ older implementation does not
+ * the lock file is corrupt
+",
+ id,
+ id.source_id()
+ )
+
+ // If our checksum hasn't been calculated, then it could mean
+ // that future Cargo figured out how to checksum something or
+ // more realistically we were overridden with a source that does
+ // not have checksums.
+ } else if mine.is_none() {
+ anyhow::bail!(
+ "\
+checksum for `{}` could not be calculated, but a checksum is listed in \
+the existing lock file
+
+this could be indicative of a few possible situations:
+
+ * the source `{}` supports checksums,
+ but was replaced with one that doesn't
+ * the lock file is corrupt
+
+unable to verify that `{0}` is the same as when the lockfile was generated
+",
+ id,
+ id.source_id()
+ )
+
+ // If the checksums aren't equal, and neither is None, then they
+ // must both be Some, in which case the checksum now differs.
+ // That's quite bad!
+ } else {
+ anyhow::bail!(
+ "\
+checksum for `{}` changed between lock files
+
+this could be indicative of a few possible errors:
+
+ * the lock file is corrupt
+ * a replacement source in use (e.g., a mirror) returned a different checksum
+ * the source itself may be corrupt in one way or another
+
+unable to verify that `{0}` is the same as when the lockfile was generated
+",
+ id
+ );
+ }
+ }
+ }
+
+ // Be sure to just copy over any unknown metadata.
+ self.metadata = previous.metadata.clone();
+
+ // Preserve the lockfile encoding where possible to avoid lockfile churn
+ self.version = previous.version;
+
+ Ok(())
+ }
+
+ pub fn contains<Q: ?Sized>(&self, k: &Q) -> bool
+ where
+ PackageId: Borrow<Q>,
+ Q: Ord + Eq,
+ {
+ self.graph.contains(k)
+ }
+
+ pub fn sort(&self) -> Vec<PackageId> {
+ self.graph.sort()
+ }
+
+ pub fn iter(&self) -> impl Iterator<Item = PackageId> + '_ {
+ self.graph.iter().cloned()
+ }
+
+ pub fn deps(&self, pkg: PackageId) -> impl Iterator<Item = (PackageId, &HashSet<Dependency>)> {
+ self.deps_not_replaced(pkg)
+ .map(move |(id, deps)| (self.replacement(id).unwrap_or(id), deps))
+ }
+
+ pub fn deps_not_replaced(
+ &self,
+ pkg: PackageId,
+ ) -> impl Iterator<Item = (PackageId, &HashSet<Dependency>)> {
+ self.graph.edges(&pkg).map(|(id, deps)| (*id, deps))
+ }
+
+ pub fn replacement(&self, pkg: PackageId) -> Option<PackageId> {
+ self.replacements.get(&pkg).cloned()
+ }
+
+ pub fn replacements(&self) -> &HashMap<PackageId, PackageId> {
+ &self.replacements
+ }
+
+ pub fn features(&self, pkg: PackageId) -> &[InternedString] {
+ self.features.get(&pkg).map(|v| &**v).unwrap_or(&[])
+ }
+
+ /// This is only here for legacy support, it will be removed when
+ /// switching to the new feature resolver.
+ pub fn features_clone(&self) -> HashMap<PackageId, Vec<InternedString>> {
+ self.features.clone()
+ }
+
+ pub fn is_public_dep(&self, pkg: PackageId, dep: PackageId) -> bool {
+ self.public_dependencies
+ .get(&pkg)
+ .map(|public_deps| public_deps.contains(&dep))
+ .unwrap_or_else(|| panic!("Unknown dependency {:?} for package {:?}", dep, pkg))
+ }
+
+ pub fn query(&self, spec: &str) -> CargoResult<PackageId> {
+ PackageIdSpec::query_str(spec, self.iter())
+ }
+
+ pub fn specs_to_ids(&self, specs: &[PackageIdSpec]) -> CargoResult<Vec<PackageId>> {
+ specs.iter().map(|s| s.query(self.iter())).collect()
+ }
+
+ pub fn unused_patches(&self) -> &[PackageId] {
+ &self.unused_patches
+ }
+
+ pub fn checksums(&self) -> &HashMap<PackageId, Option<String>> {
+ &self.checksums
+ }
+
+ pub fn metadata(&self) -> &Metadata {
+ &self.metadata
+ }
+
+ pub fn extern_crate_name_and_dep_name(
+ &self,
+ from: PackageId,
+ to: PackageId,
+ to_target: &Target,
+ ) -> CargoResult<(InternedString, Option<InternedString>)> {
+ let empty_set: HashSet<Dependency> = HashSet::new();
+ let deps = if from == to {
+ &empty_set
+ } else {
+ self.dependencies_listed(from, to)
+ };
+
+ let target_crate_name = || (to_target.crate_name(), None);
+ let mut name_pairs = deps.iter().map(|d| {
+ d.explicit_name_in_toml()
+ .map(|s| (s.as_str().replace("-", "_"), Some(s)))
+ .unwrap_or_else(target_crate_name)
+ });
+ let (extern_crate_name, dep_name) = name_pairs.next().unwrap_or_else(target_crate_name);
+ for (n, _) in name_pairs {
+ anyhow::ensure!(
+ n == extern_crate_name,
+ "the crate `{}` depends on crate `{}` multiple times with different names",
+ from,
+ to,
+ );
+ }
+ Ok((extern_crate_name.into(), dep_name))
+ }
+
+ fn dependencies_listed(&self, from: PackageId, to: PackageId) -> &HashSet<Dependency> {
+ // We've got a dependency on `from` to `to`, but this dependency edge
+ // may be affected by [replace]. If the `to` package is listed as the
+ // target of a replacement (aka the key of a reverse replacement map)
+ // then we try to find our dependency edge through that. If that fails
+ // then we go down below assuming it's not replaced.
+ //
+ // Note that we don't treat `from` as if it's been replaced because
+ // that's where the dependency originates from, and we only replace
+ // targets of dependencies not the originator.
+ if let Some(replace) = self.reverse_replacements.get(&to) {
+ if let Some(deps) = self.graph.edge(&from, replace) {
+ return deps;
+ }
+ }
+ match self.graph.edge(&from, &to) {
+ Some(ret) => ret,
+ None => panic!("no Dependency listed for `{}` => `{}`", from, to),
+ }
+ }
+
+ /// Returns the version of the encoding that's being used for this lock
+ /// file.
+ pub fn version(&self) -> ResolveVersion {
+ self.version
+ }
+
+ pub fn set_version(&mut self, version: ResolveVersion) {
+ self.version = version;
+ }
+
+ pub fn summary(&self, pkg_id: PackageId) -> &Summary {
+ &self.summaries[&pkg_id]
+ }
+}
+
+impl PartialEq for Resolve {
+ fn eq(&self, other: &Resolve) -> bool {
+ macro_rules! compare {
+ ($($fields:ident)* | $($ignored:ident)*) => {
+ let Resolve { $($fields,)* $($ignored: _,)* } = self;
+ $($fields == &other.$fields)&&*
+ }
+ }
+ compare! {
+ // fields to compare
+ graph replacements reverse_replacements features
+ checksums metadata unused_patches public_dependencies summaries
+ |
+ // fields to ignore
+ version
+ }
+ }
+}
+
+impl fmt::Debug for Resolve {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ writeln!(fmt, "graph: {:?}", self.graph)?;
+ writeln!(fmt, "\nfeatures: {{")?;
+ for (pkg, features) in &self.features {
+ writeln!(fmt, " {}: {:?}", pkg, features)?;
+ }
+ write!(fmt, "}}")
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/resolver/types.rs b/src/tools/cargo/src/cargo/core/resolver/types.rs
new file mode 100644
index 000000000..40bdb6c21
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/resolver/types.rs
@@ -0,0 +1,381 @@
+use super::features::{CliFeatures, RequestedFeatures};
+use crate::core::{Dependency, PackageId, Summary};
+use crate::util::errors::CargoResult;
+use crate::util::interning::InternedString;
+use crate::util::Config;
+use std::cmp::Ordering;
+use std::collections::{BTreeMap, BTreeSet};
+use std::ops::Range;
+use std::rc::Rc;
+use std::time::{Duration, Instant};
+
+pub struct ResolverProgress {
+ ticks: u16,
+ start: Instant,
+ time_to_print: Duration,
+ printed: bool,
+ deps_time: Duration,
+ /// Provides an escape hatch for machine with slow CPU for debugging and
+ /// testing Cargo itself.
+ /// See [rust-lang/cargo#6596](https://github.com/rust-lang/cargo/pull/6596) for more.
+ #[cfg(debug_assertions)]
+ slow_cpu_multiplier: u64,
+}
+
+impl ResolverProgress {
+ pub fn new() -> ResolverProgress {
+ ResolverProgress {
+ ticks: 0,
+ start: Instant::now(),
+ time_to_print: Duration::from_millis(500),
+ printed: false,
+ deps_time: Duration::new(0, 0),
+ // Some CI setups are much slower then the equipment used by Cargo itself.
+ // Architectures that do not have a modern processor, hardware emulation, etc.
+ // In the test code we have `slow_cpu_multiplier`, but that is not accessible here.
+ #[cfg(debug_assertions)]
+ // ALLOWED: For testing cargo itself only. However, it was communicated as an public
+ // interface to other developers, so keep it as-is, shouldn't add `__CARGO` prefix.
+ #[allow(clippy::disallowed_methods)]
+ slow_cpu_multiplier: std::env::var("CARGO_TEST_SLOW_CPU_MULTIPLIER")
+ .ok()
+ .and_then(|m| m.parse().ok())
+ .unwrap_or(1),
+ }
+ }
+ pub fn shell_status(&mut self, config: Option<&Config>) -> CargoResult<()> {
+ // If we spend a lot of time here (we shouldn't in most cases) then give
+ // a bit of a visual indicator as to what we're doing. Only enable this
+ // when stderr is a tty (a human is likely to be watching) to ensure we
+ // get deterministic output otherwise when observed by tools.
+ //
+ // Also note that we hit this loop a lot, so it's fairly performance
+ // sensitive. As a result try to defer a possibly expensive operation
+ // like `Instant::now` by only checking every N iterations of this loop
+ // to amortize the cost of the current time lookup.
+ self.ticks += 1;
+ if let Some(config) = config {
+ if config.shell().is_err_tty()
+ && !self.printed
+ && self.ticks % 1000 == 0
+ && self.start.elapsed() - self.deps_time > self.time_to_print
+ {
+ self.printed = true;
+ config.shell().status("Resolving", "dependency graph...")?;
+ }
+ }
+ #[cfg(debug_assertions)]
+ {
+ // The largest test in our suite takes less then 5000 ticks
+ // with all the algorithm improvements.
+ // If any of them are removed then it takes more than I am willing to measure.
+ // So lets fail the test fast if we have ben running for two long.
+ assert!(
+ self.ticks < 50_000,
+ "got to 50_000 ticks in {:?}",
+ self.start.elapsed()
+ );
+ // The largest test in our suite takes less then 30 sec
+ // with all the improvements to how fast a tick can go.
+ // If any of them are removed then it takes more than I am willing to measure.
+ // So lets fail the test fast if we have ben running for two long.
+ if self.ticks % 1000 == 0 {
+ assert!(
+ self.start.elapsed() - self.deps_time
+ < Duration::from_secs(self.slow_cpu_multiplier * 90)
+ );
+ }
+ }
+ Ok(())
+ }
+ pub fn elapsed(&mut self, dur: Duration) {
+ self.deps_time += dur;
+ }
+}
+
+/// The preferred way to store the set of activated features for a package.
+/// This is sorted so that it impls Hash, and owns its contents,
+/// needed so it can be part of the key for caching in the `DepsCache`.
+/// It is also cloned often as part of `Context`, hence the `RC`.
+/// `im-rs::OrdSet` was slower of small sets like this,
+/// but this can change with improvements to std, im, or llvm.
+/// Using a consistent type for this allows us to use the highly
+/// optimized comparison operators like `is_subset` at the interfaces.
+pub type FeaturesSet = Rc<BTreeSet<InternedString>>;
+
+/// Resolver behavior, used to opt-in to new behavior that is
+/// backwards-incompatible via the `resolver` field in the manifest.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
+pub enum ResolveBehavior {
+ /// V1 is the original resolver behavior.
+ V1,
+ /// V2 adds the new feature resolver.
+ V2,
+}
+
+impl ResolveBehavior {
+ pub fn from_manifest(resolver: &str) -> CargoResult<ResolveBehavior> {
+ match resolver {
+ "1" => Ok(ResolveBehavior::V1),
+ "2" => Ok(ResolveBehavior::V2),
+ s => anyhow::bail!(
+ "`resolver` setting `{}` is not valid, valid options are \"1\" or \"2\"",
+ s
+ ),
+ }
+ }
+
+ pub fn to_manifest(&self) -> String {
+ match self {
+ ResolveBehavior::V1 => "1",
+ ResolveBehavior::V2 => "2",
+ }
+ .to_owned()
+ }
+}
+
+/// Options for how the resolve should work.
+#[derive(Clone, Debug, Eq, PartialEq, Hash)]
+pub struct ResolveOpts {
+ /// Whether or not dev-dependencies should be included.
+ ///
+ /// This may be set to `false` by things like `cargo install` or `-Z avoid-dev-deps`.
+ /// It also gets set to `false` when activating dependencies in the resolver.
+ pub dev_deps: bool,
+ /// Set of features requested on the command-line.
+ pub features: RequestedFeatures,
+}
+
+impl ResolveOpts {
+ /// Creates a ResolveOpts that resolves everything.
+ pub fn everything() -> ResolveOpts {
+ ResolveOpts {
+ dev_deps: true,
+ features: RequestedFeatures::CliFeatures(CliFeatures::new_all(true)),
+ }
+ }
+
+ pub fn new(dev_deps: bool, features: RequestedFeatures) -> ResolveOpts {
+ ResolveOpts { dev_deps, features }
+ }
+}
+
+#[derive(Clone)]
+pub struct DepsFrame {
+ pub parent: Summary,
+ pub just_for_error_messages: bool,
+ pub remaining_siblings: RcVecIter<DepInfo>,
+}
+
+impl DepsFrame {
+ /// Returns the least number of candidates that any of this frame's siblings
+ /// has.
+ ///
+ /// The `remaining_siblings` array is already sorted with the smallest
+ /// number of candidates at the front, so we just return the number of
+ /// candidates in that entry.
+ fn min_candidates(&self) -> usize {
+ self.remaining_siblings
+ .peek()
+ .map(|(_, (_, candidates, _))| candidates.len())
+ .unwrap_or(0)
+ }
+
+ pub fn flatten(&self) -> impl Iterator<Item = (PackageId, Dependency)> + '_ {
+ self.remaining_siblings
+ .clone()
+ .map(move |(d, _, _)| (self.parent.package_id(), d))
+ }
+}
+
+impl PartialEq for DepsFrame {
+ fn eq(&self, other: &DepsFrame) -> bool {
+ self.just_for_error_messages == other.just_for_error_messages
+ && self.min_candidates() == other.min_candidates()
+ }
+}
+
+impl Eq for DepsFrame {}
+
+impl PartialOrd for DepsFrame {
+ fn partial_cmp(&self, other: &DepsFrame) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+impl Ord for DepsFrame {
+ fn cmp(&self, other: &DepsFrame) -> Ordering {
+ self.just_for_error_messages
+ .cmp(&other.just_for_error_messages)
+ .reverse()
+ .then_with(|| self.min_candidates().cmp(&other.min_candidates()))
+ }
+}
+
+/// Note that an `OrdSet` is used for the remaining dependencies that need
+/// activation. This set is sorted by how many candidates each dependency has.
+///
+/// This helps us get through super constrained portions of the dependency
+/// graph quickly and hopefully lock down what later larger dependencies can
+/// use (those with more candidates).
+#[derive(Clone)]
+pub struct RemainingDeps {
+ /// a monotonic counter, increased for each new insertion.
+ time: u32,
+ /// the data is augmented by the insertion time.
+ /// This insures that no two items will cmp eq.
+ /// Forcing the OrdSet into a multi set.
+ data: im_rc::OrdSet<(DepsFrame, u32)>,
+}
+
+impl RemainingDeps {
+ pub fn new() -> RemainingDeps {
+ RemainingDeps {
+ time: 0,
+ data: im_rc::OrdSet::new(),
+ }
+ }
+ pub fn push(&mut self, x: DepsFrame) {
+ let insertion_time = self.time;
+ self.data.insert((x, insertion_time));
+ self.time += 1;
+ }
+ pub fn pop_most_constrained(&mut self) -> Option<(bool, (Summary, DepInfo))> {
+ while let Some((mut deps_frame, insertion_time)) = self.data.remove_min() {
+ let just_here_for_the_error_messages = deps_frame.just_for_error_messages;
+
+ // Figure out what our next dependency to activate is, and if nothing is
+ // listed then we're entirely done with this frame (yay!) and we can
+ // move on to the next frame.
+ if let Some(sibling) = deps_frame.remaining_siblings.next() {
+ let parent = Summary::clone(&deps_frame.parent);
+ self.data.insert((deps_frame, insertion_time));
+ return Some((just_here_for_the_error_messages, (parent, sibling)));
+ }
+ }
+ None
+ }
+ pub fn iter(&mut self) -> impl Iterator<Item = (PackageId, Dependency)> + '_ {
+ self.data.iter().flat_map(|(other, _)| other.flatten())
+ }
+}
+
+/// Information about the dependencies for a crate, a tuple of:
+///
+/// (dependency info, candidates, features activated)
+pub type DepInfo = (Dependency, Rc<Vec<Summary>>, FeaturesSet);
+
+/// All possible reasons that a package might fail to activate.
+///
+/// We maintain a list of conflicts for error reporting as well as backtracking
+/// purposes. Each reason here is why candidates may be rejected or why we may
+/// fail to resolve a dependency.
+#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq)]
+pub enum ConflictReason {
+ /// There was a semver conflict, for example we tried to activate a package
+ /// 1.0.2 but 1.1.0 was already activated (aka a compatible semver version
+ /// is already activated)
+ Semver,
+
+ /// The `links` key is being violated. For example one crate in the
+ /// dependency graph has `links = "foo"` but this crate also had that, and
+ /// we're only allowed one per dependency graph.
+ Links(InternedString),
+
+ /// A dependency listed features that weren't actually available on the
+ /// candidate. For example we tried to activate feature `foo` but the
+ /// candidate we're activating didn't actually have the feature `foo`.
+ MissingFeatures(String),
+
+ /// A dependency listed a feature that ended up being a required dependency.
+ /// For example we tried to activate feature `foo` but the
+ /// candidate we're activating didn't actually have the feature `foo`
+ /// it had a dependency `foo` instead.
+ RequiredDependencyAsFeature(InternedString),
+
+ /// A dependency listed a feature for an optional dependency, but that
+ /// optional dependency is "hidden" using namespaced `dep:` syntax.
+ NonImplicitDependencyAsFeature(InternedString),
+
+ // TODO: needs more info for `activation_error`
+ // TODO: needs more info for `find_candidate`
+ /// pub dep error
+ PublicDependency(PackageId),
+ PubliclyExports(PackageId),
+}
+
+impl ConflictReason {
+ pub fn is_links(&self) -> bool {
+ matches!(self, ConflictReason::Links(_))
+ }
+
+ pub fn is_missing_features(&self) -> bool {
+ matches!(self, ConflictReason::MissingFeatures(_))
+ }
+
+ pub fn is_required_dependency_as_features(&self) -> bool {
+ matches!(self, ConflictReason::RequiredDependencyAsFeature(_))
+ }
+
+ pub fn is_public_dependency(&self) -> bool {
+ matches!(
+ self,
+ ConflictReason::PublicDependency(_) | ConflictReason::PubliclyExports(_)
+ )
+ }
+}
+
+/// A list of packages that have gotten in the way of resolving a dependency.
+/// If resolving a dependency fails then this represents an incompatibility,
+/// that dependency will never be resolve while all of these packages are active.
+/// This is useless if the packages can't be simultaneously activated for other reasons.
+pub type ConflictMap = BTreeMap<PackageId, ConflictReason>;
+
+pub struct RcVecIter<T> {
+ vec: Rc<Vec<T>>,
+ rest: Range<usize>,
+}
+
+impl<T> RcVecIter<T> {
+ pub fn new(vec: Rc<Vec<T>>) -> RcVecIter<T> {
+ RcVecIter {
+ rest: 0..vec.len(),
+ vec,
+ }
+ }
+
+ fn peek(&self) -> Option<(usize, &T)> {
+ self.rest
+ .clone()
+ .next()
+ .and_then(|i| self.vec.get(i).map(|val| (i, &*val)))
+ }
+}
+
+// Not derived to avoid `T: Clone`
+impl<T> Clone for RcVecIter<T> {
+ fn clone(&self) -> RcVecIter<T> {
+ RcVecIter {
+ vec: self.vec.clone(),
+ rest: self.rest.clone(),
+ }
+ }
+}
+
+impl<T> Iterator for RcVecIter<T>
+where
+ T: Clone,
+{
+ type Item = T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.rest.next().and_then(|i| self.vec.get(i).cloned())
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ // rest is a std::ops::Range, which is an ExactSizeIterator.
+ self.rest.size_hint()
+ }
+}
+
+impl<T: Clone> ExactSizeIterator for RcVecIter<T> {}
diff --git a/src/tools/cargo/src/cargo/core/resolver/version_prefs.rs b/src/tools/cargo/src/cargo/core/resolver/version_prefs.rs
new file mode 100644
index 000000000..73cce5db8
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/resolver/version_prefs.rs
@@ -0,0 +1,189 @@
+//! This module implements support for preferring some versions of a package
+//! over other versions.
+
+use std::cmp::Ordering;
+use std::collections::{HashMap, HashSet};
+
+use crate::core::{Dependency, PackageId, Summary};
+use crate::util::interning::InternedString;
+
+/// A collection of preferences for particular package versions.
+///
+/// This is built up with [`Self::prefer_package_id`] and [`Self::prefer_dependency`], then used to sort the set of
+/// summaries for a package during resolution via [`Self::sort_summaries`].
+///
+/// As written, a version is either "preferred" or "not preferred". Later extensions may
+/// introduce more granular preferences.
+#[derive(Default)]
+pub struct VersionPreferences {
+ try_to_use: HashSet<PackageId>,
+ prefer_patch_deps: HashMap<InternedString, HashSet<Dependency>>,
+}
+
+pub enum VersionOrdering {
+ MaximumVersionsFirst,
+ MinimumVersionsFirst,
+}
+
+impl VersionPreferences {
+ /// Indicate that the given package (specified as a [`PackageId`]) should be preferred.
+ pub fn prefer_package_id(&mut self, pkg_id: PackageId) {
+ self.try_to_use.insert(pkg_id);
+ }
+
+ /// Indicate that the given package (specified as a [`Dependency`]) should be preferred.
+ pub fn prefer_dependency(&mut self, dep: Dependency) {
+ self.prefer_patch_deps
+ .entry(dep.package_name())
+ .or_insert_with(HashSet::new)
+ .insert(dep);
+ }
+
+ /// Sort the given vector of summaries in-place, with all summaries presumed to be for
+ /// the same package. Preferred versions appear first in the result, sorted by
+ /// `version_ordering`, followed by non-preferred versions sorted the same way.
+ pub fn sort_summaries(
+ &self,
+ summaries: &mut Vec<Summary>,
+ version_ordering: VersionOrdering,
+ first_version: bool,
+ ) {
+ let should_prefer = |pkg_id: &PackageId| {
+ self.try_to_use.contains(pkg_id)
+ || self
+ .prefer_patch_deps
+ .get(&pkg_id.name())
+ .map(|deps| deps.iter().any(|d| d.matches_id(*pkg_id)))
+ .unwrap_or(false)
+ };
+ summaries.sort_unstable_by(|a, b| {
+ let prefer_a = should_prefer(&a.package_id());
+ let prefer_b = should_prefer(&b.package_id());
+ let previous_cmp = prefer_a.cmp(&prefer_b).reverse();
+ match previous_cmp {
+ Ordering::Equal => {
+ let cmp = a.version().cmp(b.version());
+ match version_ordering {
+ VersionOrdering::MaximumVersionsFirst => cmp.reverse(),
+ VersionOrdering::MinimumVersionsFirst => cmp,
+ }
+ }
+ _ => previous_cmp,
+ }
+ });
+ if first_version {
+ let _ = summaries.split_off(1);
+ }
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use crate::core::SourceId;
+ use crate::util::Config;
+ use std::collections::BTreeMap;
+
+ fn pkgid(name: &str, version: &str) -> PackageId {
+ let src_id =
+ SourceId::from_url("registry+https://github.com/rust-lang/crates.io-index").unwrap();
+ PackageId::new(name, version, src_id).unwrap()
+ }
+
+ fn dep(name: &str, version: &str) -> Dependency {
+ let src_id =
+ SourceId::from_url("registry+https://github.com/rust-lang/crates.io-index").unwrap();
+ Dependency::parse(name, Some(version), src_id).unwrap()
+ }
+
+ fn summ(name: &str, version: &str) -> Summary {
+ let pkg_id = pkgid(name, version);
+ let config = Config::default().unwrap();
+ let features = BTreeMap::new();
+ Summary::new(&config, pkg_id, Vec::new(), &features, None::<&String>).unwrap()
+ }
+
+ fn describe(summaries: &Vec<Summary>) -> String {
+ let strs: Vec<String> = summaries
+ .iter()
+ .map(|summary| format!("{}/{}", summary.name(), summary.version()))
+ .collect();
+ strs.join(", ")
+ }
+
+ #[test]
+ fn test_prefer_package_id() {
+ let mut vp = VersionPreferences::default();
+ vp.prefer_package_id(pkgid("foo", "1.2.3"));
+
+ let mut summaries = vec![
+ summ("foo", "1.2.4"),
+ summ("foo", "1.2.3"),
+ summ("foo", "1.1.0"),
+ summ("foo", "1.0.9"),
+ ];
+
+ vp.sort_summaries(&mut summaries, VersionOrdering::MaximumVersionsFirst, false);
+ assert_eq!(
+ describe(&summaries),
+ "foo/1.2.3, foo/1.2.4, foo/1.1.0, foo/1.0.9".to_string()
+ );
+
+ vp.sort_summaries(&mut summaries, VersionOrdering::MinimumVersionsFirst, false);
+ assert_eq!(
+ describe(&summaries),
+ "foo/1.2.3, foo/1.0.9, foo/1.1.0, foo/1.2.4".to_string()
+ );
+ }
+
+ #[test]
+ fn test_prefer_dependency() {
+ let mut vp = VersionPreferences::default();
+ vp.prefer_dependency(dep("foo", "=1.2.3"));
+
+ let mut summaries = vec![
+ summ("foo", "1.2.4"),
+ summ("foo", "1.2.3"),
+ summ("foo", "1.1.0"),
+ summ("foo", "1.0.9"),
+ ];
+
+ vp.sort_summaries(&mut summaries, VersionOrdering::MaximumVersionsFirst, false);
+ assert_eq!(
+ describe(&summaries),
+ "foo/1.2.3, foo/1.2.4, foo/1.1.0, foo/1.0.9".to_string()
+ );
+
+ vp.sort_summaries(&mut summaries, VersionOrdering::MinimumVersionsFirst, false);
+ assert_eq!(
+ describe(&summaries),
+ "foo/1.2.3, foo/1.0.9, foo/1.1.0, foo/1.2.4".to_string()
+ );
+ }
+
+ #[test]
+ fn test_prefer_both() {
+ let mut vp = VersionPreferences::default();
+ vp.prefer_package_id(pkgid("foo", "1.2.3"));
+ vp.prefer_dependency(dep("foo", "=1.1.0"));
+
+ let mut summaries = vec![
+ summ("foo", "1.2.4"),
+ summ("foo", "1.2.3"),
+ summ("foo", "1.1.0"),
+ summ("foo", "1.0.9"),
+ ];
+
+ vp.sort_summaries(&mut summaries, VersionOrdering::MaximumVersionsFirst, false);
+ assert_eq!(
+ describe(&summaries),
+ "foo/1.2.3, foo/1.1.0, foo/1.2.4, foo/1.0.9".to_string()
+ );
+
+ vp.sort_summaries(&mut summaries, VersionOrdering::MinimumVersionsFirst, false);
+ assert_eq!(
+ describe(&summaries),
+ "foo/1.1.0, foo/1.2.3, foo/1.0.9, foo/1.2.4".to_string()
+ );
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/shell.rs b/src/tools/cargo/src/cargo/core/shell.rs
new file mode 100644
index 000000000..fdae617c4
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/shell.rs
@@ -0,0 +1,630 @@
+use std::fmt;
+use std::io::prelude::*;
+
+use is_terminal::IsTerminal;
+use termcolor::Color::{Cyan, Green, Red, Yellow};
+use termcolor::{self, Color, ColorSpec, StandardStream, WriteColor};
+
+use crate::util::errors::CargoResult;
+
+pub enum TtyWidth {
+ NoTty,
+ Known(usize),
+ Guess(usize),
+}
+
+impl TtyWidth {
+ /// Returns the width of the terminal to use for diagnostics (which is
+ /// relayed to rustc via `--diagnostic-width`).
+ pub fn diagnostic_terminal_width(&self) -> Option<usize> {
+ // ALLOWED: For testing cargo itself only.
+ #[allow(clippy::disallowed_methods)]
+ if let Ok(width) = std::env::var("__CARGO_TEST_TTY_WIDTH_DO_NOT_USE_THIS") {
+ return Some(width.parse().unwrap());
+ }
+ match *self {
+ TtyWidth::NoTty | TtyWidth::Guess(_) => None,
+ TtyWidth::Known(width) => Some(width),
+ }
+ }
+
+ /// Returns the width used by progress bars for the tty.
+ pub fn progress_max_width(&self) -> Option<usize> {
+ match *self {
+ TtyWidth::NoTty => None,
+ TtyWidth::Known(width) | TtyWidth::Guess(width) => Some(width),
+ }
+ }
+}
+
+/// The requested verbosity of output.
+#[derive(Debug, Clone, Copy, PartialEq)]
+pub enum Verbosity {
+ Verbose,
+ Normal,
+ Quiet,
+}
+
+/// An abstraction around console output that remembers preferences for output
+/// verbosity and color.
+pub struct Shell {
+ /// Wrapper around stdout/stderr. This helps with supporting sending
+ /// output to a memory buffer which is useful for tests.
+ output: ShellOut,
+ /// How verbose messages should be.
+ verbosity: Verbosity,
+ /// Flag that indicates the current line needs to be cleared before
+ /// printing. Used when a progress bar is currently displayed.
+ needs_clear: bool,
+}
+
+impl fmt::Debug for Shell {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.output {
+ ShellOut::Write(_) => f
+ .debug_struct("Shell")
+ .field("verbosity", &self.verbosity)
+ .finish(),
+ ShellOut::Stream { color_choice, .. } => f
+ .debug_struct("Shell")
+ .field("verbosity", &self.verbosity)
+ .field("color_choice", &color_choice)
+ .finish(),
+ }
+ }
+}
+
+/// A `Write`able object, either with or without color support
+enum ShellOut {
+ /// A plain write object without color support
+ Write(Box<dyn Write>),
+ /// Color-enabled stdio, with information on whether color should be used
+ Stream {
+ stdout: StandardStream,
+ stderr: StandardStream,
+ stderr_tty: bool,
+ color_choice: ColorChoice,
+ },
+}
+
+/// Whether messages should use color output
+#[derive(Debug, PartialEq, Clone, Copy)]
+pub enum ColorChoice {
+ /// Force color output
+ Always,
+ /// Force disable color output
+ Never,
+ /// Intelligently guess whether to use color output
+ CargoAuto,
+}
+
+impl Shell {
+ /// Creates a new shell (color choice and verbosity), defaulting to 'auto' color and verbose
+ /// output.
+ pub fn new() -> Shell {
+ let auto_clr = ColorChoice::CargoAuto;
+ Shell {
+ output: ShellOut::Stream {
+ stdout: StandardStream::stdout(auto_clr.to_termcolor_color_choice(Stream::Stdout)),
+ stderr: StandardStream::stderr(auto_clr.to_termcolor_color_choice(Stream::Stderr)),
+ color_choice: ColorChoice::CargoAuto,
+ stderr_tty: std::io::stderr().is_terminal(),
+ },
+ verbosity: Verbosity::Verbose,
+ needs_clear: false,
+ }
+ }
+
+ /// Creates a shell from a plain writable object, with no color, and max verbosity.
+ pub fn from_write(out: Box<dyn Write>) -> Shell {
+ Shell {
+ output: ShellOut::Write(out),
+ verbosity: Verbosity::Verbose,
+ needs_clear: false,
+ }
+ }
+
+ /// Prints a message, where the status will have `color` color, and can be justified. The
+ /// messages follows without color.
+ fn print(
+ &mut self,
+ status: &dyn fmt::Display,
+ message: Option<&dyn fmt::Display>,
+ color: Color,
+ justified: bool,
+ ) -> CargoResult<()> {
+ match self.verbosity {
+ Verbosity::Quiet => Ok(()),
+ _ => {
+ if self.needs_clear {
+ self.err_erase_line();
+ }
+ self.output
+ .message_stderr(status, message, color, justified)
+ }
+ }
+ }
+
+ /// Sets whether the next print should clear the current line.
+ pub fn set_needs_clear(&mut self, needs_clear: bool) {
+ self.needs_clear = needs_clear;
+ }
+
+ /// Returns `true` if the `needs_clear` flag is unset.
+ pub fn is_cleared(&self) -> bool {
+ !self.needs_clear
+ }
+
+ /// Returns the width of the terminal in spaces, if any.
+ pub fn err_width(&self) -> TtyWidth {
+ match self.output {
+ ShellOut::Stream {
+ stderr_tty: true, ..
+ } => imp::stderr_width(),
+ _ => TtyWidth::NoTty,
+ }
+ }
+
+ /// Returns `true` if stderr is a tty.
+ pub fn is_err_tty(&self) -> bool {
+ match self.output {
+ ShellOut::Stream { stderr_tty, .. } => stderr_tty,
+ _ => false,
+ }
+ }
+
+ /// Gets a reference to the underlying stdout writer.
+ pub fn out(&mut self) -> &mut dyn Write {
+ if self.needs_clear {
+ self.err_erase_line();
+ }
+ self.output.stdout()
+ }
+
+ /// Gets a reference to the underlying stderr writer.
+ pub fn err(&mut self) -> &mut dyn Write {
+ if self.needs_clear {
+ self.err_erase_line();
+ }
+ self.output.stderr()
+ }
+
+ /// Erase from cursor to end of line.
+ pub fn err_erase_line(&mut self) {
+ if self.err_supports_color() {
+ imp::err_erase_line(self);
+ self.needs_clear = false;
+ }
+ }
+
+ /// Shortcut to right-align and color green a status message.
+ pub fn status<T, U>(&mut self, status: T, message: U) -> CargoResult<()>
+ where
+ T: fmt::Display,
+ U: fmt::Display,
+ {
+ self.print(&status, Some(&message), Green, true)
+ }
+
+ pub fn status_header<T>(&mut self, status: T) -> CargoResult<()>
+ where
+ T: fmt::Display,
+ {
+ self.print(&status, None, Cyan, true)
+ }
+
+ /// Shortcut to right-align a status message.
+ pub fn status_with_color<T, U>(
+ &mut self,
+ status: T,
+ message: U,
+ color: Color,
+ ) -> CargoResult<()>
+ where
+ T: fmt::Display,
+ U: fmt::Display,
+ {
+ self.print(&status, Some(&message), color, true)
+ }
+
+ /// Runs the callback only if we are in verbose mode.
+ pub fn verbose<F>(&mut self, mut callback: F) -> CargoResult<()>
+ where
+ F: FnMut(&mut Shell) -> CargoResult<()>,
+ {
+ match self.verbosity {
+ Verbosity::Verbose => callback(self),
+ _ => Ok(()),
+ }
+ }
+
+ /// Runs the callback if we are not in verbose mode.
+ pub fn concise<F>(&mut self, mut callback: F) -> CargoResult<()>
+ where
+ F: FnMut(&mut Shell) -> CargoResult<()>,
+ {
+ match self.verbosity {
+ Verbosity::Verbose => Ok(()),
+ _ => callback(self),
+ }
+ }
+
+ /// Prints a red 'error' message.
+ pub fn error<T: fmt::Display>(&mut self, message: T) -> CargoResult<()> {
+ if self.needs_clear {
+ self.err_erase_line();
+ }
+ self.output
+ .message_stderr(&"error", Some(&message), Red, false)
+ }
+
+ /// Prints an amber 'warning' message.
+ pub fn warn<T: fmt::Display>(&mut self, message: T) -> CargoResult<()> {
+ match self.verbosity {
+ Verbosity::Quiet => Ok(()),
+ _ => self.print(&"warning", Some(&message), Yellow, false),
+ }
+ }
+
+ /// Prints a cyan 'note' message.
+ pub fn note<T: fmt::Display>(&mut self, message: T) -> CargoResult<()> {
+ self.print(&"note", Some(&message), Cyan, false)
+ }
+
+ /// Updates the verbosity of the shell.
+ pub fn set_verbosity(&mut self, verbosity: Verbosity) {
+ self.verbosity = verbosity;
+ }
+
+ /// Gets the verbosity of the shell.
+ pub fn verbosity(&self) -> Verbosity {
+ self.verbosity
+ }
+
+ /// Updates the color choice (always, never, or auto) from a string..
+ pub fn set_color_choice(&mut self, color: Option<&str>) -> CargoResult<()> {
+ if let ShellOut::Stream {
+ ref mut stdout,
+ ref mut stderr,
+ ref mut color_choice,
+ ..
+ } = self.output
+ {
+ let cfg = match color {
+ Some("always") => ColorChoice::Always,
+ Some("never") => ColorChoice::Never,
+
+ Some("auto") | None => ColorChoice::CargoAuto,
+
+ Some(arg) => anyhow::bail!(
+ "argument for --color must be auto, always, or \
+ never, but found `{}`",
+ arg
+ ),
+ };
+ *color_choice = cfg;
+ *stdout = StandardStream::stdout(cfg.to_termcolor_color_choice(Stream::Stdout));
+ *stderr = StandardStream::stderr(cfg.to_termcolor_color_choice(Stream::Stderr));
+ }
+ Ok(())
+ }
+
+ /// Gets the current color choice.
+ ///
+ /// If we are not using a color stream, this will always return `Never`, even if the color
+ /// choice has been set to something else.
+ pub fn color_choice(&self) -> ColorChoice {
+ match self.output {
+ ShellOut::Stream { color_choice, .. } => color_choice,
+ ShellOut::Write(_) => ColorChoice::Never,
+ }
+ }
+
+ /// Whether the shell supports color.
+ pub fn err_supports_color(&self) -> bool {
+ match &self.output {
+ ShellOut::Write(_) => false,
+ ShellOut::Stream { stderr, .. } => stderr.supports_color(),
+ }
+ }
+
+ pub fn out_supports_color(&self) -> bool {
+ match &self.output {
+ ShellOut::Write(_) => false,
+ ShellOut::Stream { stdout, .. } => stdout.supports_color(),
+ }
+ }
+
+ /// Write a styled fragment
+ ///
+ /// Caller is responsible for deciding whether [`Shell::verbosity`] is affects output.
+ pub fn write_stdout(
+ &mut self,
+ fragment: impl fmt::Display,
+ color: &ColorSpec,
+ ) -> CargoResult<()> {
+ self.output.write_stdout(fragment, color)
+ }
+
+ /// Write a styled fragment
+ ///
+ /// Caller is responsible for deciding whether [`Shell::verbosity`] is affects output.
+ pub fn write_stderr(
+ &mut self,
+ fragment: impl fmt::Display,
+ color: &ColorSpec,
+ ) -> CargoResult<()> {
+ self.output.write_stderr(fragment, color)
+ }
+
+ /// Prints a message to stderr and translates ANSI escape code into console colors.
+ pub fn print_ansi_stderr(&mut self, message: &[u8]) -> CargoResult<()> {
+ if self.needs_clear {
+ self.err_erase_line();
+ }
+ #[cfg(windows)]
+ {
+ if let ShellOut::Stream { stderr, .. } = &mut self.output {
+ ::fwdansi::write_ansi(stderr, message)?;
+ return Ok(());
+ }
+ }
+ self.err().write_all(message)?;
+ Ok(())
+ }
+
+ /// Prints a message to stdout and translates ANSI escape code into console colors.
+ pub fn print_ansi_stdout(&mut self, message: &[u8]) -> CargoResult<()> {
+ if self.needs_clear {
+ self.err_erase_line();
+ }
+ #[cfg(windows)]
+ {
+ if let ShellOut::Stream { stdout, .. } = &mut self.output {
+ ::fwdansi::write_ansi(stdout, message)?;
+ return Ok(());
+ }
+ }
+ self.out().write_all(message)?;
+ Ok(())
+ }
+
+ pub fn print_json<T: serde::ser::Serialize>(&mut self, obj: &T) -> CargoResult<()> {
+ // Path may fail to serialize to JSON ...
+ let encoded = serde_json::to_string(&obj)?;
+ // ... but don't fail due to a closed pipe.
+ drop(writeln!(self.out(), "{}", encoded));
+ Ok(())
+ }
+}
+
+impl Default for Shell {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl ShellOut {
+ /// Prints out a message with a status. The status comes first, and is bold plus the given
+ /// color. The status can be justified, in which case the max width that will right align is
+ /// 12 chars.
+ fn message_stderr(
+ &mut self,
+ status: &dyn fmt::Display,
+ message: Option<&dyn fmt::Display>,
+ color: Color,
+ justified: bool,
+ ) -> CargoResult<()> {
+ match *self {
+ ShellOut::Stream { ref mut stderr, .. } => {
+ stderr.reset()?;
+ stderr.set_color(ColorSpec::new().set_bold(true).set_fg(Some(color)))?;
+ if justified {
+ write!(stderr, "{:>12}", status)?;
+ } else {
+ write!(stderr, "{}", status)?;
+ stderr.set_color(ColorSpec::new().set_bold(true))?;
+ write!(stderr, ":")?;
+ }
+ stderr.reset()?;
+ match message {
+ Some(message) => writeln!(stderr, " {}", message)?,
+ None => write!(stderr, " ")?,
+ }
+ }
+ ShellOut::Write(ref mut w) => {
+ if justified {
+ write!(w, "{:>12}", status)?;
+ } else {
+ write!(w, "{}:", status)?;
+ }
+ match message {
+ Some(message) => writeln!(w, " {}", message)?,
+ None => write!(w, " ")?,
+ }
+ }
+ }
+ Ok(())
+ }
+
+ /// Write a styled fragment
+ fn write_stdout(&mut self, fragment: impl fmt::Display, color: &ColorSpec) -> CargoResult<()> {
+ match *self {
+ ShellOut::Stream { ref mut stdout, .. } => {
+ stdout.reset()?;
+ stdout.set_color(&color)?;
+ write!(stdout, "{}", fragment)?;
+ stdout.reset()?;
+ }
+ ShellOut::Write(ref mut w) => {
+ write!(w, "{}", fragment)?;
+ }
+ }
+ Ok(())
+ }
+
+ /// Write a styled fragment
+ fn write_stderr(&mut self, fragment: impl fmt::Display, color: &ColorSpec) -> CargoResult<()> {
+ match *self {
+ ShellOut::Stream { ref mut stderr, .. } => {
+ stderr.reset()?;
+ stderr.set_color(&color)?;
+ write!(stderr, "{}", fragment)?;
+ stderr.reset()?;
+ }
+ ShellOut::Write(ref mut w) => {
+ write!(w, "{}", fragment)?;
+ }
+ }
+ Ok(())
+ }
+
+ /// Gets stdout as a `io::Write`.
+ fn stdout(&mut self) -> &mut dyn Write {
+ match *self {
+ ShellOut::Stream { ref mut stdout, .. } => stdout,
+ ShellOut::Write(ref mut w) => w,
+ }
+ }
+
+ /// Gets stderr as a `io::Write`.
+ fn stderr(&mut self) -> &mut dyn Write {
+ match *self {
+ ShellOut::Stream { ref mut stderr, .. } => stderr,
+ ShellOut::Write(ref mut w) => w,
+ }
+ }
+}
+
+impl ColorChoice {
+ /// Converts our color choice to termcolor's version.
+ fn to_termcolor_color_choice(self, stream: Stream) -> termcolor::ColorChoice {
+ match self {
+ ColorChoice::Always => termcolor::ColorChoice::Always,
+ ColorChoice::Never => termcolor::ColorChoice::Never,
+ ColorChoice::CargoAuto => {
+ if stream.is_terminal() {
+ termcolor::ColorChoice::Auto
+ } else {
+ termcolor::ColorChoice::Never
+ }
+ }
+ }
+ }
+}
+
+enum Stream {
+ Stdout,
+ Stderr,
+}
+
+impl Stream {
+ fn is_terminal(self) -> bool {
+ match self {
+ Self::Stdout => std::io::stdout().is_terminal(),
+ Self::Stderr => std::io::stderr().is_terminal(),
+ }
+ }
+}
+
+#[cfg(unix)]
+mod imp {
+ use super::{Shell, TtyWidth};
+ use std::mem;
+
+ pub fn stderr_width() -> TtyWidth {
+ unsafe {
+ let mut winsize: libc::winsize = mem::zeroed();
+ // The .into() here is needed for FreeBSD which defines TIOCGWINSZ
+ // as c_uint but ioctl wants c_ulong.
+ if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ.into(), &mut winsize) < 0 {
+ return TtyWidth::NoTty;
+ }
+ if winsize.ws_col > 0 {
+ TtyWidth::Known(winsize.ws_col as usize)
+ } else {
+ TtyWidth::NoTty
+ }
+ }
+ }
+
+ pub fn err_erase_line(shell: &mut Shell) {
+ // This is the "EL - Erase in Line" sequence. It clears from the cursor
+ // to the end of line.
+ // https://en.wikipedia.org/wiki/ANSI_escape_code#CSI_sequences
+ let _ = shell.output.stderr().write_all(b"\x1B[K");
+ }
+}
+
+#[cfg(windows)]
+mod imp {
+ use std::{cmp, mem, ptr};
+
+ use windows_sys::core::PCSTR;
+ use windows_sys::Win32::Foundation::CloseHandle;
+ use windows_sys::Win32::Foundation::INVALID_HANDLE_VALUE;
+ use windows_sys::Win32::Storage::FileSystem::{
+ CreateFileA, FILE_SHARE_READ, FILE_SHARE_WRITE, OPEN_EXISTING,
+ };
+ use windows_sys::Win32::System::Console::{
+ GetConsoleScreenBufferInfo, GetStdHandle, CONSOLE_SCREEN_BUFFER_INFO, STD_ERROR_HANDLE,
+ };
+ use windows_sys::Win32::System::SystemServices::{GENERIC_READ, GENERIC_WRITE};
+
+ pub(super) use super::{default_err_erase_line as err_erase_line, TtyWidth};
+
+ pub fn stderr_width() -> TtyWidth {
+ unsafe {
+ let stdout = GetStdHandle(STD_ERROR_HANDLE);
+ let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed();
+ if GetConsoleScreenBufferInfo(stdout, &mut csbi) != 0 {
+ return TtyWidth::Known((csbi.srWindow.Right - csbi.srWindow.Left) as usize);
+ }
+
+ // On mintty/msys/cygwin based terminals, the above fails with
+ // INVALID_HANDLE_VALUE. Use an alternate method which works
+ // in that case as well.
+ let h = CreateFileA(
+ "CONOUT$\0".as_ptr() as PCSTR,
+ GENERIC_READ | GENERIC_WRITE,
+ FILE_SHARE_READ | FILE_SHARE_WRITE,
+ ptr::null_mut(),
+ OPEN_EXISTING,
+ 0,
+ 0,
+ );
+ if h == INVALID_HANDLE_VALUE {
+ return TtyWidth::NoTty;
+ }
+
+ let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed();
+ let rc = GetConsoleScreenBufferInfo(h, &mut csbi);
+ CloseHandle(h);
+ if rc != 0 {
+ let width = (csbi.srWindow.Right - csbi.srWindow.Left) as usize;
+ // Unfortunately cygwin/mintty does not set the size of the
+ // backing console to match the actual window size. This
+ // always reports a size of 80 or 120 (not sure what
+ // determines that). Use a conservative max of 60 which should
+ // work in most circumstances. ConEmu does some magic to
+ // resize the console correctly, but there's no reasonable way
+ // to detect which kind of terminal we are running in, or if
+ // GetConsoleScreenBufferInfo returns accurate information.
+ return TtyWidth::Guess(cmp::min(60, width));
+ }
+
+ TtyWidth::NoTty
+ }
+ }
+}
+
+#[cfg(windows)]
+fn default_err_erase_line(shell: &mut Shell) {
+ match imp::stderr_width() {
+ TtyWidth::Known(max_width) | TtyWidth::Guess(max_width) => {
+ let blank = " ".repeat(max_width);
+ drop(write!(shell.output.stderr(), "{}\r", blank));
+ }
+ _ => (),
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/source/mod.rs b/src/tools/cargo/src/cargo/core/source/mod.rs
new file mode 100644
index 000000000..dca71b64e
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/source/mod.rs
@@ -0,0 +1,340 @@
+use std::collections::hash_map::HashMap;
+use std::fmt;
+use std::task::Poll;
+
+use crate::core::package::PackageSet;
+use crate::core::{Dependency, Package, PackageId, Summary};
+use crate::util::{CargoResult, Config};
+
+mod source_id;
+
+pub use self::source_id::{GitReference, SourceId};
+
+/// Something that finds and downloads remote packages based on names and versions.
+pub trait Source {
+ /// Returns the `SourceId` corresponding to this source.
+ fn source_id(&self) -> SourceId;
+
+ /// Returns the replaced `SourceId` corresponding to this source.
+ fn replaced_source_id(&self) -> SourceId {
+ self.source_id()
+ }
+
+ /// Returns whether or not this source will return summaries with
+ /// checksums listed.
+ fn supports_checksums(&self) -> bool;
+
+ /// Returns whether or not this source will return summaries with
+ /// the `precise` field in the source id listed.
+ fn requires_precise(&self) -> bool;
+
+ /// Attempts to find the packages that match a dependency request.
+ fn query(
+ &mut self,
+ dep: &Dependency,
+ kind: QueryKind,
+ f: &mut dyn FnMut(Summary),
+ ) -> Poll<CargoResult<()>>;
+
+ fn query_vec(&mut self, dep: &Dependency, kind: QueryKind) -> Poll<CargoResult<Vec<Summary>>> {
+ let mut ret = Vec::new();
+ self.query(dep, kind, &mut |s| ret.push(s)).map_ok(|_| ret)
+ }
+
+ /// Ensure that the source is fully up-to-date for the current session on the next query.
+ fn invalidate_cache(&mut self);
+
+ /// If quiet, the source should not display any progress or status messages.
+ fn set_quiet(&mut self, quiet: bool);
+
+ /// Fetches the full package for each name and version specified.
+ fn download(&mut self, package: PackageId) -> CargoResult<MaybePackage>;
+
+ fn download_now(self: Box<Self>, package: PackageId, config: &Config) -> CargoResult<Package>
+ where
+ Self: std::marker::Sized,
+ {
+ let mut sources = SourceMap::new();
+ sources.insert(self);
+ let pkg_set = PackageSet::new(&[package], sources, config)?;
+ let pkg = pkg_set.get_one(package)?;
+ Ok(Package::clone(pkg))
+ }
+
+ fn finish_download(&mut self, package: PackageId, contents: Vec<u8>) -> CargoResult<Package>;
+
+ /// Generates a unique string which represents the fingerprint of the
+ /// current state of the source.
+ ///
+ /// This fingerprint is used to determine the "freshness" of the source
+ /// later on. It must be guaranteed that the fingerprint of a source is
+ /// constant if and only if the output product will remain constant.
+ ///
+ /// The `pkg` argument is the package which this fingerprint should only be
+ /// interested in for when this source may contain multiple packages.
+ fn fingerprint(&self, pkg: &Package) -> CargoResult<String>;
+
+ /// If this source supports it, verifies the source of the package
+ /// specified.
+ ///
+ /// Note that the source may also have performed other checksum-based
+ /// verification during the `download` step, but this is intended to be run
+ /// just before a crate is compiled so it may perform more expensive checks
+ /// which may not be cacheable.
+ fn verify(&self, _pkg: PackageId) -> CargoResult<()> {
+ Ok(())
+ }
+
+ /// Describes this source in a human readable fashion, used for display in
+ /// resolver error messages currently.
+ fn describe(&self) -> String;
+
+ /// Returns whether a source is being replaced by another here.
+ fn is_replaced(&self) -> bool {
+ false
+ }
+
+ /// Add a number of crates that should be whitelisted for showing up during
+ /// queries, even if they are yanked. Currently only applies to registry
+ /// sources.
+ fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]);
+
+ /// Query if a package is yanked. Only registry sources can mark packages
+ /// as yanked. This ignores the yanked whitelist.
+ fn is_yanked(&mut self, _pkg: PackageId) -> Poll<CargoResult<bool>>;
+
+ /// Block until all outstanding Poll::Pending requests are `Poll::Ready`.
+ ///
+ /// After calling this function, the source should return `Poll::Ready` for
+ /// any queries that previously returned `Poll::Pending`.
+ ///
+ /// If no queries previously returned `Poll::Pending`, and `invalidate_cache`
+ /// was not called, this function should be a no-op.
+ fn block_until_ready(&mut self) -> CargoResult<()>;
+}
+
+#[derive(Copy, Clone, PartialEq, Eq)]
+pub enum QueryKind {
+ Exact,
+ /// Each source gets to define what `close` means for it.
+ /// Path/Git sources may return all dependencies that are at that URI,
+ /// whereas an `Index` source may return dependencies that have the same canonicalization.
+ Fuzzy,
+}
+
+pub enum MaybePackage {
+ Ready(Package),
+ Download {
+ url: String,
+ descriptor: String,
+ authorization: Option<String>,
+ },
+}
+
+impl<'a, T: Source + ?Sized + 'a> Source for Box<T> {
+ /// Forwards to `Source::source_id`.
+ fn source_id(&self) -> SourceId {
+ (**self).source_id()
+ }
+
+ /// Forwards to `Source::replaced_source_id`.
+ fn replaced_source_id(&self) -> SourceId {
+ (**self).replaced_source_id()
+ }
+
+ /// Forwards to `Source::supports_checksums`.
+ fn supports_checksums(&self) -> bool {
+ (**self).supports_checksums()
+ }
+
+ /// Forwards to `Source::requires_precise`.
+ fn requires_precise(&self) -> bool {
+ (**self).requires_precise()
+ }
+
+ /// Forwards to `Source::query`.
+ fn query(
+ &mut self,
+ dep: &Dependency,
+ kind: QueryKind,
+ f: &mut dyn FnMut(Summary),
+ ) -> Poll<CargoResult<()>> {
+ (**self).query(dep, kind, f)
+ }
+
+ fn invalidate_cache(&mut self) {
+ (**self).invalidate_cache()
+ }
+
+ fn set_quiet(&mut self, quiet: bool) {
+ (**self).set_quiet(quiet)
+ }
+
+ /// Forwards to `Source::download`.
+ fn download(&mut self, id: PackageId) -> CargoResult<MaybePackage> {
+ (**self).download(id)
+ }
+
+ fn finish_download(&mut self, id: PackageId, data: Vec<u8>) -> CargoResult<Package> {
+ (**self).finish_download(id, data)
+ }
+
+ /// Forwards to `Source::fingerprint`.
+ fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
+ (**self).fingerprint(pkg)
+ }
+
+ /// Forwards to `Source::verify`.
+ fn verify(&self, pkg: PackageId) -> CargoResult<()> {
+ (**self).verify(pkg)
+ }
+
+ fn describe(&self) -> String {
+ (**self).describe()
+ }
+
+ fn is_replaced(&self) -> bool {
+ (**self).is_replaced()
+ }
+
+ fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]) {
+ (**self).add_to_yanked_whitelist(pkgs);
+ }
+
+ fn is_yanked(&mut self, pkg: PackageId) -> Poll<CargoResult<bool>> {
+ (**self).is_yanked(pkg)
+ }
+
+ fn block_until_ready(&mut self) -> CargoResult<()> {
+ (**self).block_until_ready()
+ }
+}
+
+impl<'a, T: Source + ?Sized + 'a> Source for &'a mut T {
+ fn source_id(&self) -> SourceId {
+ (**self).source_id()
+ }
+
+ fn replaced_source_id(&self) -> SourceId {
+ (**self).replaced_source_id()
+ }
+
+ fn supports_checksums(&self) -> bool {
+ (**self).supports_checksums()
+ }
+
+ fn requires_precise(&self) -> bool {
+ (**self).requires_precise()
+ }
+
+ fn query(
+ &mut self,
+ dep: &Dependency,
+ kind: QueryKind,
+ f: &mut dyn FnMut(Summary),
+ ) -> Poll<CargoResult<()>> {
+ (**self).query(dep, kind, f)
+ }
+
+ fn invalidate_cache(&mut self) {
+ (**self).invalidate_cache()
+ }
+
+ fn set_quiet(&mut self, quiet: bool) {
+ (**self).set_quiet(quiet)
+ }
+
+ fn download(&mut self, id: PackageId) -> CargoResult<MaybePackage> {
+ (**self).download(id)
+ }
+
+ fn finish_download(&mut self, id: PackageId, data: Vec<u8>) -> CargoResult<Package> {
+ (**self).finish_download(id, data)
+ }
+
+ fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
+ (**self).fingerprint(pkg)
+ }
+
+ fn verify(&self, pkg: PackageId) -> CargoResult<()> {
+ (**self).verify(pkg)
+ }
+
+ fn describe(&self) -> String {
+ (**self).describe()
+ }
+
+ fn is_replaced(&self) -> bool {
+ (**self).is_replaced()
+ }
+
+ fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]) {
+ (**self).add_to_yanked_whitelist(pkgs);
+ }
+
+ fn is_yanked(&mut self, pkg: PackageId) -> Poll<CargoResult<bool>> {
+ (**self).is_yanked(pkg)
+ }
+
+ fn block_until_ready(&mut self) -> CargoResult<()> {
+ (**self).block_until_ready()
+ }
+}
+
+/// A `HashMap` of `SourceId` -> `Box<Source>`.
+#[derive(Default)]
+pub struct SourceMap<'src> {
+ map: HashMap<SourceId, Box<dyn Source + 'src>>,
+}
+
+// `impl Debug` on source requires specialization, if even desirable at all.
+impl<'src> fmt::Debug for SourceMap<'src> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "SourceMap ")?;
+ f.debug_set().entries(self.map.keys()).finish()
+ }
+}
+
+impl<'src> SourceMap<'src> {
+ /// Creates an empty map.
+ pub fn new() -> SourceMap<'src> {
+ SourceMap {
+ map: HashMap::new(),
+ }
+ }
+
+ /// Like `HashMap::get`.
+ pub fn get(&self, id: SourceId) -> Option<&(dyn Source + 'src)> {
+ self.map.get(&id).map(|s| s.as_ref())
+ }
+
+ /// Like `HashMap::get_mut`.
+ pub fn get_mut(&mut self, id: SourceId) -> Option<&mut (dyn Source + 'src)> {
+ self.map.get_mut(&id).map(|s| s.as_mut())
+ }
+
+ /// Like `HashMap::insert`, but derives the `SourceId` key from the `Source`.
+ pub fn insert(&mut self, source: Box<dyn Source + 'src>) {
+ let id = source.source_id();
+ self.map.insert(id, source);
+ }
+
+ /// Like `HashMap::len`.
+ pub fn len(&self) -> usize {
+ self.map.len()
+ }
+
+ /// Like `HashMap::iter_mut`.
+ pub fn sources_mut<'a>(
+ &'a mut self,
+ ) -> impl Iterator<Item = (&'a SourceId, &'a mut (dyn Source + 'src))> {
+ self.map.iter_mut().map(|(a, b)| (a, &mut **b))
+ }
+
+ /// Merge the given map into self.
+ pub fn add_source_map(&mut self, other: SourceMap<'src>) {
+ for (key, value) in other.map {
+ self.map.entry(key).or_insert(value);
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/source/source_id.rs b/src/tools/cargo/src/cargo/core/source/source_id.rs
new file mode 100644
index 000000000..034d7ed59
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/source/source_id.rs
@@ -0,0 +1,891 @@
+use crate::core::PackageId;
+use crate::sources::registry::CRATES_IO_HTTP_INDEX;
+use crate::sources::{DirectorySource, CRATES_IO_DOMAIN, CRATES_IO_INDEX, CRATES_IO_REGISTRY};
+use crate::sources::{GitSource, PathSource, RegistrySource};
+use crate::util::{config, CanonicalUrl, CargoResult, Config, IntoUrl};
+use log::trace;
+use serde::de;
+use serde::ser;
+use std::cmp::{self, Ordering};
+use std::collections::HashSet;
+use std::fmt::{self, Formatter};
+use std::hash::{self, Hash};
+use std::path::{Path, PathBuf};
+use std::ptr;
+use std::sync::Mutex;
+use url::Url;
+
+lazy_static::lazy_static! {
+ static ref SOURCE_ID_CACHE: Mutex<HashSet<&'static SourceIdInner>> = Default::default();
+}
+
+/// Unique identifier for a source of packages.
+#[derive(Clone, Copy, Eq, Debug)]
+pub struct SourceId {
+ inner: &'static SourceIdInner,
+}
+
+#[derive(Eq, Clone, Debug)]
+struct SourceIdInner {
+ /// The source URL.
+ url: Url,
+ /// The canonical version of the above url
+ canonical_url: CanonicalUrl,
+ /// The source kind.
+ kind: SourceKind,
+ /// For example, the exact Git revision of the specified branch for a Git Source.
+ precise: Option<String>,
+ /// Name of the registry source for alternative registries
+ /// WARNING: this is not always set for alt-registries when the name is
+ /// not known.
+ name: Option<String>,
+ /// Name of the alt registry in the `[registries]` table.
+ /// WARNING: this is not always set for alt-registries when the name is
+ /// not known.
+ alt_registry_key: Option<String>,
+}
+
+/// The possible kinds of code source. Along with `SourceIdInner`, this fully defines the
+/// source.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+enum SourceKind {
+ /// A git repository.
+ Git(GitReference),
+ /// A local path.
+ Path,
+ /// A remote registry.
+ Registry,
+ /// A sparse registry.
+ SparseRegistry,
+ /// A local filesystem-based registry.
+ LocalRegistry,
+ /// A directory-based registry.
+ Directory,
+}
+
+/// Information to find a specific commit in a Git repository.
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum GitReference {
+ /// From a tag.
+ Tag(String),
+ /// From a branch.
+ Branch(String),
+ /// From a specific revision.
+ Rev(String),
+ /// The default branch of the repository, the reference named `HEAD`.
+ DefaultBranch,
+}
+
+impl SourceId {
+ /// Creates a `SourceId` object from the kind and URL.
+ ///
+ /// The canonical url will be calculated, but the precise field will not
+ fn new(kind: SourceKind, url: Url, name: Option<&str>) -> CargoResult<SourceId> {
+ if kind == SourceKind::SparseRegistry {
+ // Sparse URLs are different because they store the kind prefix (sparse+)
+ // in the URL. This is because the prefix is necessary to differentiate
+ // from regular registries (git-based). The sparse+ prefix is included
+ // everywhere, including user-facing locations such as the `config.toml`
+ // file that defines the registry, or whenever Cargo displays it to the user.
+ assert!(url.as_str().starts_with("sparse+"));
+ }
+ let source_id = SourceId::wrap(SourceIdInner {
+ kind,
+ canonical_url: CanonicalUrl::new(&url)?,
+ url,
+ precise: None,
+ name: name.map(|n| n.into()),
+ alt_registry_key: None,
+ });
+ Ok(source_id)
+ }
+
+ fn wrap(inner: SourceIdInner) -> SourceId {
+ let mut cache = SOURCE_ID_CACHE.lock().unwrap();
+ let inner = cache.get(&inner).cloned().unwrap_or_else(|| {
+ let inner = Box::leak(Box::new(inner));
+ cache.insert(inner);
+ inner
+ });
+ SourceId { inner }
+ }
+
+ fn remote_source_kind(url: &Url) -> SourceKind {
+ if url.as_str().starts_with("sparse+") {
+ SourceKind::SparseRegistry
+ } else {
+ SourceKind::Registry
+ }
+ }
+
+ /// Parses a source URL and returns the corresponding ID.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// use cargo::core::SourceId;
+ /// SourceId::from_url("git+https://github.com/alexcrichton/\
+ /// libssh2-static-sys#80e71a3021618eb05\
+ /// 656c58fb7c5ef5f12bc747f");
+ /// ```
+ pub fn from_url(string: &str) -> CargoResult<SourceId> {
+ let mut parts = string.splitn(2, '+');
+ let kind = parts.next().unwrap();
+ let url = parts
+ .next()
+ .ok_or_else(|| anyhow::format_err!("invalid source `{}`", string))?;
+
+ match kind {
+ "git" => {
+ let mut url = url.into_url()?;
+ let mut reference = GitReference::DefaultBranch;
+ for (k, v) in url.query_pairs() {
+ match &k[..] {
+ // Map older 'ref' to branch.
+ "branch" | "ref" => reference = GitReference::Branch(v.into_owned()),
+
+ "rev" => reference = GitReference::Rev(v.into_owned()),
+ "tag" => reference = GitReference::Tag(v.into_owned()),
+ _ => {}
+ }
+ }
+ let precise = url.fragment().map(|s| s.to_owned());
+ url.set_fragment(None);
+ url.set_query(None);
+ Ok(SourceId::for_git(&url, reference)?.with_precise(precise))
+ }
+ "registry" => {
+ let url = url.into_url()?;
+ Ok(SourceId::new(SourceKind::Registry, url, None)?
+ .with_precise(Some("locked".to_string())))
+ }
+ "sparse" => {
+ let url = string.into_url()?;
+ Ok(SourceId::new(SourceKind::SparseRegistry, url, None)?
+ .with_precise(Some("locked".to_string())))
+ }
+ "path" => {
+ let url = url.into_url()?;
+ SourceId::new(SourceKind::Path, url, None)
+ }
+ kind => Err(anyhow::format_err!("unsupported source protocol: {}", kind)),
+ }
+ }
+
+ /// A view of the `SourceId` that can be `Display`ed as a URL.
+ pub fn as_url(&self) -> SourceIdAsUrl<'_> {
+ SourceIdAsUrl {
+ inner: &*self.inner,
+ }
+ }
+
+ /// Creates a `SourceId` from a filesystem path.
+ ///
+ /// `path`: an absolute path.
+ pub fn for_path(path: &Path) -> CargoResult<SourceId> {
+ let url = path.into_url()?;
+ SourceId::new(SourceKind::Path, url, None)
+ }
+
+ /// Creates a `SourceId` from a Git reference.
+ pub fn for_git(url: &Url, reference: GitReference) -> CargoResult<SourceId> {
+ SourceId::new(SourceKind::Git(reference), url.clone(), None)
+ }
+
+ /// Creates a SourceId from a remote registry URL when the registry name
+ /// cannot be determined, e.g. a user passes `--index` directly from CLI.
+ ///
+ /// Use [`SourceId::for_alt_registry`] if a name can provided, which
+ /// generates better messages for cargo.
+ pub fn for_registry(url: &Url) -> CargoResult<SourceId> {
+ let kind = Self::remote_source_kind(url);
+ SourceId::new(kind, url.to_owned(), None)
+ }
+
+ /// Creates a `SourceId` from a remote registry URL with given name.
+ pub fn for_alt_registry(url: &Url, name: &str) -> CargoResult<SourceId> {
+ let kind = Self::remote_source_kind(url);
+ SourceId::new(kind, url.to_owned(), Some(name))
+ }
+
+ /// Creates a SourceId from a local registry path.
+ pub fn for_local_registry(path: &Path) -> CargoResult<SourceId> {
+ let url = path.into_url()?;
+ SourceId::new(SourceKind::LocalRegistry, url, None)
+ }
+
+ /// Creates a `SourceId` from a directory path.
+ pub fn for_directory(path: &Path) -> CargoResult<SourceId> {
+ let url = path.into_url()?;
+ SourceId::new(SourceKind::Directory, url, None)
+ }
+
+ /// Returns the `SourceId` corresponding to the main repository.
+ ///
+ /// This is the main cargo registry by default, but it can be overridden in
+ /// a `.cargo/config.toml`.
+ pub fn crates_io(config: &Config) -> CargoResult<SourceId> {
+ config.crates_io_source_id(|| {
+ config.check_registry_index_not_set()?;
+ let url = CRATES_IO_INDEX.into_url().unwrap();
+ SourceId::new(SourceKind::Registry, url, Some(CRATES_IO_REGISTRY))
+ })
+ }
+
+ /// Returns the `SourceId` corresponding to the main repository, using the
+ /// sparse HTTP index if allowed.
+ pub fn crates_io_maybe_sparse_http(config: &Config) -> CargoResult<SourceId> {
+ if Self::crates_io_is_sparse(config)? {
+ config.check_registry_index_not_set()?;
+ let url = CRATES_IO_HTTP_INDEX.into_url().unwrap();
+ SourceId::new(SourceKind::SparseRegistry, url, Some(CRATES_IO_REGISTRY))
+ } else {
+ Self::crates_io(config)
+ }
+ }
+
+ /// Returns whether to access crates.io over the sparse protocol.
+ pub fn crates_io_is_sparse(config: &Config) -> CargoResult<bool> {
+ let proto: Option<config::Value<String>> = config.get("registries.crates-io.protocol")?;
+ let is_sparse = match proto.as_ref().map(|v| v.val.as_str()) {
+ Some("sparse") => true,
+ Some("git") => false,
+ Some(unknown) => anyhow::bail!(
+ "unsupported registry protocol `{unknown}` (defined in {})",
+ proto.as_ref().unwrap().definition
+ ),
+ None => true,
+ };
+ Ok(is_sparse)
+ }
+
+ /// Gets the `SourceId` associated with given name of the remote registry.
+ pub fn alt_registry(config: &Config, key: &str) -> CargoResult<SourceId> {
+ if key == CRATES_IO_REGISTRY {
+ return Self::crates_io(config);
+ }
+ let url = config.get_registry_index(key)?;
+ let kind = Self::remote_source_kind(&url);
+ Ok(SourceId::wrap(SourceIdInner {
+ kind,
+ canonical_url: CanonicalUrl::new(&url)?,
+ url,
+ precise: None,
+ name: Some(key.to_string()),
+ alt_registry_key: Some(key.to_string()),
+ }))
+ }
+
+ /// Gets this source URL.
+ pub fn url(&self) -> &Url {
+ &self.inner.url
+ }
+
+ /// Gets the canonical URL of this source, used for internal comparison
+ /// purposes.
+ pub fn canonical_url(&self) -> &CanonicalUrl {
+ &self.inner.canonical_url
+ }
+
+ pub fn display_index(self) -> String {
+ if self.is_crates_io() {
+ format!("{} index", CRATES_IO_DOMAIN)
+ } else {
+ format!("`{}` index", self.display_registry_name())
+ }
+ }
+
+ pub fn display_registry_name(self) -> String {
+ if self.is_crates_io() {
+ CRATES_IO_REGISTRY.to_string()
+ } else if let Some(name) = &self.inner.name {
+ name.clone()
+ } else if self.precise().is_some() {
+ // We remove `precise` here to retrieve an permissive version of
+ // `SourceIdInner`, which may contain the registry name.
+ self.with_precise(None).display_registry_name()
+ } else {
+ url_display(self.url())
+ }
+ }
+
+ /// Gets the name of the remote registry as defined in the `[registries]` table.
+ /// WARNING: alt registries that come from Cargo.lock, or --index will
+ /// not have a name.
+ pub fn alt_registry_key(&self) -> Option<&str> {
+ self.inner.alt_registry_key.as_deref()
+ }
+
+ /// Returns `true` if this source is from a filesystem path.
+ pub fn is_path(self) -> bool {
+ self.inner.kind == SourceKind::Path
+ }
+
+ /// Returns the local path if this is a path dependency.
+ pub fn local_path(self) -> Option<PathBuf> {
+ if self.inner.kind != SourceKind::Path {
+ return None;
+ }
+
+ Some(self.inner.url.to_file_path().unwrap())
+ }
+
+ /// Returns `true` if this source is from a registry (either local or not).
+ pub fn is_registry(self) -> bool {
+ matches!(
+ self.inner.kind,
+ SourceKind::Registry | SourceKind::SparseRegistry | SourceKind::LocalRegistry
+ )
+ }
+
+ /// Returns `true` if this source is from a sparse registry.
+ pub fn is_sparse(self) -> bool {
+ matches!(self.inner.kind, SourceKind::SparseRegistry)
+ }
+
+ /// Returns `true` if this source is a "remote" registry.
+ ///
+ /// "remote" may also mean a file URL to a git index, so it is not
+ /// necessarily "remote". This just means it is not `local-registry`.
+ pub fn is_remote_registry(self) -> bool {
+ matches!(
+ self.inner.kind,
+ SourceKind::Registry | SourceKind::SparseRegistry
+ )
+ }
+
+ /// Returns `true` if this source from a Git repository.
+ pub fn is_git(self) -> bool {
+ matches!(self.inner.kind, SourceKind::Git(_))
+ }
+
+ /// Creates an implementation of `Source` corresponding to this ID.
+ pub fn load<'a>(
+ self,
+ config: &'a Config,
+ yanked_whitelist: &HashSet<PackageId>,
+ ) -> CargoResult<Box<dyn super::Source + 'a>> {
+ trace!("loading SourceId; {}", self);
+ match self.inner.kind {
+ SourceKind::Git(..) => Ok(Box::new(GitSource::new(self, config)?)),
+ SourceKind::Path => {
+ let path = match self.inner.url.to_file_path() {
+ Ok(p) => p,
+ Err(()) => panic!("path sources cannot be remote"),
+ };
+ Ok(Box::new(PathSource::new(&path, self, config)))
+ }
+ SourceKind::Registry | SourceKind::SparseRegistry => Ok(Box::new(
+ RegistrySource::remote(self, yanked_whitelist, config)?,
+ )),
+ SourceKind::LocalRegistry => {
+ let path = match self.inner.url.to_file_path() {
+ Ok(p) => p,
+ Err(()) => panic!("path sources cannot be remote"),
+ };
+ Ok(Box::new(RegistrySource::local(
+ self,
+ &path,
+ yanked_whitelist,
+ config,
+ )))
+ }
+ SourceKind::Directory => {
+ let path = match self.inner.url.to_file_path() {
+ Ok(p) => p,
+ Err(()) => panic!("path sources cannot be remote"),
+ };
+ Ok(Box::new(DirectorySource::new(&path, self, config)))
+ }
+ }
+ }
+
+ /// Gets the value of the precise field.
+ pub fn precise(self) -> Option<&'static str> {
+ self.inner.precise.as_deref()
+ }
+
+ /// Gets the Git reference if this is a git source, otherwise `None`.
+ pub fn git_reference(self) -> Option<&'static GitReference> {
+ match self.inner.kind {
+ SourceKind::Git(ref s) => Some(s),
+ _ => None,
+ }
+ }
+
+ /// Creates a new `SourceId` from this source with the given `precise`.
+ pub fn with_precise(self, v: Option<String>) -> SourceId {
+ SourceId::wrap(SourceIdInner {
+ precise: v,
+ ..(*self.inner).clone()
+ })
+ }
+
+ /// Returns `true` if the remote registry is the standard <https://crates.io>.
+ pub fn is_crates_io(self) -> bool {
+ match self.inner.kind {
+ SourceKind::Registry | SourceKind::SparseRegistry => {}
+ _ => return false,
+ }
+ let url = self.inner.url.as_str();
+ url == CRATES_IO_INDEX || url == CRATES_IO_HTTP_INDEX || is_overridden_crates_io_url(url)
+ }
+
+ /// Hashes `self`.
+ ///
+ /// For paths, remove the workspace prefix so the same source will give the
+ /// same hash in different locations.
+ pub fn stable_hash<S: hash::Hasher>(self, workspace: &Path, into: &mut S) {
+ if self.is_path() {
+ if let Ok(p) = self
+ .inner
+ .url
+ .to_file_path()
+ .unwrap()
+ .strip_prefix(workspace)
+ {
+ self.inner.kind.hash(into);
+ p.to_str().unwrap().hash(into);
+ return;
+ }
+ }
+ self.hash(into)
+ }
+
+ pub fn full_eq(self, other: SourceId) -> bool {
+ ptr::eq(self.inner, other.inner)
+ }
+
+ pub fn full_hash<S: hash::Hasher>(self, into: &mut S) {
+ ptr::NonNull::from(self.inner).hash(into)
+ }
+}
+
+impl PartialEq for SourceId {
+ fn eq(&self, other: &SourceId) -> bool {
+ self.cmp(other) == Ordering::Equal
+ }
+}
+
+impl PartialOrd for SourceId {
+ fn partial_cmp(&self, other: &SourceId) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+// Custom comparison defined as canonical URL equality for git sources and URL
+// equality for other sources, ignoring the `precise` and `name` fields.
+impl Ord for SourceId {
+ fn cmp(&self, other: &SourceId) -> Ordering {
+ // If our interior pointers are to the exact same `SourceIdInner` then
+ // we're guaranteed to be equal.
+ if ptr::eq(self.inner, other.inner) {
+ return Ordering::Equal;
+ }
+
+ // Sort first based on `kind`, deferring to the URL comparison below if
+ // the kinds are equal.
+ match self.inner.kind.cmp(&other.inner.kind) {
+ Ordering::Equal => {}
+ other => return other,
+ }
+
+ // If the `kind` and the `url` are equal, then for git sources we also
+ // ensure that the canonical urls are equal.
+ match (&self.inner.kind, &other.inner.kind) {
+ (SourceKind::Git(_), SourceKind::Git(_)) => {
+ self.inner.canonical_url.cmp(&other.inner.canonical_url)
+ }
+ _ => self.inner.url.cmp(&other.inner.url),
+ }
+ }
+}
+
+impl ser::Serialize for SourceId {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: ser::Serializer,
+ {
+ if self.is_path() {
+ None::<String>.serialize(s)
+ } else {
+ s.collect_str(&self.as_url())
+ }
+ }
+}
+
+impl<'de> de::Deserialize<'de> for SourceId {
+ fn deserialize<D>(d: D) -> Result<SourceId, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ let string = String::deserialize(d)?;
+ SourceId::from_url(&string).map_err(de::Error::custom)
+ }
+}
+
+fn url_display(url: &Url) -> String {
+ if url.scheme() == "file" {
+ if let Ok(path) = url.to_file_path() {
+ if let Some(path_str) = path.to_str() {
+ return path_str.to_string();
+ }
+ }
+ }
+
+ url.as_str().to_string()
+}
+
+impl fmt::Display for SourceId {
+ fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+ match self.inner.kind {
+ SourceKind::Git(ref reference) => {
+ // Don't replace the URL display for git references,
+ // because those are kind of expected to be URLs.
+ write!(f, "{}", self.inner.url)?;
+ if let Some(pretty) = reference.pretty_ref() {
+ write!(f, "?{}", pretty)?;
+ }
+
+ if let Some(ref s) = self.inner.precise {
+ let len = cmp::min(s.len(), 8);
+ write!(f, "#{}", &s[..len])?;
+ }
+ Ok(())
+ }
+ SourceKind::Path => write!(f, "{}", url_display(&self.inner.url)),
+ SourceKind::Registry | SourceKind::SparseRegistry => {
+ write!(f, "registry `{}`", self.display_registry_name())
+ }
+ SourceKind::LocalRegistry => write!(f, "registry `{}`", url_display(&self.inner.url)),
+ SourceKind::Directory => write!(f, "dir {}", url_display(&self.inner.url)),
+ }
+ }
+}
+
+// The hash of SourceId is used in the name of some Cargo folders, so shouldn't
+// vary. `as_str` gives the serialisation of a url (which has a spec) and so
+// insulates against possible changes in how the url crate does hashing.
+impl Hash for SourceId {
+ fn hash<S: hash::Hasher>(&self, into: &mut S) {
+ self.inner.kind.hash(into);
+ match self.inner.kind {
+ SourceKind::Git(_) => self.inner.canonical_url.hash(into),
+ _ => self.inner.url.as_str().hash(into),
+ }
+ }
+}
+
+impl Hash for SourceIdInner {
+ /// The hash of `SourceIdInner` is used to retrieve its interned value. We
+ /// only care about fields that make `SourceIdInner` unique, which are:
+ ///
+ /// - `kind`
+ /// - `precise`
+ /// - `canonical_url`
+ fn hash<S: hash::Hasher>(&self, into: &mut S) {
+ self.kind.hash(into);
+ self.precise.hash(into);
+ self.canonical_url.hash(into);
+ }
+}
+
+impl PartialEq for SourceIdInner {
+ /// This implementation must be synced with [`SourceIdInner::hash`].
+ fn eq(&self, other: &Self) -> bool {
+ self.kind == other.kind
+ && self.precise == other.precise
+ && self.canonical_url == other.canonical_url
+ }
+}
+
+// forward to `Ord`
+impl PartialOrd for SourceKind {
+ fn partial_cmp(&self, other: &SourceKind) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+// Note that this is specifically not derived on `SourceKind` although the
+// implementation here is very similar to what it might look like if it were
+// otherwise derived.
+//
+// The reason for this is somewhat obtuse. First of all the hash value of
+// `SourceKind` makes its way into `~/.cargo/registry/index/github.com-XXXX`
+// which means that changes to the hash means that all Rust users need to
+// redownload the crates.io index and all their crates. If possible we strive to
+// not change this to make this redownloading behavior happen as little as
+// possible. How is this connected to `Ord` you might ask? That's a good
+// question!
+//
+// Since the beginning of time `SourceKind` has had `#[derive(Hash)]`. It for
+// the longest time *also* derived the `Ord` and `PartialOrd` traits. In #8522,
+// however, the implementation of `Ord` changed. This handwritten implementation
+// forgot to sync itself with the originally derived implementation, namely
+// placing git dependencies as sorted after all other dependencies instead of
+// first as before.
+//
+// This regression in #8522 (Rust 1.47) went unnoticed. When we switched back
+// to a derived implementation in #9133 (Rust 1.52 beta) we only then ironically
+// saw an issue (#9334). In #9334 it was observed that stable Rust at the time
+// (1.51) was sorting git dependencies last, whereas Rust 1.52 beta would sort
+// git dependencies first. This is because the `PartialOrd` implementation in
+// 1.51 used #8522, the buggy implementation, which put git deps last. In 1.52
+// it was (unknowingly) restored to the pre-1.47 behavior with git dependencies
+// first.
+//
+// Because the breakage was only witnessed after the original breakage, this
+// trait implementation is preserving the "broken" behavior. Put a different way:
+//
+// * Rust pre-1.47 sorted git deps first.
+// * Rust 1.47 to Rust 1.51 sorted git deps last, a breaking change (#8522) that
+// was never noticed.
+// * Rust 1.52 restored the pre-1.47 behavior (#9133, without knowing it did
+// so), and breakage was witnessed by actual users due to difference with
+// 1.51.
+// * Rust 1.52 (the source as it lives now) was fixed to match the 1.47-1.51
+// behavior (#9383), which is now considered intentionally breaking from the
+// pre-1.47 behavior.
+//
+// Note that this was all discovered when Rust 1.53 was in nightly and 1.52 was
+// in beta. #9133 was in both beta and nightly at the time of discovery. For
+// 1.52 #9383 reverted #9133, meaning 1.52 is the same as 1.51. On nightly
+// (1.53) #9397 was created to fix the regression introduced by #9133 relative
+// to the current stable (1.51).
+//
+// That's all a long winded way of saying "it's weird that git deps hash first
+// and are sorted last, but it's the way it is right now". The author of this
+// comment chose to handwrite the `Ord` implementation instead of the `Hash`
+// implementation, but it's only required that at most one of them is
+// hand-written because the other can be derived. Perhaps one day in
+// the future someone can figure out how to remove this behavior.
+impl Ord for SourceKind {
+ fn cmp(&self, other: &SourceKind) -> Ordering {
+ match (self, other) {
+ (SourceKind::Path, SourceKind::Path) => Ordering::Equal,
+ (SourceKind::Path, _) => Ordering::Less,
+ (_, SourceKind::Path) => Ordering::Greater,
+
+ (SourceKind::Registry, SourceKind::Registry) => Ordering::Equal,
+ (SourceKind::Registry, _) => Ordering::Less,
+ (_, SourceKind::Registry) => Ordering::Greater,
+
+ (SourceKind::SparseRegistry, SourceKind::SparseRegistry) => Ordering::Equal,
+ (SourceKind::SparseRegistry, _) => Ordering::Less,
+ (_, SourceKind::SparseRegistry) => Ordering::Greater,
+
+ (SourceKind::LocalRegistry, SourceKind::LocalRegistry) => Ordering::Equal,
+ (SourceKind::LocalRegistry, _) => Ordering::Less,
+ (_, SourceKind::LocalRegistry) => Ordering::Greater,
+
+ (SourceKind::Directory, SourceKind::Directory) => Ordering::Equal,
+ (SourceKind::Directory, _) => Ordering::Less,
+ (_, SourceKind::Directory) => Ordering::Greater,
+
+ (SourceKind::Git(a), SourceKind::Git(b)) => a.cmp(b),
+ }
+ }
+}
+
+/// A `Display`able view into a `SourceId` that will write it as a url
+pub struct SourceIdAsUrl<'a> {
+ inner: &'a SourceIdInner,
+}
+
+impl<'a> fmt::Display for SourceIdAsUrl<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self.inner {
+ SourceIdInner {
+ kind: SourceKind::Path,
+ ref url,
+ ..
+ } => write!(f, "path+{}", url),
+ SourceIdInner {
+ kind: SourceKind::Git(ref reference),
+ ref url,
+ ref precise,
+ ..
+ } => {
+ write!(f, "git+{}", url)?;
+ if let Some(pretty) = reference.pretty_ref() {
+ write!(f, "?{}", pretty)?;
+ }
+ if let Some(precise) = precise.as_ref() {
+ write!(f, "#{}", precise)?;
+ }
+ Ok(())
+ }
+ SourceIdInner {
+ kind: SourceKind::Registry,
+ ref url,
+ ..
+ } => {
+ write!(f, "registry+{url}")
+ }
+ SourceIdInner {
+ kind: SourceKind::SparseRegistry,
+ ref url,
+ ..
+ } => {
+ // Sparse registry URL already includes the `sparse+` prefix
+ write!(f, "{url}")
+ }
+ SourceIdInner {
+ kind: SourceKind::LocalRegistry,
+ ref url,
+ ..
+ } => write!(f, "local-registry+{}", url),
+ SourceIdInner {
+ kind: SourceKind::Directory,
+ ref url,
+ ..
+ } => write!(f, "directory+{}", url),
+ }
+ }
+}
+
+impl GitReference {
+ /// Returns a `Display`able view of this git reference, or None if using
+ /// the head of the default branch
+ pub fn pretty_ref(&self) -> Option<PrettyRef<'_>> {
+ match self {
+ GitReference::DefaultBranch => None,
+ _ => Some(PrettyRef { inner: self }),
+ }
+ }
+}
+
+/// A git reference that can be `Display`ed
+pub struct PrettyRef<'a> {
+ inner: &'a GitReference,
+}
+
+impl<'a> fmt::Display for PrettyRef<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self.inner {
+ GitReference::Branch(ref b) => write!(f, "branch={}", b),
+ GitReference::Tag(ref s) => write!(f, "tag={}", s),
+ GitReference::Rev(ref s) => write!(f, "rev={}", s),
+ GitReference::DefaultBranch => unreachable!(),
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::{GitReference, SourceId, SourceKind};
+ use crate::util::{Config, IntoUrl};
+
+ #[test]
+ fn github_sources_equal() {
+ let loc = "https://github.com/foo/bar".into_url().unwrap();
+ let default = SourceKind::Git(GitReference::DefaultBranch);
+ let s1 = SourceId::new(default.clone(), loc, None).unwrap();
+
+ let loc = "git://github.com/foo/bar".into_url().unwrap();
+ let s2 = SourceId::new(default, loc.clone(), None).unwrap();
+
+ assert_eq!(s1, s2);
+
+ let foo = SourceKind::Git(GitReference::Branch("foo".to_string()));
+ let s3 = SourceId::new(foo, loc, None).unwrap();
+ assert_ne!(s1, s3);
+ }
+
+ // This is a test that the hash of the `SourceId` for crates.io is a well-known
+ // value.
+ //
+ // Note that the hash value matches what the crates.io source id has hashed
+ // since long before Rust 1.30. We strive to keep this value the same across
+ // versions of Cargo because changing it means that users will need to
+ // redownload the index and all crates they use when using a new Cargo version.
+ //
+ // This isn't to say that this hash can *never* change, only that when changing
+ // this it should be explicitly done. If this hash changes accidentally and
+ // you're able to restore the hash to its original value, please do so!
+ // Otherwise please just leave a comment in your PR as to why the hash value is
+ // changing and why the old value can't be easily preserved.
+ //
+ // The hash value depends on endianness and bit-width, so we only run this test on
+ // little-endian 64-bit CPUs (such as x86-64 and ARM64) where it matches the
+ // well-known value.
+ #[test]
+ #[cfg(all(target_endian = "little", target_pointer_width = "64"))]
+ fn test_cratesio_hash() {
+ let config = Config::default().unwrap();
+ let crates_io = SourceId::crates_io(&config).unwrap();
+ assert_eq!(crate::util::hex::short_hash(&crates_io), "1ecc6299db9ec823");
+ }
+
+ // See the comment in `test_cratesio_hash`.
+ //
+ // Only test on non-Windows as paths on Windows will get different hashes.
+ #[test]
+ #[cfg(all(target_endian = "little", target_pointer_width = "64", not(windows)))]
+ fn test_stable_hash() {
+ use std::hash::Hasher;
+ use std::path::Path;
+
+ let gen_hash = |source_id: SourceId| {
+ let mut hasher = std::collections::hash_map::DefaultHasher::new();
+ source_id.stable_hash(Path::new("/tmp/ws"), &mut hasher);
+ hasher.finish()
+ };
+
+ let url = "https://my-crates.io".into_url().unwrap();
+ let source_id = SourceId::for_registry(&url).unwrap();
+ assert_eq!(gen_hash(source_id), 18108075011063494626);
+ assert_eq!(crate::util::hex::short_hash(&source_id), "fb60813d6cb8df79");
+
+ let url = "https://your-crates.io".into_url().unwrap();
+ let source_id = SourceId::for_alt_registry(&url, "alt").unwrap();
+ assert_eq!(gen_hash(source_id), 12862859764592646184);
+ assert_eq!(crate::util::hex::short_hash(&source_id), "09c10fd0cbd74bce");
+
+ let url = "sparse+https://my-crates.io".into_url().unwrap();
+ let source_id = SourceId::for_registry(&url).unwrap();
+ assert_eq!(gen_hash(source_id), 8763561830438022424);
+ assert_eq!(crate::util::hex::short_hash(&source_id), "d1ea0d96f6f759b5");
+
+ let url = "sparse+https://your-crates.io".into_url().unwrap();
+ let source_id = SourceId::for_alt_registry(&url, "alt").unwrap();
+ assert_eq!(gen_hash(source_id), 5159702466575482972);
+ assert_eq!(crate::util::hex::short_hash(&source_id), "135d23074253cb78");
+
+ let url = "file:///tmp/ws/crate".into_url().unwrap();
+ let source_id = SourceId::for_git(&url, GitReference::DefaultBranch).unwrap();
+ assert_eq!(gen_hash(source_id), 15332537265078583985);
+ assert_eq!(crate::util::hex::short_hash(&source_id), "73a808694abda756");
+
+ let path = Path::new("/tmp/ws/crate");
+
+ let source_id = SourceId::for_local_registry(path).unwrap();
+ assert_eq!(gen_hash(source_id), 18446533307730842837);
+ assert_eq!(crate::util::hex::short_hash(&source_id), "52a84cc73f6fd48b");
+
+ let source_id = SourceId::for_path(path).unwrap();
+ assert_eq!(gen_hash(source_id), 8764714075439899829);
+ assert_eq!(crate::util::hex::short_hash(&source_id), "e1ddd48578620fc1");
+
+ let source_id = SourceId::for_directory(path).unwrap();
+ assert_eq!(gen_hash(source_id), 17459999773908528552);
+ assert_eq!(crate::util::hex::short_hash(&source_id), "6568fe2c2fab5bfe");
+ }
+
+ #[test]
+ fn serde_roundtrip() {
+ let url = "sparse+https://my-crates.io/".into_url().unwrap();
+ let source_id = SourceId::for_registry(&url).unwrap();
+ let formatted = format!("{}", source_id.as_url());
+ let deserialized = SourceId::from_url(&formatted).unwrap();
+ assert_eq!(formatted, "sparse+https://my-crates.io/");
+ assert_eq!(source_id, deserialized);
+ }
+}
+
+/// Check if `url` equals to the overridden crates.io URL.
+// ALLOWED: For testing Cargo itself only.
+#[allow(clippy::disallowed_methods)]
+fn is_overridden_crates_io_url(url: &str) -> bool {
+ std::env::var("__CARGO_TEST_CRATES_IO_URL_DO_NOT_USE_THIS").map_or(false, |v| v == url)
+}
diff --git a/src/tools/cargo/src/cargo/core/summary.rs b/src/tools/cargo/src/cargo/core/summary.rs
new file mode 100644
index 000000000..8a7238e4a
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/summary.rs
@@ -0,0 +1,456 @@
+use crate::core::{Dependency, PackageId, SourceId};
+use crate::util::interning::InternedString;
+use crate::util::{CargoResult, Config};
+use anyhow::bail;
+use semver::Version;
+use std::collections::{BTreeMap, HashMap, HashSet};
+use std::fmt;
+use std::hash::{Hash, Hasher};
+use std::mem;
+use std::rc::Rc;
+
+/// Subset of a `Manifest`. Contains only the most important information about
+/// a package.
+///
+/// Summaries are cloned, and should not be mutated after creation
+#[derive(Debug, Clone)]
+pub struct Summary {
+ inner: Rc<Inner>,
+}
+
+#[derive(Debug, Clone)]
+struct Inner {
+ package_id: PackageId,
+ dependencies: Vec<Dependency>,
+ features: Rc<FeatureMap>,
+ checksum: Option<String>,
+ links: Option<InternedString>,
+}
+
+impl Summary {
+ pub fn new(
+ config: &Config,
+ pkg_id: PackageId,
+ dependencies: Vec<Dependency>,
+ features: &BTreeMap<InternedString, Vec<InternedString>>,
+ links: Option<impl Into<InternedString>>,
+ ) -> CargoResult<Summary> {
+ // ****CAUTION**** If you change anything here that may raise a new
+ // error, be sure to coordinate that change with either the index
+ // schema field or the SummariesCache version.
+ for dep in dependencies.iter() {
+ let dep_name = dep.name_in_toml();
+ if dep.is_optional() && !dep.is_transitive() {
+ bail!(
+ "dev-dependencies are not allowed to be optional: `{}`",
+ dep_name
+ )
+ }
+ }
+ let feature_map = build_feature_map(config, pkg_id, features, &dependencies)?;
+ Ok(Summary {
+ inner: Rc::new(Inner {
+ package_id: pkg_id,
+ dependencies,
+ features: Rc::new(feature_map),
+ checksum: None,
+ links: links.map(|l| l.into()),
+ }),
+ })
+ }
+
+ pub fn package_id(&self) -> PackageId {
+ self.inner.package_id
+ }
+ pub fn name(&self) -> InternedString {
+ self.package_id().name()
+ }
+ pub fn version(&self) -> &Version {
+ self.package_id().version()
+ }
+ pub fn source_id(&self) -> SourceId {
+ self.package_id().source_id()
+ }
+ pub fn dependencies(&self) -> &[Dependency] {
+ &self.inner.dependencies
+ }
+ pub fn features(&self) -> &FeatureMap {
+ &self.inner.features
+ }
+
+ pub fn checksum(&self) -> Option<&str> {
+ self.inner.checksum.as_deref()
+ }
+ pub fn links(&self) -> Option<InternedString> {
+ self.inner.links
+ }
+
+ pub fn override_id(mut self, id: PackageId) -> Summary {
+ Rc::make_mut(&mut self.inner).package_id = id;
+ self
+ }
+
+ pub fn set_checksum(&mut self, cksum: String) {
+ Rc::make_mut(&mut self.inner).checksum = Some(cksum);
+ }
+
+ pub fn map_dependencies<F>(mut self, f: F) -> Summary
+ where
+ F: FnMut(Dependency) -> Dependency,
+ {
+ {
+ let slot = &mut Rc::make_mut(&mut self.inner).dependencies;
+ *slot = mem::take(slot).into_iter().map(f).collect();
+ }
+ self
+ }
+
+ pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Summary {
+ let me = if self.package_id().source_id() == to_replace {
+ let new_id = self.package_id().with_source_id(replace_with);
+ self.override_id(new_id)
+ } else {
+ self
+ };
+ me.map_dependencies(|dep| dep.map_source(to_replace, replace_with))
+ }
+}
+
+impl PartialEq for Summary {
+ fn eq(&self, other: &Summary) -> bool {
+ self.inner.package_id == other.inner.package_id
+ }
+}
+
+impl Eq for Summary {}
+
+impl Hash for Summary {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.inner.package_id.hash(state);
+ }
+}
+
+/// Checks features for errors, bailing out a CargoResult:Err if invalid,
+/// and creates FeatureValues for each feature.
+fn build_feature_map(
+ config: &Config,
+ pkg_id: PackageId,
+ features: &BTreeMap<InternedString, Vec<InternedString>>,
+ dependencies: &[Dependency],
+) -> CargoResult<FeatureMap> {
+ use self::FeatureValue::*;
+ let mut dep_map = HashMap::new();
+ for dep in dependencies.iter() {
+ dep_map
+ .entry(dep.name_in_toml())
+ .or_insert_with(Vec::new)
+ .push(dep);
+ }
+
+ let mut map: FeatureMap = features
+ .iter()
+ .map(|(feature, list)| {
+ let fvs: Vec<_> = list
+ .iter()
+ .map(|feat_value| FeatureValue::new(*feat_value))
+ .collect();
+ (*feature, fvs)
+ })
+ .collect();
+
+ // Add implicit features for optional dependencies if they weren't
+ // explicitly listed anywhere.
+ let explicitly_listed: HashSet<_> = map
+ .values()
+ .flatten()
+ .filter_map(|fv| match fv {
+ Dep { dep_name } => Some(*dep_name),
+ _ => None,
+ })
+ .collect();
+ for dep in dependencies {
+ if !dep.is_optional() {
+ continue;
+ }
+ let dep_name_in_toml = dep.name_in_toml();
+ if features.contains_key(&dep_name_in_toml) || explicitly_listed.contains(&dep_name_in_toml)
+ {
+ continue;
+ }
+ let fv = Dep {
+ dep_name: dep_name_in_toml,
+ };
+ map.insert(dep_name_in_toml, vec![fv]);
+ }
+
+ // Validate features are listed properly.
+ for (feature, fvs) in &map {
+ if feature.starts_with("dep:") {
+ bail!(
+ "feature named `{}` is not allowed to start with `dep:`",
+ feature
+ );
+ }
+ if feature.contains('/') {
+ bail!(
+ "feature named `{}` is not allowed to contain slashes",
+ feature
+ );
+ }
+ validate_feature_name(config, pkg_id, feature)?;
+ for fv in fvs {
+ // Find data for the referenced dependency...
+ let dep_data = {
+ match fv {
+ Feature(dep_name) | Dep { dep_name, .. } | DepFeature { dep_name, .. } => {
+ dep_map.get(dep_name)
+ }
+ }
+ };
+ let is_optional_dep = dep_data
+ .iter()
+ .flat_map(|d| d.iter())
+ .any(|d| d.is_optional());
+ let is_any_dep = dep_data.is_some();
+ match fv {
+ Feature(f) => {
+ if !features.contains_key(f) {
+ if !is_any_dep {
+ bail!(
+ "feature `{}` includes `{}` which is neither a dependency \
+ nor another feature",
+ feature,
+ fv
+ );
+ }
+ if is_optional_dep {
+ if !map.contains_key(f) {
+ bail!(
+ "feature `{}` includes `{}`, but `{}` is an \
+ optional dependency without an implicit feature\n\
+ Use `dep:{}` to enable the dependency.",
+ feature,
+ fv,
+ f,
+ f
+ );
+ }
+ } else {
+ bail!("feature `{}` includes `{}`, but `{}` is not an optional dependency\n\
+ A non-optional dependency of the same name is defined; \
+ consider adding `optional = true` to its definition.",
+ feature, fv, f);
+ }
+ }
+ }
+ Dep { dep_name } => {
+ if !is_any_dep {
+ bail!(
+ "feature `{}` includes `{}`, but `{}` is not listed as a dependency",
+ feature,
+ fv,
+ dep_name
+ );
+ }
+ if !is_optional_dep {
+ bail!(
+ "feature `{}` includes `{}`, but `{}` is not an optional dependency\n\
+ A non-optional dependency of the same name is defined; \
+ consider adding `optional = true` to its definition.",
+ feature,
+ fv,
+ dep_name
+ );
+ }
+ }
+ DepFeature {
+ dep_name,
+ dep_feature,
+ weak,
+ ..
+ } => {
+ // Early check for some unlikely syntax.
+ if dep_feature.contains('/') {
+ bail!(
+ "multiple slashes in feature `{}` (included by feature `{}`) are not allowed",
+ fv,
+ feature
+ );
+ }
+
+ // dep: cannot be combined with /
+ if let Some(stripped_dep) = dep_name.strip_prefix("dep:") {
+ let has_other_dep = explicitly_listed.contains(stripped_dep);
+ let is_optional = dep_map
+ .get(stripped_dep)
+ .iter()
+ .flat_map(|d| d.iter())
+ .any(|d| d.is_optional());
+ let extra_help = if *weak || has_other_dep || !is_optional {
+ // In this case, the user should just remove dep:.
+ // Note that "hiding" an optional dependency
+ // wouldn't work with just a single `dep:foo?/bar`
+ // because there would not be any way to enable
+ // `foo`.
+ String::new()
+ } else {
+ format!(
+ "\nIf the intent is to avoid creating an implicit feature \
+ `{stripped_dep}` for an optional dependency, \
+ then consider replacing this with two values:\n \
+ \"dep:{stripped_dep}\", \"{stripped_dep}/{dep_feature}\""
+ )
+ };
+ bail!(
+ "feature `{feature}` includes `{fv}` with both `dep:` and `/`\n\
+ To fix this, remove the `dep:` prefix.{extra_help}"
+ )
+ }
+
+ // Validation of the feature name will be performed in the resolver.
+ if !is_any_dep {
+ bail!(
+ "feature `{}` includes `{}`, but `{}` is not a dependency",
+ feature,
+ fv,
+ dep_name
+ );
+ }
+ if *weak && !is_optional_dep {
+ bail!("feature `{}` includes `{}` with a `?`, but `{}` is not an optional dependency\n\
+ A non-optional dependency of the same name is defined; \
+ consider removing the `?` or changing the dependency to be optional",
+ feature, fv, dep_name);
+ }
+ }
+ }
+ }
+ }
+
+ // Make sure every optional dep is mentioned at least once.
+ let used: HashSet<_> = map
+ .values()
+ .flatten()
+ .filter_map(|fv| match fv {
+ Dep { dep_name } | DepFeature { dep_name, .. } => Some(dep_name),
+ _ => None,
+ })
+ .collect();
+ if let Some(dep) = dependencies
+ .iter()
+ .find(|dep| dep.is_optional() && !used.contains(&dep.name_in_toml()))
+ {
+ bail!(
+ "optional dependency `{}` is not included in any feature\n\
+ Make sure that `dep:{}` is included in one of features in the [features] table.",
+ dep.name_in_toml(),
+ dep.name_in_toml(),
+ );
+ }
+
+ Ok(map)
+}
+
+/// FeatureValue represents the types of dependencies a feature can have.
+#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
+pub enum FeatureValue {
+ /// A feature enabling another feature.
+ Feature(InternedString),
+ /// A feature enabling a dependency with `dep:dep_name` syntax.
+ Dep { dep_name: InternedString },
+ /// A feature enabling a feature on a dependency with `crate_name/feat_name` syntax.
+ DepFeature {
+ dep_name: InternedString,
+ dep_feature: InternedString,
+ /// If `true`, indicates the `?` syntax is used, which means this will
+ /// not automatically enable the dependency unless the dependency is
+ /// activated through some other means.
+ weak: bool,
+ },
+}
+
+impl FeatureValue {
+ pub fn new(feature: InternedString) -> FeatureValue {
+ match feature.find('/') {
+ Some(pos) => {
+ let (dep, dep_feat) = feature.split_at(pos);
+ let dep_feat = &dep_feat[1..];
+ let (dep, weak) = if let Some(dep) = dep.strip_suffix('?') {
+ (dep, true)
+ } else {
+ (dep, false)
+ };
+ FeatureValue::DepFeature {
+ dep_name: InternedString::new(dep),
+ dep_feature: InternedString::new(dep_feat),
+ weak,
+ }
+ }
+ None => {
+ if let Some(dep_name) = feature.strip_prefix("dep:") {
+ FeatureValue::Dep {
+ dep_name: InternedString::new(dep_name),
+ }
+ } else {
+ FeatureValue::Feature(feature)
+ }
+ }
+ }
+ }
+
+ /// Returns `true` if this feature explicitly used `dep:` syntax.
+ pub fn has_dep_prefix(&self) -> bool {
+ matches!(self, FeatureValue::Dep { .. })
+ }
+}
+
+impl fmt::Display for FeatureValue {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ use self::FeatureValue::*;
+ match self {
+ Feature(feat) => write!(f, "{}", feat),
+ Dep { dep_name } => write!(f, "dep:{}", dep_name),
+ DepFeature {
+ dep_name,
+ dep_feature,
+ weak,
+ } => {
+ let weak = if *weak { "?" } else { "" };
+ write!(f, "{}{}/{}", dep_name, weak, dep_feature)
+ }
+ }
+ }
+}
+
+pub type FeatureMap = BTreeMap<InternedString, Vec<FeatureValue>>;
+
+fn validate_feature_name(config: &Config, pkg_id: PackageId, name: &str) -> CargoResult<()> {
+ let mut chars = name.chars();
+ const FUTURE: &str = "This was previously accepted but is being phased out; \
+ it will become a hard error in a future release.\n\
+ For more information, see issue #8813 <https://github.com/rust-lang/cargo/issues/8813>, \
+ and please leave a comment if this will be a problem for your project.";
+ if let Some(ch) = chars.next() {
+ if !(unicode_xid::UnicodeXID::is_xid_start(ch) || ch == '_' || ch.is_digit(10)) {
+ config.shell().warn(&format!(
+ "invalid character `{}` in feature `{}` in package {}, \
+ the first character must be a Unicode XID start character or digit \
+ (most letters or `_` or `0` to `9`)\n\
+ {}",
+ ch, name, pkg_id, FUTURE
+ ))?;
+ }
+ }
+ for ch in chars {
+ if !(unicode_xid::UnicodeXID::is_xid_continue(ch) || ch == '-' || ch == '+' || ch == '.') {
+ config.shell().warn(&format!(
+ "invalid character `{}` in feature `{}` in package {}, \
+ characters must be Unicode XID characters, `+`, or `.` \
+ (numbers, `+`, `-`, `_`, `.`, or most letters)\n\
+ {}",
+ ch, name, pkg_id, FUTURE
+ ))?;
+ }
+ }
+ Ok(())
+}
diff --git a/src/tools/cargo/src/cargo/core/workspace.rs b/src/tools/cargo/src/cargo/core/workspace.rs
new file mode 100644
index 000000000..12b78a69c
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/workspace.rs
@@ -0,0 +1,1789 @@
+use std::cell::RefCell;
+use std::collections::hash_map::{Entry, HashMap};
+use std::collections::{BTreeMap, BTreeSet, HashSet};
+use std::path::{Path, PathBuf};
+use std::rc::Rc;
+
+use anyhow::{anyhow, bail, Context as _};
+use glob::glob;
+use itertools::Itertools;
+use log::debug;
+use url::Url;
+
+use crate::core::compiler::Unit;
+use crate::core::features::Features;
+use crate::core::registry::PackageRegistry;
+use crate::core::resolver::features::CliFeatures;
+use crate::core::resolver::ResolveBehavior;
+use crate::core::{Dependency, FeatureValue, PackageId, PackageIdSpec};
+use crate::core::{EitherManifest, Package, SourceId, VirtualManifest};
+use crate::ops;
+use crate::sources::{PathSource, CRATES_IO_INDEX, CRATES_IO_REGISTRY};
+use crate::util::errors::{CargoResult, ManifestError};
+use crate::util::interning::InternedString;
+use crate::util::lev_distance;
+use crate::util::toml::{read_manifest, InheritableFields, TomlDependency, TomlProfiles};
+use crate::util::{config::ConfigRelativePath, Config, Filesystem, IntoUrl};
+use cargo_util::paths;
+use cargo_util::paths::normalize_path;
+use pathdiff::diff_paths;
+
+/// The core abstraction in Cargo for working with a workspace of crates.
+///
+/// A workspace is often created very early on and then threaded through all
+/// other functions. It's typically through this object that the current
+/// package is loaded and/or learned about.
+#[derive(Debug)]
+pub struct Workspace<'cfg> {
+ config: &'cfg Config,
+
+ // This path is a path to where the current cargo subcommand was invoked
+ // from. That is the `--manifest-path` argument to Cargo, and
+ // points to the "main crate" that we're going to worry about.
+ current_manifest: PathBuf,
+
+ // A list of packages found in this workspace. Always includes at least the
+ // package mentioned by `current_manifest`.
+ packages: Packages<'cfg>,
+
+ // If this workspace includes more than one crate, this points to the root
+ // of the workspace. This is `None` in the case that `[workspace]` is
+ // missing, `package.workspace` is missing, and no `Cargo.toml` above
+ // `current_manifest` was found on the filesystem with `[workspace]`.
+ root_manifest: Option<PathBuf>,
+
+ // Shared target directory for all the packages of this workspace.
+ // `None` if the default path of `root/target` should be used.
+ target_dir: Option<Filesystem>,
+
+ // List of members in this workspace with a listing of all their manifest
+ // paths. The packages themselves can be looked up through the `packages`
+ // set above.
+ members: Vec<PathBuf>,
+ member_ids: HashSet<PackageId>,
+
+ // The subset of `members` that are used by the
+ // `build`, `check`, `test`, and `bench` subcommands
+ // when no package is selected with `--package` / `-p` and `--workspace`
+ // is not used.
+ //
+ // This is set by the `default-members` config
+ // in the `[workspace]` section.
+ // When unset, this is the same as `members` for virtual workspaces
+ // (`--workspace` is implied)
+ // or only the root package for non-virtual workspaces.
+ default_members: Vec<PathBuf>,
+
+ // `true` if this is a temporary workspace created for the purposes of the
+ // `cargo install` or `cargo package` commands.
+ is_ephemeral: bool,
+
+ // `true` if this workspace should enforce optional dependencies even when
+ // not needed; false if this workspace should only enforce dependencies
+ // needed by the current configuration (such as in cargo install). In some
+ // cases `false` also results in the non-enforcement of dev-dependencies.
+ require_optional_deps: bool,
+
+ // A cache of loaded packages for particular paths which is disjoint from
+ // `packages` up above, used in the `load` method down below.
+ loaded_packages: RefCell<HashMap<PathBuf, Package>>,
+
+ // If `true`, then the resolver will ignore any existing `Cargo.lock`
+ // file. This is set for `cargo install` without `--locked`.
+ ignore_lock: bool,
+
+ /// The resolver behavior specified with the `resolver` field.
+ resolve_behavior: ResolveBehavior,
+
+ /// Workspace-level custom metadata
+ custom_metadata: Option<toml::Value>,
+}
+
+// Separate structure for tracking loaded packages (to avoid loading anything
+// twice), and this is separate to help appease the borrow checker.
+#[derive(Debug)]
+struct Packages<'cfg> {
+ config: &'cfg Config,
+ packages: HashMap<PathBuf, MaybePackage>,
+}
+
+#[derive(Debug)]
+pub enum MaybePackage {
+ Package(Package),
+ Virtual(VirtualManifest),
+}
+
+/// Configuration of a workspace in a manifest.
+#[derive(Debug, Clone)]
+pub enum WorkspaceConfig {
+ /// Indicates that `[workspace]` was present and the members were
+ /// optionally specified as well.
+ Root(WorkspaceRootConfig),
+
+ /// Indicates that `[workspace]` was present and the `root` field is the
+ /// optional value of `package.workspace`, if present.
+ Member { root: Option<String> },
+}
+
+impl WorkspaceConfig {
+ pub fn inheritable(&self) -> Option<&InheritableFields> {
+ match self {
+ WorkspaceConfig::Root(root) => Some(&root.inheritable_fields),
+ WorkspaceConfig::Member { .. } => None,
+ }
+ }
+
+ /// Returns the path of the workspace root based on this `[workspace]` configuration.
+ ///
+ /// Returns `None` if the root is not explicitly known.
+ ///
+ /// * `self_path` is the path of the manifest this `WorkspaceConfig` is located.
+ /// * `look_from` is the path where discovery started (usually the current
+ /// working directory), used for `workspace.exclude` checking.
+ fn get_ws_root(&self, self_path: &Path, look_from: &Path) -> Option<PathBuf> {
+ match self {
+ WorkspaceConfig::Root(ances_root_config) => {
+ debug!("find_root - found a root checking exclusion");
+ if !ances_root_config.is_excluded(look_from) {
+ debug!("find_root - found!");
+ Some(self_path.to_owned())
+ } else {
+ None
+ }
+ }
+ WorkspaceConfig::Member {
+ root: Some(path_to_root),
+ } => {
+ debug!("find_root - found pointer");
+ Some(read_root_pointer(self_path, path_to_root))
+ }
+ WorkspaceConfig::Member { .. } => None,
+ }
+ }
+}
+
+/// Intermediate configuration of a workspace root in a manifest.
+///
+/// Knows the Workspace Root path, as well as `members` and `exclude` lists of path patterns, which
+/// together tell if some path is recognized as a member by this root or not.
+#[derive(Debug, Clone)]
+pub struct WorkspaceRootConfig {
+ root_dir: PathBuf,
+ members: Option<Vec<String>>,
+ default_members: Option<Vec<String>>,
+ exclude: Vec<String>,
+ inheritable_fields: InheritableFields,
+ custom_metadata: Option<toml::Value>,
+}
+
+impl<'cfg> Workspace<'cfg> {
+ /// Creates a new workspace given the target manifest pointed to by
+ /// `manifest_path`.
+ ///
+ /// This function will construct the entire workspace by determining the
+ /// root and all member packages. It will then validate the workspace
+ /// before returning it, so `Ok` is only returned for valid workspaces.
+ pub fn new(manifest_path: &Path, config: &'cfg Config) -> CargoResult<Workspace<'cfg>> {
+ let mut ws = Workspace::new_default(manifest_path.to_path_buf(), config);
+ ws.target_dir = config.target_dir()?;
+
+ if manifest_path.is_relative() {
+ bail!(
+ "manifest_path:{:?} is not an absolute path. Please provide an absolute path.",
+ manifest_path
+ )
+ } else {
+ ws.root_manifest = ws.find_root(manifest_path)?;
+ }
+
+ ws.custom_metadata = ws
+ .load_workspace_config()?
+ .and_then(|cfg| cfg.custom_metadata);
+ ws.find_members()?;
+ ws.set_resolve_behavior();
+ ws.validate()?;
+ Ok(ws)
+ }
+
+ fn new_default(current_manifest: PathBuf, config: &'cfg Config) -> Workspace<'cfg> {
+ Workspace {
+ config,
+ current_manifest,
+ packages: Packages {
+ config,
+ packages: HashMap::new(),
+ },
+ root_manifest: None,
+ target_dir: None,
+ members: Vec::new(),
+ member_ids: HashSet::new(),
+ default_members: Vec::new(),
+ is_ephemeral: false,
+ require_optional_deps: true,
+ loaded_packages: RefCell::new(HashMap::new()),
+ ignore_lock: false,
+ resolve_behavior: ResolveBehavior::V1,
+ custom_metadata: None,
+ }
+ }
+
+ pub fn new_virtual(
+ root_path: PathBuf,
+ current_manifest: PathBuf,
+ manifest: VirtualManifest,
+ config: &'cfg Config,
+ ) -> CargoResult<Workspace<'cfg>> {
+ let mut ws = Workspace::new_default(current_manifest, config);
+ ws.root_manifest = Some(root_path.join("Cargo.toml"));
+ ws.target_dir = config.target_dir()?;
+ ws.packages
+ .packages
+ .insert(root_path, MaybePackage::Virtual(manifest));
+ ws.find_members()?;
+ ws.set_resolve_behavior();
+ // TODO: validation does not work because it walks up the directory
+ // tree looking for the root which is a fake file that doesn't exist.
+ Ok(ws)
+ }
+
+ /// Creates a "temporary workspace" from one package which only contains
+ /// that package.
+ ///
+ /// This constructor will not touch the filesystem and only creates an
+ /// in-memory workspace. That is, all configuration is ignored, it's just
+ /// intended for that one package.
+ ///
+ /// This is currently only used in niche situations like `cargo install` or
+ /// `cargo package`.
+ pub fn ephemeral(
+ package: Package,
+ config: &'cfg Config,
+ target_dir: Option<Filesystem>,
+ require_optional_deps: bool,
+ ) -> CargoResult<Workspace<'cfg>> {
+ let mut ws = Workspace::new_default(package.manifest_path().to_path_buf(), config);
+ ws.is_ephemeral = true;
+ ws.require_optional_deps = require_optional_deps;
+ let key = ws.current_manifest.parent().unwrap();
+ let id = package.package_id();
+ let package = MaybePackage::Package(package);
+ ws.packages.packages.insert(key.to_path_buf(), package);
+ ws.target_dir = if let Some(dir) = target_dir {
+ Some(dir)
+ } else {
+ ws.config.target_dir()?
+ };
+ ws.members.push(ws.current_manifest.clone());
+ ws.member_ids.insert(id);
+ ws.default_members.push(ws.current_manifest.clone());
+ ws.set_resolve_behavior();
+ Ok(ws)
+ }
+
+ fn set_resolve_behavior(&mut self) {
+ // - If resolver is specified in the workspace definition, use that.
+ // - If the root package specifies the resolver, use that.
+ // - If the root package specifies edition 2021, use v2.
+ // - Otherwise, use the default v1.
+ self.resolve_behavior = match self.root_maybe() {
+ MaybePackage::Package(p) => p
+ .manifest()
+ .resolve_behavior()
+ .unwrap_or_else(|| p.manifest().edition().default_resolve_behavior()),
+ MaybePackage::Virtual(vm) => vm.resolve_behavior().unwrap_or(ResolveBehavior::V1),
+ }
+ }
+
+ /// Returns the current package of this workspace.
+ ///
+ /// Note that this can return an error if it the current manifest is
+ /// actually a "virtual Cargo.toml", in which case an error is returned
+ /// indicating that something else should be passed.
+ pub fn current(&self) -> CargoResult<&Package> {
+ let pkg = self.current_opt().ok_or_else(|| {
+ anyhow::format_err!(
+ "manifest path `{}` is a virtual manifest, but this \
+ command requires running against an actual package in \
+ this workspace",
+ self.current_manifest.display()
+ )
+ })?;
+ Ok(pkg)
+ }
+
+ pub fn current_mut(&mut self) -> CargoResult<&mut Package> {
+ let cm = self.current_manifest.clone();
+ let pkg = self.current_opt_mut().ok_or_else(|| {
+ anyhow::format_err!(
+ "manifest path `{}` is a virtual manifest, but this \
+ command requires running against an actual package in \
+ this workspace",
+ cm.display()
+ )
+ })?;
+ Ok(pkg)
+ }
+
+ pub fn current_opt(&self) -> Option<&Package> {
+ match *self.packages.get(&self.current_manifest) {
+ MaybePackage::Package(ref p) => Some(p),
+ MaybePackage::Virtual(..) => None,
+ }
+ }
+
+ pub fn current_opt_mut(&mut self) -> Option<&mut Package> {
+ match *self.packages.get_mut(&self.current_manifest) {
+ MaybePackage::Package(ref mut p) => Some(p),
+ MaybePackage::Virtual(..) => None,
+ }
+ }
+
+ pub fn is_virtual(&self) -> bool {
+ match *self.packages.get(&self.current_manifest) {
+ MaybePackage::Package(..) => false,
+ MaybePackage::Virtual(..) => true,
+ }
+ }
+
+ /// Returns the `Config` this workspace is associated with.
+ pub fn config(&self) -> &'cfg Config {
+ self.config
+ }
+
+ pub fn profiles(&self) -> Option<&TomlProfiles> {
+ match self.root_maybe() {
+ MaybePackage::Package(p) => p.manifest().profiles(),
+ MaybePackage::Virtual(vm) => vm.profiles(),
+ }
+ }
+
+ /// Returns the root path of this workspace.
+ ///
+ /// That is, this returns the path of the directory containing the
+ /// `Cargo.toml` which is the root of this workspace.
+ pub fn root(&self) -> &Path {
+ self.root_manifest().parent().unwrap()
+ }
+
+ /// Returns the path of the `Cargo.toml` which is the root of this
+ /// workspace.
+ pub fn root_manifest(&self) -> &Path {
+ self.root_manifest
+ .as_ref()
+ .unwrap_or(&self.current_manifest)
+ }
+
+ /// Returns the root Package or VirtualManifest.
+ pub fn root_maybe(&self) -> &MaybePackage {
+ self.packages.get(self.root_manifest())
+ }
+
+ pub fn target_dir(&self) -> Filesystem {
+ self.target_dir
+ .clone()
+ .unwrap_or_else(|| Filesystem::new(self.root().join("target")))
+ }
+
+ /// Returns the root `[replace]` section of this workspace.
+ ///
+ /// This may be from a virtual crate or an actual crate.
+ pub fn root_replace(&self) -> &[(PackageIdSpec, Dependency)] {
+ match self.root_maybe() {
+ MaybePackage::Package(p) => p.manifest().replace(),
+ MaybePackage::Virtual(vm) => vm.replace(),
+ }
+ }
+
+ fn config_patch(&self) -> CargoResult<HashMap<Url, Vec<Dependency>>> {
+ let config_patch: Option<
+ BTreeMap<String, BTreeMap<String, TomlDependency<ConfigRelativePath>>>,
+ > = self.config.get("patch")?;
+
+ let source = SourceId::for_path(self.root())?;
+
+ let mut warnings = Vec::new();
+ let mut nested_paths = Vec::new();
+
+ let mut patch = HashMap::new();
+ for (url, deps) in config_patch.into_iter().flatten() {
+ let url = match &url[..] {
+ CRATES_IO_REGISTRY => CRATES_IO_INDEX.parse().unwrap(),
+ url => self
+ .config
+ .get_registry_index(url)
+ .or_else(|_| url.into_url())
+ .with_context(|| {
+ format!("[patch] entry `{}` should be a URL or registry name", url)
+ })?,
+ };
+ patch.insert(
+ url,
+ deps.iter()
+ .map(|(name, dep)| {
+ dep.to_dependency_split(
+ name,
+ source,
+ &mut nested_paths,
+ self.config,
+ &mut warnings,
+ /* platform */ None,
+ // NOTE: Since we use ConfigRelativePath, this root isn't used as
+ // any relative paths are resolved before they'd be joined with root.
+ Path::new("unused-relative-path"),
+ self.unstable_features(),
+ /* kind */ None,
+ )
+ })
+ .collect::<CargoResult<Vec<_>>>()?,
+ );
+ }
+
+ for message in warnings {
+ self.config
+ .shell()
+ .warn(format!("[patch] in cargo config: {}", message))?
+ }
+
+ Ok(patch)
+ }
+
+ /// Returns the root `[patch]` section of this workspace.
+ ///
+ /// This may be from a virtual crate or an actual crate.
+ pub fn root_patch(&self) -> CargoResult<HashMap<Url, Vec<Dependency>>> {
+ let from_manifest = match self.root_maybe() {
+ MaybePackage::Package(p) => p.manifest().patch(),
+ MaybePackage::Virtual(vm) => vm.patch(),
+ };
+
+ let from_config = self.config_patch()?;
+ if from_config.is_empty() {
+ return Ok(from_manifest.clone());
+ }
+ if from_manifest.is_empty() {
+ return Ok(from_config);
+ }
+
+ // We could just chain from_manifest and from_config,
+ // but that's not quite right as it won't deal with overlaps.
+ let mut combined = from_config;
+ for (url, deps_from_manifest) in from_manifest {
+ if let Some(deps_from_config) = combined.get_mut(url) {
+ // We want from_config to take precedence for each patched name.
+ // NOTE: This is inefficient if the number of patches is large!
+ let mut from_manifest_pruned = deps_from_manifest.clone();
+ for dep_from_config in &mut *deps_from_config {
+ if let Some(i) = from_manifest_pruned.iter().position(|dep_from_manifest| {
+ // XXX: should this also take into account version numbers?
+ dep_from_config.name_in_toml() == dep_from_manifest.name_in_toml()
+ }) {
+ from_manifest_pruned.swap_remove(i);
+ }
+ }
+ // Whatever is left does not exist in manifest dependencies.
+ deps_from_config.extend(from_manifest_pruned);
+ } else {
+ combined.insert(url.clone(), deps_from_manifest.clone());
+ }
+ }
+ Ok(combined)
+ }
+
+ /// Returns an iterator over all packages in this workspace
+ pub fn members(&self) -> impl Iterator<Item = &Package> {
+ let packages = &self.packages;
+ self.members
+ .iter()
+ .filter_map(move |path| match packages.get(path) {
+ &MaybePackage::Package(ref p) => Some(p),
+ _ => None,
+ })
+ }
+
+ /// Returns a mutable iterator over all packages in this workspace
+ pub fn members_mut(&mut self) -> impl Iterator<Item = &mut Package> {
+ let packages = &mut self.packages.packages;
+ let members: HashSet<_> = self
+ .members
+ .iter()
+ .map(|path| path.parent().unwrap().to_owned())
+ .collect();
+
+ packages.iter_mut().filter_map(move |(path, package)| {
+ if members.contains(path) {
+ if let MaybePackage::Package(ref mut p) = package {
+ return Some(p);
+ }
+ }
+
+ None
+ })
+ }
+
+ /// Returns an iterator over default packages in this workspace
+ pub fn default_members<'a>(&'a self) -> impl Iterator<Item = &Package> {
+ let packages = &self.packages;
+ self.default_members
+ .iter()
+ .filter_map(move |path| match packages.get(path) {
+ &MaybePackage::Package(ref p) => Some(p),
+ _ => None,
+ })
+ }
+
+ /// Returns an iterator over default packages in this workspace
+ pub fn default_members_mut(&mut self) -> impl Iterator<Item = &mut Package> {
+ let packages = &mut self.packages.packages;
+ let members: HashSet<_> = self
+ .default_members
+ .iter()
+ .map(|path| path.parent().unwrap().to_owned())
+ .collect();
+
+ packages.iter_mut().filter_map(move |(path, package)| {
+ if members.contains(path) {
+ if let MaybePackage::Package(ref mut p) = package {
+ return Some(p);
+ }
+ }
+
+ None
+ })
+ }
+
+ /// Returns true if the package is a member of the workspace.
+ pub fn is_member(&self, pkg: &Package) -> bool {
+ self.member_ids.contains(&pkg.package_id())
+ }
+
+ pub fn is_ephemeral(&self) -> bool {
+ self.is_ephemeral
+ }
+
+ pub fn require_optional_deps(&self) -> bool {
+ self.require_optional_deps
+ }
+
+ pub fn set_require_optional_deps(
+ &mut self,
+ require_optional_deps: bool,
+ ) -> &mut Workspace<'cfg> {
+ self.require_optional_deps = require_optional_deps;
+ self
+ }
+
+ pub fn ignore_lock(&self) -> bool {
+ self.ignore_lock
+ }
+
+ pub fn set_ignore_lock(&mut self, ignore_lock: bool) -> &mut Workspace<'cfg> {
+ self.ignore_lock = ignore_lock;
+ self
+ }
+
+ pub fn custom_metadata(&self) -> Option<&toml::Value> {
+ self.custom_metadata.as_ref()
+ }
+
+ pub fn load_workspace_config(&mut self) -> CargoResult<Option<WorkspaceRootConfig>> {
+ // If we didn't find a root, it must mean there is no [workspace] section, and thus no
+ // metadata.
+ if let Some(root_path) = &self.root_manifest {
+ let root_package = self.packages.load(root_path)?;
+ match root_package.workspace_config() {
+ WorkspaceConfig::Root(ref root_config) => {
+ return Ok(Some(root_config.clone()));
+ }
+
+ _ => bail!(
+ "root of a workspace inferred but wasn't a root: {}",
+ root_path.display()
+ ),
+ }
+ }
+
+ Ok(None)
+ }
+
+ /// Finds the root of a workspace for the crate whose manifest is located
+ /// at `manifest_path`.
+ ///
+ /// This will parse the `Cargo.toml` at `manifest_path` and then interpret
+ /// the workspace configuration, optionally walking up the filesystem
+ /// looking for other workspace roots.
+ ///
+ /// Returns an error if `manifest_path` isn't actually a valid manifest or
+ /// if some other transient error happens.
+ fn find_root(&mut self, manifest_path: &Path) -> CargoResult<Option<PathBuf>> {
+ let current = self.packages.load(manifest_path)?;
+ match current
+ .workspace_config()
+ .get_ws_root(manifest_path, manifest_path)
+ {
+ Some(root_path) => {
+ debug!("find_root - is root {}", manifest_path.display());
+ Ok(Some(root_path))
+ }
+ None => find_workspace_root_with_loader(manifest_path, self.config, |self_path| {
+ Ok(self
+ .packages
+ .load(self_path)?
+ .workspace_config()
+ .get_ws_root(self_path, manifest_path))
+ }),
+ }
+ }
+
+ /// After the root of a workspace has been located, probes for all members
+ /// of a workspace.
+ ///
+ /// If the `workspace.members` configuration is present, then this just
+ /// verifies that those are all valid packages to point to. Otherwise, this
+ /// will transitively follow all `path` dependencies looking for members of
+ /// the workspace.
+ fn find_members(&mut self) -> CargoResult<()> {
+ let workspace_config = match self.load_workspace_config()? {
+ Some(workspace_config) => workspace_config,
+ None => {
+ debug!("find_members - only me as a member");
+ self.members.push(self.current_manifest.clone());
+ self.default_members.push(self.current_manifest.clone());
+ if let Ok(pkg) = self.current() {
+ let id = pkg.package_id();
+ self.member_ids.insert(id);
+ }
+ return Ok(());
+ }
+ };
+
+ // self.root_manifest must be Some to have retrieved workspace_config
+ let root_manifest_path = self.root_manifest.clone().unwrap();
+
+ let members_paths =
+ workspace_config.members_paths(workspace_config.members.as_ref().unwrap_or(&vec![]))?;
+ let default_members_paths = if root_manifest_path == self.current_manifest {
+ if let Some(ref default) = workspace_config.default_members {
+ Some(workspace_config.members_paths(default)?)
+ } else {
+ None
+ }
+ } else {
+ None
+ };
+
+ for path in &members_paths {
+ self.find_path_deps(&path.join("Cargo.toml"), &root_manifest_path, false)
+ .with_context(|| {
+ format!(
+ "failed to load manifest for workspace member `{}`",
+ path.display()
+ )
+ })?;
+ }
+
+ self.find_path_deps(&root_manifest_path, &root_manifest_path, false)?;
+
+ if let Some(default) = default_members_paths {
+ for path in default {
+ let normalized_path = paths::normalize_path(&path);
+ let manifest_path = normalized_path.join("Cargo.toml");
+ if !self.members.contains(&manifest_path) {
+ // default-members are allowed to be excluded, but they
+ // still must be referred to by the original (unfiltered)
+ // members list. Note that we aren't testing against the
+ // manifest path, both because `members_paths` doesn't
+ // include `/Cargo.toml`, and because excluded paths may not
+ // be crates.
+ let exclude = members_paths.contains(&normalized_path)
+ && workspace_config.is_excluded(&normalized_path);
+ if exclude {
+ continue;
+ }
+ bail!(
+ "package `{}` is listed in workspace’s default-members \
+ but is not a member.",
+ path.display()
+ )
+ }
+ self.default_members.push(manifest_path)
+ }
+ } else if self.is_virtual() {
+ self.default_members = self.members.clone()
+ } else {
+ self.default_members.push(self.current_manifest.clone())
+ }
+
+ Ok(())
+ }
+
+ fn find_path_deps(
+ &mut self,
+ manifest_path: &Path,
+ root_manifest: &Path,
+ is_path_dep: bool,
+ ) -> CargoResult<()> {
+ let manifest_path = paths::normalize_path(manifest_path);
+ if self.members.contains(&manifest_path) {
+ return Ok(());
+ }
+ if is_path_dep
+ && !manifest_path.parent().unwrap().starts_with(self.root())
+ && self.find_root(&manifest_path)? != self.root_manifest
+ {
+ // If `manifest_path` is a path dependency outside of the workspace,
+ // don't add it, or any of its dependencies, as a members.
+ return Ok(());
+ }
+
+ if let WorkspaceConfig::Root(ref root_config) =
+ *self.packages.load(root_manifest)?.workspace_config()
+ {
+ if root_config.is_excluded(&manifest_path) {
+ return Ok(());
+ }
+ }
+
+ debug!("find_members - {}", manifest_path.display());
+ self.members.push(manifest_path.clone());
+
+ let candidates = {
+ let pkg = match *self.packages.load(&manifest_path)? {
+ MaybePackage::Package(ref p) => p,
+ MaybePackage::Virtual(_) => return Ok(()),
+ };
+ self.member_ids.insert(pkg.package_id());
+ pkg.dependencies()
+ .iter()
+ .map(|d| (d.source_id(), d.package_name()))
+ .filter(|(s, _)| s.is_path())
+ .filter_map(|(s, n)| s.url().to_file_path().ok().map(|p| (p, n)))
+ .map(|(p, n)| (p.join("Cargo.toml"), n))
+ .collect::<Vec<_>>()
+ };
+ for (path, name) in candidates {
+ self.find_path_deps(&path, root_manifest, true)
+ .with_context(|| format!("failed to load manifest for dependency `{}`", name))
+ .map_err(|err| ManifestError::new(err, manifest_path.clone()))?;
+ }
+ Ok(())
+ }
+
+ /// Returns the unstable nightly-only features enabled via `cargo-features` in the manifest.
+ pub fn unstable_features(&self) -> &Features {
+ match self.root_maybe() {
+ MaybePackage::Package(p) => p.manifest().unstable_features(),
+ MaybePackage::Virtual(vm) => vm.unstable_features(),
+ }
+ }
+
+ pub fn resolve_behavior(&self) -> ResolveBehavior {
+ self.resolve_behavior
+ }
+
+ /// Returns `true` if this workspace uses the new CLI features behavior.
+ ///
+ /// The old behavior only allowed choosing the features from the package
+ /// in the current directory, regardless of which packages were chosen
+ /// with the -p flags. The new behavior allows selecting features from the
+ /// packages chosen on the command line (with -p or --workspace flags),
+ /// ignoring whatever is in the current directory.
+ pub fn allows_new_cli_feature_behavior(&self) -> bool {
+ self.is_virtual()
+ || match self.resolve_behavior() {
+ ResolveBehavior::V1 => false,
+ ResolveBehavior::V2 => true,
+ }
+ }
+
+ /// Validates a workspace, ensuring that a number of invariants are upheld:
+ ///
+ /// 1. A workspace only has one root.
+ /// 2. All workspace members agree on this one root as the root.
+ /// 3. The current crate is a member of this workspace.
+ fn validate(&mut self) -> CargoResult<()> {
+ // The rest of the checks require a VirtualManifest or multiple members.
+ if self.root_manifest.is_none() {
+ return Ok(());
+ }
+
+ self.validate_unique_names()?;
+ self.validate_workspace_roots()?;
+ self.validate_members()?;
+ self.error_if_manifest_not_in_members()?;
+ self.validate_manifest()
+ }
+
+ fn validate_unique_names(&self) -> CargoResult<()> {
+ let mut names = BTreeMap::new();
+ for member in self.members.iter() {
+ let package = self.packages.get(member);
+ let name = match *package {
+ MaybePackage::Package(ref p) => p.name(),
+ MaybePackage::Virtual(_) => continue,
+ };
+ if let Some(prev) = names.insert(name, member) {
+ bail!(
+ "two packages named `{}` in this workspace:\n\
+ - {}\n\
+ - {}",
+ name,
+ prev.display(),
+ member.display()
+ );
+ }
+ }
+ Ok(())
+ }
+
+ fn validate_workspace_roots(&self) -> CargoResult<()> {
+ let roots: Vec<PathBuf> = self
+ .members
+ .iter()
+ .filter(|&member| {
+ let config = self.packages.get(member).workspace_config();
+ matches!(config, WorkspaceConfig::Root(_))
+ })
+ .map(|member| member.parent().unwrap().to_path_buf())
+ .collect();
+ match roots.len() {
+ 1 => Ok(()),
+ 0 => bail!(
+ "`package.workspace` configuration points to a crate \
+ which is not configured with [workspace]: \n\
+ configuration at: {}\n\
+ points to: {}",
+ self.current_manifest.display(),
+ self.root_manifest.as_ref().unwrap().display()
+ ),
+ _ => {
+ bail!(
+ "multiple workspace roots found in the same workspace:\n{}",
+ roots
+ .iter()
+ .map(|r| format!(" {}", r.display()))
+ .collect::<Vec<_>>()
+ .join("\n")
+ );
+ }
+ }
+ }
+
+ fn validate_members(&mut self) -> CargoResult<()> {
+ for member in self.members.clone() {
+ let root = self.find_root(&member)?;
+ if root == self.root_manifest {
+ continue;
+ }
+
+ match root {
+ Some(root) => {
+ bail!(
+ "package `{}` is a member of the wrong workspace\n\
+ expected: {}\n\
+ actual: {}",
+ member.display(),
+ self.root_manifest.as_ref().unwrap().display(),
+ root.display()
+ );
+ }
+ None => {
+ bail!(
+ "workspace member `{}` is not hierarchically below \
+ the workspace root `{}`",
+ member.display(),
+ self.root_manifest.as_ref().unwrap().display()
+ );
+ }
+ }
+ }
+ Ok(())
+ }
+
+ fn error_if_manifest_not_in_members(&mut self) -> CargoResult<()> {
+ if self.members.contains(&self.current_manifest) {
+ return Ok(());
+ }
+
+ let root = self.root_manifest.as_ref().unwrap();
+ let root_dir = root.parent().unwrap();
+ let current_dir = self.current_manifest.parent().unwrap();
+ let root_pkg = self.packages.get(root);
+
+ // FIXME: Make this more generic by using a relative path resolver between member and root.
+ let members_msg = match current_dir.strip_prefix(root_dir) {
+ Ok(rel) => format!(
+ "this may be fixable by adding `{}` to the \
+ `workspace.members` array of the manifest \
+ located at: {}",
+ rel.display(),
+ root.display()
+ ),
+ Err(_) => format!(
+ "this may be fixable by adding a member to \
+ the `workspace.members` array of the \
+ manifest located at: {}",
+ root.display()
+ ),
+ };
+ let extra = match *root_pkg {
+ MaybePackage::Virtual(_) => members_msg,
+ MaybePackage::Package(ref p) => {
+ let has_members_list = match *p.manifest().workspace_config() {
+ WorkspaceConfig::Root(ref root_config) => root_config.has_members_list(),
+ WorkspaceConfig::Member { .. } => unreachable!(),
+ };
+ if !has_members_list {
+ format!(
+ "this may be fixable by ensuring that this \
+ crate is depended on by the workspace \
+ root: {}",
+ root.display()
+ )
+ } else {
+ members_msg
+ }
+ }
+ };
+ bail!(
+ "current package believes it's in a workspace when it's not:\n\
+ current: {}\n\
+ workspace: {}\n\n{}\n\
+ Alternatively, to keep it out of the workspace, add the package \
+ to the `workspace.exclude` array, or add an empty `[workspace]` \
+ table to the package's manifest.",
+ self.current_manifest.display(),
+ root.display(),
+ extra
+ );
+ }
+
+ fn validate_manifest(&mut self) -> CargoResult<()> {
+ if let Some(ref root_manifest) = self.root_manifest {
+ for pkg in self
+ .members()
+ .filter(|p| p.manifest_path() != root_manifest)
+ {
+ let manifest = pkg.manifest();
+ let emit_warning = |what| -> CargoResult<()> {
+ let msg = format!(
+ "{} for the non root package will be ignored, \
+ specify {} at the workspace root:\n\
+ package: {}\n\
+ workspace: {}",
+ what,
+ what,
+ pkg.manifest_path().display(),
+ root_manifest.display(),
+ );
+ self.config.shell().warn(&msg)
+ };
+ if manifest.original().has_profiles() {
+ emit_warning("profiles")?;
+ }
+ if !manifest.replace().is_empty() {
+ emit_warning("replace")?;
+ }
+ if !manifest.patch().is_empty() {
+ emit_warning("patch")?;
+ }
+ if let Some(behavior) = manifest.resolve_behavior() {
+ if behavior != self.resolve_behavior {
+ // Only warn if they don't match.
+ emit_warning("resolver")?;
+ }
+ }
+ }
+ }
+ Ok(())
+ }
+
+ pub fn load(&self, manifest_path: &Path) -> CargoResult<Package> {
+ match self.packages.maybe_get(manifest_path) {
+ Some(&MaybePackage::Package(ref p)) => return Ok(p.clone()),
+ Some(&MaybePackage::Virtual(_)) => bail!("cannot load workspace root"),
+ None => {}
+ }
+
+ let mut loaded = self.loaded_packages.borrow_mut();
+ if let Some(p) = loaded.get(manifest_path).cloned() {
+ return Ok(p);
+ }
+ let source_id = SourceId::for_path(manifest_path.parent().unwrap())?;
+ let (package, _nested_paths) = ops::read_package(manifest_path, source_id, self.config)?;
+ loaded.insert(manifest_path.to_path_buf(), package.clone());
+ Ok(package)
+ }
+
+ /// Preload the provided registry with already loaded packages.
+ ///
+ /// A workspace may load packages during construction/parsing/early phases
+ /// for various operations, and this preload step avoids doubly-loading and
+ /// parsing crates on the filesystem by inserting them all into the registry
+ /// with their in-memory formats.
+ pub fn preload(&self, registry: &mut PackageRegistry<'cfg>) {
+ // These can get weird as this generally represents a workspace during
+ // `cargo install`. Things like git repositories will actually have a
+ // `PathSource` with multiple entries in it, so the logic below is
+ // mostly just an optimization for normal `cargo build` in workspaces
+ // during development.
+ if self.is_ephemeral {
+ return;
+ }
+
+ for pkg in self.packages.packages.values() {
+ let pkg = match *pkg {
+ MaybePackage::Package(ref p) => p.clone(),
+ MaybePackage::Virtual(_) => continue,
+ };
+ let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), self.config);
+ src.preload_with(pkg);
+ registry.add_preloaded(Box::new(src));
+ }
+ }
+
+ pub fn emit_warnings(&self) -> CargoResult<()> {
+ for (path, maybe_pkg) in &self.packages.packages {
+ let warnings = match maybe_pkg {
+ MaybePackage::Package(pkg) => pkg.manifest().warnings().warnings(),
+ MaybePackage::Virtual(vm) => vm.warnings().warnings(),
+ };
+ let path = path.join("Cargo.toml");
+ for warning in warnings {
+ if warning.is_critical {
+ let err = anyhow::format_err!("{}", warning.message);
+ let cx =
+ anyhow::format_err!("failed to parse manifest at `{}`", path.display());
+ return Err(err.context(cx));
+ } else {
+ let msg = if self.root_manifest.is_none() {
+ warning.message.to_string()
+ } else {
+ // In a workspace, it can be confusing where a warning
+ // originated, so include the path.
+ format!("{}: {}", path.display(), warning.message)
+ };
+ self.config.shell().warn(msg)?
+ }
+ }
+ }
+ Ok(())
+ }
+
+ pub fn set_target_dir(&mut self, target_dir: Filesystem) {
+ self.target_dir = Some(target_dir);
+ }
+
+ /// Returns a Vec of `(&Package, RequestedFeatures)` tuples that
+ /// represent the workspace members that were requested on the command-line.
+ ///
+ /// `specs` may be empty, which indicates it should return all workspace
+ /// members. In this case, `requested_features.all_features` must be
+ /// `true`. This is used for generating `Cargo.lock`, which must include
+ /// all members with all features enabled.
+ pub fn members_with_features(
+ &self,
+ specs: &[PackageIdSpec],
+ cli_features: &CliFeatures,
+ ) -> CargoResult<Vec<(&Package, CliFeatures)>> {
+ assert!(
+ !specs.is_empty() || cli_features.all_features,
+ "no specs requires all_features"
+ );
+ if specs.is_empty() {
+ // When resolving the entire workspace, resolve each member with
+ // all features enabled.
+ return Ok(self
+ .members()
+ .map(|m| (m, CliFeatures::new_all(true)))
+ .collect());
+ }
+ if self.allows_new_cli_feature_behavior() {
+ self.members_with_features_new(specs, cli_features)
+ } else {
+ Ok(self.members_with_features_old(specs, cli_features))
+ }
+ }
+
+ /// Returns the requested features for the given member.
+ /// This filters out any named features that the member does not have.
+ fn collect_matching_features(
+ member: &Package,
+ cli_features: &CliFeatures,
+ found_features: &mut BTreeSet<FeatureValue>,
+ ) -> CliFeatures {
+ if cli_features.features.is_empty() {
+ return cli_features.clone();
+ }
+
+ // Only include features this member defines.
+ let summary = member.summary();
+
+ // Features defined in the manifest
+ let summary_features = summary.features();
+
+ // Dependency name -> dependency
+ let dependencies: BTreeMap<InternedString, &Dependency> = summary
+ .dependencies()
+ .iter()
+ .map(|dep| (dep.name_in_toml(), dep))
+ .collect();
+
+ // Features that enable optional dependencies
+ let optional_dependency_names: BTreeSet<_> = dependencies
+ .iter()
+ .filter(|(_, dep)| dep.is_optional())
+ .map(|(name, _)| name)
+ .copied()
+ .collect();
+
+ let mut features = BTreeSet::new();
+
+ // Checks if a member contains the given feature.
+ let summary_or_opt_dependency_feature = |feature: &InternedString| -> bool {
+ summary_features.contains_key(feature) || optional_dependency_names.contains(feature)
+ };
+
+ for feature in cli_features.features.iter() {
+ match feature {
+ FeatureValue::Feature(f) => {
+ if summary_or_opt_dependency_feature(f) {
+ // feature exists in this member.
+ features.insert(feature.clone());
+ found_features.insert(feature.clone());
+ }
+ }
+ // This should be enforced by CliFeatures.
+ FeatureValue::Dep { .. } => panic!("unexpected dep: syntax {}", feature),
+ FeatureValue::DepFeature {
+ dep_name,
+ dep_feature,
+ weak: _,
+ } => {
+ if dependencies.contains_key(dep_name) {
+ // pkg/feat for a dependency.
+ // Will rely on the dependency resolver to validate `dep_feature`.
+ features.insert(feature.clone());
+ found_features.insert(feature.clone());
+ } else if *dep_name == member.name()
+ && summary_or_opt_dependency_feature(dep_feature)
+ {
+ // member/feat where "feat" is a feature in member.
+ //
+ // `weak` can be ignored here, because the member
+ // either is or isn't being built.
+ features.insert(FeatureValue::Feature(*dep_feature));
+ found_features.insert(feature.clone());
+ }
+ }
+ }
+ }
+ CliFeatures {
+ features: Rc::new(features),
+ all_features: cli_features.all_features,
+ uses_default_features: cli_features.uses_default_features,
+ }
+ }
+
+ fn report_unknown_features_error(
+ &self,
+ specs: &[PackageIdSpec],
+ cli_features: &CliFeatures,
+ found_features: &BTreeSet<FeatureValue>,
+ ) -> CargoResult<()> {
+ // Keeps track of which features were contained in summary of `member` to suggest similar features in errors
+ let mut summary_features: Vec<InternedString> = Default::default();
+
+ // Keeps track of `member` dependencies (`dep/feature`) and their features names to suggest similar features in error
+ let mut dependencies_features: BTreeMap<InternedString, &[InternedString]> =
+ Default::default();
+
+ // Keeps track of `member` optional dependencies names (which can be enabled with feature) to suggest similar features in error
+ let mut optional_dependency_names: Vec<InternedString> = Default::default();
+
+ // Keeps track of which features were contained in summary of `member` to suggest similar features in errors
+ let mut summary_features_per_member: BTreeMap<&Package, BTreeSet<InternedString>> =
+ Default::default();
+
+ // Keeps track of `member` optional dependencies (which can be enabled with feature) to suggest similar features in error
+ let mut optional_dependency_names_per_member: BTreeMap<&Package, BTreeSet<InternedString>> =
+ Default::default();
+
+ for member in self
+ .members()
+ .filter(|m| specs.iter().any(|spec| spec.matches(m.package_id())))
+ {
+ // Only include features this member defines.
+ let summary = member.summary();
+
+ // Features defined in the manifest
+ summary_features.extend(summary.features().keys());
+ summary_features_per_member
+ .insert(member, summary.features().keys().copied().collect());
+
+ // Dependency name -> dependency
+ let dependencies: BTreeMap<InternedString, &Dependency> = summary
+ .dependencies()
+ .iter()
+ .map(|dep| (dep.name_in_toml(), dep))
+ .collect();
+
+ dependencies_features.extend(
+ dependencies
+ .iter()
+ .map(|(name, dep)| (*name, dep.features())),
+ );
+
+ // Features that enable optional dependencies
+ let optional_dependency_names_raw: BTreeSet<_> = dependencies
+ .iter()
+ .filter(|(_, dep)| dep.is_optional())
+ .map(|(name, _)| name)
+ .copied()
+ .collect();
+
+ optional_dependency_names.extend(optional_dependency_names_raw.iter());
+ optional_dependency_names_per_member.insert(member, optional_dependency_names_raw);
+ }
+
+ let levenshtein_test =
+ |a: InternedString, b: InternedString| lev_distance(a.as_str(), b.as_str()) < 4;
+
+ let suggestions: Vec<_> = cli_features
+ .features
+ .difference(found_features)
+ .map(|feature| match feature {
+ // Simple feature, check if any of the optional dependency features or member features are close enough
+ FeatureValue::Feature(typo) => {
+ // Finds member features which are similar to the requested feature.
+ let summary_features = summary_features
+ .iter()
+ .filter(move |feature| levenshtein_test(**feature, *typo));
+
+ // Finds optional dependencies which name is similar to the feature
+ let optional_dependency_features = optional_dependency_names
+ .iter()
+ .filter(move |feature| levenshtein_test(**feature, *typo));
+
+ summary_features
+ .chain(optional_dependency_features)
+ .map(|s| s.to_string())
+ .collect::<Vec<_>>()
+ }
+ FeatureValue::Dep { .. } => panic!("unexpected dep: syntax {}", feature),
+ FeatureValue::DepFeature {
+ dep_name,
+ dep_feature,
+ weak: _,
+ } => {
+ // Finds set of `pkg/feat` that are very similar to current `pkg/feat`.
+ let pkg_feat_similar = dependencies_features
+ .iter()
+ .filter(|(name, _)| levenshtein_test(**name, *dep_name))
+ .map(|(name, features)| {
+ (
+ name,
+ features
+ .iter()
+ .filter(|feature| levenshtein_test(**feature, *dep_feature))
+ .collect::<Vec<_>>(),
+ )
+ })
+ .map(|(name, features)| {
+ features
+ .into_iter()
+ .map(move |feature| format!("{}/{}", name, feature))
+ })
+ .flatten();
+
+ // Finds set of `member/optional_dep` features which name is similar to current `pkg/feat`.
+ let optional_dependency_features = optional_dependency_names_per_member
+ .iter()
+ .filter(|(package, _)| levenshtein_test(package.name(), *dep_name))
+ .map(|(package, optional_dependencies)| {
+ optional_dependencies
+ .into_iter()
+ .filter(|optional_dependency| {
+ levenshtein_test(**optional_dependency, *dep_name)
+ })
+ .map(move |optional_dependency| {
+ format!("{}/{}", package.name(), optional_dependency)
+ })
+ })
+ .flatten();
+
+ // Finds set of `member/feat` features which name is similar to current `pkg/feat`.
+ let summary_features = summary_features_per_member
+ .iter()
+ .filter(|(package, _)| levenshtein_test(package.name(), *dep_name))
+ .map(|(package, summary_features)| {
+ summary_features
+ .into_iter()
+ .filter(|summary_feature| {
+ levenshtein_test(**summary_feature, *dep_feature)
+ })
+ .map(move |summary_feature| {
+ format!("{}/{}", package.name(), summary_feature)
+ })
+ })
+ .flatten();
+
+ pkg_feat_similar
+ .chain(optional_dependency_features)
+ .chain(summary_features)
+ .collect::<Vec<_>>()
+ }
+ })
+ .map(|v| v.into_iter())
+ .flatten()
+ .unique()
+ .filter(|element| {
+ let feature = FeatureValue::new(InternedString::new(element));
+ !cli_features.features.contains(&feature) && !found_features.contains(&feature)
+ })
+ .sorted()
+ .take(5)
+ .collect();
+
+ let unknown: Vec<_> = cli_features
+ .features
+ .difference(found_features)
+ .map(|feature| feature.to_string())
+ .sorted()
+ .collect();
+
+ if suggestions.is_empty() {
+ bail!(
+ "none of the selected packages contains these features: {}",
+ unknown.join(", ")
+ );
+ } else {
+ bail!(
+ "none of the selected packages contains these features: {}, did you mean: {}?",
+ unknown.join(", "),
+ suggestions.join(", ")
+ );
+ }
+ }
+
+ /// New command-line feature selection behavior with resolver = "2" or the
+ /// root of a virtual workspace. See `allows_new_cli_feature_behavior`.
+ fn members_with_features_new(
+ &self,
+ specs: &[PackageIdSpec],
+ cli_features: &CliFeatures,
+ ) -> CargoResult<Vec<(&Package, CliFeatures)>> {
+ // Keeps track of which features matched `member` to produce an error
+ // if any of them did not match anywhere.
+ let mut found_features = Default::default();
+
+ let members: Vec<(&Package, CliFeatures)> = self
+ .members()
+ .filter(|m| specs.iter().any(|spec| spec.matches(m.package_id())))
+ .map(|m| {
+ (
+ m,
+ Workspace::collect_matching_features(m, cli_features, &mut found_features),
+ )
+ })
+ .collect();
+
+ if members.is_empty() {
+ // `cargo build -p foo`, where `foo` is not a member.
+ // Do not allow any command-line flags (defaults only).
+ if !(cli_features.features.is_empty()
+ && !cli_features.all_features
+ && cli_features.uses_default_features)
+ {
+ bail!("cannot specify features for packages outside of workspace");
+ }
+ // Add all members from the workspace so we can ensure `-p nonmember`
+ // is in the resolve graph.
+ return Ok(self
+ .members()
+ .map(|m| (m, CliFeatures::new_all(false)))
+ .collect());
+ }
+ if *cli_features.features != found_features {
+ self.report_unknown_features_error(specs, cli_features, &found_features)?;
+ }
+ Ok(members)
+ }
+
+ /// This is the "old" behavior for command-line feature selection.
+ /// See `allows_new_cli_feature_behavior`.
+ fn members_with_features_old(
+ &self,
+ specs: &[PackageIdSpec],
+ cli_features: &CliFeatures,
+ ) -> Vec<(&Package, CliFeatures)> {
+ // Split off any features with the syntax `member-name/feature-name` into a map
+ // so that those features can be applied directly to those workspace-members.
+ let mut member_specific_features: HashMap<InternedString, BTreeSet<FeatureValue>> =
+ HashMap::new();
+ // Features for the member in the current directory.
+ let mut cwd_features = BTreeSet::new();
+ for feature in cli_features.features.iter() {
+ match feature {
+ FeatureValue::Feature(_) => {
+ cwd_features.insert(feature.clone());
+ }
+ // This should be enforced by CliFeatures.
+ FeatureValue::Dep { .. } => panic!("unexpected dep: syntax {}", feature),
+ FeatureValue::DepFeature {
+ dep_name,
+ dep_feature,
+ weak: _,
+ } => {
+ // I think weak can be ignored here.
+ // * With `--features member?/feat -p member`, the ? doesn't
+ // really mean anything (either the member is built or it isn't).
+ // * With `--features nonmember?/feat`, cwd_features will
+ // handle processing it correctly.
+ let is_member = self.members().any(|member| {
+ // Check if `dep_name` is member of the workspace, but isn't associated with current package.
+ self.current_opt() != Some(member) && member.name() == *dep_name
+ });
+ if is_member && specs.iter().any(|spec| spec.name() == *dep_name) {
+ member_specific_features
+ .entry(*dep_name)
+ .or_default()
+ .insert(FeatureValue::Feature(*dep_feature));
+ } else {
+ cwd_features.insert(feature.clone());
+ }
+ }
+ }
+ }
+
+ let ms: Vec<_> = self
+ .members()
+ .filter_map(|member| {
+ let member_id = member.package_id();
+ match self.current_opt() {
+ // The features passed on the command-line only apply to
+ // the "current" package (determined by the cwd).
+ Some(current) if member_id == current.package_id() => {
+ let feats = CliFeatures {
+ features: Rc::new(cwd_features.clone()),
+ all_features: cli_features.all_features,
+ uses_default_features: cli_features.uses_default_features,
+ };
+ Some((member, feats))
+ }
+ _ => {
+ // Ignore members that are not enabled on the command-line.
+ if specs.iter().any(|spec| spec.matches(member_id)) {
+ // -p for a workspace member that is not the "current"
+ // one.
+ //
+ // The odd behavior here is due to backwards
+ // compatibility. `--features` and
+ // `--no-default-features` used to only apply to the
+ // "current" package. As an extension, this allows
+ // member-name/feature-name to set member-specific
+ // features, which should be backwards-compatible.
+ let feats = CliFeatures {
+ features: Rc::new(
+ member_specific_features
+ .remove(member.name().as_str())
+ .unwrap_or_default(),
+ ),
+ uses_default_features: true,
+ all_features: cli_features.all_features,
+ };
+ Some((member, feats))
+ } else {
+ // This member was not requested on the command-line, skip.
+ None
+ }
+ }
+ }
+ })
+ .collect();
+
+ // If any member specific features were not removed while iterating over members
+ // some features will be ignored.
+ assert!(member_specific_features.is_empty());
+
+ ms
+ }
+
+ /// Returns true if `unit` should depend on the output of Docscrape units.
+ pub fn unit_needs_doc_scrape(&self, unit: &Unit) -> bool {
+ // We do not add scraped units for Host units, as they're either build scripts
+ // (not documented) or proc macros (have no scrape-able exports). Additionally,
+ // naively passing a proc macro's unit_for to new_unit_dep will currently cause
+ // Cargo to panic, see issue #10545.
+ self.is_member(&unit.pkg) && !(unit.target.for_host() || unit.pkg.proc_macro())
+ }
+}
+
+impl<'cfg> Packages<'cfg> {
+ fn get(&self, manifest_path: &Path) -> &MaybePackage {
+ self.maybe_get(manifest_path).unwrap()
+ }
+
+ fn get_mut(&mut self, manifest_path: &Path) -> &mut MaybePackage {
+ self.maybe_get_mut(manifest_path).unwrap()
+ }
+
+ fn maybe_get(&self, manifest_path: &Path) -> Option<&MaybePackage> {
+ self.packages.get(manifest_path.parent().unwrap())
+ }
+
+ fn maybe_get_mut(&mut self, manifest_path: &Path) -> Option<&mut MaybePackage> {
+ self.packages.get_mut(manifest_path.parent().unwrap())
+ }
+
+ fn load(&mut self, manifest_path: &Path) -> CargoResult<&MaybePackage> {
+ let key = manifest_path.parent().unwrap();
+ match self.packages.entry(key.to_path_buf()) {
+ Entry::Occupied(e) => Ok(e.into_mut()),
+ Entry::Vacant(v) => {
+ let source_id = SourceId::for_path(key)?;
+ let (manifest, _nested_paths) =
+ read_manifest(manifest_path, source_id, self.config)?;
+ Ok(v.insert(match manifest {
+ EitherManifest::Real(manifest) => {
+ MaybePackage::Package(Package::new(manifest, manifest_path))
+ }
+ EitherManifest::Virtual(vm) => MaybePackage::Virtual(vm),
+ }))
+ }
+ }
+ }
+}
+
+impl MaybePackage {
+ fn workspace_config(&self) -> &WorkspaceConfig {
+ match *self {
+ MaybePackage::Package(ref p) => p.manifest().workspace_config(),
+ MaybePackage::Virtual(ref vm) => vm.workspace_config(),
+ }
+ }
+}
+
+impl WorkspaceRootConfig {
+ /// Creates a new Intermediate Workspace Root configuration.
+ pub fn new(
+ root_dir: &Path,
+ members: &Option<Vec<String>>,
+ default_members: &Option<Vec<String>>,
+ exclude: &Option<Vec<String>>,
+ inheritable: &Option<InheritableFields>,
+ custom_metadata: &Option<toml::Value>,
+ ) -> WorkspaceRootConfig {
+ WorkspaceRootConfig {
+ root_dir: root_dir.to_path_buf(),
+ members: members.clone(),
+ default_members: default_members.clone(),
+ exclude: exclude.clone().unwrap_or_default(),
+ inheritable_fields: inheritable.clone().unwrap_or_default(),
+ custom_metadata: custom_metadata.clone(),
+ }
+ }
+ /// Checks the path against the `excluded` list.
+ ///
+ /// This method does **not** consider the `members` list.
+ fn is_excluded(&self, manifest_path: &Path) -> bool {
+ let excluded = self
+ .exclude
+ .iter()
+ .any(|ex| manifest_path.starts_with(self.root_dir.join(ex)));
+
+ let explicit_member = match self.members {
+ Some(ref members) => members
+ .iter()
+ .any(|mem| manifest_path.starts_with(self.root_dir.join(mem))),
+ None => false,
+ };
+
+ !explicit_member && excluded
+ }
+
+ fn has_members_list(&self) -> bool {
+ self.members.is_some()
+ }
+
+ fn members_paths(&self, globs: &[String]) -> CargoResult<Vec<PathBuf>> {
+ let mut expanded_list = Vec::new();
+
+ for glob in globs {
+ let pathbuf = self.root_dir.join(glob);
+ let expanded_paths = Self::expand_member_path(&pathbuf)?;
+
+ // If glob does not find any valid paths, then put the original
+ // path in the expanded list to maintain backwards compatibility.
+ if expanded_paths.is_empty() {
+ expanded_list.push(pathbuf);
+ } else {
+ // Some OS can create system support files anywhere.
+ // (e.g. macOS creates `.DS_Store` file if you visit a directory using Finder.)
+ // Such files can be reported as a member path unexpectedly.
+ // Check and filter out non-directory paths to prevent pushing such accidental unwanted path
+ // as a member.
+ for expanded_path in expanded_paths {
+ if expanded_path.is_dir() {
+ expanded_list.push(expanded_path);
+ }
+ }
+ }
+ }
+
+ Ok(expanded_list)
+ }
+
+ fn expand_member_path(path: &Path) -> CargoResult<Vec<PathBuf>> {
+ let path = match path.to_str() {
+ Some(p) => p,
+ None => return Ok(Vec::new()),
+ };
+ let res = glob(path).with_context(|| format!("could not parse pattern `{}`", &path))?;
+ let res = res
+ .map(|p| p.with_context(|| format!("unable to match path to pattern `{}`", &path)))
+ .collect::<Result<Vec<_>, _>>()?;
+ Ok(res)
+ }
+
+ pub fn inheritable(&self) -> &InheritableFields {
+ &self.inheritable_fields
+ }
+}
+
+pub fn resolve_relative_path(
+ label: &str,
+ old_root: &Path,
+ new_root: &Path,
+ rel_path: &str,
+) -> CargoResult<String> {
+ let joined_path = normalize_path(&old_root.join(rel_path));
+ match diff_paths(joined_path, new_root) {
+ None => Err(anyhow!(
+ "`{}` was defined in {} but could not be resolved with {}",
+ label,
+ old_root.display(),
+ new_root.display()
+ )),
+ Some(path) => Ok(path
+ .to_str()
+ .ok_or_else(|| {
+ anyhow!(
+ "`{}` resolved to non-UTF value (`{}`)",
+ label,
+ path.display()
+ )
+ })?
+ .to_owned()),
+ }
+}
+
+/// Finds the path of the root of the workspace.
+pub fn find_workspace_root(manifest_path: &Path, config: &Config) -> CargoResult<Option<PathBuf>> {
+ find_workspace_root_with_loader(manifest_path, config, |self_path| {
+ let key = self_path.parent().unwrap();
+ let source_id = SourceId::for_path(key)?;
+ let (manifest, _nested_paths) = read_manifest(self_path, source_id, config)?;
+ Ok(manifest
+ .workspace_config()
+ .get_ws_root(self_path, manifest_path))
+ })
+}
+
+/// Finds the path of the root of the workspace.
+///
+/// This uses a callback to determine if the given path tells us what the
+/// workspace root is.
+fn find_workspace_root_with_loader(
+ manifest_path: &Path,
+ config: &Config,
+ mut loader: impl FnMut(&Path) -> CargoResult<Option<PathBuf>>,
+) -> CargoResult<Option<PathBuf>> {
+ // Check if there are any workspace roots that have already been found that would work
+ {
+ let roots = config.ws_roots.borrow();
+ // Iterate through the manifests parent directories until we find a workspace
+ // root. Note we skip the first item since that is just the path itself
+ for current in manifest_path.ancestors().skip(1) {
+ if let Some(ws_config) = roots.get(current) {
+ if !ws_config.is_excluded(manifest_path) {
+ // Add `Cargo.toml` since ws_root is the root and not the file
+ return Ok(Some(current.join("Cargo.toml")));
+ }
+ }
+ }
+ }
+
+ for ances_manifest_path in find_root_iter(manifest_path, config) {
+ debug!("find_root - trying {}", ances_manifest_path.display());
+ if let Some(ws_root_path) = loader(&ances_manifest_path)? {
+ return Ok(Some(ws_root_path));
+ }
+ }
+ Ok(None)
+}
+
+fn read_root_pointer(member_manifest: &Path, root_link: &str) -> PathBuf {
+ let path = member_manifest
+ .parent()
+ .unwrap()
+ .join(root_link)
+ .join("Cargo.toml");
+ debug!("find_root - pointer {}", path.display());
+ paths::normalize_path(&path)
+}
+
+fn find_root_iter<'a>(
+ manifest_path: &'a Path,
+ config: &'a Config,
+) -> impl Iterator<Item = PathBuf> + 'a {
+ LookBehind::new(paths::ancestors(manifest_path, None).skip(2))
+ .take_while(|path| !path.curr.ends_with("target/package"))
+ // Don't walk across `CARGO_HOME` when we're looking for the
+ // workspace root. Sometimes a package will be organized with
+ // `CARGO_HOME` pointing inside of the workspace root or in the
+ // current package, but we don't want to mistakenly try to put
+ // crates.io crates into the workspace by accident.
+ .take_while(|path| {
+ if let Some(last) = path.last {
+ config.home() != last
+ } else {
+ true
+ }
+ })
+ .map(|path| path.curr.join("Cargo.toml"))
+ .filter(|ances_manifest_path| ances_manifest_path.exists())
+}
+
+struct LookBehindWindow<'a, T: ?Sized> {
+ curr: &'a T,
+ last: Option<&'a T>,
+}
+
+struct LookBehind<'a, T: ?Sized, K: Iterator<Item = &'a T>> {
+ iter: K,
+ last: Option<&'a T>,
+}
+
+impl<'a, T: ?Sized, K: Iterator<Item = &'a T>> LookBehind<'a, T, K> {
+ fn new(items: K) -> Self {
+ Self {
+ iter: items,
+ last: None,
+ }
+ }
+}
+
+impl<'a, T: ?Sized, K: Iterator<Item = &'a T>> Iterator for LookBehind<'a, T, K> {
+ type Item = LookBehindWindow<'a, T>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match self.iter.next() {
+ None => None,
+ Some(next) => {
+ let last = self.last;
+ self.last = Some(next);
+ Some(LookBehindWindow { curr: next, last })
+ }
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/lib.rs b/src/tools/cargo/src/cargo/lib.rs
new file mode 100644
index 000000000..f4c671fd3
--- /dev/null
+++ b/src/tools/cargo/src/cargo/lib.rs
@@ -0,0 +1,235 @@
+// For various reasons, some idioms are still allow'ed, but we would like to
+// test and enforce them.
+#![warn(rust_2018_idioms)]
+// Due to some of the default clippy lints being somewhat subjective and not
+// necessarily an improvement, we prefer to not use them at this time.
+#![allow(clippy::all)]
+#![warn(clippy::disallowed_methods)]
+#![warn(clippy::self_named_module_files)]
+#![allow(rustdoc::private_intra_doc_links)]
+
+//! # Cargo as a library
+//!
+//! There are two places you can find API documentation of cargo-the-library,
+//!
+//! - <https://docs.rs/cargo>: targeted at external tool developers using cargo-the-library
+//! - Released with every rustc release
+//! - <https://doc.rust-lang.org/nightly/nightly-rustc/cargo>: targeted at cargo contributors
+//! - Updated on each update of the `cargo` submodule in `rust-lang/rust`
+//!
+//! **WARNING:** Using Cargo as a library has drawbacks, particulary the API is unstable,
+//! and there is no clear path to stabilize it soon at the time of writing. See [The Cargo Book:
+//! External tools] for more on this topic.
+//!
+//! ## Overview
+//!
+//! Major components of cargo include:
+//!
+//! - [`ops`]:
+//! Every major operation is implemented here. Each command is a thin wrapper around ops.
+//! - [`ops::cargo_compile`]:
+//! This is the entry point for all the compilation commands. This is a
+//! good place to start if you want to follow how compilation starts and
+//! flows to completion.
+//! - [`ops::resolve`]:
+//! Top-level API for dependency and feature resolver (e.g. [`ops::resolve_ws`])
+//! - [`core::resolver`]: The core algorithm
+//! - [`core::compiler`]:
+//! This is the code responsible for running `rustc` and `rustdoc`.
+//! - [`core::compiler::build_context`]:
+//! The [`BuildContext`]['core::compiler::BuildContext] is the result of the "front end" of the
+//! build process. This contains the graph of work to perform and any settings necessary for
+//! `rustc`. After this is built, the next stage of building is handled in
+//! [`Context`][core::compiler::Context].
+//! - [`core::compiler::context`]:
+//! The `Context` is the mutable state used during the build process. This
+//! is the core of the build process, and everything is coordinated through
+//! this.
+//! - [`core::compiler::fingerprint`]:
+//! The `fingerprint` module contains all the code that handles detecting
+//! if a crate needs to be recompiled.
+//! - [`core::source`]:
+//! The [`core::Source`] trait is an abstraction over different sources of packages.
+//! Sources are uniquely identified by a [`core::SourceId`]. Sources are implemented in the [`sources`]
+//! directory.
+//! - [`util`]:
+//! This directory contains generally-useful utility modules.
+//! - [`util::config`]:
+//! This directory contains the config parser. It makes heavy use of
+//! [serde](https://serde.rs/) to merge and translate config values. The
+//! [`util::Config`] is usually accessed from the
+//! [`core::Workspace`]
+//! though references to it are scattered around for more convenient access.
+//! - [`util::toml`]:
+//! This directory contains the code for parsing `Cargo.toml` files.
+//! - [`ops::lockfile`]:
+//! This is where `Cargo.lock` files are loaded and saved.
+//!
+//! Related crates:
+//! - [`cargo-platform`](https://crates.io/crates/cargo-platform)
+//! ([nightly docs](https://doc.rust-lang.org/nightly/nightly-rustc/cargo_platform)):
+//! This library handles parsing `cfg` expressions.
+//! - [`cargo-util`](https://crates.io/crates/cargo-util)
+//! ([nightly docs](https://doc.rust-lang.org/nightly/nightly-rustc/cargo_util)):
+//! This contains general utility code that is shared between cargo and the testsuite
+//! - [`crates-io`](https://crates.io/crates/crates-io)
+//! ([nightly docs](https://doc.rust-lang.org/nightly/nightly-rustc/crates_io)):
+//! This contains code for accessing the crates.io API.
+//! - [`home`](https://crates.io/crates/home):
+//! This library is shared between cargo and rustup and is used for finding their home directories.
+//! This is not directly depended upon with a `path` dependency; cargo uses the version from crates.io.
+//! It is intended to be versioned and published independently of Rust's release system.
+//! Whenever a change needs to be made, bump the version in Cargo.toml and `cargo publish` it manually, and then update cargo's `Cargo.toml` to depend on the new version.
+//! - [`cargo-test-support`](https://github.com/rust-lang/cargo/tree/master/crates/cargo-test-support)
+//! ([nightly docs](https://doc.rust-lang.org/nightly/nightly-rustc/cargo_test_support/index.html)):
+//! This contains a variety of code to support writing tests
+//! - [`cargo-test-macro`](https://github.com/rust-lang/cargo/tree/master/crates/cargo-test-macro)
+//! ([nightly docs](https://doc.rust-lang.org/nightly/nightly-rustc/cargo_test_macro/index.html)):
+//! This is the `#[cargo_test]` proc-macro used by the test suite to define tests.
+//! - [`credential`](https://github.com/rust-lang/cargo/tree/master/crates/credential)
+//! This subdirectory contains several packages for implementing the
+//! experimental
+//! [credential-process](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#credential-process)
+//! feature.
+//! - [`mdman`](https://github.com/rust-lang/cargo/tree/master/crates/mdman)
+//! ([nightly docs](https://doc.rust-lang.org/nightly/nightly-rustc/mdman/index.html)):
+//! This is a utility for generating cargo's man pages. See [Building the man
+//! pages](https://github.com/rust-lang/cargo/tree/master/src/doc#building-the-man-pages)
+//! for more information.
+//! - [`resolver-tests`](https://github.com/rust-lang/cargo/tree/master/crates/resolver-tests)
+//! This is a dedicated package that defines tests for the [dependency
+//! resolver][core::resolver].
+//!
+//! ### File Overview
+//!
+//! Files that interact with cargo include
+//!
+//! - Package
+//! - `Cargo.toml`: User-written project manifest, loaded with [`util::toml::TomlManifest`] and then
+//! translated to [`core::manifest::Manifest`] which maybe stored in a [`core::Package`].
+//! - This is editable with [`util::toml_mut::manifest::LocalManifest`]
+//! - `Cargo.lock`: Generally loaded with [`ops::resolve_ws`] or a variant of it into a [`core::resolver::Resolve`]
+//! - At the lowest level, [`ops::load_pkg_lockfile`] and [`ops::write_pkg_lockfile`] are used
+//! - See [`core::resolver::encode`] for versioning of `Cargo.lock`
+//! - `target/`: Used for build artifacts and abstracted with [`core::compiler::layout`]. `Layout` handles locking the target directory and providing paths to parts inside. There is a separate `Layout` for each build `target`.
+//! - `target/debug/.fingerprint`: Tracker whether nor not a crate needs to be rebuilt. See [`core::compiler::fingerprint`]
+//! - `$CARGO_HOME/`:
+//! - `registry/`: Package registry cache which is managed in [`sources::registry`]. Be careful
+//! as the lock [`util::Config::acquire_package_cache_lock`] must be manually acquired.
+//! - `index`/: Fast-to-access crate metadata (no need to download / extract `*.crate` files)
+//! - `cache/*/*.crate`: Local cache of published crates
+//! - `src/*/*`: Extracted from `*.crate` by [`sources::registry::RegistrySource`]
+//! - `git/`: Git source cache. See [`sources::git`].
+//! - `**/.cargo/config.toml`: Environment dependent (env variables, files) configuration. See
+//! [`util::config`]
+//!
+//! ## Contribute to Cargo documentations
+//!
+//! The Cargo team always continues improving all external and internal documentations.
+//! If you spot anything could be better, don't hesitate to discuss with the team on
+//! Zulip [`t-cargo` stream], or [submit an issue] right on GitHub.
+//! There is also an issue label [`A-documenting-cargo-itself`],
+//! which is generally for documenting user-facing [The Cargo Book],
+//! but the Cargo team is welcome any form of enhancement for the [Cargo Contributor Guide]
+//! and this API documentation as well.
+//!
+//! [The Cargo Book: External tools]: https://doc.rust-lang.org/stable/cargo/reference/external-tools.html
+//! [Cargo Architecture Overview]: https://doc.crates.io/contrib/architecture
+//! [`t-cargo` stream]: https://rust-lang.zulipchat.com/#narrow/stream/246057-t-cargo
+//! [submit an issue]: https://github.com/rust-lang/cargo/issues/new/choose
+//! [`A-documenting-cargo-itself`]: https://github.com/rust-lang/cargo/labels/A-documenting-cargo-itself
+//! [The Cargo Book]: https://doc.rust-lang.org/cargo/
+//! [Cargo Contributor Guide]: https://doc.crates.io/contrib/
+
+use crate::core::shell::Verbosity::Verbose;
+use crate::core::Shell;
+use anyhow::Error;
+use log::debug;
+
+pub use crate::util::errors::{AlreadyPrintedError, InternalError, VerboseError};
+pub use crate::util::{indented_lines, CargoResult, CliError, CliResult, Config};
+pub use crate::version::version;
+
+pub const CARGO_ENV: &str = "CARGO";
+
+#[macro_use]
+mod macros;
+
+pub mod core;
+pub mod ops;
+pub mod sources;
+pub mod util;
+mod version;
+
+pub fn exit_with_error(err: CliError, shell: &mut Shell) -> ! {
+ debug!("exit_with_error; err={:?}", err);
+
+ if let Some(ref err) = err.error {
+ if let Some(clap_err) = err.downcast_ref::<clap::Error>() {
+ let exit_code = if clap_err.use_stderr() { 1 } else { 0 };
+ let _ = clap_err.print();
+ std::process::exit(exit_code)
+ }
+ }
+
+ let CliError { error, exit_code } = err;
+ if let Some(error) = error {
+ display_error(&error, shell);
+ }
+
+ std::process::exit(exit_code)
+}
+
+/// Displays an error, and all its causes, to stderr.
+pub fn display_error(err: &Error, shell: &mut Shell) {
+ debug!("display_error; err={:?}", err);
+ _display_error(err, shell, true);
+ if err
+ .chain()
+ .any(|e| e.downcast_ref::<InternalError>().is_some())
+ {
+ drop(shell.note("this is an unexpected cargo internal error"));
+ drop(
+ shell.note(
+ "we would appreciate a bug report: https://github.com/rust-lang/cargo/issues/",
+ ),
+ );
+ drop(shell.note(format!("cargo {}", version())));
+ // Once backtraces are stabilized, this should print out a backtrace
+ // if it is available.
+ }
+}
+
+/// Displays a warning, with an error object providing detailed information
+/// and context.
+pub fn display_warning_with_error(warning: &str, err: &Error, shell: &mut Shell) {
+ drop(shell.warn(warning));
+ drop(writeln!(shell.err()));
+ _display_error(err, shell, false);
+}
+
+fn _display_error(err: &Error, shell: &mut Shell, as_err: bool) -> bool {
+ for (i, err) in err.chain().enumerate() {
+ // If we're not in verbose mode then only print cause chain until one
+ // marked as `VerboseError` appears.
+ //
+ // Generally the top error shouldn't be verbose, but check it anyways.
+ if shell.verbosity() != Verbose && err.is::<VerboseError>() {
+ return true;
+ }
+ if err.is::<AlreadyPrintedError>() {
+ break;
+ }
+ if i == 0 {
+ if as_err {
+ drop(shell.error(&err));
+ } else {
+ drop(writeln!(shell.err(), "{}", err));
+ }
+ } else {
+ drop(writeln!(shell.err(), "\nCaused by:"));
+ drop(write!(shell.err(), "{}", indented_lines(&err.to_string())));
+ }
+ }
+ false
+}
diff --git a/src/tools/cargo/src/cargo/macros.rs b/src/tools/cargo/src/cargo/macros.rs
new file mode 100644
index 000000000..3ebf3b37f
--- /dev/null
+++ b/src/tools/cargo/src/cargo/macros.rs
@@ -0,0 +1,49 @@
+use std::fmt;
+
+macro_rules! compact_debug {
+ (
+ impl fmt::Debug for $ty:ident {
+ fn fmt(&$this:ident, f: &mut fmt::Formatter) -> fmt::Result {
+ let (default, default_name) = $e:expr;
+ [debug_the_fields($($field:ident)*)]
+ }
+ }
+ ) => (
+
+ impl fmt::Debug for $ty {
+ fn fmt(&$this, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // Try printing a pretty version where we collapse as many fields as
+ // possible, indicating that they're equivalent to a function call
+ // that's hopefully enough to indicate what each value is without
+ // actually dumping everything so verbosely.
+ let mut s = f.debug_struct(stringify!($ty));
+ let (default, default_name) = $e;
+ let mut any_default = false;
+
+ // Exhaustively match so when fields are added we get a compile
+ // failure
+ let $ty { $($field),* } = $this;
+ $(
+ if *$field == default.$field {
+ any_default = true;
+ } else {
+ s.field(stringify!($field), $field);
+ }
+ )*
+
+ if any_default {
+ s.field("..", &crate::macros::DisplayAsDebug(default_name));
+ }
+ s.finish()
+ }
+ }
+ )
+}
+
+pub struct DisplayAsDebug<T>(pub T);
+
+impl<T: fmt::Display> fmt::Debug for DisplayAsDebug<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.0, f)
+ }
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_add/crate_spec.rs b/src/tools/cargo/src/cargo/ops/cargo_add/crate_spec.rs
new file mode 100644
index 000000000..f07e2fae5
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_add/crate_spec.rs
@@ -0,0 +1,63 @@
+//! Crate name parsing.
+
+use anyhow::Context as _;
+
+use super::Dependency;
+use crate::util::toml_mut::dependency::RegistrySource;
+use crate::util::validate_package_name;
+use crate::CargoResult;
+
+/// User-specified crate
+///
+/// This can be a
+/// - Name (e.g. `docopt`)
+/// - Name and a version req (e.g. `docopt@^0.8`)
+/// - Path
+#[derive(Debug)]
+pub struct CrateSpec {
+ /// Crate name
+ name: String,
+ /// Optional version requirement
+ version_req: Option<String>,
+}
+
+impl CrateSpec {
+ /// Convert a string to a `Crate`
+ pub fn resolve(pkg_id: &str) -> CargoResult<Self> {
+ let (name, version) = pkg_id
+ .split_once('@')
+ .map(|(n, v)| (n, Some(v)))
+ .unwrap_or((pkg_id, None));
+
+ validate_package_name(name, "dependency name", "")?;
+
+ if let Some(version) = version {
+ semver::VersionReq::parse(version)
+ .with_context(|| format!("invalid version requirement `{version}`"))?;
+ }
+
+ let id = Self {
+ name: name.to_owned(),
+ version_req: version.map(|s| s.to_owned()),
+ };
+
+ Ok(id)
+ }
+
+ /// Generate a dependency entry for this crate specifier
+ pub fn to_dependency(&self) -> CargoResult<Dependency> {
+ let mut dep = Dependency::new(self.name());
+ if let Some(version_req) = self.version_req() {
+ dep = dep.set_source(RegistrySource::new(version_req));
+ }
+ Ok(dep)
+ }
+
+ pub fn name(&self) -> &str {
+ &self.name
+ }
+
+ pub fn version_req(&self) -> Option<&str> {
+ self.version_req.as_deref()
+ }
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_add/mod.rs b/src/tools/cargo/src/cargo/ops/cargo_add/mod.rs
new file mode 100644
index 000000000..5c519ac09
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_add/mod.rs
@@ -0,0 +1,971 @@
+//! Core of cargo-add command
+
+mod crate_spec;
+
+use std::collections::BTreeMap;
+use std::collections::BTreeSet;
+use std::collections::VecDeque;
+use std::fmt::Write;
+use std::path::Path;
+
+use anyhow::Context as _;
+use cargo_util::paths;
+use indexmap::IndexSet;
+use itertools::Itertools;
+use termcolor::Color::Green;
+use termcolor::Color::Red;
+use termcolor::ColorSpec;
+use toml_edit::Item as TomlItem;
+
+use crate::core::dependency::DepKind;
+use crate::core::registry::PackageRegistry;
+use crate::core::FeatureValue;
+use crate::core::Package;
+use crate::core::QueryKind;
+use crate::core::Registry;
+use crate::core::Shell;
+use crate::core::Summary;
+use crate::core::Workspace;
+use crate::util::toml_mut::dependency::Dependency;
+use crate::util::toml_mut::dependency::GitSource;
+use crate::util::toml_mut::dependency::MaybeWorkspace;
+use crate::util::toml_mut::dependency::PathSource;
+use crate::util::toml_mut::dependency::Source;
+use crate::util::toml_mut::dependency::WorkspaceSource;
+use crate::util::toml_mut::manifest::DepTable;
+use crate::util::toml_mut::manifest::LocalManifest;
+use crate::CargoResult;
+use crate::Config;
+use crate_spec::CrateSpec;
+
+/// Information on what dependencies should be added
+#[derive(Clone, Debug)]
+pub struct AddOptions<'a> {
+ /// Configuration information for cargo operations
+ pub config: &'a Config,
+ /// Package to add dependencies to
+ pub spec: &'a Package,
+ /// Dependencies to add or modify
+ pub dependencies: Vec<DepOp>,
+ /// Which dependency section to add these to
+ pub section: DepTable,
+ /// Act as if dependencies will be added
+ pub dry_run: bool,
+}
+
+/// Add dependencies to a manifest
+pub fn add(workspace: &Workspace<'_>, options: &AddOptions<'_>) -> CargoResult<()> {
+ let dep_table = options
+ .section
+ .to_table()
+ .into_iter()
+ .map(String::from)
+ .collect::<Vec<_>>();
+
+ let manifest_path = options.spec.manifest_path().to_path_buf();
+ let mut manifest = LocalManifest::try_new(&manifest_path)?;
+ let original_raw_manifest = manifest.to_string();
+ let legacy = manifest.get_legacy_sections();
+ if !legacy.is_empty() {
+ anyhow::bail!(
+ "Deprecated dependency sections are unsupported: {}",
+ legacy.join(", ")
+ );
+ }
+
+ let mut registry = PackageRegistry::new(options.config)?;
+
+ let deps = {
+ let _lock = options.config.acquire_package_cache_lock()?;
+ registry.lock_patches();
+ options
+ .dependencies
+ .iter()
+ .map(|raw| {
+ resolve_dependency(
+ &manifest,
+ raw,
+ workspace,
+ &options.section,
+ options.config,
+ &mut registry,
+ )
+ })
+ .collect::<CargoResult<Vec<_>>>()?
+ };
+
+ let was_sorted = manifest
+ .get_table(&dep_table)
+ .map(TomlItem::as_table)
+ .map_or(true, |table_option| {
+ table_option.map_or(true, |table| {
+ is_sorted(table.get_values().iter_mut().map(|(key, _)| {
+ // get_values key paths always have at least one key.
+ key.remove(0)
+ }))
+ })
+ });
+ for dep in deps {
+ print_action_msg(&mut options.config.shell(), &dep, &dep_table)?;
+ if let Some(Source::Path(src)) = dep.source() {
+ if src.path == manifest.path.parent().unwrap_or_else(|| Path::new("")) {
+ anyhow::bail!(
+ "cannot add `{}` as a dependency to itself",
+ manifest.package_name()?
+ )
+ }
+ }
+
+ let available_features = dep
+ .available_features
+ .keys()
+ .map(|s| s.as_ref())
+ .collect::<BTreeSet<&str>>();
+ let mut unknown_features: Vec<&str> = Vec::new();
+ if let Some(req_feats) = dep.features.as_ref() {
+ let req_feats: BTreeSet<_> = req_feats.iter().map(|s| s.as_str()).collect();
+ unknown_features.extend(req_feats.difference(&available_features).copied());
+ }
+ if let Some(inherited_features) = dep.inherited_features.as_ref() {
+ let inherited_features: BTreeSet<_> =
+ inherited_features.iter().map(|s| s.as_str()).collect();
+ unknown_features.extend(inherited_features.difference(&available_features).copied());
+ }
+
+ unknown_features.sort();
+
+ if !unknown_features.is_empty() {
+ let (mut activated, mut deactivated) = dep.features();
+ // Since the unknown features have been added to the DependencyUI we need to remove
+ // them to present the "correct" features that can be specified for the crate.
+ deactivated.retain(|f| !unknown_features.contains(f));
+ activated.retain(|f| !unknown_features.contains(f));
+
+ let mut message = format!(
+ "unrecognized feature{} for crate {}: {}\n",
+ if unknown_features.len() == 1 { "" } else { "s" },
+ dep.name,
+ unknown_features.iter().format(", "),
+ );
+ if activated.is_empty() && deactivated.is_empty() {
+ write!(message, "no features available for crate {}", dep.name)?;
+ } else {
+ if !deactivated.is_empty() {
+ writeln!(
+ message,
+ "disabled features:\n {}",
+ deactivated
+ .iter()
+ .map(|s| s.to_string())
+ .coalesce(|x, y| if x.len() + y.len() < 78 {
+ Ok(format!("{x}, {y}"))
+ } else {
+ Err((x, y))
+ })
+ .into_iter()
+ .format("\n ")
+ )?
+ }
+ if !activated.is_empty() {
+ writeln!(
+ message,
+ "enabled features:\n {}",
+ activated
+ .iter()
+ .map(|s| s.to_string())
+ .coalesce(|x, y| if x.len() + y.len() < 78 {
+ Ok(format!("{x}, {y}"))
+ } else {
+ Err((x, y))
+ })
+ .into_iter()
+ .format("\n ")
+ )?
+ }
+ }
+ anyhow::bail!(message.trim().to_owned());
+ }
+
+ print_dep_table_msg(&mut options.config.shell(), &dep)?;
+
+ manifest.insert_into_table(&dep_table, &dep)?;
+ manifest.gc_dep(dep.toml_key());
+ }
+
+ if was_sorted {
+ if let Some(table) = manifest
+ .get_table_mut(&dep_table)
+ .ok()
+ .and_then(TomlItem::as_table_like_mut)
+ {
+ table.sort_values();
+ }
+ }
+
+ if options.config.locked() {
+ let new_raw_manifest = manifest.to_string();
+ if original_raw_manifest != new_raw_manifest {
+ anyhow::bail!(
+ "the manifest file {} needs to be updated but --locked was passed to prevent this",
+ manifest.path.display()
+ );
+ }
+ }
+
+ if options.dry_run {
+ options.config.shell().warn("aborting add due to dry run")?;
+ } else {
+ manifest.write()?;
+ }
+
+ Ok(())
+}
+
+/// Dependency entry operation
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct DepOp {
+ /// Describes the crate
+ pub crate_spec: Option<String>,
+ /// Dependency key, overriding the package name in crate_spec
+ pub rename: Option<String>,
+
+ /// Feature flags to activate
+ pub features: Option<IndexSet<String>>,
+ /// Whether the default feature should be activated
+ pub default_features: Option<bool>,
+
+ /// Whether dependency is optional
+ pub optional: Option<bool>,
+
+ /// Registry for looking up dependency version
+ pub registry: Option<String>,
+
+ /// Git repo for dependency
+ pub path: Option<String>,
+ /// Git repo for dependency
+ pub git: Option<String>,
+ /// Specify an alternative git branch
+ pub branch: Option<String>,
+ /// Specify a specific git rev
+ pub rev: Option<String>,
+ /// Specify a specific git tag
+ pub tag: Option<String>,
+}
+
+fn resolve_dependency(
+ manifest: &LocalManifest,
+ arg: &DepOp,
+ ws: &Workspace<'_>,
+ section: &DepTable,
+ config: &Config,
+ registry: &mut PackageRegistry<'_>,
+) -> CargoResult<DependencyUI> {
+ let crate_spec = arg
+ .crate_spec
+ .as_deref()
+ .map(CrateSpec::resolve)
+ .transpose()?;
+ let mut selected_dep = if let Some(url) = &arg.git {
+ let mut src = GitSource::new(url);
+ if let Some(branch) = &arg.branch {
+ src = src.set_branch(branch);
+ }
+ if let Some(tag) = &arg.tag {
+ src = src.set_tag(tag);
+ }
+ if let Some(rev) = &arg.rev {
+ src = src.set_rev(rev);
+ }
+
+ let selected = if let Some(crate_spec) = &crate_spec {
+ if let Some(v) = crate_spec.version_req() {
+ // crate specifier includes a version (e.g. `docopt@0.8`)
+ anyhow::bail!("cannot specify a git URL (`{url}`) with a version (`{v}`).");
+ }
+ let dependency = crate_spec.to_dependency()?.set_source(src);
+ let selected = select_package(&dependency, config, registry)?;
+ if dependency.name != selected.name {
+ config.shell().warn(format!(
+ "translating `{}` to `{}`",
+ dependency.name, selected.name,
+ ))?;
+ }
+ selected
+ } else {
+ let mut source = crate::sources::GitSource::new(src.source_id()?, config)?;
+ let packages = source.read_packages()?;
+ let package = infer_package_for_git_source(packages, &src)?;
+ Dependency::from(package.summary())
+ };
+ selected
+ } else if let Some(raw_path) = &arg.path {
+ let path = paths::normalize_path(&std::env::current_dir()?.join(raw_path));
+ let src = PathSource::new(&path);
+
+ let selected = if let Some(crate_spec) = &crate_spec {
+ if let Some(v) = crate_spec.version_req() {
+ // crate specifier includes a version (e.g. `docopt@0.8`)
+ anyhow::bail!("cannot specify a path (`{raw_path}`) with a version (`{v}`).");
+ }
+ let dependency = crate_spec.to_dependency()?.set_source(src);
+ let selected = select_package(&dependency, config, registry)?;
+ if dependency.name != selected.name {
+ config.shell().warn(format!(
+ "translating `{}` to `{}`",
+ dependency.name, selected.name,
+ ))?;
+ }
+ selected
+ } else {
+ let source = crate::sources::PathSource::new(&path, src.source_id()?, config);
+ let package = source
+ .read_packages()?
+ .pop()
+ .expect("read_packages errors when no packages");
+ Dependency::from(package.summary())
+ };
+ selected
+ } else if let Some(crate_spec) = &crate_spec {
+ crate_spec.to_dependency()?
+ } else {
+ anyhow::bail!("dependency name is required");
+ };
+ selected_dep = populate_dependency(selected_dep, arg);
+
+ let old_dep = get_existing_dependency(manifest, selected_dep.toml_key(), section)?;
+ let mut dependency = if let Some(mut old_dep) = old_dep.clone() {
+ if old_dep.name != selected_dep.name {
+ // Assuming most existing keys are not relevant when the package changes
+ if selected_dep.optional.is_none() {
+ selected_dep.optional = old_dep.optional;
+ }
+ selected_dep
+ } else {
+ if selected_dep.source().is_some() {
+ // Overwrite with `crate_spec`
+ old_dep.source = selected_dep.source;
+ }
+ populate_dependency(old_dep, arg)
+ }
+ } else {
+ selected_dep
+ };
+
+ if dependency.source().is_none() {
+ // Checking for a workspace dependency happens first since a member could be specified
+ // in the workspace dependencies table as a dependency
+ if let Some(_dep) = find_workspace_dep(dependency.toml_key(), ws.root_manifest()).ok() {
+ dependency = dependency.set_source(WorkspaceSource::new());
+ } else if let Some(package) = ws.members().find(|p| p.name().as_str() == dependency.name) {
+ // Only special-case workspaces when the user doesn't provide any extra
+ // information, otherwise, trust the user.
+ let mut src = PathSource::new(package.root());
+ // dev-dependencies do not need the version populated
+ if section.kind() != DepKind::Development {
+ let op = "";
+ let v = format!("{op}{version}", version = package.version());
+ src = src.set_version(v);
+ }
+ dependency = dependency.set_source(src);
+ } else {
+ let latest = get_latest_dependency(&dependency, false, config, registry)?;
+
+ if dependency.name != latest.name {
+ config.shell().warn(format!(
+ "translating `{}` to `{}`",
+ dependency.name, latest.name,
+ ))?;
+ dependency.name = latest.name; // Normalize the name
+ }
+ dependency = dependency.set_source(latest.source.expect("latest always has a source"));
+ }
+ }
+
+ if let Some(Source::Workspace(_)) = dependency.source() {
+ check_invalid_ws_keys(dependency.toml_key(), arg)?;
+ }
+
+ let version_required = dependency.source().and_then(|s| s.as_registry()).is_some();
+ let version_optional_in_section = section.kind() == DepKind::Development;
+ let preserve_existing_version = old_dep
+ .as_ref()
+ .map(|d| d.version().is_some())
+ .unwrap_or(false);
+ if !version_required && !preserve_existing_version && version_optional_in_section {
+ // dev-dependencies do not need the version populated
+ dependency = dependency.clear_version();
+ }
+
+ let query = dependency.query(config)?;
+ let query = match query {
+ MaybeWorkspace::Workspace(_workspace) => {
+ let dep = find_workspace_dep(dependency.toml_key(), ws.root_manifest())?;
+ if let Some(features) = dep.features.clone() {
+ dependency = dependency.set_inherited_features(features);
+ }
+ let query = dep.query(config)?;
+ match query {
+ MaybeWorkspace::Workspace(_) => {
+ unreachable!("This should have been caught when parsing a workspace root")
+ }
+ MaybeWorkspace::Other(query) => query,
+ }
+ }
+ MaybeWorkspace::Other(query) => query,
+ };
+
+ let dependency = populate_available_features(dependency, &query, registry)?;
+
+ Ok(dependency)
+}
+
+/// When { workspace = true } you cannot define other keys that configure
+/// the source of the dependency such as `version`, `registry`, `registry-index`,
+/// `path`, `git`, `branch`, `tag`, `rev`, or `package`. You can also not define
+/// `default-features`.
+///
+/// Only `default-features`, `registry` and `rename` need to be checked
+/// for currently. This is because `git` and its associated keys, `path`, and
+/// `version` should all bee checked before this is called. `rename` is checked
+/// for as it turns into `package`
+fn check_invalid_ws_keys(toml_key: &str, arg: &DepOp) -> CargoResult<()> {
+ fn err_msg(toml_key: &str, flag: &str, field: &str) -> String {
+ format!(
+ "cannot override workspace dependency with `{flag}`, \
+ either change `workspace.dependencies.{toml_key}.{field}` \
+ or define the dependency exclusively in the package's manifest"
+ )
+ }
+
+ if arg.default_features.is_some() {
+ anyhow::bail!(
+ "{}",
+ err_msg(toml_key, "--default-features", "default-features")
+ )
+ }
+ if arg.registry.is_some() {
+ anyhow::bail!("{}", err_msg(toml_key, "--registry", "registry"))
+ }
+ // rename is `package`
+ if arg.rename.is_some() {
+ anyhow::bail!("{}", err_msg(toml_key, "--rename", "package"))
+ }
+ Ok(())
+}
+
+/// Provide the existing dependency for the target table
+///
+/// If it doesn't exist but exists in another table, let's use that as most likely users
+/// want to use the same version across all tables unless they are renaming.
+fn get_existing_dependency(
+ manifest: &LocalManifest,
+ dep_key: &str,
+ section: &DepTable,
+) -> CargoResult<Option<Dependency>> {
+ #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]
+ enum Key {
+ Error,
+ Dev,
+ Build,
+ Normal,
+ Existing,
+ }
+
+ let mut possible: Vec<_> = manifest
+ .get_dependency_versions(dep_key)
+ .map(|(path, dep)| {
+ let key = if path == *section {
+ (Key::Existing, true)
+ } else if dep.is_err() {
+ (Key::Error, path.target().is_some())
+ } else {
+ let key = match path.kind() {
+ DepKind::Normal => Key::Normal,
+ DepKind::Build => Key::Build,
+ DepKind::Development => Key::Dev,
+ };
+ (key, path.target().is_some())
+ };
+ (key, dep)
+ })
+ .collect();
+ possible.sort_by_key(|(key, _)| *key);
+ let (key, dep) = if let Some(item) = possible.pop() {
+ item
+ } else {
+ return Ok(None);
+ };
+ let mut dep = dep?;
+
+ if key.0 != Key::Existing {
+ // When the dep comes from a different section, we only care about the source and not any
+ // of the other fields, like `features`
+ let unrelated = dep;
+ dep = Dependency::new(&unrelated.name);
+ dep.source = unrelated.source.clone();
+ dep.registry = unrelated.registry.clone();
+
+ // dev-dependencies do not need the version populated when path is set though we
+ // should preserve it if the user chose to populate it.
+ let version_required = unrelated.source().and_then(|s| s.as_registry()).is_some();
+ let version_optional_in_section = section.kind() == DepKind::Development;
+ if !version_required && version_optional_in_section {
+ dep = dep.clear_version();
+ }
+ }
+
+ Ok(Some(dep))
+}
+
+fn get_latest_dependency(
+ dependency: &Dependency,
+ _flag_allow_prerelease: bool,
+ config: &Config,
+ registry: &mut PackageRegistry<'_>,
+) -> CargoResult<Dependency> {
+ let query = dependency.query(config)?;
+ match query {
+ MaybeWorkspace::Workspace(_) => {
+ unreachable!("registry dependencies required, found a workspace dependency");
+ }
+ MaybeWorkspace::Other(query) => {
+ let possibilities = loop {
+ match registry.query_vec(&query, QueryKind::Fuzzy) {
+ std::task::Poll::Ready(res) => {
+ break res?;
+ }
+ std::task::Poll::Pending => registry.block_until_ready()?,
+ }
+ };
+ let latest = possibilities
+ .iter()
+ .max_by_key(|s| {
+ // Fallback to a pre-release if no official release is available by sorting them as
+ // less.
+ let stable = s.version().pre.is_empty();
+ (stable, s.version())
+ })
+ .ok_or_else(|| {
+ anyhow::format_err!(
+ "the crate `{dependency}` could not be found in registry index."
+ )
+ })?;
+ let mut dep = Dependency::from(latest);
+ if let Some(reg_name) = dependency.registry.as_deref() {
+ dep = dep.set_registry(reg_name);
+ }
+ Ok(dep)
+ }
+ }
+}
+
+fn select_package(
+ dependency: &Dependency,
+ config: &Config,
+ registry: &mut PackageRegistry<'_>,
+) -> CargoResult<Dependency> {
+ let query = dependency.query(config)?;
+ match query {
+ MaybeWorkspace::Workspace(_) => {
+ unreachable!("path or git dependency expected, found workspace dependency");
+ }
+ MaybeWorkspace::Other(query) => {
+ let possibilities = loop {
+ // Exact to avoid returning all for path/git
+ match registry.query_vec(&query, QueryKind::Exact) {
+ std::task::Poll::Ready(res) => {
+ break res?;
+ }
+ std::task::Poll::Pending => registry.block_until_ready()?,
+ }
+ };
+ match possibilities.len() {
+ 0 => {
+ let source = dependency
+ .source()
+ .expect("source should be resolved before here");
+ anyhow::bail!("the crate `{dependency}` could not be found at `{source}`")
+ }
+ 1 => {
+ let mut dep = Dependency::from(&possibilities[0]);
+ if let Some(reg_name) = dependency.registry.as_deref() {
+ dep = dep.set_registry(reg_name);
+ }
+ Ok(dep)
+ }
+ _ => {
+ let source = dependency
+ .source()
+ .expect("source should be resolved before here");
+ anyhow::bail!(
+ "unexpectedly found multiple copies of crate `{dependency}` at `{source}`"
+ )
+ }
+ }
+ }
+ }
+}
+
+fn infer_package_for_git_source(
+ mut packages: Vec<Package>,
+ src: &dyn std::fmt::Display,
+) -> CargoResult<Package> {
+ let package = match packages.len() {
+ 0 => unreachable!(
+ "this function should only be called with packages from `GitSource::read_packages` \
+ and that call should error for us when there are no packages"
+ ),
+ 1 => packages.pop().expect("match ensured element is present"),
+ _ => {
+ let mut names: Vec<_> = packages
+ .iter()
+ .map(|p| p.name().as_str().to_owned())
+ .collect();
+ names.sort_unstable();
+ anyhow::bail!(
+ "multiple packages found at `{src}`:\n {}\nTo disambiguate, run `cargo add --git {src} <package>`",
+ names
+ .iter()
+ .map(|s| s.to_string())
+ .coalesce(|x, y| if x.len() + y.len() < 78 {
+ Ok(format!("{x}, {y}"))
+ } else {
+ Err((x, y))
+ })
+ .into_iter()
+ .format("\n "),
+ );
+ }
+ };
+ Ok(package)
+}
+
+fn populate_dependency(mut dependency: Dependency, arg: &DepOp) -> Dependency {
+ if let Some(registry) = &arg.registry {
+ if registry.is_empty() {
+ dependency.registry = None;
+ } else {
+ dependency.registry = Some(registry.to_owned());
+ }
+ }
+ if let Some(value) = arg.optional {
+ if value {
+ dependency.optional = Some(true);
+ } else {
+ dependency.optional = None;
+ }
+ }
+ if let Some(value) = arg.default_features {
+ if value {
+ dependency.default_features = None;
+ } else {
+ dependency.default_features = Some(false);
+ }
+ }
+ if let Some(value) = arg.features.as_ref() {
+ dependency = dependency.extend_features(value.iter().cloned());
+ }
+
+ if let Some(rename) = &arg.rename {
+ dependency = dependency.set_rename(rename);
+ }
+
+ dependency
+}
+
+/// Track presentation-layer information with the editable representation of a `[dependencies]`
+/// entry (Dependency)
+pub struct DependencyUI {
+ /// Editable representation of a `[depednencies]` entry
+ dep: Dependency,
+ /// The version of the crate that we pulled `available_features` from
+ available_version: Option<semver::Version>,
+ /// The widest set of features compatible with `Dependency`s version requirement
+ available_features: BTreeMap<String, Vec<String>>,
+}
+
+impl DependencyUI {
+ fn new(dep: Dependency) -> Self {
+ Self {
+ dep,
+ available_version: None,
+ available_features: Default::default(),
+ }
+ }
+
+ fn apply_summary(&mut self, summary: &Summary) {
+ self.available_version = Some(summary.version().clone());
+ self.available_features = summary
+ .features()
+ .iter()
+ .map(|(k, v)| {
+ (
+ k.as_str().to_owned(),
+ v.iter()
+ .filter_map(|v| match v {
+ FeatureValue::Feature(f) => Some(f.as_str().to_owned()),
+ FeatureValue::Dep { .. } | FeatureValue::DepFeature { .. } => None,
+ })
+ .collect::<Vec<_>>(),
+ )
+ })
+ .collect();
+ }
+
+ fn features(&self) -> (IndexSet<&str>, IndexSet<&str>) {
+ let mut activated: IndexSet<_> =
+ self.features.iter().flatten().map(|s| s.as_str()).collect();
+ if self.default_features().unwrap_or(true) {
+ activated.insert("default");
+ }
+ activated.extend(self.inherited_features.iter().flatten().map(|s| s.as_str()));
+ let mut walk: VecDeque<_> = activated.iter().cloned().collect();
+ while let Some(next) = walk.pop_front() {
+ walk.extend(
+ self.available_features
+ .get(next)
+ .into_iter()
+ .flatten()
+ .map(|s| s.as_str())
+ .filter(|s| !activated.contains(s)),
+ );
+ activated.extend(
+ self.available_features
+ .get(next)
+ .into_iter()
+ .flatten()
+ .map(|s| s.as_str()),
+ );
+ }
+ activated.remove("default");
+ activated.sort();
+ let mut deactivated = self
+ .available_features
+ .keys()
+ .filter(|f| !activated.contains(f.as_str()) && *f != "default")
+ .map(|f| f.as_str())
+ .collect::<IndexSet<_>>();
+ deactivated.sort();
+ (activated, deactivated)
+ }
+}
+
+impl<'s> From<&'s Summary> for DependencyUI {
+ fn from(other: &'s Summary) -> Self {
+ let dep = Dependency::from(other);
+ let mut dep = Self::new(dep);
+ dep.apply_summary(other);
+ dep
+ }
+}
+
+impl std::fmt::Display for DependencyUI {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.dep.fmt(f)
+ }
+}
+
+impl std::ops::Deref for DependencyUI {
+ type Target = Dependency;
+
+ fn deref(&self) -> &Self::Target {
+ &self.dep
+ }
+}
+
+/// Lookup available features
+fn populate_available_features(
+ dependency: Dependency,
+ query: &crate::core::dependency::Dependency,
+ registry: &mut PackageRegistry<'_>,
+) -> CargoResult<DependencyUI> {
+ let mut dependency = DependencyUI::new(dependency);
+
+ if !dependency.available_features.is_empty() {
+ return Ok(dependency);
+ }
+
+ let possibilities = loop {
+ match registry.query_vec(&query, QueryKind::Fuzzy) {
+ std::task::Poll::Ready(res) => {
+ break res?;
+ }
+ std::task::Poll::Pending => registry.block_until_ready()?,
+ }
+ };
+ // Ensure widest feature flag compatibility by picking the earliest version that could show up
+ // in the lock file for a given version requirement.
+ let lowest_common_denominator = possibilities
+ .iter()
+ .min_by_key(|s| {
+ // Fallback to a pre-release if no official release is available by sorting them as
+ // more.
+ let is_pre = !s.version().pre.is_empty();
+ (is_pre, s.version())
+ })
+ .ok_or_else(|| {
+ anyhow::format_err!("the crate `{dependency}` could not be found in registry index.")
+ })?;
+ dependency.apply_summary(&lowest_common_denominator);
+
+ Ok(dependency)
+}
+
+fn print_action_msg(shell: &mut Shell, dep: &DependencyUI, section: &[String]) -> CargoResult<()> {
+ if matches!(shell.verbosity(), crate::core::shell::Verbosity::Quiet) {
+ return Ok(());
+ }
+
+ let mut message = String::new();
+ write!(message, "{}", dep.name)?;
+ match dep.source() {
+ Some(Source::Registry(src)) => {
+ if src.version.chars().next().unwrap_or('0').is_ascii_digit() {
+ write!(message, " v{}", src.version)?;
+ } else {
+ write!(message, " {}", src.version)?;
+ }
+ }
+ Some(Source::Path(_)) => {
+ write!(message, " (local)")?;
+ }
+ Some(Source::Git(_)) => {
+ write!(message, " (git)")?;
+ }
+ Some(Source::Workspace(_)) => {
+ write!(message, " (workspace)")?;
+ }
+ None => {}
+ }
+ write!(message, " to")?;
+ if dep.optional().unwrap_or(false) {
+ write!(message, " optional")?;
+ }
+ let section = if section.len() == 1 {
+ section[0].clone()
+ } else {
+ format!("{} for target `{}`", &section[2], &section[1])
+ };
+ write!(message, " {section}")?;
+ write!(message, ".")?;
+ shell.status("Adding", message)
+}
+
+fn print_dep_table_msg(shell: &mut Shell, dep: &DependencyUI) -> CargoResult<()> {
+ if matches!(shell.verbosity(), crate::core::shell::Verbosity::Quiet) {
+ return Ok(());
+ }
+ let (activated, deactivated) = dep.features();
+ if !activated.is_empty() || !deactivated.is_empty() {
+ let prefix = format!("{:>13}", " ");
+ let suffix = if let Some(version) = &dep.available_version {
+ let mut version = version.clone();
+ version.build = Default::default();
+ let version = version.to_string();
+ // Avoid displaying the version if it will visually look like the version req that we
+ // showed earlier
+ let version_req = dep
+ .version()
+ .and_then(|v| semver::VersionReq::parse(v).ok())
+ .and_then(|v| precise_version(&v));
+ if version_req.as_deref() != Some(version.as_str()) {
+ format!(" as of v{version}")
+ } else {
+ "".to_owned()
+ }
+ } else {
+ "".to_owned()
+ };
+ shell.write_stderr(
+ format_args!("{}Features{}:\n", prefix, suffix),
+ &ColorSpec::new(),
+ )?;
+ for feat in activated {
+ shell.write_stderr(&prefix, &ColorSpec::new())?;
+ shell.write_stderr('+', &ColorSpec::new().set_bold(true).set_fg(Some(Green)))?;
+ shell.write_stderr(format_args!(" {}\n", feat), &ColorSpec::new())?;
+ }
+ for feat in deactivated {
+ shell.write_stderr(&prefix, &ColorSpec::new())?;
+ shell.write_stderr('-', &ColorSpec::new().set_bold(true).set_fg(Some(Red)))?;
+ shell.write_stderr(format_args!(" {}\n", feat), &ColorSpec::new())?;
+ }
+ }
+
+ Ok(())
+}
+
+// Based on Iterator::is_sorted from nightly std; remove in favor of that when stabilized.
+fn is_sorted(mut it: impl Iterator<Item = impl PartialOrd>) -> bool {
+ let mut last = match it.next() {
+ Some(e) => e,
+ None => return true,
+ };
+
+ for curr in it {
+ if curr < last {
+ return false;
+ }
+ last = curr;
+ }
+
+ true
+}
+
+fn find_workspace_dep(toml_key: &str, root_manifest: &Path) -> CargoResult<Dependency> {
+ let manifest = LocalManifest::try_new(root_manifest)?;
+ let manifest = manifest
+ .data
+ .as_item()
+ .as_table_like()
+ .context("could not make `manifest.data` into a table")?;
+ let workspace = manifest
+ .get("workspace")
+ .context("could not find `workspace`")?
+ .as_table_like()
+ .context("could not make `manifest.data.workspace` into a table")?;
+ let dependencies = workspace
+ .get("dependencies")
+ .context("could not find `dependencies` table in `workspace`")?
+ .as_table_like()
+ .context("could not make `dependencies` into a table")?;
+ let dep_item = dependencies.get(toml_key).context(format!(
+ "could not find {} in `workspace.dependencies`",
+ toml_key
+ ))?;
+ Dependency::from_toml(root_manifest.parent().unwrap(), toml_key, dep_item)
+}
+
+/// Convert a `semver::VersionReq` into a rendered `semver::Version` if all fields are fully
+/// specified.
+fn precise_version(version_req: &semver::VersionReq) -> Option<String> {
+ version_req
+ .comparators
+ .iter()
+ .filter(|c| {
+ matches!(
+ c.op,
+ // Only ops we can determine a precise version from
+ semver::Op::Exact
+ | semver::Op::GreaterEq
+ | semver::Op::LessEq
+ | semver::Op::Tilde
+ | semver::Op::Caret
+ | semver::Op::Wildcard
+ )
+ })
+ .filter_map(|c| {
+ // Only do it when full precision is specified
+ c.minor.and_then(|minor| {
+ c.patch.map(|patch| semver::Version {
+ major: c.major,
+ minor,
+ patch,
+ pre: c.pre.clone(),
+ build: Default::default(),
+ })
+ })
+ })
+ .max()
+ .map(|v| v.to_string())
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_clean.rs b/src/tools/cargo/src/cargo/ops/cargo_clean.rs
new file mode 100644
index 000000000..b3e1bf483
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_clean.rs
@@ -0,0 +1,401 @@
+use crate::core::compiler::{CompileKind, CompileMode, Layout, RustcTargetData};
+use crate::core::profiles::Profiles;
+use crate::core::{PackageIdSpec, TargetKind, Workspace};
+use crate::ops;
+use crate::util::errors::CargoResult;
+use crate::util::interning::InternedString;
+use crate::util::lev_distance;
+use crate::util::{Config, Progress, ProgressStyle};
+
+use anyhow::Context as _;
+use cargo_util::paths;
+use std::fs;
+use std::path::Path;
+
+pub struct CleanOptions<'a> {
+ pub config: &'a Config,
+ /// A list of packages to clean. If empty, everything is cleaned.
+ pub spec: Vec<String>,
+ /// The target arch triple to clean, or None for the host arch
+ pub targets: Vec<String>,
+ /// Whether to clean the release directory
+ pub profile_specified: bool,
+ /// Whether to clean the directory of a certain build profile
+ pub requested_profile: InternedString,
+ /// Whether to just clean the doc directory
+ pub doc: bool,
+}
+
+/// Cleans the package's build artifacts.
+pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> {
+ let mut target_dir = ws.target_dir();
+ let config = ws.config();
+
+ // If the doc option is set, we just want to delete the doc directory.
+ if opts.doc {
+ target_dir = target_dir.join("doc");
+ return clean_entire_folder(&target_dir.into_path_unlocked(), config);
+ }
+
+ let profiles = Profiles::new(ws, opts.requested_profile)?;
+
+ if opts.profile_specified {
+ // After parsing profiles we know the dir-name of the profile, if a profile
+ // was passed from the command line. If so, delete only the directory of
+ // that profile.
+ let dir_name = profiles.get_dir_name();
+ target_dir = target_dir.join(dir_name);
+ }
+
+ // If we have a spec, then we need to delete some packages, otherwise, just
+ // remove the whole target directory and be done with it!
+ //
+ // Note that we don't bother grabbing a lock here as we're just going to
+ // blow it all away anyway.
+ if opts.spec.is_empty() {
+ return clean_entire_folder(&target_dir.into_path_unlocked(), config);
+ }
+
+ // Clean specific packages.
+ let requested_kinds = CompileKind::from_requested_targets(config, &opts.targets)?;
+ let target_data = RustcTargetData::new(ws, &requested_kinds)?;
+ let (pkg_set, resolve) = ops::resolve_ws(ws)?;
+ let prof_dir_name = profiles.get_dir_name();
+ let host_layout = Layout::new(ws, None, &prof_dir_name)?;
+ // Convert requested kinds to a Vec of layouts.
+ let target_layouts: Vec<(CompileKind, Layout)> = requested_kinds
+ .into_iter()
+ .filter_map(|kind| match kind {
+ CompileKind::Target(target) => match Layout::new(ws, Some(target), &prof_dir_name) {
+ Ok(layout) => Some(Ok((kind, layout))),
+ Err(e) => Some(Err(e)),
+ },
+ CompileKind::Host => None,
+ })
+ .collect::<CargoResult<_>>()?;
+ // A Vec of layouts. This is a little convoluted because there can only be
+ // one host_layout.
+ let layouts = if opts.targets.is_empty() {
+ vec![(CompileKind::Host, &host_layout)]
+ } else {
+ target_layouts
+ .iter()
+ .map(|(kind, layout)| (*kind, layout))
+ .collect()
+ };
+ // Create a Vec that also includes the host for things that need to clean both.
+ let layouts_with_host: Vec<(CompileKind, &Layout)> =
+ std::iter::once((CompileKind::Host, &host_layout))
+ .chain(layouts.iter().map(|(k, l)| (*k, *l)))
+ .collect();
+
+ // Cleaning individual rustdoc crates is currently not supported.
+ // For example, the search index would need to be rebuilt to fully
+ // remove it (otherwise you're left with lots of broken links).
+ // Doc tests produce no output.
+
+ // Get Packages for the specified specs.
+ let mut pkg_ids = Vec::new();
+ for spec_str in opts.spec.iter() {
+ // Translate the spec to a Package.
+ let spec = PackageIdSpec::parse(spec_str)?;
+ if spec.version().is_some() {
+ config.shell().warn(&format!(
+ "version qualifier in `-p {}` is ignored, \
+ cleaning all versions of `{}` found",
+ spec_str,
+ spec.name()
+ ))?;
+ }
+ if spec.url().is_some() {
+ config.shell().warn(&format!(
+ "url qualifier in `-p {}` ignored, \
+ cleaning all versions of `{}` found",
+ spec_str,
+ spec.name()
+ ))?;
+ }
+ let matches: Vec<_> = resolve.iter().filter(|id| spec.matches(*id)).collect();
+ if matches.is_empty() {
+ let mut suggestion = String::new();
+ suggestion.push_str(&lev_distance::closest_msg(
+ &spec.name(),
+ resolve.iter(),
+ |id| id.name().as_str(),
+ ));
+ anyhow::bail!(
+ "package ID specification `{}` did not match any packages{}",
+ spec,
+ suggestion
+ );
+ }
+ pkg_ids.extend(matches);
+ }
+ let packages = pkg_set.get_many(pkg_ids)?;
+
+ let mut progress = CleaningPackagesBar::new(config, packages.len());
+ for pkg in packages {
+ let pkg_dir = format!("{}-*", pkg.name());
+ progress.on_cleaning_package(&pkg.name())?;
+
+ // Clean fingerprints.
+ for (_, layout) in &layouts_with_host {
+ let dir = escape_glob_path(layout.fingerprint())?;
+ rm_rf_package_glob_containing_hash(
+ &pkg.name(),
+ &Path::new(&dir).join(&pkg_dir),
+ config,
+ &mut progress,
+ )?;
+ }
+
+ for target in pkg.targets() {
+ if target.is_custom_build() {
+ // Get both the build_script_build and the output directory.
+ for (_, layout) in &layouts_with_host {
+ let dir = escape_glob_path(layout.build())?;
+ rm_rf_package_glob_containing_hash(
+ &pkg.name(),
+ &Path::new(&dir).join(&pkg_dir),
+ config,
+ &mut progress,
+ )?;
+ }
+ continue;
+ }
+ let crate_name = target.crate_name();
+ for &mode in &[
+ CompileMode::Build,
+ CompileMode::Test,
+ CompileMode::Check { test: false },
+ ] {
+ for (compile_kind, layout) in &layouts {
+ let triple = target_data.short_name(compile_kind);
+
+ let (file_types, _unsupported) = target_data
+ .info(*compile_kind)
+ .rustc_outputs(mode, target.kind(), triple)?;
+ let (dir, uplift_dir) = match target.kind() {
+ TargetKind::ExampleBin | TargetKind::ExampleLib(..) => {
+ (layout.examples(), Some(layout.examples()))
+ }
+ // Tests/benchmarks are never uplifted.
+ TargetKind::Test | TargetKind::Bench => (layout.deps(), None),
+ _ => (layout.deps(), Some(layout.dest())),
+ };
+ for file_type in file_types {
+ // Some files include a hash in the filename, some don't.
+ let hashed_name = file_type.output_filename(target, Some("*"));
+ let unhashed_name = file_type.output_filename(target, None);
+ let dir_glob = escape_glob_path(dir)?;
+ let dir_glob = Path::new(&dir_glob);
+
+ rm_rf_glob(&dir_glob.join(&hashed_name), config, &mut progress)?;
+ rm_rf(&dir.join(&unhashed_name), config, &mut progress)?;
+ // Remove dep-info file generated by rustc. It is not tracked in
+ // file_types. It does not have a prefix.
+ let hashed_dep_info = dir_glob.join(format!("{}-*.d", crate_name));
+ rm_rf_glob(&hashed_dep_info, config, &mut progress)?;
+ let unhashed_dep_info = dir.join(format!("{}.d", crate_name));
+ rm_rf(&unhashed_dep_info, config, &mut progress)?;
+ // Remove split-debuginfo files generated by rustc.
+ let split_debuginfo_obj = dir_glob.join(format!("{}.*.o", crate_name));
+ rm_rf_glob(&split_debuginfo_obj, config, &mut progress)?;
+ let split_debuginfo_dwo = dir_glob.join(format!("{}.*.dwo", crate_name));
+ rm_rf_glob(&split_debuginfo_dwo, config, &mut progress)?;
+ let split_debuginfo_dwp = dir_glob.join(format!("{}.*.dwp", crate_name));
+ rm_rf_glob(&split_debuginfo_dwp, config, &mut progress)?;
+
+ // Remove the uplifted copy.
+ if let Some(uplift_dir) = uplift_dir {
+ let uplifted_path = uplift_dir.join(file_type.uplift_filename(target));
+ rm_rf(&uplifted_path, config, &mut progress)?;
+ // Dep-info generated by Cargo itself.
+ let dep_info = uplifted_path.with_extension("d");
+ rm_rf(&dep_info, config, &mut progress)?;
+ }
+ }
+ // TODO: what to do about build_script_build?
+ let dir = escape_glob_path(layout.incremental())?;
+ let incremental = Path::new(&dir).join(format!("{}-*", crate_name));
+ rm_rf_glob(&incremental, config, &mut progress)?;
+ }
+ }
+ }
+ }
+
+ Ok(())
+}
+
+fn escape_glob_path(pattern: &Path) -> CargoResult<String> {
+ let pattern = pattern
+ .to_str()
+ .ok_or_else(|| anyhow::anyhow!("expected utf-8 path"))?;
+ Ok(glob::Pattern::escape(pattern))
+}
+
+/// Glob remove artifacts for the provided `package`
+///
+/// Make sure the artifact is for `package` and not another crate that is prefixed by
+/// `package` by getting the original name stripped of the trailing hash and possible
+/// extension
+fn rm_rf_package_glob_containing_hash(
+ package: &str,
+ pattern: &Path,
+ config: &Config,
+ progress: &mut dyn CleaningProgressBar,
+) -> CargoResult<()> {
+ // TODO: Display utf8 warning to user? Or switch to globset?
+ let pattern = pattern
+ .to_str()
+ .ok_or_else(|| anyhow::anyhow!("expected utf-8 path"))?;
+ for path in glob::glob(pattern)? {
+ let path = path?;
+
+ let pkg_name = path
+ .file_name()
+ .and_then(std::ffi::OsStr::to_str)
+ .and_then(|artifact| artifact.rsplit_once('-'))
+ .ok_or_else(|| anyhow::anyhow!("expected utf-8 path"))?
+ .0;
+
+ if pkg_name != package {
+ continue;
+ }
+
+ rm_rf(&path, config, progress)?;
+ }
+ Ok(())
+}
+
+fn rm_rf_glob(
+ pattern: &Path,
+ config: &Config,
+ progress: &mut dyn CleaningProgressBar,
+) -> CargoResult<()> {
+ // TODO: Display utf8 warning to user? Or switch to globset?
+ let pattern = pattern
+ .to_str()
+ .ok_or_else(|| anyhow::anyhow!("expected utf-8 path"))?;
+ for path in glob::glob(pattern)? {
+ rm_rf(&path?, config, progress)?;
+ }
+ Ok(())
+}
+
+fn rm_rf(path: &Path, config: &Config, progress: &mut dyn CleaningProgressBar) -> CargoResult<()> {
+ if fs::symlink_metadata(path).is_err() {
+ return Ok(());
+ }
+
+ config
+ .shell()
+ .verbose(|shell| shell.status("Removing", path.display()))?;
+ progress.display_now()?;
+
+ for entry in walkdir::WalkDir::new(path).contents_first(true) {
+ let entry = entry?;
+ progress.on_clean()?;
+ if entry.file_type().is_dir() {
+ paths::remove_dir(entry.path()).with_context(|| "could not remove build directory")?;
+ } else {
+ paths::remove_file(entry.path()).with_context(|| "failed to remove build artifact")?;
+ }
+ }
+
+ Ok(())
+}
+
+fn clean_entire_folder(path: &Path, config: &Config) -> CargoResult<()> {
+ let num_paths = walkdir::WalkDir::new(path).into_iter().count();
+ let mut progress = CleaningFolderBar::new(config, num_paths);
+ rm_rf(path, config, &mut progress)
+}
+
+trait CleaningProgressBar {
+ fn display_now(&mut self) -> CargoResult<()>;
+ fn on_clean(&mut self) -> CargoResult<()>;
+}
+
+struct CleaningFolderBar<'cfg> {
+ bar: Progress<'cfg>,
+ max: usize,
+ cur: usize,
+}
+
+impl<'cfg> CleaningFolderBar<'cfg> {
+ fn new(cfg: &'cfg Config, max: usize) -> Self {
+ Self {
+ bar: Progress::with_style("Cleaning", ProgressStyle::Percentage, cfg),
+ max,
+ cur: 0,
+ }
+ }
+
+ fn cur_progress(&self) -> usize {
+ std::cmp::min(self.cur, self.max)
+ }
+}
+
+impl<'cfg> CleaningProgressBar for CleaningFolderBar<'cfg> {
+ fn display_now(&mut self) -> CargoResult<()> {
+ self.bar.tick_now(self.cur_progress(), self.max, "")
+ }
+
+ fn on_clean(&mut self) -> CargoResult<()> {
+ self.cur += 1;
+ self.bar.tick(self.cur_progress(), self.max, "")
+ }
+}
+
+struct CleaningPackagesBar<'cfg> {
+ bar: Progress<'cfg>,
+ max: usize,
+ cur: usize,
+ num_files_folders_cleaned: usize,
+ package_being_cleaned: String,
+}
+
+impl<'cfg> CleaningPackagesBar<'cfg> {
+ fn new(cfg: &'cfg Config, max: usize) -> Self {
+ Self {
+ bar: Progress::with_style("Cleaning", ProgressStyle::Ratio, cfg),
+ max,
+ cur: 0,
+ num_files_folders_cleaned: 0,
+ package_being_cleaned: String::new(),
+ }
+ }
+
+ fn on_cleaning_package(&mut self, package: &str) -> CargoResult<()> {
+ self.cur += 1;
+ self.package_being_cleaned = String::from(package);
+ self.bar
+ .tick(self.cur_progress(), self.max, &self.format_message())
+ }
+
+ fn cur_progress(&self) -> usize {
+ std::cmp::min(self.cur, self.max)
+ }
+
+ fn format_message(&self) -> String {
+ format!(
+ ": {}, {} files/folders cleaned",
+ self.package_being_cleaned, self.num_files_folders_cleaned
+ )
+ }
+}
+
+impl<'cfg> CleaningProgressBar for CleaningPackagesBar<'cfg> {
+ fn display_now(&mut self) -> CargoResult<()> {
+ self.bar
+ .tick_now(self.cur_progress(), self.max, &self.format_message())
+ }
+
+ fn on_clean(&mut self) -> CargoResult<()> {
+ self.bar
+ .tick(self.cur_progress(), self.max, &self.format_message())?;
+ self.num_files_folders_cleaned += 1;
+ Ok(())
+ }
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_compile/compile_filter.rs b/src/tools/cargo/src/cargo/ops/cargo_compile/compile_filter.rs
new file mode 100644
index 000000000..d5e326dfe
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_compile/compile_filter.rs
@@ -0,0 +1,309 @@
+//! Filters and their rules to select which Cargo targets will be built.
+
+use crate::core::compiler::CompileMode;
+
+use crate::core::{Target, TargetKind};
+use crate::util::restricted_names::is_glob_pattern;
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+/// Indicates whether or not the library target gets included.
+pub enum LibRule {
+ /// Include the library, fail if not present
+ True,
+ /// Include the library if present
+ Default,
+ /// Exclude the library
+ False,
+}
+
+#[derive(Debug, Clone)]
+/// Indicates which Cargo targets will be selected to be built.
+pub enum FilterRule {
+ /// All included.
+ All,
+ /// Just a subset of Cargo targets based on names given.
+ Just(Vec<String>),
+}
+
+/// Filter to apply to the root package to select which Cargo targets will be built.
+/// (examples, bins, benches, tests, ...)
+///
+/// The actual filter process happens inside [`generate_root_units`].
+///
+/// Not to be confused with [`Packages`], which opts in packages to be built.
+///
+/// [`generate_root_units`]: super::UnitGenerator::generate_root_units
+/// [`Packages`]: crate::ops::Packages
+#[derive(Debug, Clone)]
+pub enum CompileFilter {
+ /// The default set of Cargo targets.
+ Default {
+ /// Flag whether targets can be safely skipped when required-features are not satisfied.
+ required_features_filterable: bool,
+ },
+ /// Only includes a subset of all Cargo targets.
+ Only {
+ /// Include all Cargo targets.
+ all_targets: bool,
+ lib: LibRule,
+ bins: FilterRule,
+ examples: FilterRule,
+ tests: FilterRule,
+ benches: FilterRule,
+ },
+}
+
+impl FilterRule {
+ pub fn new(targets: Vec<String>, all: bool) -> FilterRule {
+ if all {
+ FilterRule::All
+ } else {
+ FilterRule::Just(targets)
+ }
+ }
+
+ /// Creates a filter with no rule.
+ ///
+ /// In the current Cargo implementation, filter without a rule implies
+ /// Cargo will follows the default behaviour to filter targets.
+ pub fn none() -> FilterRule {
+ FilterRule::Just(Vec::new())
+ }
+
+ /// Checks if a target definition matches this filter rule.
+ fn matches(&self, target: &Target) -> bool {
+ match *self {
+ FilterRule::All => true,
+ FilterRule::Just(ref targets) => targets.iter().any(|x| *x == target.name()),
+ }
+ }
+
+ /// Check if a filter is specific.
+ ///
+ /// Only filters without rules are considered as not specific.
+ fn is_specific(&self) -> bool {
+ match *self {
+ FilterRule::All => true,
+ FilterRule::Just(ref targets) => !targets.is_empty(),
+ }
+ }
+
+ /// Checks if any specified target name contains glob patterns.
+ pub(crate) fn contains_glob_patterns(&self) -> bool {
+ match self {
+ FilterRule::All => false,
+ FilterRule::Just(targets) => targets.iter().any(is_glob_pattern),
+ }
+ }
+}
+
+impl CompileFilter {
+ /// Constructs a filter from raw command line arguments.
+ pub fn from_raw_arguments(
+ lib_only: bool,
+ bins: Vec<String>,
+ all_bins: bool,
+ tsts: Vec<String>,
+ all_tsts: bool,
+ exms: Vec<String>,
+ all_exms: bool,
+ bens: Vec<String>,
+ all_bens: bool,
+ all_targets: bool,
+ ) -> CompileFilter {
+ if all_targets {
+ return CompileFilter::new_all_targets();
+ }
+ let rule_lib = if lib_only {
+ LibRule::True
+ } else {
+ LibRule::False
+ };
+ let rule_bins = FilterRule::new(bins, all_bins);
+ let rule_tsts = FilterRule::new(tsts, all_tsts);
+ let rule_exms = FilterRule::new(exms, all_exms);
+ let rule_bens = FilterRule::new(bens, all_bens);
+
+ CompileFilter::new(rule_lib, rule_bins, rule_tsts, rule_exms, rule_bens)
+ }
+
+ /// Constructs a filter from underlying primitives.
+ pub fn new(
+ rule_lib: LibRule,
+ rule_bins: FilterRule,
+ rule_tsts: FilterRule,
+ rule_exms: FilterRule,
+ rule_bens: FilterRule,
+ ) -> CompileFilter {
+ if rule_lib == LibRule::True
+ || rule_bins.is_specific()
+ || rule_tsts.is_specific()
+ || rule_exms.is_specific()
+ || rule_bens.is_specific()
+ {
+ CompileFilter::Only {
+ all_targets: false,
+ lib: rule_lib,
+ bins: rule_bins,
+ examples: rule_exms,
+ benches: rule_bens,
+ tests: rule_tsts,
+ }
+ } else {
+ CompileFilter::Default {
+ required_features_filterable: true,
+ }
+ }
+ }
+
+ /// Constructs a filter that includes all targets.
+ pub fn new_all_targets() -> CompileFilter {
+ CompileFilter::Only {
+ all_targets: true,
+ lib: LibRule::Default,
+ bins: FilterRule::All,
+ examples: FilterRule::All,
+ benches: FilterRule::All,
+ tests: FilterRule::All,
+ }
+ }
+
+ /// Constructs a filter that includes all test targets.
+ ///
+ /// Being different from the behavior of [`CompileFilter::Default`], this
+ /// function only recognizes test targets, which means cargo might compile
+ /// all targets with `tested` flag on, whereas [`CompileFilter::Default`]
+ /// may include additional example targets to ensure they can be compiled.
+ ///
+ /// Note that the actual behavior is subject to [`filter_default_targets`]
+ /// and [`generate_root_units`] though.
+ ///
+ /// [`generate_root_units`]: super::UnitGenerator::generate_root_units
+ /// [`filter_default_targets`]: super::UnitGenerator::filter_default_targets
+ pub fn all_test_targets() -> Self {
+ Self::Only {
+ all_targets: false,
+ lib: LibRule::Default,
+ bins: FilterRule::none(),
+ examples: FilterRule::none(),
+ tests: FilterRule::All,
+ benches: FilterRule::none(),
+ }
+ }
+
+ /// Constructs a filter that includes lib target only.
+ pub fn lib_only() -> Self {
+ Self::Only {
+ all_targets: false,
+ lib: LibRule::True,
+ bins: FilterRule::none(),
+ examples: FilterRule::none(),
+ tests: FilterRule::none(),
+ benches: FilterRule::none(),
+ }
+ }
+
+ /// Constructs a filter that includes the given binary. No more. No less.
+ pub fn single_bin(bin: String) -> Self {
+ Self::Only {
+ all_targets: false,
+ lib: LibRule::False,
+ bins: FilterRule::new(vec![bin], false),
+ examples: FilterRule::none(),
+ tests: FilterRule::none(),
+ benches: FilterRule::none(),
+ }
+ }
+
+ /// Indicates if Cargo needs to build any dev dependency.
+ pub fn need_dev_deps(&self, mode: CompileMode) -> bool {
+ match mode {
+ CompileMode::Test | CompileMode::Doctest | CompileMode::Bench => true,
+ CompileMode::Check { test: true } => true,
+ CompileMode::Build
+ | CompileMode::Doc { .. }
+ | CompileMode::Docscrape
+ | CompileMode::Check { test: false } => match *self {
+ CompileFilter::Default { .. } => false,
+ CompileFilter::Only {
+ ref examples,
+ ref tests,
+ ref benches,
+ ..
+ } => examples.is_specific() || tests.is_specific() || benches.is_specific(),
+ },
+ CompileMode::RunCustomBuild => panic!("Invalid mode"),
+ }
+ }
+
+ /// Selects targets for "cargo run". for logic to select targets for other
+ /// subcommands, see [`generate_root_units`] and [`filter_default_targets`].
+ ///
+ /// [`generate_root_units`]: super::UnitGenerator::generate_root_units
+ /// [`filter_default_targets`]: super::UnitGenerator::filter_default_targets
+ pub fn target_run(&self, target: &Target) -> bool {
+ match *self {
+ CompileFilter::Default { .. } => true,
+ CompileFilter::Only {
+ ref lib,
+ ref bins,
+ ref examples,
+ ref tests,
+ ref benches,
+ ..
+ } => {
+ let rule = match *target.kind() {
+ TargetKind::Bin => bins,
+ TargetKind::Test => tests,
+ TargetKind::Bench => benches,
+ TargetKind::ExampleBin | TargetKind::ExampleLib(..) => examples,
+ TargetKind::Lib(..) => {
+ return match *lib {
+ LibRule::True => true,
+ LibRule::Default => true,
+ LibRule::False => false,
+ };
+ }
+ TargetKind::CustomBuild => return false,
+ };
+ rule.matches(target)
+ }
+ }
+ }
+
+ pub fn is_specific(&self) -> bool {
+ match *self {
+ CompileFilter::Default { .. } => false,
+ CompileFilter::Only { .. } => true,
+ }
+ }
+
+ pub fn is_all_targets(&self) -> bool {
+ matches!(
+ *self,
+ CompileFilter::Only {
+ all_targets: true,
+ ..
+ }
+ )
+ }
+
+ /// Checks if any specified target name contains glob patterns.
+ pub(crate) fn contains_glob_patterns(&self) -> bool {
+ match self {
+ CompileFilter::Default { .. } => false,
+ CompileFilter::Only {
+ bins,
+ examples,
+ tests,
+ benches,
+ ..
+ } => {
+ bins.contains_glob_patterns()
+ || examples.contains_glob_patterns()
+ || tests.contains_glob_patterns()
+ || benches.contains_glob_patterns()
+ }
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_compile/mod.rs b/src/tools/cargo/src/cargo/ops/cargo_compile/mod.rs
new file mode 100644
index 000000000..3b6043d4f
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_compile/mod.rs
@@ -0,0 +1,924 @@
+//! # The Cargo "compile" operation
+//!
+//! This module contains the entry point for starting the compilation process
+//! for commands like `build`, `test`, `doc`, `rustc`, etc.
+//!
+//! The [`compile`] function will do all the work to compile a workspace. A
+//! rough outline is:
+//!
+//! 1. Resolve the dependency graph (see [`ops::resolve`]).
+//! 2. Download any packages needed (see [`PackageSet`](crate::core::PackageSet)).
+//! 3. Generate a list of top-level "units" of work for the targets the user
+//! requested on the command-line. Each [`Unit`] corresponds to a compiler
+//! invocation. This is done in this module ([`UnitGenerator::generate_root_units`]).
+//! 4. Starting from the root [`Unit`]s, generate the [`UnitGraph`] by walking the dependency graph
+//! from the resolver. See also [`unit_dependencies`].
+//! 5. Construct the [`BuildContext`] with all of the information collected so
+//! far. This is the end of the "front end" of compilation.
+//! 6. Create a [`Context`] which coordinates the compilation process
+//! and will perform the following steps:
+//! 1. Prepare the `target` directory (see [`Layout`]).
+//! 2. Create a [`JobQueue`]. The queue checks the
+//! fingerprint of each `Unit` to determine if it should run or be
+//! skipped.
+//! 3. Execute the queue via [`drain_the_queue`]. Each leaf in the queue's dependency graph is
+//! executed, and then removed from the graph when finished. This repeats until the queue is
+//! empty. Note that this is the only point in cargo that currently uses threads.
+//! 7. The result of the compilation is stored in the [`Compilation`] struct. This can be used for
+//! various things, such as running tests after the compilation has finished.
+//!
+//! **Note**: "target" inside this module generally refers to ["Cargo Target"],
+//! which corresponds to artifact that will be built in a package. Not to be
+//! confused with target-triple or target architecture.
+//!
+//! [`unit_dependencies`]: crate::core::compiler::unit_dependencies
+//! [`Layout`]: crate::core::compiler::Layout
+//! [`JobQueue`]: crate::core::compiler::job_queue
+//! [`drain_the_queue`]: crate::core::compiler::job_queue
+//! ["Cargo Target"]: https://doc.rust-lang.org/nightly/cargo/reference/cargo-targets.html
+
+use std::collections::{HashMap, HashSet};
+use std::hash::{Hash, Hasher};
+use std::sync::Arc;
+
+use crate::core::compiler::unit_dependencies::build_unit_dependencies;
+use crate::core::compiler::unit_graph::{self, UnitDep, UnitGraph};
+use crate::core::compiler::{standard_lib, CrateType, TargetInfo};
+use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context};
+use crate::core::compiler::{CompileKind, CompileMode, CompileTarget, RustcTargetData, Unit};
+use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner};
+use crate::core::profiles::Profiles;
+use crate::core::resolver::features::{self, CliFeatures, FeaturesFor};
+use crate::core::resolver::{HasDevUnits, Resolve};
+use crate::core::{PackageId, PackageSet, SourceId, TargetKind, Workspace};
+use crate::drop_println;
+use crate::ops;
+use crate::ops::resolve::WorkspaceResolve;
+use crate::util::config::Config;
+use crate::util::interning::InternedString;
+use crate::util::{profile, CargoResult, StableHasher};
+
+mod compile_filter;
+pub use compile_filter::{CompileFilter, FilterRule, LibRule};
+
+mod unit_generator;
+use unit_generator::UnitGenerator;
+
+mod packages;
+
+pub use packages::Packages;
+
+/// Contains information about how a package should be compiled.
+///
+/// Note on distinction between `CompileOptions` and [`BuildConfig`]:
+/// `BuildConfig` contains values that need to be retained after
+/// [`BuildContext`] is created. The other fields are no longer necessary. Think
+/// of it as `CompileOptions` are high-level settings requested on the
+/// command-line, and `BuildConfig` are low-level settings for actually
+/// driving `rustc`.
+#[derive(Debug, Clone)]
+pub struct CompileOptions {
+ /// Configuration information for a rustc build
+ pub build_config: BuildConfig,
+ /// Feature flags requested by the user.
+ pub cli_features: CliFeatures,
+ /// A set of packages to build.
+ pub spec: Packages,
+ /// Filter to apply to the root package to select which targets will be
+ /// built.
+ pub filter: CompileFilter,
+ /// Extra arguments to be passed to rustdoc (single target only)
+ pub target_rustdoc_args: Option<Vec<String>>,
+ /// The specified target will be compiled with all the available arguments,
+ /// note that this only accounts for the *final* invocation of rustc
+ pub target_rustc_args: Option<Vec<String>>,
+ /// Crate types to be passed to rustc (single target only)
+ pub target_rustc_crate_types: Option<Vec<String>>,
+ /// Whether the `--document-private-items` flags was specified and should
+ /// be forwarded to `rustdoc`.
+ pub rustdoc_document_private_items: bool,
+ /// Whether the build process should check the minimum Rust version
+ /// defined in the cargo metadata for a crate.
+ pub honor_rust_version: bool,
+}
+
+impl CompileOptions {
+ pub fn new(config: &Config, mode: CompileMode) -> CargoResult<CompileOptions> {
+ let jobs = None;
+ let keep_going = false;
+ Ok(CompileOptions {
+ build_config: BuildConfig::new(config, jobs, keep_going, &[], mode)?,
+ cli_features: CliFeatures::new_all(false),
+ spec: ops::Packages::Packages(Vec::new()),
+ filter: CompileFilter::Default {
+ required_features_filterable: false,
+ },
+ target_rustdoc_args: None,
+ target_rustc_args: None,
+ target_rustc_crate_types: None,
+ rustdoc_document_private_items: false,
+ honor_rust_version: true,
+ })
+ }
+}
+
+/// Compiles!
+///
+/// This uses the [`DefaultExecutor`]. To use a custom [`Executor`], see [`compile_with_exec`].
+pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions) -> CargoResult<Compilation<'a>> {
+ let exec: Arc<dyn Executor> = Arc::new(DefaultExecutor);
+ compile_with_exec(ws, options, &exec)
+}
+
+/// Like [`compile`] but allows specifying a custom [`Executor`]
+/// that will be able to intercept build calls and add custom logic.
+///
+/// [`compile`] uses [`DefaultExecutor`] which just passes calls through.
+pub fn compile_with_exec<'a>(
+ ws: &Workspace<'a>,
+ options: &CompileOptions,
+ exec: &Arc<dyn Executor>,
+) -> CargoResult<Compilation<'a>> {
+ ws.emit_warnings()?;
+ compile_ws(ws, options, exec)
+}
+
+/// Like [`compile_with_exec`] but without warnings from manifest parsing.
+pub fn compile_ws<'a>(
+ ws: &Workspace<'a>,
+ options: &CompileOptions,
+ exec: &Arc<dyn Executor>,
+) -> CargoResult<Compilation<'a>> {
+ let interner = UnitInterner::new();
+ let bcx = create_bcx(ws, options, &interner)?;
+ if options.build_config.unit_graph {
+ unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph, ws.config())?;
+ return Compilation::new(&bcx);
+ }
+ let _p = profile::start("compiling");
+ let cx = Context::new(&bcx)?;
+ cx.compile(exec)
+}
+
+/// Executes `rustc --print <VALUE>`.
+///
+/// * `print_opt_value` is the VALUE passed through.
+pub fn print<'a>(
+ ws: &Workspace<'a>,
+ options: &CompileOptions,
+ print_opt_value: &str,
+) -> CargoResult<()> {
+ let CompileOptions {
+ ref build_config,
+ ref target_rustc_args,
+ ..
+ } = *options;
+ let config = ws.config();
+ let rustc = config.load_global_rustc(Some(ws))?;
+ for (index, kind) in build_config.requested_kinds.iter().enumerate() {
+ if index != 0 {
+ drop_println!(config);
+ }
+ let target_info = TargetInfo::new(config, &build_config.requested_kinds, &rustc, *kind)?;
+ let mut process = rustc.process();
+ process.args(&target_info.rustflags);
+ if let Some(args) = target_rustc_args {
+ process.args(args);
+ }
+ if let CompileKind::Target(t) = kind {
+ process.arg("--target").arg(t.short_name());
+ }
+ process.arg("--print").arg(print_opt_value);
+ process.exec()?;
+ }
+ Ok(())
+}
+
+/// Prepares all required information for the actual compilation.
+///
+/// For how it works and what data it collects,
+/// please see the [module-level documentation](self).
+pub fn create_bcx<'a, 'cfg>(
+ ws: &'a Workspace<'cfg>,
+ options: &'a CompileOptions,
+ interner: &'a UnitInterner,
+) -> CargoResult<BuildContext<'a, 'cfg>> {
+ let CompileOptions {
+ ref build_config,
+ ref spec,
+ ref cli_features,
+ ref filter,
+ ref target_rustdoc_args,
+ ref target_rustc_args,
+ ref target_rustc_crate_types,
+ rustdoc_document_private_items,
+ honor_rust_version,
+ } = *options;
+ let config = ws.config();
+
+ // Perform some pre-flight validation.
+ match build_config.mode {
+ CompileMode::Test
+ | CompileMode::Build
+ | CompileMode::Check { .. }
+ | CompileMode::Bench
+ | CompileMode::RunCustomBuild => {
+ if ws.config().get_env("RUST_FLAGS").is_ok() {
+ config.shell().warn(
+ "Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?",
+ )?;
+ }
+ }
+ CompileMode::Doc { .. } | CompileMode::Doctest | CompileMode::Docscrape => {
+ if ws.config().get_env("RUSTDOC_FLAGS").is_ok() {
+ config.shell().warn(
+ "Cargo does not read `RUSTDOC_FLAGS` environment variable. Did you mean `RUSTDOCFLAGS`?"
+ )?;
+ }
+ }
+ }
+ config.validate_term_config()?;
+
+ let target_data = RustcTargetData::new(ws, &build_config.requested_kinds)?;
+
+ let specs = spec.to_package_id_specs(ws)?;
+ let has_dev_units = {
+ // Rustdoc itself doesn't need dev-dependencies. But to scrape examples from packages in the
+ // workspace, if any of those packages need dev-dependencies, then we need include dev-dependencies
+ // to scrape those packages.
+ let any_pkg_has_scrape_enabled = ws
+ .members_with_features(&specs, cli_features)?
+ .iter()
+ .any(|(pkg, _)| {
+ pkg.targets()
+ .iter()
+ .any(|target| target.is_example() && target.doc_scrape_examples().is_enabled())
+ });
+
+ if filter.need_dev_deps(build_config.mode)
+ || (build_config.mode.is_doc() && any_pkg_has_scrape_enabled)
+ {
+ HasDevUnits::Yes
+ } else {
+ HasDevUnits::No
+ }
+ };
+ let resolve = ops::resolve_ws_with_opts(
+ ws,
+ &target_data,
+ &build_config.requested_kinds,
+ cli_features,
+ &specs,
+ has_dev_units,
+ crate::core::resolver::features::ForceAllTargets::No,
+ )?;
+ let WorkspaceResolve {
+ mut pkg_set,
+ workspace_resolve,
+ targeted_resolve: resolve,
+ resolved_features,
+ } = resolve;
+
+ let std_resolve_features = if let Some(crates) = &config.cli_unstable().build_std {
+ let (std_package_set, std_resolve, std_features) =
+ standard_lib::resolve_std(ws, &target_data, &build_config, crates)?;
+ pkg_set.add_set(std_package_set);
+ Some((std_resolve, std_features))
+ } else {
+ None
+ };
+
+ // Find the packages in the resolver that the user wants to build (those
+ // passed in with `-p` or the defaults from the workspace), and convert
+ // Vec<PackageIdSpec> to a Vec<PackageId>.
+ let to_build_ids = resolve.specs_to_ids(&specs)?;
+ // Now get the `Package` for each `PackageId`. This may trigger a download
+ // if the user specified `-p` for a dependency that is not downloaded.
+ // Dependencies will be downloaded during build_unit_dependencies.
+ let mut to_builds = pkg_set.get_many(to_build_ids)?;
+
+ // The ordering here affects some error messages coming out of cargo, so
+ // let's be test and CLI friendly by always printing in the same order if
+ // there's an error.
+ to_builds.sort_by_key(|p| p.package_id());
+
+ for pkg in to_builds.iter() {
+ pkg.manifest().print_teapot(config);
+
+ if build_config.mode.is_any_test()
+ && !ws.is_member(pkg)
+ && pkg.dependencies().iter().any(|dep| !dep.is_transitive())
+ {
+ anyhow::bail!(
+ "package `{}` cannot be tested because it requires dev-dependencies \
+ and is not a member of the workspace",
+ pkg.name()
+ );
+ }
+ }
+
+ let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) {
+ (&Some(ref args), _) => (Some(args.clone()), "rustc"),
+ (_, &Some(ref args)) => (Some(args.clone()), "rustdoc"),
+ _ => (None, ""),
+ };
+
+ if extra_args.is_some() && to_builds.len() != 1 {
+ panic!(
+ "`{}` should not accept multiple `-p` flags",
+ extra_args_name
+ );
+ }
+
+ let profiles = Profiles::new(ws, build_config.requested_profile)?;
+ profiles.validate_packages(
+ ws.profiles(),
+ &mut config.shell(),
+ workspace_resolve.as_ref().unwrap_or(&resolve),
+ )?;
+
+ // If `--target` has not been specified, then the unit graph is built
+ // assuming `--target $HOST` was specified. See
+ // `rebuild_unit_graph_shared` for more on why this is done.
+ let explicit_host_kind = CompileKind::Target(CompileTarget::new(&target_data.rustc.host)?);
+ let explicit_host_kinds: Vec<_> = build_config
+ .requested_kinds
+ .iter()
+ .map(|kind| match kind {
+ CompileKind::Host => explicit_host_kind,
+ CompileKind::Target(t) => CompileKind::Target(*t),
+ })
+ .collect();
+
+ // Passing `build_config.requested_kinds` instead of
+ // `explicit_host_kinds` here so that `generate_root_units` can do
+ // its own special handling of `CompileKind::Host`. It will
+ // internally replace the host kind by the `explicit_host_kind`
+ // before setting as a unit.
+ let generator = UnitGenerator {
+ ws,
+ packages: &to_builds,
+ filter,
+ requested_kinds: &build_config.requested_kinds,
+ explicit_host_kind,
+ mode: build_config.mode,
+ resolve: &resolve,
+ workspace_resolve: &workspace_resolve,
+ resolved_features: &resolved_features,
+ package_set: &pkg_set,
+ profiles: &profiles,
+ interner,
+ has_dev_units,
+ };
+ let mut units = generator.generate_root_units()?;
+
+ if let Some(args) = target_rustc_crate_types {
+ override_rustc_crate_types(&mut units, args, interner)?;
+ }
+
+ let should_scrape = build_config.mode.is_doc() && config.cli_unstable().rustdoc_scrape_examples;
+ let mut scrape_units = if should_scrape {
+ UnitGenerator {
+ mode: CompileMode::Docscrape,
+ ..generator
+ }
+ .generate_scrape_units(&units)?
+ } else {
+ Vec::new()
+ };
+
+ let std_roots = if let Some(crates) = standard_lib::std_crates(config, Some(&units)) {
+ let (std_resolve, std_features) = std_resolve_features.as_ref().unwrap();
+ standard_lib::generate_std_roots(
+ &crates,
+ std_resolve,
+ std_features,
+ &explicit_host_kinds,
+ &pkg_set,
+ interner,
+ &profiles,
+ )?
+ } else {
+ Default::default()
+ };
+
+ let mut unit_graph = build_unit_dependencies(
+ ws,
+ &pkg_set,
+ &resolve,
+ &resolved_features,
+ std_resolve_features.as_ref(),
+ &units,
+ &scrape_units,
+ &std_roots,
+ build_config.mode,
+ &target_data,
+ &profiles,
+ interner,
+ )?;
+
+ // TODO: In theory, Cargo should also dedupe the roots, but I'm uncertain
+ // what heuristics to use in that case.
+ if build_config.mode == (CompileMode::Doc { deps: true }) {
+ remove_duplicate_doc(build_config, &units, &mut unit_graph);
+ }
+
+ let host_kind_requested = build_config
+ .requested_kinds
+ .iter()
+ .any(CompileKind::is_host);
+ let should_share_deps = host_kind_requested
+ || config.cli_unstable().bindeps
+ && unit_graph
+ .iter()
+ .any(|(unit, _)| unit.artifact_target_for_features.is_some());
+ if should_share_deps {
+ // Rebuild the unit graph, replacing the explicit host targets with
+ // CompileKind::Host, removing `artifact_target_for_features` and merging any dependencies
+ // shared with build and artifact dependencies.
+ (units, scrape_units, unit_graph) = rebuild_unit_graph_shared(
+ interner,
+ unit_graph,
+ &units,
+ &scrape_units,
+ host_kind_requested.then_some(explicit_host_kind),
+ );
+ }
+
+ let mut extra_compiler_args = HashMap::new();
+ if let Some(args) = extra_args {
+ if units.len() != 1 {
+ anyhow::bail!(
+ "extra arguments to `{}` can only be passed to one \
+ target, consider filtering\nthe package by passing, \
+ e.g., `--lib` or `--bin NAME` to specify a single target",
+ extra_args_name
+ );
+ }
+ extra_compiler_args.insert(units[0].clone(), args);
+ }
+
+ for unit in units
+ .iter()
+ .filter(|unit| unit.mode.is_doc() || unit.mode.is_doc_test())
+ .filter(|unit| rustdoc_document_private_items || unit.target.is_bin())
+ {
+ // Add `--document-private-items` rustdoc flag if requested or if
+ // the target is a binary. Binary crates get their private items
+ // documented by default.
+ let mut args = vec!["--document-private-items".into()];
+ if unit.target.is_bin() {
+ // This warning only makes sense if it's possible to document private items
+ // sometimes and ignore them at other times. But cargo consistently passes
+ // `--document-private-items`, so the warning isn't useful.
+ args.push("-Arustdoc::private-intra-doc-links".into());
+ }
+ extra_compiler_args
+ .entry(unit.clone())
+ .or_default()
+ .extend(args);
+ }
+
+ if honor_rust_version {
+ // Remove any pre-release identifiers for easier comparison
+ let current_version = &target_data.rustc.version;
+ let untagged_version = semver::Version::new(
+ current_version.major,
+ current_version.minor,
+ current_version.patch,
+ );
+
+ for unit in unit_graph.keys() {
+ let version = match unit.pkg.rust_version() {
+ Some(v) => v,
+ None => continue,
+ };
+
+ let req = semver::VersionReq::parse(version).unwrap();
+ if req.matches(&untagged_version) {
+ continue;
+ }
+
+ let guidance = if ws.is_ephemeral() {
+ if ws.ignore_lock() {
+ "Try re-running cargo install with `--locked`".to_string()
+ } else {
+ String::new()
+ }
+ } else if !unit.is_local() {
+ format!(
+ "Either upgrade to rustc {} or newer, or use\n\
+ cargo update -p {}@{} --precise ver\n\
+ where `ver` is the latest version of `{}` supporting rustc {}",
+ version,
+ unit.pkg.name(),
+ unit.pkg.version(),
+ unit.pkg.name(),
+ current_version,
+ )
+ } else {
+ String::new()
+ };
+
+ anyhow::bail!(
+ "package `{}` cannot be built because it requires rustc {} or newer, \
+ while the currently active rustc version is {}\n{}",
+ unit.pkg,
+ version,
+ current_version,
+ guidance,
+ );
+ }
+ }
+
+ let bcx = BuildContext::new(
+ ws,
+ pkg_set,
+ build_config,
+ profiles,
+ extra_compiler_args,
+ target_data,
+ units,
+ unit_graph,
+ scrape_units,
+ )?;
+
+ Ok(bcx)
+}
+
+/// This is used to rebuild the unit graph, sharing host dependencies if possible.
+///
+/// This will translate any unit's `CompileKind::Target(host)` to
+/// `CompileKind::Host` if `to_host` is not `None` and the kind is equal to `to_host`.
+/// This also handles generating the unit `dep_hash`, and merging shared units if possible.
+///
+/// This is necessary because if normal dependencies used `CompileKind::Host`,
+/// there would be no way to distinguish those units from build-dependency
+/// units or artifact dependency units.
+/// This can cause a problem if a shared normal/build/artifact dependency needs
+/// to link to another dependency whose features differ based on whether or
+/// not it is a normal, build or artifact dependency. If all units used
+/// `CompileKind::Host`, then they would end up being identical, causing a
+/// collision in the `UnitGraph`, and Cargo would end up randomly choosing one
+/// value or the other.
+///
+/// The solution is to keep normal, build and artifact dependencies separate when
+/// building the unit graph, and then run this second pass which will try to
+/// combine shared dependencies safely. By adding a hash of the dependencies
+/// to the `Unit`, this allows the `CompileKind` to be changed back to `Host`
+/// and `artifact_target_for_features` to be removed without fear of an unwanted
+/// collision for build or artifact dependencies.
+fn rebuild_unit_graph_shared(
+ interner: &UnitInterner,
+ unit_graph: UnitGraph,
+ roots: &[Unit],
+ scrape_units: &[Unit],
+ to_host: Option<CompileKind>,
+) -> (Vec<Unit>, Vec<Unit>, UnitGraph) {
+ let mut result = UnitGraph::new();
+ // Map of the old unit to the new unit, used to avoid recursing into units
+ // that have already been computed to improve performance.
+ let mut memo = HashMap::new();
+ let new_roots = roots
+ .iter()
+ .map(|root| {
+ traverse_and_share(
+ interner,
+ &mut memo,
+ &mut result,
+ &unit_graph,
+ root,
+ false,
+ to_host,
+ )
+ })
+ .collect();
+ // If no unit in the unit graph ended up having scrape units attached as dependencies,
+ // then they won't have been discovered in traverse_and_share and hence won't be in
+ // memo. So we filter out missing scrape units.
+ let new_scrape_units = scrape_units
+ .iter()
+ .map(|unit| memo.get(unit).unwrap().clone())
+ .collect();
+ (new_roots, new_scrape_units, result)
+}
+
+/// Recursive function for rebuilding the graph.
+///
+/// This walks `unit_graph`, starting at the given `unit`. It inserts the new
+/// units into `new_graph`, and returns a new updated version of the given
+/// unit (`dep_hash` is filled in, and `kind` switched if necessary).
+fn traverse_and_share(
+ interner: &UnitInterner,
+ memo: &mut HashMap<Unit, Unit>,
+ new_graph: &mut UnitGraph,
+ unit_graph: &UnitGraph,
+ unit: &Unit,
+ unit_is_for_host: bool,
+ to_host: Option<CompileKind>,
+) -> Unit {
+ if let Some(new_unit) = memo.get(unit) {
+ // Already computed, no need to recompute.
+ return new_unit.clone();
+ }
+ let mut dep_hash = StableHasher::new();
+ let new_deps: Vec<_> = unit_graph[unit]
+ .iter()
+ .map(|dep| {
+ let new_dep_unit = traverse_and_share(
+ interner,
+ memo,
+ new_graph,
+ unit_graph,
+ &dep.unit,
+ dep.unit_for.is_for_host(),
+ to_host,
+ );
+ new_dep_unit.hash(&mut dep_hash);
+ UnitDep {
+ unit: new_dep_unit,
+ ..dep.clone()
+ }
+ })
+ .collect();
+ // Here, we have recursively traversed this unit's dependencies, and hashed them: we can
+ // finalize the dep hash.
+ let new_dep_hash = dep_hash.finish();
+
+ // This is the key part of the sharing process: if the unit is a runtime dependency, whose
+ // target is the same as the host, we canonicalize the compile kind to `CompileKind::Host`.
+ // A possible host dependency counterpart to this unit would have that kind, and if such a unit
+ // exists in the current `unit_graph`, they will unify in the new unit graph map `new_graph`.
+ // The resulting unit graph will be optimized with less units, thanks to sharing these host
+ // dependencies.
+ let canonical_kind = match to_host {
+ Some(to_host) if to_host == unit.kind => CompileKind::Host,
+ _ => unit.kind,
+ };
+
+ let mut profile = unit.profile.clone();
+
+ // If this is a build dependency, and it's not shared with runtime dependencies, we can weaken
+ // its debuginfo level to optimize build times. We do nothing if it's an artifact dependency,
+ // as it and its debuginfo may end up embedded in the main program.
+ if unit_is_for_host
+ && to_host.is_some()
+ && profile.debuginfo.is_deferred()
+ && !unit.artifact.is_true()
+ {
+ // We create a "probe" test to see if a unit with the same explicit debuginfo level exists
+ // in the graph. This is the level we'd expect if it was set manually or the default value
+ // set by a profile for a runtime dependency: its canonical value.
+ let canonical_debuginfo = profile.debuginfo.finalize();
+ let mut canonical_profile = profile.clone();
+ canonical_profile.debuginfo = canonical_debuginfo;
+ let unit_probe = interner.intern(
+ &unit.pkg,
+ &unit.target,
+ canonical_profile,
+ to_host.unwrap(),
+ unit.mode,
+ unit.features.clone(),
+ unit.is_std,
+ unit.dep_hash,
+ unit.artifact,
+ unit.artifact_target_for_features,
+ );
+
+ // We can now turn the deferred value into its actual final value.
+ profile.debuginfo = if unit_graph.contains_key(&unit_probe) {
+ // The unit is present in both build time and runtime subgraphs: we canonicalize its
+ // level to the other unit's, thus ensuring reuse between the two to optimize build times.
+ canonical_debuginfo
+ } else {
+ // The unit is only present in the build time subgraph, we can weaken its debuginfo
+ // level to optimize build times.
+ canonical_debuginfo.weaken()
+ }
+ }
+
+ let new_unit = interner.intern(
+ &unit.pkg,
+ &unit.target,
+ profile,
+ canonical_kind,
+ unit.mode,
+ unit.features.clone(),
+ unit.is_std,
+ new_dep_hash,
+ unit.artifact,
+ // Since `dep_hash` is now filled in, there's no need to specify the artifact target
+ // for target-dependent feature resolution
+ None,
+ );
+ assert!(memo.insert(unit.clone(), new_unit.clone()).is_none());
+ new_graph.entry(new_unit.clone()).or_insert(new_deps);
+ new_unit
+}
+
+/// Removes duplicate CompileMode::Doc units that would cause problems with
+/// filename collisions.
+///
+/// Rustdoc only separates units by crate name in the file directory
+/// structure. If any two units with the same crate name exist, this would
+/// cause a filename collision, causing different rustdoc invocations to stomp
+/// on one another's files.
+///
+/// Unfortunately this does not remove all duplicates, as some of them are
+/// either user error, or difficult to remove. Cases that I can think of:
+///
+/// - Same target name in different packages. See the `collision_doc` test.
+/// - Different sources. See `collision_doc_sources` test.
+///
+/// Ideally this would not be necessary.
+fn remove_duplicate_doc(
+ build_config: &BuildConfig,
+ root_units: &[Unit],
+ unit_graph: &mut UnitGraph,
+) {
+ // First, create a mapping of crate_name -> Unit so we can see where the
+ // duplicates are.
+ let mut all_docs: HashMap<String, Vec<Unit>> = HashMap::new();
+ for unit in unit_graph.keys() {
+ if unit.mode.is_doc() {
+ all_docs
+ .entry(unit.target.crate_name())
+ .or_default()
+ .push(unit.clone());
+ }
+ }
+ // Keep track of units to remove so that they can be efficiently removed
+ // from the unit_deps.
+ let mut removed_units: HashSet<Unit> = HashSet::new();
+ let mut remove = |units: Vec<Unit>, reason: &str, cb: &dyn Fn(&Unit) -> bool| -> Vec<Unit> {
+ let (to_remove, remaining_units): (Vec<Unit>, Vec<Unit>) = units
+ .into_iter()
+ .partition(|unit| cb(unit) && !root_units.contains(unit));
+ for unit in to_remove {
+ log::debug!(
+ "removing duplicate doc due to {} for package {} target `{}`",
+ reason,
+ unit.pkg,
+ unit.target.name()
+ );
+ unit_graph.remove(&unit);
+ removed_units.insert(unit);
+ }
+ remaining_units
+ };
+ // Iterate over the duplicates and try to remove them from unit_graph.
+ for (_crate_name, mut units) in all_docs {
+ if units.len() == 1 {
+ continue;
+ }
+ // Prefer target over host if --target was not specified.
+ if build_config
+ .requested_kinds
+ .iter()
+ .all(CompileKind::is_host)
+ {
+ // Note these duplicates may not be real duplicates, since they
+ // might get merged in rebuild_unit_graph_shared. Either way, it
+ // shouldn't hurt to remove them early (although the report in the
+ // log might be confusing).
+ units = remove(units, "host/target merger", &|unit| unit.kind.is_host());
+ if units.len() == 1 {
+ continue;
+ }
+ }
+ // Prefer newer versions over older.
+ let mut source_map: HashMap<(InternedString, SourceId, CompileKind), Vec<Unit>> =
+ HashMap::new();
+ for unit in units {
+ let pkg_id = unit.pkg.package_id();
+ // Note, this does not detect duplicates from different sources.
+ source_map
+ .entry((pkg_id.name(), pkg_id.source_id(), unit.kind))
+ .or_default()
+ .push(unit);
+ }
+ let mut remaining_units = Vec::new();
+ for (_key, mut units) in source_map {
+ if units.len() > 1 {
+ units.sort_by(|a, b| a.pkg.version().partial_cmp(b.pkg.version()).unwrap());
+ // Remove any entries with version < newest.
+ let newest_version = units.last().unwrap().pkg.version().clone();
+ let keep_units = remove(units, "older version", &|unit| {
+ unit.pkg.version() < &newest_version
+ });
+ remaining_units.extend(keep_units);
+ } else {
+ remaining_units.extend(units);
+ }
+ }
+ if remaining_units.len() == 1 {
+ continue;
+ }
+ // Are there other heuristics to remove duplicates that would make
+ // sense? Maybe prefer path sources over all others?
+ }
+ // Also remove units from the unit_deps so there aren't any dangling edges.
+ for unit_deps in unit_graph.values_mut() {
+ unit_deps.retain(|unit_dep| !removed_units.contains(&unit_dep.unit));
+ }
+ // Remove any orphan units that were detached from the graph.
+ let mut visited = HashSet::new();
+ fn visit(unit: &Unit, graph: &UnitGraph, visited: &mut HashSet<Unit>) {
+ if !visited.insert(unit.clone()) {
+ return;
+ }
+ for dep in &graph[unit] {
+ visit(&dep.unit, graph, visited);
+ }
+ }
+ for unit in root_units {
+ visit(unit, unit_graph, &mut visited);
+ }
+ unit_graph.retain(|unit, _| visited.contains(unit));
+}
+
+/// Override crate types for given units.
+///
+/// This is primarily used by `cargo rustc --crate-type`.
+fn override_rustc_crate_types(
+ units: &mut [Unit],
+ args: &[String],
+ interner: &UnitInterner,
+) -> CargoResult<()> {
+ if units.len() != 1 {
+ anyhow::bail!(
+ "crate types to rustc can only be passed to one \
+ target, consider filtering\nthe package by passing, \
+ e.g., `--lib` or `--example` to specify a single target"
+ );
+ }
+
+ let unit = &units[0];
+ let override_unit = |f: fn(Vec<CrateType>) -> TargetKind| {
+ let crate_types = args.iter().map(|s| s.into()).collect();
+ let mut target = unit.target.clone();
+ target.set_kind(f(crate_types));
+ interner.intern(
+ &unit.pkg,
+ &target,
+ unit.profile.clone(),
+ unit.kind,
+ unit.mode,
+ unit.features.clone(),
+ unit.is_std,
+ unit.dep_hash,
+ unit.artifact,
+ unit.artifact_target_for_features,
+ )
+ };
+ units[0] = match unit.target.kind() {
+ TargetKind::Lib(_) => override_unit(TargetKind::Lib),
+ TargetKind::ExampleLib(_) => override_unit(TargetKind::ExampleLib),
+ _ => {
+ anyhow::bail!(
+ "crate types can only be specified for libraries and example libraries.\n\
+ Binaries, tests, and benchmarks are always the `bin` crate type"
+ );
+ }
+ };
+
+ Ok(())
+}
+
+/// Gets all of the features enabled for a package, plus its dependencies'
+/// features.
+///
+/// Dependencies are added as `dep_name/feat_name` because `required-features`
+/// wants to support that syntax.
+pub fn resolve_all_features(
+ resolve_with_overrides: &Resolve,
+ resolved_features: &features::ResolvedFeatures,
+ package_set: &PackageSet<'_>,
+ package_id: PackageId,
+) -> HashSet<String> {
+ let mut features: HashSet<String> = resolved_features
+ .activated_features(package_id, FeaturesFor::NormalOrDev)
+ .iter()
+ .map(|s| s.to_string())
+ .collect();
+
+ // Include features enabled for use by dependencies so targets can also use them with the
+ // required-features field when deciding whether to be built or skipped.
+ for (dep_id, deps) in resolve_with_overrides.deps(package_id) {
+ let is_proc_macro = package_set
+ .get_one(dep_id)
+ .expect("packages downloaded")
+ .proc_macro();
+ for dep in deps {
+ let features_for = FeaturesFor::from_for_host(is_proc_macro || dep.is_build());
+ for feature in resolved_features
+ .activated_features_unverified(dep_id, features_for)
+ .unwrap_or_default()
+ {
+ features.insert(format!("{}/{}", dep.name_in_toml(), feature));
+ }
+ }
+ }
+
+ features
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_compile/packages.rs b/src/tools/cargo/src/cargo/ops/cargo_compile/packages.rs
new file mode 100644
index 000000000..2d14d60a6
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_compile/packages.rs
@@ -0,0 +1,220 @@
+//! See [`Packages`].
+
+use std::collections::BTreeSet;
+
+use crate::core::Package;
+use crate::core::{PackageIdSpec, Workspace};
+use crate::util::restricted_names::is_glob_pattern;
+use crate::util::CargoResult;
+
+use anyhow::{bail, Context as _};
+
+/// Represents the selected packages that will be built.
+///
+/// Generally, it represents the combination of all `-p` flag. When working within
+/// a workspace, `--exclude` and `--workspace` flags also contribute to it.
+#[derive(PartialEq, Eq, Debug, Clone)]
+pub enum Packages {
+ /// Packages selected by default. Usually means no flag provided.
+ Default,
+ /// Opt in all packages.
+ ///
+ /// As of the time of this writing, it only works on opting in all workspace members.
+ All,
+ /// Opt out of packages passed in.
+ ///
+ /// As of the time of this writing, it only works on opting out workspace members.
+ OptOut(Vec<String>),
+ /// A sequence of hand-picked packages that will be built. Normally done by `-p` flag.
+ Packages(Vec<String>),
+}
+
+impl Packages {
+ /// Creates a `Packages` from flags which are generally equivalent to command line flags.
+ pub fn from_flags(all: bool, exclude: Vec<String>, package: Vec<String>) -> CargoResult<Self> {
+ Ok(match (all, exclude.len(), package.len()) {
+ (false, 0, 0) => Packages::Default,
+ (false, 0, _) => Packages::Packages(package),
+ (false, _, _) => anyhow::bail!("--exclude can only be used together with --workspace"),
+ (true, 0, _) => Packages::All,
+ (true, _, _) => Packages::OptOut(exclude),
+ })
+ }
+
+ /// Converts selected packages to [`PackageIdSpec`]s.
+ pub fn to_package_id_specs(&self, ws: &Workspace<'_>) -> CargoResult<Vec<PackageIdSpec>> {
+ let specs = match self {
+ Packages::All => ws
+ .members()
+ .map(Package::package_id)
+ .map(PackageIdSpec::from_package_id)
+ .collect(),
+ Packages::OptOut(opt_out) => {
+ let (mut patterns, mut names) = opt_patterns_and_names(opt_out)?;
+ let specs = ws
+ .members()
+ .filter(|pkg| {
+ !names.remove(pkg.name().as_str()) && !match_patterns(pkg, &mut patterns)
+ })
+ .map(Package::package_id)
+ .map(PackageIdSpec::from_package_id)
+ .collect();
+ let warn = |e| ws.config().shell().warn(e);
+ emit_package_not_found(ws, names, true).or_else(warn)?;
+ emit_pattern_not_found(ws, patterns, true).or_else(warn)?;
+ specs
+ }
+ Packages::Packages(packages) if packages.is_empty() => {
+ vec![PackageIdSpec::from_package_id(ws.current()?.package_id())]
+ }
+ Packages::Packages(opt_in) => {
+ let (mut patterns, packages) = opt_patterns_and_names(opt_in)?;
+ let mut specs = packages
+ .iter()
+ .map(|p| PackageIdSpec::parse(p))
+ .collect::<CargoResult<Vec<_>>>()?;
+ if !patterns.is_empty() {
+ let matched_pkgs = ws
+ .members()
+ .filter(|pkg| match_patterns(pkg, &mut patterns))
+ .map(Package::package_id)
+ .map(PackageIdSpec::from_package_id);
+ specs.extend(matched_pkgs);
+ }
+ emit_pattern_not_found(ws, patterns, false)?;
+ specs
+ }
+ Packages::Default => ws
+ .default_members()
+ .map(Package::package_id)
+ .map(PackageIdSpec::from_package_id)
+ .collect(),
+ };
+ if specs.is_empty() {
+ if ws.is_virtual() {
+ bail!(
+ "manifest path `{}` contains no package: The manifest is virtual, \
+ and the workspace has no members.",
+ ws.root().display()
+ )
+ }
+ bail!("no packages to compile")
+ }
+ Ok(specs)
+ }
+
+ /// Gets a list of selected [`Package`]s.
+ pub fn get_packages<'ws>(&self, ws: &'ws Workspace<'_>) -> CargoResult<Vec<&'ws Package>> {
+ let packages: Vec<_> = match self {
+ Packages::Default => ws.default_members().collect(),
+ Packages::All => ws.members().collect(),
+ Packages::OptOut(opt_out) => {
+ let (mut patterns, mut names) = opt_patterns_and_names(opt_out)?;
+ let packages = ws
+ .members()
+ .filter(|pkg| {
+ !names.remove(pkg.name().as_str()) && !match_patterns(pkg, &mut patterns)
+ })
+ .collect();
+ emit_package_not_found(ws, names, true)?;
+ emit_pattern_not_found(ws, patterns, true)?;
+ packages
+ }
+ Packages::Packages(opt_in) => {
+ let (mut patterns, mut names) = opt_patterns_and_names(opt_in)?;
+ let packages = ws
+ .members()
+ .filter(|pkg| {
+ names.remove(pkg.name().as_str()) || match_patterns(pkg, &mut patterns)
+ })
+ .collect();
+ emit_package_not_found(ws, names, false)?;
+ emit_pattern_not_found(ws, patterns, false)?;
+ packages
+ }
+ };
+ Ok(packages)
+ }
+
+ /// Returns whether or not the user needs to pass a `-p` flag to target a
+ /// specific package in the workspace.
+ pub fn needs_spec_flag(&self, ws: &Workspace<'_>) -> bool {
+ match self {
+ Packages::Default => ws.default_members().count() > 1,
+ Packages::All => ws.members().count() > 1,
+ Packages::Packages(_) => true,
+ Packages::OptOut(_) => true,
+ }
+ }
+}
+
+/// Emits "package not found" error.
+fn emit_package_not_found(
+ ws: &Workspace<'_>,
+ opt_names: BTreeSet<&str>,
+ opt_out: bool,
+) -> CargoResult<()> {
+ if !opt_names.is_empty() {
+ anyhow::bail!(
+ "{}package(s) `{}` not found in workspace `{}`",
+ if opt_out { "excluded " } else { "" },
+ opt_names.into_iter().collect::<Vec<_>>().join(", "),
+ ws.root().display(),
+ )
+ }
+ Ok(())
+}
+
+/// Emits "glob pattern not found" error.
+fn emit_pattern_not_found(
+ ws: &Workspace<'_>,
+ opt_patterns: Vec<(glob::Pattern, bool)>,
+ opt_out: bool,
+) -> CargoResult<()> {
+ let not_matched = opt_patterns
+ .iter()
+ .filter(|(_, matched)| !*matched)
+ .map(|(pat, _)| pat.as_str())
+ .collect::<Vec<_>>();
+ if !not_matched.is_empty() {
+ anyhow::bail!(
+ "{}package pattern(s) `{}` not found in workspace `{}`",
+ if opt_out { "excluded " } else { "" },
+ not_matched.join(", "),
+ ws.root().display(),
+ )
+ }
+ Ok(())
+}
+
+/// Given a list opt-in or opt-out package selection strings, generates two
+/// collections that represent glob patterns and package names respectively.
+fn opt_patterns_and_names(
+ opt: &[String],
+) -> CargoResult<(Vec<(glob::Pattern, bool)>, BTreeSet<&str>)> {
+ let mut opt_patterns = Vec::new();
+ let mut opt_names = BTreeSet::new();
+ for x in opt.iter() {
+ if is_glob_pattern(x) {
+ opt_patterns.push((build_glob(x)?, false));
+ } else {
+ opt_names.insert(String::as_str(x));
+ }
+ }
+ Ok((opt_patterns, opt_names))
+}
+
+/// Checks whether a package matches any of a list of glob patterns generated
+/// from `opt_patterns_and_names`.
+fn match_patterns(pkg: &Package, patterns: &mut Vec<(glob::Pattern, bool)>) -> bool {
+ patterns.iter_mut().any(|(m, matched)| {
+ let is_matched = m.matches(pkg.name().as_str());
+ *matched |= is_matched;
+ is_matched
+ })
+}
+
+/// Build [`glob::Pattern`] with informative context.
+pub fn build_glob(pat: &str) -> CargoResult<glob::Pattern> {
+ glob::Pattern::new(pat).with_context(|| format!("cannot build glob pattern from `{}`", pat))
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_compile/unit_generator.rs b/src/tools/cargo/src/cargo/ops/cargo_compile/unit_generator.rs
new file mode 100644
index 000000000..ce5a825fe
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_compile/unit_generator.rs
@@ -0,0 +1,714 @@
+use std::cell::RefCell;
+use std::collections::{HashMap, HashSet};
+use std::fmt::Write;
+
+use crate::core::compiler::rustdoc::RustdocScrapeExamples;
+use crate::core::compiler::unit_dependencies::IsArtifact;
+use crate::core::compiler::UnitInterner;
+use crate::core::compiler::{CompileKind, CompileMode, Unit};
+use crate::core::dependency::DepKind;
+use crate::core::profiles::{Profiles, UnitFor};
+use crate::core::resolver::features::{self, FeaturesFor};
+use crate::core::resolver::{HasDevUnits, Resolve};
+use crate::core::{FeatureValue, Package, PackageSet, Summary, Target};
+use crate::core::{TargetKind, Workspace};
+use crate::util::restricted_names::is_glob_pattern;
+use crate::util::{closest_msg, CargoResult};
+
+use super::compile_filter::{CompileFilter, FilterRule, LibRule};
+use super::packages::build_glob;
+
+/// A proposed target.
+///
+/// Proposed targets are later filtered into actual `Unit`s based on whether or
+/// not the target requires its features to be present.
+#[derive(Debug)]
+struct Proposal<'a> {
+ pkg: &'a Package,
+ target: &'a Target,
+ /// Indicates whether or not all required features *must* be present. If
+ /// false, and the features are not available, then it will be silently
+ /// skipped. Generally, targets specified by name (`--bin foo`) are
+ /// required, all others can be silently skipped if features are missing.
+ requires_features: bool,
+ mode: CompileMode,
+}
+
+/// The context needed for generating root units,
+/// which are packages the user has requested to compile.
+///
+/// To generate a full [`UnitGraph`],
+/// generally you need to call [`generate_root_units`] first,
+/// and then provide the output to [`build_unit_dependencies`].
+///
+/// [`generate_root_units`]: UnitGenerator::generate_root_units
+/// [`build_unit_dependencies`]: crate::core::compiler::unit_dependencies::build_unit_dependencies
+/// [`UnitGraph`]: crate::core::compiler::unit_graph::UnitGraph
+pub(super) struct UnitGenerator<'a, 'cfg> {
+ pub ws: &'a Workspace<'cfg>,
+ pub packages: &'a [&'a Package],
+ pub filter: &'a CompileFilter,
+ pub requested_kinds: &'a [CompileKind],
+ pub explicit_host_kind: CompileKind,
+ pub mode: CompileMode,
+ pub resolve: &'a Resolve,
+ pub workspace_resolve: &'a Option<Resolve>,
+ pub resolved_features: &'a features::ResolvedFeatures,
+ pub package_set: &'a PackageSet<'cfg>,
+ pub profiles: &'a Profiles,
+ pub interner: &'a UnitInterner,
+ pub has_dev_units: HasDevUnits,
+}
+
+impl<'a> UnitGenerator<'a, '_> {
+ /// Helper for creating a list of `Unit` structures
+ fn new_units(
+ &self,
+ pkg: &Package,
+ target: &Target,
+ initial_target_mode: CompileMode,
+ ) -> Vec<Unit> {
+ // Custom build units are added in `build_unit_dependencies`.
+ assert!(!target.is_custom_build());
+ let target_mode = match initial_target_mode {
+ CompileMode::Test => {
+ if target.is_example() && !self.filter.is_specific() && !target.tested() {
+ // Examples are included as regular binaries to verify
+ // that they compile.
+ CompileMode::Build
+ } else {
+ CompileMode::Test
+ }
+ }
+ CompileMode::Build => match *target.kind() {
+ TargetKind::Test => CompileMode::Test,
+ TargetKind::Bench => CompileMode::Bench,
+ _ => CompileMode::Build,
+ },
+ // `CompileMode::Bench` is only used to inform `filter_default_targets`
+ // which command is being used (`cargo bench`). Afterwards, tests
+ // and benches are treated identically. Switching the mode allows
+ // de-duplication of units that are essentially identical. For
+ // example, `cargo build --all-targets --release` creates the units
+ // (lib profile:bench, mode:test) and (lib profile:bench, mode:bench)
+ // and since these are the same, we want them to be de-duplicated in
+ // `unit_dependencies`.
+ CompileMode::Bench => CompileMode::Test,
+ _ => initial_target_mode,
+ };
+
+ let is_local = pkg.package_id().source_id().is_path();
+
+ // No need to worry about build-dependencies, roots are never build dependencies.
+ let features_for = FeaturesFor::from_for_host(target.proc_macro());
+ let features = self
+ .resolved_features
+ .activated_features(pkg.package_id(), features_for);
+
+ // If `--target` has not been specified, then the unit
+ // graph is built almost like if `--target $HOST` was
+ // specified. See `rebuild_unit_graph_shared` for more on
+ // why this is done. However, if the package has its own
+ // `package.target` key, then this gets used instead of
+ // `$HOST`
+ let explicit_kinds = if let Some(k) = pkg.manifest().forced_kind() {
+ vec![k]
+ } else {
+ self.requested_kinds
+ .iter()
+ .map(|kind| match kind {
+ CompileKind::Host => pkg
+ .manifest()
+ .default_kind()
+ .unwrap_or(self.explicit_host_kind),
+ CompileKind::Target(t) => CompileKind::Target(*t),
+ })
+ .collect()
+ };
+
+ explicit_kinds
+ .into_iter()
+ .map(move |kind| {
+ let unit_for = if initial_target_mode.is_any_test() {
+ // NOTE: the `UnitFor` here is subtle. If you have a profile
+ // with `panic` set, the `panic` flag is cleared for
+ // tests/benchmarks and their dependencies. If this
+ // was `normal`, then the lib would get compiled three
+ // times (once with panic, once without, and once with
+ // `--test`).
+ //
+ // This would cause a problem for doc tests, which would fail
+ // because `rustdoc` would attempt to link with both libraries
+ // at the same time. Also, it's probably not important (or
+ // even desirable?) for rustdoc to link with a lib with
+ // `panic` set.
+ //
+ // As a consequence, Examples and Binaries get compiled
+ // without `panic` set. This probably isn't a bad deal.
+ //
+ // Forcing the lib to be compiled three times during `cargo
+ // test` is probably also not desirable.
+ UnitFor::new_test(self.ws.config(), kind)
+ } else if target.for_host() {
+ // Proc macro / plugin should not have `panic` set.
+ UnitFor::new_compiler(kind)
+ } else {
+ UnitFor::new_normal(kind)
+ };
+ let profile = self.profiles.get_profile(
+ pkg.package_id(),
+ self.ws.is_member(pkg),
+ is_local,
+ unit_for,
+ kind,
+ );
+ self.interner.intern(
+ pkg,
+ target,
+ profile,
+ kind.for_target(target),
+ target_mode,
+ features.clone(),
+ /*is_std*/ false,
+ /*dep_hash*/ 0,
+ IsArtifact::No,
+ None,
+ )
+ })
+ .collect()
+ }
+
+ /// Given a list of all targets for a package, filters out only the targets
+ /// that are automatically included when the user doesn't specify any targets.
+ fn filter_default_targets<'b>(&self, targets: &'b [Target]) -> Vec<&'b Target> {
+ match self.mode {
+ CompileMode::Bench => targets.iter().filter(|t| t.benched()).collect(),
+ CompileMode::Test => targets
+ .iter()
+ .filter(|t| t.tested() || t.is_example())
+ .collect(),
+ CompileMode::Build | CompileMode::Check { .. } => targets
+ .iter()
+ .filter(|t| t.is_bin() || t.is_lib())
+ .collect(),
+ CompileMode::Doc { .. } => {
+ // `doc` does lib and bins (bin with same name as lib is skipped).
+ targets
+ .iter()
+ .filter(|t| {
+ t.documented()
+ && (!t.is_bin()
+ || !targets.iter().any(|l| l.is_lib() && l.name() == t.name()))
+ })
+ .collect()
+ }
+ CompileMode::Doctest | CompileMode::RunCustomBuild | CompileMode::Docscrape => {
+ panic!("Invalid mode {:?}", self.mode)
+ }
+ }
+ }
+
+ /// Filters the set of all possible targets based on the provided predicate.
+ fn filter_targets(
+ &self,
+ predicate: impl Fn(&Target) -> bool,
+ requires_features: bool,
+ mode: CompileMode,
+ ) -> Vec<Proposal<'a>> {
+ self.packages
+ .iter()
+ .flat_map(|pkg| {
+ pkg.targets()
+ .iter()
+ .filter(|t| predicate(t))
+ .map(|target| Proposal {
+ pkg,
+ target,
+ requires_features,
+ mode,
+ })
+ })
+ .collect()
+ }
+
+ /// Finds the targets for a specifically named target.
+ fn find_named_targets(
+ &self,
+ target_name: &str,
+ target_desc: &'static str,
+ is_expected_kind: fn(&Target) -> bool,
+ mode: CompileMode,
+ ) -> CargoResult<Vec<Proposal<'a>>> {
+ let is_glob = is_glob_pattern(target_name);
+ let proposals = if is_glob {
+ let pattern = build_glob(target_name)?;
+ let filter = |t: &Target| is_expected_kind(t) && pattern.matches(t.name());
+ self.filter_targets(filter, true, mode)
+ } else {
+ let filter = |t: &Target| t.name() == target_name && is_expected_kind(t);
+ self.filter_targets(filter, true, mode)
+ };
+
+ if proposals.is_empty() {
+ let targets = self
+ .packages
+ .iter()
+ .flat_map(|pkg| {
+ pkg.targets()
+ .iter()
+ .filter(|target| is_expected_kind(target))
+ })
+ .collect::<Vec<_>>();
+ let suggestion = closest_msg(target_name, targets.iter(), |t| t.name());
+ if !suggestion.is_empty() {
+ anyhow::bail!(
+ "no {} target {} `{}`{}",
+ target_desc,
+ if is_glob { "matches pattern" } else { "named" },
+ target_name,
+ suggestion
+ );
+ } else {
+ let mut msg = String::new();
+ writeln!(
+ msg,
+ "no {} target {} `{}`.",
+ target_desc,
+ if is_glob { "matches pattern" } else { "named" },
+ target_name,
+ )?;
+ if !targets.is_empty() {
+ writeln!(msg, "Available {} targets:", target_desc)?;
+ for target in targets {
+ writeln!(msg, " {}", target.name())?;
+ }
+ }
+ anyhow::bail!(msg);
+ }
+ }
+ Ok(proposals)
+ }
+
+ /// Returns a list of proposed targets based on command-line target selection flags.
+ fn list_rule_targets(
+ &self,
+ rule: &FilterRule,
+ target_desc: &'static str,
+ is_expected_kind: fn(&Target) -> bool,
+ mode: CompileMode,
+ ) -> CargoResult<Vec<Proposal<'a>>> {
+ let mut proposals = Vec::new();
+ match rule {
+ FilterRule::All => proposals.extend(self.filter_targets(is_expected_kind, false, mode)),
+ FilterRule::Just(names) => {
+ for name in names {
+ proposals.extend(self.find_named_targets(
+ name,
+ target_desc,
+ is_expected_kind,
+ mode,
+ )?);
+ }
+ }
+ }
+ Ok(proposals)
+ }
+
+ /// Create a list of proposed targets given the context in `UnitGenerator`
+ fn create_proposals(&self) -> CargoResult<Vec<Proposal<'_>>> {
+ let mut proposals: Vec<Proposal<'_>> = Vec::new();
+
+ match *self.filter {
+ CompileFilter::Default {
+ required_features_filterable,
+ } => {
+ for pkg in self.packages {
+ let default = self.filter_default_targets(pkg.targets());
+ proposals.extend(default.into_iter().map(|target| Proposal {
+ pkg,
+ target,
+ requires_features: !required_features_filterable,
+ mode: self.mode,
+ }));
+ if self.mode == CompileMode::Test {
+ if let Some(t) = pkg
+ .targets()
+ .iter()
+ .find(|t| t.is_lib() && t.doctested() && t.doctestable())
+ {
+ proposals.push(Proposal {
+ pkg,
+ target: t,
+ requires_features: false,
+ mode: CompileMode::Doctest,
+ });
+ }
+ }
+ }
+ }
+ CompileFilter::Only {
+ all_targets,
+ ref lib,
+ ref bins,
+ ref examples,
+ ref tests,
+ ref benches,
+ } => {
+ if *lib != LibRule::False {
+ let mut libs = Vec::new();
+ for proposal in self.filter_targets(Target::is_lib, false, self.mode) {
+ let Proposal { target, pkg, .. } = proposal;
+ if self.mode.is_doc_test() && !target.doctestable() {
+ let types = target.rustc_crate_types();
+ let types_str: Vec<&str> = types.iter().map(|t| t.as_str()).collect();
+ self.ws.config().shell().warn(format!(
+ "doc tests are not supported for crate type(s) `{}` in package `{}`",
+ types_str.join(", "),
+ pkg.name()
+ ))?;
+ } else {
+ libs.push(proposal)
+ }
+ }
+ if !all_targets && libs.is_empty() && *lib == LibRule::True {
+ let names = self
+ .packages
+ .iter()
+ .map(|pkg| pkg.name())
+ .collect::<Vec<_>>();
+ if names.len() == 1 {
+ anyhow::bail!("no library targets found in package `{}`", names[0]);
+ } else {
+ anyhow::bail!(
+ "no library targets found in packages: {}",
+ names.join(", ")
+ );
+ }
+ }
+ proposals.extend(libs);
+ }
+
+ // If `--tests` was specified, add all targets that would be
+ // generated by `cargo test`.
+ let test_filter = match tests {
+ FilterRule::All => Target::tested,
+ FilterRule::Just(_) => Target::is_test,
+ };
+ let test_mode = match self.mode {
+ CompileMode::Build => CompileMode::Test,
+ CompileMode::Check { .. } => CompileMode::Check { test: true },
+ _ => self.mode,
+ };
+ // If `--benches` was specified, add all targets that would be
+ // generated by `cargo bench`.
+ let bench_filter = match benches {
+ FilterRule::All => Target::benched,
+ FilterRule::Just(_) => Target::is_bench,
+ };
+ let bench_mode = match self.mode {
+ CompileMode::Build => CompileMode::Bench,
+ CompileMode::Check { .. } => CompileMode::Check { test: true },
+ _ => self.mode,
+ };
+
+ proposals.extend(self.list_rule_targets(bins, "bin", Target::is_bin, self.mode)?);
+ proposals.extend(self.list_rule_targets(
+ examples,
+ "example",
+ Target::is_example,
+ self.mode,
+ )?);
+ proposals.extend(self.list_rule_targets(tests, "test", test_filter, test_mode)?);
+ proposals.extend(self.list_rule_targets(
+ benches,
+ "bench",
+ bench_filter,
+ bench_mode,
+ )?);
+ }
+ }
+
+ Ok(proposals)
+ }
+
+ /// Proposes targets from which to scrape examples for documentation
+ fn create_docscrape_proposals(&self, doc_units: &[Unit]) -> CargoResult<Vec<Proposal<'a>>> {
+ // In general, the goal is to scrape examples from (a) whatever targets
+ // the user is documenting, and (b) Example targets. However, if the user
+ // is documenting a library with dev-dependencies, those dev-deps are not
+ // needed for the library, while dev-deps are needed for the examples.
+ //
+ // If scrape-examples caused `cargo doc` to start requiring dev-deps, this
+ // would be a breaking change to crates whose dev-deps don't compile.
+ // Therefore we ONLY want to scrape Example targets if either:
+ // (1) No package has dev-dependencies, so this is a moot issue, OR
+ // (2) The provided CompileFilter requires dev-dependencies anyway.
+ //
+ // The next two variables represent these two conditions.
+ let no_pkg_has_dev_deps = self.packages.iter().all(|pkg| {
+ pkg.summary()
+ .dependencies()
+ .iter()
+ .all(|dep| !matches!(dep.kind(), DepKind::Development))
+ });
+ let reqs_dev_deps = matches!(self.has_dev_units, HasDevUnits::Yes);
+ let safe_to_scrape_example_targets = no_pkg_has_dev_deps || reqs_dev_deps;
+
+ let pkgs_to_scrape = doc_units
+ .iter()
+ .filter(|unit| self.ws.unit_needs_doc_scrape(unit))
+ .map(|u| &u.pkg)
+ .collect::<HashSet<_>>();
+
+ let skipped_examples = RefCell::new(Vec::new());
+ let can_scrape = |target: &Target| {
+ match (target.doc_scrape_examples(), target.is_example()) {
+ // Targets configured by the user to not be scraped should never be scraped
+ (RustdocScrapeExamples::Disabled, _) => false,
+ // Targets configured by the user to be scraped should always be scraped
+ (RustdocScrapeExamples::Enabled, _) => true,
+ // Example targets with no configuration should be conditionally scraped if
+ // it's guaranteed not to break the build
+ (RustdocScrapeExamples::Unset, true) => {
+ if !safe_to_scrape_example_targets {
+ skipped_examples
+ .borrow_mut()
+ .push(target.name().to_string());
+ }
+ safe_to_scrape_example_targets
+ }
+ // All other targets are ignored for now. This may change in the future!
+ (RustdocScrapeExamples::Unset, false) => false,
+ }
+ };
+
+ let mut scrape_proposals = self.filter_targets(can_scrape, false, CompileMode::Docscrape);
+ scrape_proposals.retain(|proposal| pkgs_to_scrape.contains(proposal.pkg));
+
+ let skipped_examples = skipped_examples.into_inner();
+ if !skipped_examples.is_empty() {
+ let mut shell = self.ws.config().shell();
+ let example_str = skipped_examples.join(", ");
+ shell.warn(format!(
+ "\
+Rustdoc did not scrape the following examples because they require dev-dependencies: {example_str}
+ If you want Rustdoc to scrape these examples, then add `doc-scrape-examples = true`
+ to the [[example]] target configuration of at least one example."
+ ))?;
+ }
+
+ Ok(scrape_proposals)
+ }
+
+ /// Checks if the unit list is empty and the user has passed any combination of
+ /// --tests, --examples, --benches or --bins, and we didn't match on any targets.
+ /// We want to emit a warning to make sure the user knows that this run is a no-op,
+ /// and their code remains unchecked despite cargo not returning any errors
+ fn unmatched_target_filters(&self, units: &[Unit]) -> CargoResult<()> {
+ let mut shell = self.ws.config().shell();
+ if let CompileFilter::Only {
+ all_targets,
+ lib: _,
+ ref bins,
+ ref examples,
+ ref tests,
+ ref benches,
+ } = *self.filter
+ {
+ if units.is_empty() {
+ let mut filters = String::new();
+ let mut miss_count = 0;
+
+ let mut append = |t: &FilterRule, s| {
+ if let FilterRule::All = *t {
+ miss_count += 1;
+ filters.push_str(s);
+ }
+ };
+
+ if all_targets {
+ filters.push_str(" `all-targets`");
+ } else {
+ append(bins, " `bins`,");
+ append(tests, " `tests`,");
+ append(examples, " `examples`,");
+ append(benches, " `benches`,");
+ filters.pop();
+ }
+
+ return shell.warn(format!(
+ "Target {}{} specified, but no targets matched. This is a no-op",
+ if miss_count > 1 { "filters" } else { "filter" },
+ filters,
+ ));
+ }
+ }
+
+ Ok(())
+ }
+
+ /// Warns if a target's required-features references a feature that doesn't exist.
+ ///
+ /// This is a warning because historically this was not validated, and it
+ /// would cause too much breakage to make it an error.
+ fn validate_required_features(
+ &self,
+ target_name: &str,
+ required_features: &[String],
+ summary: &Summary,
+ ) -> CargoResult<()> {
+ let resolve = match self.workspace_resolve {
+ None => return Ok(()),
+ Some(resolve) => resolve,
+ };
+
+ let mut shell = self.ws.config().shell();
+ for feature in required_features {
+ let fv = FeatureValue::new(feature.into());
+ match &fv {
+ FeatureValue::Feature(f) => {
+ if !summary.features().contains_key(f) {
+ shell.warn(format!(
+ "invalid feature `{}` in required-features of target `{}`: \
+ `{}` is not present in [features] section",
+ fv, target_name, fv
+ ))?;
+ }
+ }
+ FeatureValue::Dep { .. } => {
+ anyhow::bail!(
+ "invalid feature `{}` in required-features of target `{}`: \
+ `dep:` prefixed feature values are not allowed in required-features",
+ fv,
+ target_name
+ );
+ }
+ FeatureValue::DepFeature { weak: true, .. } => {
+ anyhow::bail!(
+ "invalid feature `{}` in required-features of target `{}`: \
+ optional dependency with `?` is not allowed in required-features",
+ fv,
+ target_name
+ );
+ }
+ // Handling of dependent_crate/dependent_crate_feature syntax
+ FeatureValue::DepFeature {
+ dep_name,
+ dep_feature,
+ weak: false,
+ } => {
+ match resolve.deps(summary.package_id()).find(|(_dep_id, deps)| {
+ deps.iter().any(|dep| dep.name_in_toml() == *dep_name)
+ }) {
+ Some((dep_id, _deps)) => {
+ let dep_summary = resolve.summary(dep_id);
+ if !dep_summary.features().contains_key(dep_feature)
+ && !dep_summary.dependencies().iter().any(|dep| {
+ dep.name_in_toml() == *dep_feature && dep.is_optional()
+ })
+ {
+ shell.warn(format!(
+ "invalid feature `{}` in required-features of target `{}`: \
+ feature `{}` does not exist in package `{}`",
+ fv, target_name, dep_feature, dep_id
+ ))?;
+ }
+ }
+ None => {
+ shell.warn(format!(
+ "invalid feature `{}` in required-features of target `{}`: \
+ dependency `{}` does not exist",
+ fv, target_name, dep_name
+ ))?;
+ }
+ }
+ }
+ }
+ }
+ Ok(())
+ }
+
+ /// Converts proposals to units based on each target's required features.
+ fn proposals_to_units(&self, proposals: Vec<Proposal<'_>>) -> CargoResult<Vec<Unit>> {
+ // Only include targets that are libraries or have all required
+ // features available.
+ //
+ // `features_map` is a map of &Package -> enabled_features
+ // It is computed by the set of enabled features for the package plus
+ // every enabled feature of every enabled dependency.
+ let mut features_map = HashMap::new();
+ // This needs to be a set to de-duplicate units. Due to the way the
+ // targets are filtered, it is possible to have duplicate proposals for
+ // the same thing.
+ let mut units = HashSet::new();
+ for Proposal {
+ pkg,
+ target,
+ requires_features,
+ mode,
+ } in proposals
+ {
+ let unavailable_features = match target.required_features() {
+ Some(rf) => {
+ self.validate_required_features(target.name(), rf, pkg.summary())?;
+
+ let features = features_map.entry(pkg).or_insert_with(|| {
+ super::resolve_all_features(
+ self.resolve,
+ self.resolved_features,
+ self.package_set,
+ pkg.package_id(),
+ )
+ });
+ rf.iter().filter(|f| !features.contains(*f)).collect()
+ }
+ None => Vec::new(),
+ };
+ if target.is_lib() || unavailable_features.is_empty() {
+ units.extend(self.new_units(pkg, target, mode));
+ } else if requires_features {
+ let required_features = target.required_features().unwrap();
+ let quoted_required_features: Vec<String> = required_features
+ .iter()
+ .map(|s| format!("`{}`", s))
+ .collect();
+ anyhow::bail!(
+ "target `{}` in package `{}` requires the features: {}\n\
+ Consider enabling them by passing, e.g., `--features=\"{}\"`",
+ target.name(),
+ pkg.name(),
+ quoted_required_features.join(", "),
+ required_features.join(" ")
+ );
+ }
+ // else, silently skip target.
+ }
+ let mut units: Vec<_> = units.into_iter().collect();
+ self.unmatched_target_filters(&units)?;
+
+ // Keep the roots in a consistent order, which helps with checking test output.
+ units.sort_unstable();
+ Ok(units)
+ }
+
+ /// Generates all the base units for the packages the user has requested to
+ /// compile. Dependencies for these units are computed later in [`unit_dependencies`].
+ ///
+ /// [`unit_dependencies`]: crate::core::compiler::unit_dependencies
+ pub fn generate_root_units(&self) -> CargoResult<Vec<Unit>> {
+ let proposals = self.create_proposals()?;
+ self.proposals_to_units(proposals)
+ }
+
+ /// Generates units specifically for doc-scraping.
+ ///
+ /// This requires a separate entrypoint from [`generate_root_units`] because it
+ /// takes the documented units as input.
+ ///
+ /// [`generate_root_units`]: Self::generate_root_units
+ pub fn generate_scrape_units(&self, doc_units: &[Unit]) -> CargoResult<Vec<Unit>> {
+ let scrape_proposals = self.create_docscrape_proposals(&doc_units)?;
+ let scrape_units = self.proposals_to_units(scrape_proposals)?;
+ Ok(scrape_units)
+ }
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_config.rs b/src/tools/cargo/src/cargo/ops/cargo_config.rs
new file mode 100644
index 000000000..2277bd6f8
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_config.rs
@@ -0,0 +1,310 @@
+//! Implementation of `cargo config` subcommand.
+
+use crate::util::config::{Config, ConfigKey, ConfigValue as CV, Definition};
+use crate::util::errors::CargoResult;
+use crate::{drop_eprintln, drop_println};
+use anyhow::{bail, format_err, Error};
+use serde_json::json;
+use std::borrow::Cow;
+use std::fmt;
+use std::str::FromStr;
+
+pub enum ConfigFormat {
+ Toml,
+ Json,
+ JsonValue,
+}
+
+impl ConfigFormat {
+ /// For clap.
+ pub const POSSIBLE_VALUES: [&'static str; 3] = ["toml", "json", "json-value"];
+}
+
+impl FromStr for ConfigFormat {
+ type Err = Error;
+ fn from_str(s: &str) -> CargoResult<Self> {
+ match s {
+ "toml" => Ok(ConfigFormat::Toml),
+ "json" => Ok(ConfigFormat::Json),
+ "json-value" => Ok(ConfigFormat::JsonValue),
+ f => bail!("unknown config format `{}`", f),
+ }
+ }
+}
+
+impl fmt::Display for ConfigFormat {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ ConfigFormat::Toml => write!(f, "toml"),
+ ConfigFormat::Json => write!(f, "json"),
+ ConfigFormat::JsonValue => write!(f, "json-value"),
+ }
+ }
+}
+
+/// Options for `cargo config get`.
+pub struct GetOptions<'a> {
+ pub key: Option<&'a str>,
+ pub format: ConfigFormat,
+ pub show_origin: bool,
+ pub merged: bool,
+}
+
+pub fn get(config: &Config, opts: &GetOptions<'_>) -> CargoResult<()> {
+ if opts.show_origin && !matches!(opts.format, ConfigFormat::Toml) {
+ bail!(
+ "the `{}` format does not support --show-origin, try the `toml` format instead",
+ opts.format
+ );
+ }
+ let key = match opts.key {
+ Some(key) => ConfigKey::from_str(key),
+ None => ConfigKey::new(),
+ };
+ if opts.merged {
+ let cv = config
+ .get_cv_with_env(&key)?
+ .ok_or_else(|| format_err!("config value `{}` is not set", key))?;
+ match opts.format {
+ ConfigFormat::Toml => print_toml(config, opts, &key, &cv),
+ ConfigFormat::Json => print_json(config, &key, &cv, true),
+ ConfigFormat::JsonValue => print_json(config, &key, &cv, false),
+ }
+ if let Some(env) = maybe_env(config, &key, &cv) {
+ match opts.format {
+ ConfigFormat::Toml => print_toml_env(config, &env),
+ ConfigFormat::Json | ConfigFormat::JsonValue => print_json_env(config, &env),
+ }
+ }
+ } else {
+ match &opts.format {
+ ConfigFormat::Toml => print_toml_unmerged(config, opts, &key)?,
+ format => bail!(
+ "the `{}` format does not support --merged=no, try the `toml` format instead",
+ format
+ ),
+ }
+ }
+ Ok(())
+}
+
+/// Checks for environment variables that might be used.
+fn maybe_env<'config>(
+ config: &'config Config,
+ key: &ConfigKey,
+ cv: &CV,
+) -> Option<Vec<(&'config str, &'config str)>> {
+ // Only fetching a table is unable to load env values. Leaf entries should
+ // work properly.
+ match cv {
+ CV::Table(_map, _def) => {}
+ _ => return None,
+ }
+ let mut env: Vec<_> = config
+ .env()
+ .filter(|(env_key, _val)| env_key.starts_with(&format!("{}_", key.as_env_key())))
+ .collect();
+ env.sort_by_key(|x| x.0);
+ if env.is_empty() {
+ None
+ } else {
+ Some(env)
+ }
+}
+
+fn print_toml(config: &Config, opts: &GetOptions<'_>, key: &ConfigKey, cv: &CV) {
+ let origin = |def: &Definition| -> String {
+ if !opts.show_origin {
+ return "".to_string();
+ }
+ format!(" # {}", def)
+ };
+ match cv {
+ CV::Boolean(val, def) => drop_println!(config, "{} = {}{}", key, val, origin(def)),
+ CV::Integer(val, def) => drop_println!(config, "{} = {}{}", key, val, origin(def)),
+ CV::String(val, def) => drop_println!(
+ config,
+ "{} = {}{}",
+ key,
+ toml_edit::Value::from(val),
+ origin(def)
+ ),
+ CV::List(vals, _def) => {
+ if opts.show_origin {
+ drop_println!(config, "{} = [", key);
+ for (val, def) in vals {
+ drop_println!(
+ config,
+ " {}, # {}",
+ serde::Serialize::serialize(val, toml_edit::ser::ValueSerializer::new())
+ .unwrap(),
+ def
+ );
+ }
+ drop_println!(config, "]");
+ } else {
+ let vals: toml_edit::Array = vals.iter().map(|x| &x.0).collect();
+ drop_println!(config, "{} = {}", key, vals);
+ }
+ }
+ CV::Table(table, _def) => {
+ let mut key_vals: Vec<_> = table.iter().collect();
+ key_vals.sort_by(|a, b| a.0.cmp(b.0));
+ for (table_key, val) in key_vals {
+ let mut subkey = key.clone();
+ // push or push_sensitive shouldn't matter here, since this is
+ // not dealing with environment variables.
+ subkey.push(table_key);
+ print_toml(config, opts, &subkey, val);
+ }
+ }
+ }
+}
+
+fn print_toml_env(config: &Config, env: &[(&str, &str)]) {
+ drop_println!(
+ config,
+ "# The following environment variables may affect the loaded values."
+ );
+ for (env_key, env_value) in env {
+ let val = shell_escape::escape(Cow::Borrowed(env_value));
+ drop_println!(config, "# {}={}", env_key, val);
+ }
+}
+
+fn print_json_env(config: &Config, env: &[(&str, &str)]) {
+ drop_eprintln!(
+ config,
+ "note: The following environment variables may affect the loaded values."
+ );
+ for (env_key, env_value) in env {
+ let val = shell_escape::escape(Cow::Borrowed(env_value));
+ drop_eprintln!(config, "{}={}", env_key, val);
+ }
+}
+
+fn print_json(config: &Config, key: &ConfigKey, cv: &CV, include_key: bool) {
+ let json_value = if key.is_root() || !include_key {
+ cv_to_json(cv)
+ } else {
+ let mut parts: Vec<_> = key.parts().collect();
+ let last_part = parts.pop().unwrap();
+ let mut root_table = json!({});
+ // Create a JSON object with nested keys up to the value being displayed.
+ let mut table = &mut root_table;
+ for part in parts {
+ table[part] = json!({});
+ table = table.get_mut(part).unwrap();
+ }
+ table[last_part] = cv_to_json(cv);
+ root_table
+ };
+ drop_println!(config, "{}", serde_json::to_string(&json_value).unwrap());
+
+ // Helper for recursively converting a CV to JSON.
+ fn cv_to_json(cv: &CV) -> serde_json::Value {
+ match cv {
+ CV::Boolean(val, _def) => json!(val),
+ CV::Integer(val, _def) => json!(val),
+ CV::String(val, _def) => json!(val),
+ CV::List(vals, _def) => {
+ let jvals: Vec<_> = vals.iter().map(|(val, _def)| json!(val)).collect();
+ json!(jvals)
+ }
+ CV::Table(map, _def) => {
+ let mut table = json!({});
+ for (key, val) in map {
+ table[key] = cv_to_json(val);
+ }
+ table
+ }
+ }
+ }
+}
+
+fn print_toml_unmerged(config: &Config, opts: &GetOptions<'_>, key: &ConfigKey) -> CargoResult<()> {
+ let print_table = |cv: &CV| {
+ drop_println!(config, "# {}", cv.definition());
+ print_toml(config, opts, &ConfigKey::new(), cv);
+ drop_println!(config, "");
+ };
+ // This removes entries from the given CV so that all that remains is the
+ // given key. Returns false if no entries were found.
+ fn trim_cv(mut cv: &mut CV, key: &ConfigKey) -> CargoResult<bool> {
+ for (i, part) in key.parts().enumerate() {
+ match cv {
+ CV::Table(map, _def) => {
+ map.retain(|key, _value| key == part);
+ match map.get_mut(part) {
+ Some(val) => cv = val,
+ None => return Ok(false),
+ }
+ }
+ _ => {
+ let mut key_so_far = ConfigKey::new();
+ for part in key.parts().take(i) {
+ key_so_far.push(part);
+ }
+ bail!(
+ "expected table for configuration key `{}`, \
+ but found {} in {}",
+ key_so_far,
+ cv.desc(),
+ cv.definition()
+ )
+ }
+ }
+ }
+ Ok(match cv {
+ CV::Table(map, _def) => !map.is_empty(),
+ _ => true,
+ })
+ }
+
+ let mut cli_args = config.cli_args_as_table()?;
+ if trim_cv(&mut cli_args, key)? {
+ print_table(&cli_args);
+ }
+
+ // This slurps up some extra env vars that aren't technically part of the
+ // "config" (or are special-cased). I'm personally fine with just keeping
+ // them here, though it might be confusing. The vars I'm aware of:
+ //
+ // * CARGO
+ // * CARGO_HOME
+ // * CARGO_NAME
+ // * CARGO_EMAIL
+ // * CARGO_INCREMENTAL
+ // * CARGO_TARGET_DIR
+ // * CARGO_CACHE_RUSTC_INFO
+ //
+ // All of these except CARGO, CARGO_HOME, and CARGO_CACHE_RUSTC_INFO are
+ // actually part of the config, but they are special-cased in the code.
+ //
+ // TODO: It might be a good idea to teach the Config loader to support
+ // environment variable aliases so that these special cases are less
+ // special, and will just naturally get loaded as part of the config.
+ let mut env: Vec<_> = config
+ .env()
+ .filter(|(env_key, _val)| env_key.starts_with(key.as_env_key()))
+ .collect();
+ if !env.is_empty() {
+ env.sort_by_key(|x| x.0);
+ drop_println!(config, "# Environment variables");
+ for (key, value) in env {
+ // Displaying this in "shell" syntax instead of TOML, since that
+ // somehow makes more sense to me.
+ let val = shell_escape::escape(Cow::Borrowed(value));
+ drop_println!(config, "# {}={}", key, val);
+ }
+ drop_println!(config, "");
+ }
+
+ let unmerged = config.load_values_unmerged()?;
+ for mut cv in unmerged {
+ if trim_cv(&mut cv, key)? {
+ print_table(&cv);
+ }
+ }
+ Ok(())
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_doc.rs b/src/tools/cargo/src/cargo/ops/cargo_doc.rs
new file mode 100644
index 000000000..afa6ac327
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_doc.rs
@@ -0,0 +1,75 @@
+use crate::core::{Shell, Workspace};
+use crate::ops;
+use crate::util::config::{Config, PathAndArgs};
+use crate::util::CargoResult;
+use std::path::Path;
+use std::path::PathBuf;
+use std::process::Command;
+
+/// Strongly typed options for the `cargo doc` command.
+#[derive(Debug)]
+pub struct DocOptions {
+ /// Whether to attempt to open the browser after compiling the docs
+ pub open_result: bool,
+ /// Options to pass through to the compiler
+ pub compile_opts: ops::CompileOptions,
+}
+
+/// Main method for `cargo doc`.
+pub fn doc(ws: &Workspace<'_>, options: &DocOptions) -> CargoResult<()> {
+ let compilation = ops::compile(ws, &options.compile_opts)?;
+
+ if options.open_result {
+ let name = &compilation
+ .root_crate_names
+ .get(0)
+ .ok_or_else(|| anyhow::anyhow!("no crates with documentation"))?;
+ let kind = options.compile_opts.build_config.single_requested_kind()?;
+ let path = compilation.root_output[&kind]
+ .with_file_name("doc")
+ .join(&name)
+ .join("index.html");
+ if path.exists() {
+ let config_browser = {
+ let cfg: Option<PathAndArgs> = ws.config().get("doc.browser")?;
+ cfg.map(|path_args| (path_args.path.resolve_program(ws.config()), path_args.args))
+ };
+
+ let mut shell = ws.config().shell();
+ shell.status("Opening", path.display())?;
+ open_docs(&path, &mut shell, config_browser, ws.config())?;
+ }
+ }
+
+ Ok(())
+}
+
+fn open_docs(
+ path: &Path,
+ shell: &mut Shell,
+ config_browser: Option<(PathBuf, Vec<String>)>,
+ config: &Config,
+) -> CargoResult<()> {
+ let browser =
+ config_browser.or_else(|| Some((PathBuf::from(config.get_env_os("BROWSER")?), Vec::new())));
+
+ match browser {
+ Some((browser, initial_args)) => {
+ if let Err(e) = Command::new(&browser).args(initial_args).arg(path).status() {
+ shell.warn(format!(
+ "Couldn't open docs with {}: {}",
+ browser.to_string_lossy(),
+ e
+ ))?;
+ }
+ }
+ None => {
+ if let Err(e) = opener::open(&path) {
+ let e = e.into();
+ crate::display_warning_with_error("couldn't open docs", &e, shell);
+ }
+ }
+ };
+
+ Ok(())
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_fetch.rs b/src/tools/cargo/src/cargo/ops/cargo_fetch.rs
new file mode 100644
index 000000000..bac3f0278
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_fetch.rs
@@ -0,0 +1,79 @@
+use crate::core::compiler::standard_lib;
+use crate::core::compiler::{BuildConfig, CompileMode, RustcTargetData};
+use crate::core::{PackageSet, Resolve, Workspace};
+use crate::ops;
+use crate::util::CargoResult;
+use crate::util::Config;
+use std::collections::HashSet;
+
+pub struct FetchOptions<'a> {
+ pub config: &'a Config,
+ /// The target arch triple to fetch dependencies for
+ pub targets: Vec<String>,
+}
+
+/// Executes `cargo fetch`.
+pub fn fetch<'a>(
+ ws: &Workspace<'a>,
+ options: &FetchOptions<'a>,
+) -> CargoResult<(Resolve, PackageSet<'a>)> {
+ ws.emit_warnings()?;
+ let (mut packages, resolve) = ops::resolve_ws(ws)?;
+
+ let jobs = Some(1);
+ let keep_going = false;
+ let config = ws.config();
+ let build_config = BuildConfig::new(
+ config,
+ jobs,
+ keep_going,
+ &options.targets,
+ CompileMode::Build,
+ )?;
+ let data = RustcTargetData::new(ws, &build_config.requested_kinds)?;
+ let mut fetched_packages = HashSet::new();
+ let mut deps_to_fetch = ws.members().map(|p| p.package_id()).collect::<Vec<_>>();
+ let mut to_download = Vec::new();
+
+ while let Some(id) = deps_to_fetch.pop() {
+ if !fetched_packages.insert(id) {
+ continue;
+ }
+
+ to_download.push(id);
+ let deps = resolve
+ .deps(id)
+ .filter(|&(_id, deps)| {
+ deps.iter().any(|d| {
+ // If no target was specified then all dependencies are
+ // fetched.
+ if options.targets.is_empty() {
+ return true;
+ }
+
+ // Otherwise we only download this dependency if any of the
+ // requested platforms would match this dependency. Note
+ // that this is a bit lossy because not all dependencies are
+ // always compiled for all platforms, but it should be
+ // "close enough" for now.
+ build_config
+ .requested_kinds
+ .iter()
+ .any(|kind| data.dep_platform_activated(d, *kind))
+ })
+ })
+ .map(|(id, _deps)| id);
+ deps_to_fetch.extend(deps);
+ }
+
+ // If -Zbuild-std was passed, download dependencies for the standard library.
+ // We don't know ahead of time what jobs we'll be running, so tell `std_crates` that.
+ if let Some(crates) = standard_lib::std_crates(config, None) {
+ let (std_package_set, _, _) = standard_lib::resolve_std(ws, &data, &build_config, &crates)?;
+ packages.add_set(std_package_set);
+ }
+
+ packages.get_many(to_download)?;
+
+ Ok((resolve, packages))
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_generate_lockfile.rs b/src/tools/cargo/src/cargo/ops/cargo_generate_lockfile.rs
new file mode 100644
index 000000000..6267b08f5
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_generate_lockfile.rs
@@ -0,0 +1,257 @@
+use crate::core::registry::PackageRegistry;
+use crate::core::resolver::features::{CliFeatures, HasDevUnits};
+use crate::core::{PackageId, PackageIdSpec};
+use crate::core::{Resolve, SourceId, Workspace};
+use crate::ops;
+use crate::util::config::Config;
+use crate::util::CargoResult;
+use anyhow::Context;
+use log::debug;
+use std::collections::{BTreeMap, HashSet};
+use termcolor::Color::{self, Cyan, Green, Red, Yellow};
+
+pub struct UpdateOptions<'a> {
+ pub config: &'a Config,
+ pub to_update: Vec<String>,
+ pub precise: Option<&'a str>,
+ pub aggressive: bool,
+ pub dry_run: bool,
+ pub workspace: bool,
+}
+
+pub fn generate_lockfile(ws: &Workspace<'_>) -> CargoResult<()> {
+ let mut registry = PackageRegistry::new(ws.config())?;
+ let mut resolve = ops::resolve_with_previous(
+ &mut registry,
+ ws,
+ &CliFeatures::new_all(true),
+ HasDevUnits::Yes,
+ None,
+ None,
+ &[],
+ true,
+ )?;
+ ops::write_pkg_lockfile(ws, &mut resolve)?;
+ Ok(())
+}
+
+pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoResult<()> {
+ if opts.aggressive && opts.precise.is_some() {
+ anyhow::bail!("cannot specify both aggressive and precise simultaneously")
+ }
+
+ if ws.members().count() == 0 {
+ anyhow::bail!("you can't generate a lockfile for an empty workspace.")
+ }
+
+ // Updates often require a lot of modifications to the registry, so ensure
+ // that we're synchronized against other Cargos.
+ let _lock = ws.config().acquire_package_cache_lock()?;
+
+ let previous_resolve = match ops::load_pkg_lockfile(ws)? {
+ Some(resolve) => resolve,
+ None => {
+ match opts.precise {
+ None => return generate_lockfile(ws),
+
+ // Precise option specified, so calculate a previous_resolve required
+ // by precise package update later.
+ Some(_) => {
+ let mut registry = PackageRegistry::new(opts.config)?;
+ ops::resolve_with_previous(
+ &mut registry,
+ ws,
+ &CliFeatures::new_all(true),
+ HasDevUnits::Yes,
+ None,
+ None,
+ &[],
+ true,
+ )?
+ }
+ }
+ }
+ };
+ let mut registry = PackageRegistry::new(opts.config)?;
+ let mut to_avoid = HashSet::new();
+
+ if opts.to_update.is_empty() {
+ if !opts.workspace {
+ to_avoid.extend(previous_resolve.iter());
+ to_avoid.extend(previous_resolve.unused_patches());
+ }
+ } else {
+ let mut sources = Vec::new();
+ for name in opts.to_update.iter() {
+ let dep = previous_resolve.query(name)?;
+ if opts.aggressive {
+ fill_with_deps(&previous_resolve, dep, &mut to_avoid, &mut HashSet::new());
+ } else {
+ to_avoid.insert(dep);
+ sources.push(match opts.precise {
+ Some(precise) => {
+ // TODO: see comment in `resolve.rs` as well, but this
+ // seems like a pretty hokey reason to single out
+ // the registry as well.
+ let precise = if dep.source_id().is_registry() {
+ semver::Version::parse(precise).with_context(|| {
+ format!("invalid version format for precise version `{}`", precise)
+ })?;
+ format!("{}={}->{}", dep.name(), dep.version(), precise)
+ } else {
+ precise.to_string()
+ };
+ dep.source_id().with_precise(Some(precise))
+ }
+ None => dep.source_id().with_precise(None),
+ });
+ }
+ if let Ok(unused_id) =
+ PackageIdSpec::query_str(name, previous_resolve.unused_patches().iter().cloned())
+ {
+ to_avoid.insert(unused_id);
+ }
+ }
+
+ registry.add_sources(sources)?;
+ }
+
+ let mut resolve = ops::resolve_with_previous(
+ &mut registry,
+ ws,
+ &CliFeatures::new_all(true),
+ HasDevUnits::Yes,
+ Some(&previous_resolve),
+ Some(&to_avoid),
+ &[],
+ true,
+ )?;
+
+ // Summarize what is changing for the user.
+ let print_change = |status: &str, msg: String, color: Color| {
+ opts.config.shell().status_with_color(status, msg, color)
+ };
+ for (removed, added) in compare_dependency_graphs(&previous_resolve, &resolve) {
+ if removed.len() == 1 && added.len() == 1 {
+ let msg = if removed[0].source_id().is_git() {
+ format!(
+ "{} -> #{}",
+ removed[0],
+ &added[0].source_id().precise().unwrap()[..8]
+ )
+ } else {
+ format!("{} -> v{}", removed[0], added[0].version())
+ };
+
+ if removed[0].version() > added[0].version() {
+ print_change("Downgrading", msg, Yellow)?;
+ } else {
+ print_change("Updating", msg, Green)?;
+ }
+ } else {
+ for package in removed.iter() {
+ print_change("Removing", format!("{}", package), Red)?;
+ }
+ for package in added.iter() {
+ print_change("Adding", format!("{}", package), Cyan)?;
+ }
+ }
+ }
+ if opts.dry_run {
+ opts.config
+ .shell()
+ .warn("not updating lockfile due to dry run")?;
+ } else {
+ ops::write_pkg_lockfile(ws, &mut resolve)?;
+ }
+ return Ok(());
+
+ fn fill_with_deps<'a>(
+ resolve: &'a Resolve,
+ dep: PackageId,
+ set: &mut HashSet<PackageId>,
+ visited: &mut HashSet<PackageId>,
+ ) {
+ if !visited.insert(dep) {
+ return;
+ }
+ set.insert(dep);
+ for (dep, _) in resolve.deps_not_replaced(dep) {
+ fill_with_deps(resolve, dep, set, visited);
+ }
+ }
+
+ fn compare_dependency_graphs(
+ previous_resolve: &Resolve,
+ resolve: &Resolve,
+ ) -> Vec<(Vec<PackageId>, Vec<PackageId>)> {
+ fn key(dep: PackageId) -> (&'static str, SourceId) {
+ (dep.name().as_str(), dep.source_id())
+ }
+
+ // Removes all package IDs in `b` from `a`. Note that this is somewhat
+ // more complicated because the equality for source IDs does not take
+ // precise versions into account (e.g., git shas), but we want to take
+ // that into account here.
+ fn vec_subtract(a: &[PackageId], b: &[PackageId]) -> Vec<PackageId> {
+ a.iter()
+ .filter(|a| {
+ // If this package ID is not found in `b`, then it's definitely
+ // in the subtracted set.
+ let i = match b.binary_search(a) {
+ Ok(i) => i,
+ Err(..) => return true,
+ };
+
+ // If we've found `a` in `b`, then we iterate over all instances
+ // (we know `b` is sorted) and see if they all have different
+ // precise versions. If so, then `a` isn't actually in `b` so
+ // we'll let it through.
+ //
+ // Note that we only check this for non-registry sources,
+ // however, as registries contain enough version information in
+ // the package ID to disambiguate.
+ if a.source_id().is_registry() {
+ return false;
+ }
+ b[i..]
+ .iter()
+ .take_while(|b| a == b)
+ .all(|b| a.source_id().precise() != b.source_id().precise())
+ })
+ .cloned()
+ .collect()
+ }
+
+ // Map `(package name, package source)` to `(removed versions, added versions)`.
+ let mut changes = BTreeMap::new();
+ let empty = (Vec::new(), Vec::new());
+ for dep in previous_resolve.iter() {
+ changes
+ .entry(key(dep))
+ .or_insert_with(|| empty.clone())
+ .0
+ .push(dep);
+ }
+ for dep in resolve.iter() {
+ changes
+ .entry(key(dep))
+ .or_insert_with(|| empty.clone())
+ .1
+ .push(dep);
+ }
+
+ for v in changes.values_mut() {
+ let (ref mut old, ref mut new) = *v;
+ old.sort();
+ new.sort();
+ let removed = vec_subtract(old, new);
+ let added = vec_subtract(new, old);
+ *old = removed;
+ *new = added;
+ }
+ debug!("{:#?}", changes);
+
+ changes.into_iter().map(|(_, v)| v).collect()
+ }
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_install.rs b/src/tools/cargo/src/cargo/ops/cargo_install.rs
new file mode 100644
index 000000000..53c3e72f7
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_install.rs
@@ -0,0 +1,911 @@
+use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
+use std::path::{Path, PathBuf};
+use std::sync::Arc;
+use std::{env, fs};
+
+use crate::core::compiler::{CompileKind, DefaultExecutor, Executor, UnitOutput};
+use crate::core::{Dependency, Edition, Package, PackageId, Source, SourceId, Target, Workspace};
+use crate::ops::{common_for_install_and_uninstall::*, FilterRule};
+use crate::ops::{CompileFilter, Packages};
+use crate::sources::{GitSource, PathSource, SourceConfigMap};
+use crate::util::errors::CargoResult;
+use crate::util::{Config, Filesystem, Rustc, ToSemver, VersionReqExt};
+use crate::{drop_println, ops};
+
+use anyhow::{bail, format_err, Context as _};
+use cargo_util::paths;
+use itertools::Itertools;
+use semver::VersionReq;
+use tempfile::Builder as TempFileBuilder;
+
+struct Transaction {
+ bins: Vec<PathBuf>,
+}
+
+impl Transaction {
+ fn success(mut self) {
+ self.bins.clear();
+ }
+}
+
+impl Drop for Transaction {
+ fn drop(&mut self) {
+ for bin in self.bins.iter() {
+ let _ = paths::remove_file(bin);
+ }
+ }
+}
+
+struct InstallablePackage<'cfg, 'a> {
+ config: &'cfg Config,
+ opts: ops::CompileOptions,
+ root: Filesystem,
+ source_id: SourceId,
+ vers: Option<&'a str>,
+ force: bool,
+ no_track: bool,
+
+ pkg: Package,
+ ws: Workspace<'cfg>,
+ rustc: Rustc,
+ target: String,
+}
+
+impl<'cfg, 'a> InstallablePackage<'cfg, 'a> {
+ // Returns pkg to install. None if pkg is already installed
+ pub fn new(
+ config: &'cfg Config,
+ root: Filesystem,
+ map: SourceConfigMap<'_>,
+ krate: Option<&str>,
+ source_id: SourceId,
+ from_cwd: bool,
+ vers: Option<&'a str>,
+ original_opts: &'a ops::CompileOptions,
+ force: bool,
+ no_track: bool,
+ needs_update_if_source_is_index: bool,
+ ) -> CargoResult<Option<InstallablePackage<'cfg, 'a>>> {
+ if let Some(name) = krate {
+ if name == "." {
+ bail!(
+ "To install the binaries for the package in current working \
+ directory use `cargo install --path .`. \n\
+ Use `cargo build` if you want to simply build the package."
+ )
+ }
+ }
+
+ let dst = root.join("bin").into_path_unlocked();
+ let pkg = {
+ let dep = {
+ if let Some(krate) = krate {
+ let vers = if let Some(vers_flag) = vers {
+ Some(parse_semver_flag(vers_flag)?.to_string())
+ } else if source_id.is_registry() {
+ // Avoid pre-release versions from crate.io
+ // unless explicitly asked for
+ Some(String::from("*"))
+ } else {
+ None
+ };
+ Some(Dependency::parse(krate, vers.as_deref(), source_id)?)
+ } else {
+ None
+ }
+ };
+
+ if source_id.is_git() {
+ let mut source = GitSource::new(source_id, config)?;
+ select_pkg(
+ &mut source,
+ dep,
+ |git: &mut GitSource<'_>| git.read_packages(),
+ config,
+ )?
+ } else if source_id.is_path() {
+ let mut src = path_source(source_id, config)?;
+ if !src.path().is_dir() {
+ bail!(
+ "`{}` is not a directory. \
+ --path must point to a directory containing a Cargo.toml file.",
+ src.path().display()
+ )
+ }
+ if !src.path().join("Cargo.toml").exists() {
+ if from_cwd {
+ bail!(
+ "`{}` is not a crate root; specify a crate to \
+ install from crates.io, or use --path or --git to \
+ specify an alternate source",
+ src.path().display()
+ );
+ } else if src.path().join("cargo.toml").exists() {
+ bail!(
+ "`{}` does not contain a Cargo.toml file, but found cargo.toml please try to rename it to Cargo.toml. \
+ --path must point to a directory containing a Cargo.toml file.",
+ src.path().display()
+ )
+ } else {
+ bail!(
+ "`{}` does not contain a Cargo.toml file. \
+ --path must point to a directory containing a Cargo.toml file.",
+ src.path().display()
+ )
+ }
+ }
+ select_pkg(
+ &mut src,
+ dep,
+ |path: &mut PathSource<'_>| path.read_packages(),
+ config,
+ )?
+ } else if let Some(dep) = dep {
+ let mut source = map.load(source_id, &HashSet::new())?;
+ if let Ok(Some(pkg)) = installed_exact_package(
+ dep.clone(),
+ &mut source,
+ config,
+ original_opts,
+ &root,
+ &dst,
+ force,
+ ) {
+ let msg = format!(
+ "package `{}` is already installed, use --force to override",
+ pkg
+ );
+ config.shell().status("Ignored", &msg)?;
+ return Ok(None);
+ }
+ select_dep_pkg(&mut source, dep, config, needs_update_if_source_is_index)?
+ } else {
+ bail!(
+ "must specify a crate to install from \
+ crates.io, or use --path or --git to \
+ specify alternate source"
+ )
+ }
+ };
+
+ // When we build this package, we want to build the *specified* package only,
+ // and avoid building e.g. workspace default-members instead. Do so by constructing
+ // specialized compile options specific to the identified package.
+ // See test `path_install_workspace_root_despite_default_members`.
+ let mut opts = original_opts.clone();
+ opts.spec = Packages::Packages(vec![pkg.name().to_string()]);
+
+ let (ws, rustc, target) = make_ws_rustc_target(config, &opts, &source_id, pkg.clone())?;
+ // If we're installing in --locked mode and there's no `Cargo.lock` published
+ // ie. the bin was published before https://github.com/rust-lang/cargo/pull/7026
+ if config.locked() && !ws.root().join("Cargo.lock").exists() {
+ config.shell().warn(format!(
+ "no Cargo.lock file published in {}",
+ pkg.to_string()
+ ))?;
+ }
+ let pkg = if source_id.is_git() {
+ // Don't use ws.current() in order to keep the package source as a git source so that
+ // install tracking uses the correct source.
+ pkg
+ } else {
+ ws.current()?.clone()
+ };
+
+ if from_cwd {
+ if pkg.manifest().edition() == Edition::Edition2015 {
+ config.shell().warn(
+ "Using `cargo install` to install the binaries for the \
+ package in current working directory is deprecated, \
+ use `cargo install --path .` instead. \
+ Use `cargo build` if you want to simply build the package.",
+ )?
+ } else {
+ bail!(
+ "Using `cargo install` to install the binaries for the \
+ package in current working directory is no longer supported, \
+ use `cargo install --path .` instead. \
+ Use `cargo build` if you want to simply build the package."
+ )
+ }
+ };
+
+ // For bare `cargo install` (no `--bin` or `--example`), check if there is
+ // *something* to install. Explicit `--bin` or `--example` flags will be
+ // checked at the start of `compile_ws`.
+ if !opts.filter.is_specific() && !pkg.targets().iter().any(|t| t.is_bin()) {
+ bail!(
+ "there is nothing to install in `{}`, because it has no binaries\n\
+ `cargo install` is only for installing programs, and can't be used with libraries.\n\
+ To use a library crate, add it as a dependency to a Cargo project with `cargo add`.",
+ pkg,
+ );
+ }
+
+ let ip = InstallablePackage {
+ config,
+ opts,
+ root,
+ source_id,
+ vers,
+ force,
+ no_track,
+
+ pkg,
+ ws,
+ rustc,
+ target,
+ };
+
+ // WARNING: no_track does not perform locking, so there is no protection
+ // of concurrent installs.
+ if no_track {
+ // Check for conflicts.
+ ip.no_track_duplicates(&dst)?;
+ } else if is_installed(
+ &ip.pkg, config, &ip.opts, &ip.rustc, &ip.target, &ip.root, &dst, force,
+ )? {
+ let msg = format!(
+ "package `{}` is already installed, use --force to override",
+ ip.pkg
+ );
+ config.shell().status("Ignored", &msg)?;
+ return Ok(None);
+ }
+
+ Ok(Some(ip))
+ }
+
+ fn no_track_duplicates(&self, dst: &Path) -> CargoResult<BTreeMap<String, Option<PackageId>>> {
+ // Helper for --no-track flag to make sure it doesn't overwrite anything.
+ let duplicates: BTreeMap<String, Option<PackageId>> =
+ exe_names(&self.pkg, &self.opts.filter)
+ .into_iter()
+ .filter(|name| dst.join(name).exists())
+ .map(|name| (name, None))
+ .collect();
+ if !self.force && !duplicates.is_empty() {
+ let mut msg: Vec<String> = duplicates
+ .iter()
+ .map(|(name, _)| {
+ format!(
+ "binary `{}` already exists in destination `{}`",
+ name,
+ dst.join(name).to_string_lossy()
+ )
+ })
+ .collect();
+ msg.push("Add --force to overwrite".to_string());
+ bail!("{}", msg.join("\n"));
+ }
+ Ok(duplicates)
+ }
+
+ fn install_one(mut self) -> CargoResult<bool> {
+ self.config.shell().status("Installing", &self.pkg)?;
+
+ let dst = self.root.join("bin").into_path_unlocked();
+
+ let mut td_opt = None;
+ let mut needs_cleanup = false;
+ if !self.source_id.is_path() {
+ let target_dir = if let Some(dir) = self.config.target_dir()? {
+ dir
+ } else if let Ok(td) = TempFileBuilder::new().prefix("cargo-install").tempdir() {
+ let p = td.path().to_owned();
+ td_opt = Some(td);
+ Filesystem::new(p)
+ } else {
+ needs_cleanup = true;
+ Filesystem::new(self.config.cwd().join("target-install"))
+ };
+ self.ws.set_target_dir(target_dir);
+ }
+
+ self.check_yanked_install()?;
+
+ let exec: Arc<dyn Executor> = Arc::new(DefaultExecutor);
+ let compile = ops::compile_ws(&self.ws, &self.opts, &exec).with_context(|| {
+ if let Some(td) = td_opt.take() {
+ // preserve the temporary directory, so the user can inspect it
+ drop(td.into_path());
+ }
+
+ format!(
+ "failed to compile `{}`, intermediate artifacts can be \
+ found at `{}`",
+ self.pkg,
+ self.ws.target_dir().display()
+ )
+ })?;
+ let mut binaries: Vec<(&str, &Path)> = compile
+ .binaries
+ .iter()
+ .map(|UnitOutput { path, .. }| {
+ let name = path.file_name().unwrap();
+ if let Some(s) = name.to_str() {
+ Ok((s, path.as_ref()))
+ } else {
+ bail!("Binary `{:?}` name can't be serialized into string", name)
+ }
+ })
+ .collect::<CargoResult<_>>()?;
+ if binaries.is_empty() {
+ // Cargo already warns the user if they use a target specifier that matches nothing,
+ // but we want to error if the user asked for a _particular_ binary to be installed,
+ // and we didn't end up installing it.
+ //
+ // NOTE: This _should_ be impossible to hit since --bin=does_not_exist will fail on
+ // target selection, and --bin=requires_a without --features=a will fail with "target
+ // .. requires the features ..". But rather than assume that's the case, we define the
+ // behavior for this fallback case as well.
+ if let CompileFilter::Only { bins, examples, .. } = &self.opts.filter {
+ let mut any_specific = false;
+ if let FilterRule::Just(ref v) = bins {
+ if !v.is_empty() {
+ any_specific = true;
+ }
+ }
+ if let FilterRule::Just(ref v) = examples {
+ if !v.is_empty() {
+ any_specific = true;
+ }
+ }
+ if any_specific {
+ bail!("no binaries are available for install using the selected features");
+ }
+ }
+
+ // If there _are_ binaries available, but none were selected given the current set of
+ // features, let the user know.
+ //
+ // Note that we know at this point that _if_ bins or examples is set to `::Just`,
+ // they're `::Just([])`, which is `FilterRule::none()`.
+ let binaries: Vec<_> = self
+ .pkg
+ .targets()
+ .iter()
+ .filter(|t| t.is_executable())
+ .collect();
+ if !binaries.is_empty() {
+ self.config
+ .shell()
+ .warn(make_warning_about_missing_features(&binaries))?;
+ }
+
+ return Ok(false);
+ }
+ // This is primarily to make testing easier.
+ binaries.sort_unstable();
+
+ let (tracker, duplicates) = if self.no_track {
+ (None, self.no_track_duplicates(&dst)?)
+ } else {
+ let tracker = InstallTracker::load(self.config, &self.root)?;
+ let (_freshness, duplicates) = tracker.check_upgrade(
+ &dst,
+ &self.pkg,
+ self.force,
+ &self.opts,
+ &self.target,
+ &self.rustc.verbose_version,
+ )?;
+ (Some(tracker), duplicates)
+ };
+
+ paths::create_dir_all(&dst)?;
+
+ // Copy all binaries to a temporary directory under `dst` first, catching
+ // some failure modes (e.g., out of space) before touching the existing
+ // binaries. This directory will get cleaned up via RAII.
+ let staging_dir = TempFileBuilder::new()
+ .prefix("cargo-install")
+ .tempdir_in(&dst)?;
+ for &(bin, src) in binaries.iter() {
+ let dst = staging_dir.path().join(bin);
+ // Try to move if `target_dir` is transient.
+ if !self.source_id.is_path() && fs::rename(src, &dst).is_ok() {
+ continue;
+ }
+ paths::copy(src, &dst)?;
+ }
+
+ let (to_replace, to_install): (Vec<&str>, Vec<&str>) = binaries
+ .iter()
+ .map(|&(bin, _)| bin)
+ .partition(|&bin| duplicates.contains_key(bin));
+
+ let mut installed = Transaction { bins: Vec::new() };
+ let mut successful_bins = BTreeSet::new();
+
+ // Move the temporary copies into `dst` starting with new binaries.
+ for bin in to_install.iter() {
+ let src = staging_dir.path().join(bin);
+ let dst = dst.join(bin);
+ self.config.shell().status("Installing", dst.display())?;
+ fs::rename(&src, &dst).with_context(|| {
+ format!("failed to move `{}` to `{}`", src.display(), dst.display())
+ })?;
+ installed.bins.push(dst);
+ successful_bins.insert(bin.to_string());
+ }
+
+ // Repeat for binaries which replace existing ones but don't pop the error
+ // up until after updating metadata.
+ let replace_result = {
+ let mut try_install = || -> CargoResult<()> {
+ for &bin in to_replace.iter() {
+ let src = staging_dir.path().join(bin);
+ let dst = dst.join(bin);
+ self.config.shell().status("Replacing", dst.display())?;
+ fs::rename(&src, &dst).with_context(|| {
+ format!("failed to move `{}` to `{}`", src.display(), dst.display())
+ })?;
+ successful_bins.insert(bin.to_string());
+ }
+ Ok(())
+ };
+ try_install()
+ };
+
+ if let Some(mut tracker) = tracker {
+ tracker.mark_installed(
+ &self.pkg,
+ &successful_bins,
+ self.vers.map(|s| s.to_string()),
+ &self.opts,
+ &self.target,
+ &self.rustc.verbose_version,
+ );
+
+ if let Err(e) =
+ remove_orphaned_bins(&self.ws, &mut tracker, &duplicates, &self.pkg, &dst)
+ {
+ // Don't hard error on remove.
+ self.config
+ .shell()
+ .warn(format!("failed to remove orphan: {:?}", e))?;
+ }
+
+ match tracker.save() {
+ Err(err) => replace_result.with_context(|| err)?,
+ Ok(_) => replace_result?,
+ }
+ }
+
+ // Reaching here means all actions have succeeded. Clean up.
+ installed.success();
+ if needs_cleanup {
+ // Don't bother grabbing a lock as we're going to blow it all away
+ // anyway.
+ let target_dir = self.ws.target_dir().into_path_unlocked();
+ paths::remove_dir_all(&target_dir)?;
+ }
+
+ // Helper for creating status messages.
+ fn executables<T: AsRef<str>>(mut names: impl Iterator<Item = T> + Clone) -> String {
+ if names.clone().count() == 1 {
+ format!("(executable `{}`)", names.next().unwrap().as_ref())
+ } else {
+ format!(
+ "(executables {})",
+ names
+ .map(|b| format!("`{}`", b.as_ref()))
+ .collect::<Vec<_>>()
+ .join(", ")
+ )
+ }
+ }
+
+ if duplicates.is_empty() {
+ self.config.shell().status(
+ "Installed",
+ format!(
+ "package `{}` {}",
+ self.pkg,
+ executables(successful_bins.iter())
+ ),
+ )?;
+ Ok(true)
+ } else {
+ if !to_install.is_empty() {
+ self.config.shell().status(
+ "Installed",
+ format!("package `{}` {}", self.pkg, executables(to_install.iter())),
+ )?;
+ }
+ // Invert the duplicate map.
+ let mut pkg_map = BTreeMap::new();
+ for (bin_name, opt_pkg_id) in &duplicates {
+ let key =
+ opt_pkg_id.map_or_else(|| "unknown".to_string(), |pkg_id| pkg_id.to_string());
+ pkg_map.entry(key).or_insert_with(Vec::new).push(bin_name);
+ }
+ for (pkg_descr, bin_names) in &pkg_map {
+ self.config.shell().status(
+ "Replaced",
+ format!(
+ "package `{}` with `{}` {}",
+ pkg_descr,
+ self.pkg,
+ executables(bin_names.iter())
+ ),
+ )?;
+ }
+ Ok(true)
+ }
+ }
+
+ fn check_yanked_install(&self) -> CargoResult<()> {
+ if self.ws.ignore_lock() || !self.ws.root().join("Cargo.lock").exists() {
+ return Ok(());
+ }
+ // It would be best if `source` could be passed in here to avoid a
+ // duplicate "Updating", but since `source` is taken by value, then it
+ // wouldn't be available for `compile_ws`.
+ let (pkg_set, resolve) = ops::resolve_ws(&self.ws)?;
+ ops::check_yanked(
+ self.ws.config(),
+ &pkg_set,
+ &resolve,
+ "consider running without --locked",
+ )
+ }
+}
+
+fn make_warning_about_missing_features(binaries: &[&Target]) -> String {
+ let max_targets_listed = 7;
+ let target_features_message = binaries
+ .iter()
+ .take(max_targets_listed)
+ .map(|b| {
+ let name = b.description_named();
+ let features = b
+ .required_features()
+ .unwrap_or(&Vec::new())
+ .iter()
+ .map(|f| format!("`{f}`"))
+ .join(", ");
+ format!(" {name} requires the features: {features}")
+ })
+ .join("\n");
+
+ let additional_bins_message = if binaries.len() > max_targets_listed {
+ format!(
+ "\n{} more targets also requires features not enabled. See them in the Cargo.toml file.",
+ binaries.len() - max_targets_listed
+ )
+ } else {
+ "".into()
+ };
+
+ let example_features = binaries[0]
+ .required_features()
+ .map(|f| f.join(" "))
+ .unwrap_or_default();
+
+ format!(
+ "\
+none of the package's binaries are available for install using the selected features
+{target_features_message}{additional_bins_message}
+Consider enabling some of the needed features by passing, e.g., `--features=\"{example_features}\"`"
+ )
+}
+
+pub fn install(
+ config: &Config,
+ root: Option<&str>,
+ krates: Vec<(&str, Option<&str>)>,
+ source_id: SourceId,
+ from_cwd: bool,
+ opts: &ops::CompileOptions,
+ force: bool,
+ no_track: bool,
+) -> CargoResult<()> {
+ let root = resolve_root(root, config)?;
+ let dst = root.join("bin").into_path_unlocked();
+ let map = SourceConfigMap::new(config)?;
+
+ let (installed_anything, scheduled_error) = if krates.len() <= 1 {
+ let (krate, vers) = krates
+ .into_iter()
+ .next()
+ .map(|(k, v)| (Some(k), v))
+ .unwrap_or((None, None));
+ let installable_pkg = InstallablePackage::new(
+ config, root, map, krate, source_id, from_cwd, vers, opts, force, no_track, true,
+ )?;
+ let mut installed_anything = true;
+ if let Some(installable_pkg) = installable_pkg {
+ installed_anything = installable_pkg.install_one()?;
+ }
+ (installed_anything, false)
+ } else {
+ let mut succeeded = vec![];
+ let mut failed = vec![];
+ // "Tracks whether or not the source (such as a registry or git repo) has been updated.
+ // This is used to avoid updating it multiple times when installing multiple crates.
+ let mut did_update = false;
+
+ let pkgs_to_install: Vec<_> = krates
+ .into_iter()
+ .filter_map(|(krate, vers)| {
+ let root = root.clone();
+ let map = map.clone();
+ match InstallablePackage::new(
+ config,
+ root,
+ map,
+ Some(krate),
+ source_id,
+ from_cwd,
+ vers,
+ opts,
+ force,
+ no_track,
+ !did_update,
+ ) {
+ Ok(Some(installable_pkg)) => {
+ did_update = true;
+ Some((krate, installable_pkg))
+ }
+ Ok(None) => {
+ // Already installed
+ succeeded.push(krate);
+ None
+ }
+ Err(e) => {
+ crate::display_error(&e, &mut config.shell());
+ failed.push(krate);
+ // We assume an update was performed if we got an error.
+ did_update = true;
+ None
+ }
+ }
+ })
+ .collect();
+
+ let install_results: Vec<_> = pkgs_to_install
+ .into_iter()
+ .map(|(krate, installable_pkg)| (krate, installable_pkg.install_one()))
+ .collect();
+
+ for (krate, result) in install_results {
+ match result {
+ Ok(installed) => {
+ if installed {
+ succeeded.push(krate);
+ }
+ }
+ Err(e) => {
+ crate::display_error(&e, &mut config.shell());
+ failed.push(krate);
+ }
+ }
+ }
+
+ let mut summary = vec![];
+ if !succeeded.is_empty() {
+ summary.push(format!("Successfully installed {}!", succeeded.join(", ")));
+ }
+ if !failed.is_empty() {
+ summary.push(format!(
+ "Failed to install {} (see error(s) above).",
+ failed.join(", ")
+ ));
+ }
+ if !succeeded.is_empty() || !failed.is_empty() {
+ config.shell().status("Summary", summary.join(" "))?;
+ }
+
+ (!succeeded.is_empty(), !failed.is_empty())
+ };
+
+ if installed_anything {
+ // Print a warning that if this directory isn't in PATH that they won't be
+ // able to run these commands.
+ let path = config.get_env_os("PATH").unwrap_or_default();
+ let dst_in_path = env::split_paths(&path).any(|path| path == dst);
+
+ if !dst_in_path {
+ config.shell().warn(&format!(
+ "be sure to add `{}` to your PATH to be \
+ able to run the installed binaries",
+ dst.display()
+ ))?;
+ }
+ }
+
+ if scheduled_error {
+ bail!("some crates failed to install");
+ }
+
+ Ok(())
+}
+
+fn is_installed(
+ pkg: &Package,
+ config: &Config,
+ opts: &ops::CompileOptions,
+ rustc: &Rustc,
+ target: &str,
+ root: &Filesystem,
+ dst: &Path,
+ force: bool,
+) -> CargoResult<bool> {
+ let tracker = InstallTracker::load(config, root)?;
+ let (freshness, _duplicates) =
+ tracker.check_upgrade(dst, pkg, force, opts, target, &rustc.verbose_version)?;
+ Ok(freshness.is_fresh())
+}
+
+/// Checks if vers can only be satisfied by exactly one version of a package in a registry, and it's
+/// already installed. If this is the case, we can skip interacting with a registry to check if
+/// newer versions may be installable, as no newer version can exist.
+fn installed_exact_package<T>(
+ dep: Dependency,
+ source: &mut T,
+ config: &Config,
+ opts: &ops::CompileOptions,
+ root: &Filesystem,
+ dst: &Path,
+ force: bool,
+) -> CargoResult<Option<Package>>
+where
+ T: Source,
+{
+ if !dep.version_req().is_exact() {
+ // If the version isn't exact, we may need to update the registry and look for a newer
+ // version - we can't know if the package is installed without doing so.
+ return Ok(None);
+ }
+ // Try getting the package from the registry without updating it, to avoid a potentially
+ // expensive network call in the case that the package is already installed.
+ // If this fails, the caller will possibly do an index update and try again, this is just a
+ // best-effort check to see if we can avoid hitting the network.
+ if let Ok(pkg) = select_dep_pkg(source, dep, config, false) {
+ let (_ws, rustc, target) =
+ make_ws_rustc_target(config, opts, &source.source_id(), pkg.clone())?;
+ if let Ok(true) = is_installed(&pkg, config, opts, &rustc, &target, root, dst, force) {
+ return Ok(Some(pkg));
+ }
+ }
+ Ok(None)
+}
+
+fn make_ws_rustc_target<'cfg>(
+ config: &'cfg Config,
+ opts: &ops::CompileOptions,
+ source_id: &SourceId,
+ pkg: Package,
+) -> CargoResult<(Workspace<'cfg>, Rustc, String)> {
+ let mut ws = if source_id.is_git() || source_id.is_path() {
+ Workspace::new(pkg.manifest_path(), config)?
+ } else {
+ Workspace::ephemeral(pkg, config, None, false)?
+ };
+ ws.set_ignore_lock(config.lock_update_allowed());
+ ws.set_require_optional_deps(false);
+
+ let rustc = config.load_global_rustc(Some(&ws))?;
+ let target = match &opts.build_config.single_requested_kind()? {
+ CompileKind::Host => rustc.host.as_str().to_owned(),
+ CompileKind::Target(target) => target.short_name().to_owned(),
+ };
+
+ Ok((ws, rustc, target))
+}
+
+/// Parses x.y.z as if it were =x.y.z, and gives CLI-specific error messages in the case of invalid
+/// values.
+fn parse_semver_flag(v: &str) -> CargoResult<VersionReq> {
+ // If the version begins with character <, >, =, ^, ~ parse it as a
+ // version range, otherwise parse it as a specific version
+ let first = v
+ .chars()
+ .next()
+ .ok_or_else(|| format_err!("no version provided for the `--version` flag"))?;
+
+ let is_req = "<>=^~".contains(first) || v.contains('*');
+ if is_req {
+ match v.parse::<VersionReq>() {
+ Ok(v) => Ok(v),
+ Err(_) => bail!(
+ "the `--version` provided, `{}`, is \
+ not a valid semver version requirement\n\n\
+ Please have a look at \
+ https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html \
+ for the correct format",
+ v
+ ),
+ }
+ } else {
+ match v.to_semver() {
+ Ok(v) => Ok(VersionReq::exact(&v)),
+ Err(e) => {
+ let mut msg = format!(
+ "the `--version` provided, `{}`, is \
+ not a valid semver version: {}\n",
+ v, e
+ );
+
+ // If it is not a valid version but it is a valid version
+ // requirement, add a note to the warning
+ if v.parse::<VersionReq>().is_ok() {
+ msg.push_str(&format!(
+ "\nif you want to specify semver range, \
+ add an explicit qualifier, like ^{}",
+ v
+ ));
+ }
+ bail!(msg);
+ }
+ }
+ }
+}
+
+/// Display a list of installed binaries.
+pub fn install_list(dst: Option<&str>, config: &Config) -> CargoResult<()> {
+ let root = resolve_root(dst, config)?;
+ let tracker = InstallTracker::load(config, &root)?;
+ for (k, v) in tracker.all_installed_bins() {
+ drop_println!(config, "{}:", k);
+ for bin in v {
+ drop_println!(config, " {}", bin);
+ }
+ }
+ Ok(())
+}
+
+/// Removes executables that are no longer part of a package that was
+/// previously installed.
+fn remove_orphaned_bins(
+ ws: &Workspace<'_>,
+ tracker: &mut InstallTracker,
+ duplicates: &BTreeMap<String, Option<PackageId>>,
+ pkg: &Package,
+ dst: &Path,
+) -> CargoResult<()> {
+ let filter = ops::CompileFilter::new_all_targets();
+ let all_self_names = exe_names(pkg, &filter);
+ let mut to_remove: HashMap<PackageId, BTreeSet<String>> = HashMap::new();
+ // For each package that we stomped on.
+ for other_pkg in duplicates.values().flatten() {
+ // Only for packages with the same name.
+ if other_pkg.name() == pkg.name() {
+ // Check what the old package had installed.
+ if let Some(installed) = tracker.installed_bins(*other_pkg) {
+ // If the old install has any names that no longer exist,
+ // add them to the list to remove.
+ for installed_name in installed {
+ if !all_self_names.contains(installed_name.as_str()) {
+ to_remove
+ .entry(*other_pkg)
+ .or_default()
+ .insert(installed_name.clone());
+ }
+ }
+ }
+ }
+ }
+
+ for (old_pkg, bins) in to_remove {
+ tracker.remove(old_pkg, &bins);
+ for bin in bins {
+ let full_path = dst.join(bin);
+ if full_path.exists() {
+ ws.config().shell().status(
+ "Removing",
+ format!(
+ "executable `{}` from previous version {}",
+ full_path.display(),
+ old_pkg
+ ),
+ )?;
+ paths::remove_file(&full_path)
+ .with_context(|| format!("failed to remove {:?}", full_path))?;
+ }
+ }
+ }
+ Ok(())
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_new.rs b/src/tools/cargo/src/cargo/ops/cargo_new.rs
new file mode 100644
index 000000000..caa1d2fa8
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_new.rs
@@ -0,0 +1,880 @@
+use crate::core::{Edition, Shell, Workspace};
+use crate::util::errors::CargoResult;
+use crate::util::{existing_vcs_repo, FossilRepo, GitRepo, HgRepo, PijulRepo};
+use crate::util::{restricted_names, Config};
+use anyhow::{anyhow, Context as _};
+use cargo_util::paths;
+use serde::de;
+use serde::Deserialize;
+use std::collections::BTreeMap;
+use std::ffi::OsStr;
+use std::io::{BufRead, BufReader, ErrorKind};
+use std::path::{Path, PathBuf};
+use std::str::FromStr;
+use std::{fmt, slice};
+
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub enum VersionControl {
+ Git,
+ Hg,
+ Pijul,
+ Fossil,
+ NoVcs,
+}
+
+impl FromStr for VersionControl {
+ type Err = anyhow::Error;
+
+ fn from_str(s: &str) -> Result<Self, anyhow::Error> {
+ match s {
+ "git" => Ok(VersionControl::Git),
+ "hg" => Ok(VersionControl::Hg),
+ "pijul" => Ok(VersionControl::Pijul),
+ "fossil" => Ok(VersionControl::Fossil),
+ "none" => Ok(VersionControl::NoVcs),
+ other => anyhow::bail!("unknown vcs specification: `{}`", other),
+ }
+ }
+}
+
+impl<'de> de::Deserialize<'de> for VersionControl {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ let s = String::deserialize(deserializer)?;
+ FromStr::from_str(&s).map_err(de::Error::custom)
+ }
+}
+
+#[derive(Debug)]
+pub struct NewOptions {
+ pub version_control: Option<VersionControl>,
+ pub kind: NewProjectKind,
+ pub auto_detect_kind: bool,
+ /// Absolute path to the directory for the new package
+ pub path: PathBuf,
+ pub name: Option<String>,
+ pub edition: Option<String>,
+ pub registry: Option<String>,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum NewProjectKind {
+ Bin,
+ Lib,
+}
+
+impl NewProjectKind {
+ fn is_bin(self) -> bool {
+ self == NewProjectKind::Bin
+ }
+}
+
+impl fmt::Display for NewProjectKind {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ NewProjectKind::Bin => "binary (application)",
+ NewProjectKind::Lib => "library",
+ }
+ .fmt(f)
+ }
+}
+
+struct SourceFileInformation {
+ relative_path: String,
+ target_name: String,
+ bin: bool,
+}
+
+struct MkOptions<'a> {
+ version_control: Option<VersionControl>,
+ path: &'a Path,
+ name: &'a str,
+ source_files: Vec<SourceFileInformation>,
+ bin: bool,
+ edition: Option<&'a str>,
+ registry: Option<&'a str>,
+}
+
+impl NewOptions {
+ pub fn new(
+ version_control: Option<VersionControl>,
+ bin: bool,
+ lib: bool,
+ path: PathBuf,
+ name: Option<String>,
+ edition: Option<String>,
+ registry: Option<String>,
+ ) -> CargoResult<NewOptions> {
+ let auto_detect_kind = !bin && !lib;
+
+ let kind = match (bin, lib) {
+ (true, true) => anyhow::bail!("can't specify both lib and binary outputs"),
+ (false, true) => NewProjectKind::Lib,
+ (_, false) => NewProjectKind::Bin,
+ };
+
+ let opts = NewOptions {
+ version_control,
+ kind,
+ auto_detect_kind,
+ path,
+ name,
+ edition,
+ registry,
+ };
+ Ok(opts)
+ }
+}
+
+#[derive(Deserialize)]
+struct CargoNewConfig {
+ #[deprecated = "cargo-new no longer supports adding the authors field"]
+ #[allow(dead_code)]
+ name: Option<String>,
+
+ #[deprecated = "cargo-new no longer supports adding the authors field"]
+ #[allow(dead_code)]
+ email: Option<String>,
+
+ #[serde(rename = "vcs")]
+ version_control: Option<VersionControl>,
+}
+
+fn get_name<'a>(path: &'a Path, opts: &'a NewOptions) -> CargoResult<&'a str> {
+ if let Some(ref name) = opts.name {
+ return Ok(name);
+ }
+
+ let file_name = path.file_name().ok_or_else(|| {
+ anyhow::format_err!(
+ "cannot auto-detect package name from path {:?} ; use --name to override",
+ path.as_os_str()
+ )
+ })?;
+
+ file_name.to_str().ok_or_else(|| {
+ anyhow::format_err!(
+ "cannot create package with a non-unicode name: {:?}",
+ file_name
+ )
+ })
+}
+
+fn check_name(
+ name: &str,
+ show_name_help: bool,
+ has_bin: bool,
+ shell: &mut Shell,
+) -> CargoResult<()> {
+ // If --name is already used to override, no point in suggesting it
+ // again as a fix.
+ let name_help = if show_name_help {
+ "\nIf you need a package name to not match the directory name, consider using --name flag."
+ } else {
+ ""
+ };
+ let bin_help = || {
+ let mut help = String::from(name_help);
+ if has_bin {
+ help.push_str(&format!(
+ "\n\
+ If you need a binary with the name \"{name}\", use a valid package \
+ name, and set the binary name to be different from the package. \
+ This can be done by setting the binary filename to `src/bin/{name}.rs` \
+ or change the name in Cargo.toml with:\n\
+ \n \
+ [[bin]]\n \
+ name = \"{name}\"\n \
+ path = \"src/main.rs\"\n\
+ ",
+ name = name
+ ));
+ }
+ help
+ };
+ restricted_names::validate_package_name(name, "package name", &bin_help())?;
+
+ if restricted_names::is_keyword(name) {
+ anyhow::bail!(
+ "the name `{}` cannot be used as a package name, it is a Rust keyword{}",
+ name,
+ bin_help()
+ );
+ }
+ if restricted_names::is_conflicting_artifact_name(name) {
+ if has_bin {
+ anyhow::bail!(
+ "the name `{}` cannot be used as a package name, \
+ it conflicts with cargo's build directory names{}",
+ name,
+ name_help
+ );
+ } else {
+ shell.warn(format!(
+ "the name `{}` will not support binary \
+ executables with that name, \
+ it conflicts with cargo's build directory names",
+ name
+ ))?;
+ }
+ }
+ if name == "test" {
+ anyhow::bail!(
+ "the name `test` cannot be used as a package name, \
+ it conflicts with Rust's built-in test library{}",
+ bin_help()
+ );
+ }
+ if ["core", "std", "alloc", "proc_macro", "proc-macro"].contains(&name) {
+ shell.warn(format!(
+ "the name `{}` is part of Rust's standard library\n\
+ It is recommended to use a different name to avoid problems.{}",
+ name,
+ bin_help()
+ ))?;
+ }
+ if restricted_names::is_windows_reserved(name) {
+ if cfg!(windows) {
+ anyhow::bail!(
+ "cannot use name `{}`, it is a reserved Windows filename{}",
+ name,
+ name_help
+ );
+ } else {
+ shell.warn(format!(
+ "the name `{}` is a reserved Windows filename\n\
+ This package will not work on Windows platforms.",
+ name
+ ))?;
+ }
+ }
+ if restricted_names::is_non_ascii_name(name) {
+ shell.warn(format!(
+ "the name `{}` contains non-ASCII characters\n\
+ Non-ASCII crate names are not supported by Rust.",
+ name
+ ))?;
+ }
+
+ Ok(())
+}
+
+/// Checks if the path contains any invalid PATH env characters.
+fn check_path(path: &Path, shell: &mut Shell) -> CargoResult<()> {
+ // warn if the path contains characters that will break `env::join_paths`
+ if let Err(_) = paths::join_paths(slice::from_ref(&OsStr::new(path)), "") {
+ let path = path.to_string_lossy();
+ shell.warn(format!(
+ "the path `{path}` contains invalid PATH characters (usually `:`, `;`, or `\"`)\n\
+ It is recommended to use a different name to avoid problems."
+ ))?;
+ }
+ Ok(())
+}
+
+fn detect_source_paths_and_types(
+ package_path: &Path,
+ package_name: &str,
+ detected_files: &mut Vec<SourceFileInformation>,
+) -> CargoResult<()> {
+ let path = package_path;
+ let name = package_name;
+
+ enum H {
+ Bin,
+ Lib,
+ Detect,
+ }
+
+ struct Test {
+ proposed_path: String,
+ handling: H,
+ }
+
+ let tests = vec![
+ Test {
+ proposed_path: "src/main.rs".to_string(),
+ handling: H::Bin,
+ },
+ Test {
+ proposed_path: "main.rs".to_string(),
+ handling: H::Bin,
+ },
+ Test {
+ proposed_path: format!("src/{}.rs", name),
+ handling: H::Detect,
+ },
+ Test {
+ proposed_path: format!("{}.rs", name),
+ handling: H::Detect,
+ },
+ Test {
+ proposed_path: "src/lib.rs".to_string(),
+ handling: H::Lib,
+ },
+ Test {
+ proposed_path: "lib.rs".to_string(),
+ handling: H::Lib,
+ },
+ ];
+
+ for i in tests {
+ let pp = i.proposed_path;
+
+ // path/pp does not exist or is not a file
+ if !path.join(&pp).is_file() {
+ continue;
+ }
+
+ let sfi = match i.handling {
+ H::Bin => SourceFileInformation {
+ relative_path: pp,
+ target_name: package_name.to_string(),
+ bin: true,
+ },
+ H::Lib => SourceFileInformation {
+ relative_path: pp,
+ target_name: package_name.to_string(),
+ bin: false,
+ },
+ H::Detect => {
+ let content = paths::read(&path.join(pp.clone()))?;
+ let isbin = content.contains("fn main");
+ SourceFileInformation {
+ relative_path: pp,
+ target_name: package_name.to_string(),
+ bin: isbin,
+ }
+ }
+ };
+ detected_files.push(sfi);
+ }
+
+ // Check for duplicate lib attempt
+
+ let mut previous_lib_relpath: Option<&str> = None;
+ let mut duplicates_checker: BTreeMap<&str, &SourceFileInformation> = BTreeMap::new();
+
+ for i in detected_files {
+ if i.bin {
+ if let Some(x) = BTreeMap::get::<str>(&duplicates_checker, i.target_name.as_ref()) {
+ anyhow::bail!(
+ "\
+multiple possible binary sources found:
+ {}
+ {}
+cannot automatically generate Cargo.toml as the main target would be ambiguous",
+ &x.relative_path,
+ &i.relative_path
+ );
+ }
+ duplicates_checker.insert(i.target_name.as_ref(), i);
+ } else {
+ if let Some(plp) = previous_lib_relpath {
+ anyhow::bail!(
+ "cannot have a package with \
+ multiple libraries, \
+ found both `{}` and `{}`",
+ plp,
+ i.relative_path
+ )
+ }
+ previous_lib_relpath = Some(&i.relative_path);
+ }
+ }
+
+ Ok(())
+}
+
+fn plan_new_source_file(bin: bool, package_name: String) -> SourceFileInformation {
+ if bin {
+ SourceFileInformation {
+ relative_path: "src/main.rs".to_string(),
+ target_name: package_name,
+ bin: true,
+ }
+ } else {
+ SourceFileInformation {
+ relative_path: "src/lib.rs".to_string(),
+ target_name: package_name,
+ bin: false,
+ }
+ }
+}
+
+fn calculate_new_project_kind(
+ requested_kind: NewProjectKind,
+ auto_detect_kind: bool,
+ found_files: &Vec<SourceFileInformation>,
+) -> NewProjectKind {
+ let bin_file = found_files.iter().find(|x| x.bin);
+
+ let kind_from_files = if !found_files.is_empty() && bin_file.is_none() {
+ NewProjectKind::Lib
+ } else {
+ NewProjectKind::Bin
+ };
+
+ if auto_detect_kind {
+ return kind_from_files;
+ }
+
+ requested_kind
+}
+
+pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> {
+ let path = &opts.path;
+ if path.exists() {
+ anyhow::bail!(
+ "destination `{}` already exists\n\n\
+ Use `cargo init` to initialize the directory",
+ path.display()
+ )
+ }
+
+ check_path(path, &mut config.shell())?;
+
+ let is_bin = opts.kind.is_bin();
+
+ let name = get_name(path, opts)?;
+ check_name(name, opts.name.is_none(), is_bin, &mut config.shell())?;
+
+ let mkopts = MkOptions {
+ version_control: opts.version_control,
+ path,
+ name,
+ source_files: vec![plan_new_source_file(opts.kind.is_bin(), name.to_string())],
+ bin: is_bin,
+ edition: opts.edition.as_deref(),
+ registry: opts.registry.as_deref(),
+ };
+
+ mk(config, &mkopts).with_context(|| {
+ format!(
+ "Failed to create package `{}` at `{}`",
+ name,
+ path.display()
+ )
+ })?;
+ Ok(())
+}
+
+pub fn init(opts: &NewOptions, config: &Config) -> CargoResult<NewProjectKind> {
+ // This is here just as a random location to exercise the internal error handling.
+ if config.get_env_os("__CARGO_TEST_INTERNAL_ERROR").is_some() {
+ return Err(crate::util::internal("internal error test"));
+ }
+
+ let path = &opts.path;
+
+ if path.join("Cargo.toml").exists() {
+ anyhow::bail!("`cargo init` cannot be run on existing Cargo packages")
+ }
+
+ check_path(path, &mut config.shell())?;
+
+ let name = get_name(path, opts)?;
+
+ let mut src_paths_types = vec![];
+
+ detect_source_paths_and_types(path, name, &mut src_paths_types)?;
+
+ let kind = calculate_new_project_kind(opts.kind, opts.auto_detect_kind, &src_paths_types);
+ let has_bin = kind.is_bin();
+
+ if src_paths_types.is_empty() {
+ src_paths_types.push(plan_new_source_file(has_bin, name.to_string()));
+ } else if src_paths_types.len() == 1 && !src_paths_types.iter().any(|x| x.bin == has_bin) {
+ // we've found the only file and it's not the type user wants. Change the type and warn
+ let file_type = if src_paths_types[0].bin {
+ NewProjectKind::Bin
+ } else {
+ NewProjectKind::Lib
+ };
+ config.shell().warn(format!(
+ "file `{}` seems to be a {} file",
+ src_paths_types[0].relative_path, file_type
+ ))?;
+ src_paths_types[0].bin = has_bin
+ } else if src_paths_types.len() > 1 && !has_bin {
+ // We have found both lib and bin files and the user would like us to treat both as libs
+ anyhow::bail!(
+ "cannot have a package with \
+ multiple libraries, \
+ found both `{}` and `{}`",
+ src_paths_types[0].relative_path,
+ src_paths_types[1].relative_path
+ )
+ }
+
+ check_name(name, opts.name.is_none(), has_bin, &mut config.shell())?;
+
+ let mut version_control = opts.version_control;
+
+ if version_control == None {
+ let mut num_detected_vcses = 0;
+
+ if path.join(".git").exists() {
+ version_control = Some(VersionControl::Git);
+ num_detected_vcses += 1;
+ }
+
+ if path.join(".hg").exists() {
+ version_control = Some(VersionControl::Hg);
+ num_detected_vcses += 1;
+ }
+
+ if path.join(".pijul").exists() {
+ version_control = Some(VersionControl::Pijul);
+ num_detected_vcses += 1;
+ }
+
+ if path.join(".fossil").exists() {
+ version_control = Some(VersionControl::Fossil);
+ num_detected_vcses += 1;
+ }
+
+ // if none exists, maybe create git, like in `cargo new`
+
+ if num_detected_vcses > 1 {
+ anyhow::bail!(
+ "more than one of .hg, .git, .pijul, .fossil configurations \
+ found and the ignore file can't be filled in as \
+ a result. specify --vcs to override detection"
+ );
+ }
+ }
+
+ let mkopts = MkOptions {
+ version_control,
+ path,
+ name,
+ bin: has_bin,
+ source_files: src_paths_types,
+ edition: opts.edition.as_deref(),
+ registry: opts.registry.as_deref(),
+ };
+
+ mk(config, &mkopts).with_context(|| {
+ format!(
+ "Failed to create package `{}` at `{}`",
+ name,
+ path.display()
+ )
+ })?;
+ Ok(kind)
+}
+
+/// IgnoreList
+struct IgnoreList {
+ /// git like formatted entries
+ ignore: Vec<String>,
+ /// mercurial formatted entries
+ hg_ignore: Vec<String>,
+ /// Fossil-formatted entries.
+ fossil_ignore: Vec<String>,
+}
+
+impl IgnoreList {
+ /// constructor to build a new ignore file
+ fn new() -> IgnoreList {
+ IgnoreList {
+ ignore: Vec::new(),
+ hg_ignore: Vec::new(),
+ fossil_ignore: Vec::new(),
+ }
+ }
+
+ /// Add a new entry to the ignore list. Requires three arguments with the
+ /// entry in possibly three different formats. One for "git style" entries,
+ /// one for "mercurial style" entries and one for "fossil style" entries.
+ fn push(&mut self, ignore: &str, hg_ignore: &str, fossil_ignore: &str) {
+ self.ignore.push(ignore.to_string());
+ self.hg_ignore.push(hg_ignore.to_string());
+ self.fossil_ignore.push(fossil_ignore.to_string());
+ }
+
+ /// Return the correctly formatted content of the ignore file for the given
+ /// version control system as `String`.
+ fn format_new(&self, vcs: VersionControl) -> String {
+ let ignore_items = match vcs {
+ VersionControl::Hg => &self.hg_ignore,
+ VersionControl::Fossil => &self.fossil_ignore,
+ _ => &self.ignore,
+ };
+
+ ignore_items.join("\n") + "\n"
+ }
+
+ /// format_existing is used to format the IgnoreList when the ignore file
+ /// already exists. It reads the contents of the given `BufRead` and
+ /// checks if the contents of the ignore list are already existing in the
+ /// file.
+ fn format_existing<T: BufRead>(&self, existing: T, vcs: VersionControl) -> CargoResult<String> {
+ let mut existing_items = Vec::new();
+ for (i, item) in existing.lines().enumerate() {
+ match item {
+ Ok(s) => existing_items.push(s),
+ Err(err) => match err.kind() {
+ ErrorKind::InvalidData => {
+ return Err(anyhow!(
+ "Character at line {} is invalid. Cargo only supports UTF-8.",
+ i
+ ))
+ }
+ _ => return Err(anyhow!(err)),
+ },
+ }
+ }
+
+ let ignore_items = match vcs {
+ VersionControl::Hg => &self.hg_ignore,
+ VersionControl::Fossil => &self.fossil_ignore,
+ _ => &self.ignore,
+ };
+
+ let mut out = String::new();
+
+ // Fossil does not support `#` comments.
+ if vcs != VersionControl::Fossil {
+ out.push_str("\n\n# Added by cargo\n");
+ if ignore_items
+ .iter()
+ .any(|item| existing_items.contains(item))
+ {
+ out.push_str("#\n# already existing elements were commented out\n");
+ }
+ out.push('\n');
+ }
+
+ for item in ignore_items {
+ if existing_items.contains(item) {
+ if vcs == VersionControl::Fossil {
+ // Just merge for Fossil.
+ continue;
+ }
+ out.push('#');
+ }
+ out.push_str(item);
+ out.push('\n');
+ }
+
+ Ok(out)
+ }
+}
+
+/// Writes the ignore file to the given directory. If the ignore file for the
+/// given vcs system already exists, its content is read and duplicate ignore
+/// file entries are filtered out.
+fn write_ignore_file(base_path: &Path, list: &IgnoreList, vcs: VersionControl) -> CargoResult<()> {
+ // Fossil only supports project-level settings in a dedicated subdirectory.
+ if vcs == VersionControl::Fossil {
+ paths::create_dir_all(base_path.join(".fossil-settings"))?;
+ }
+
+ for fp_ignore in match vcs {
+ VersionControl::Git => vec![base_path.join(".gitignore")],
+ VersionControl::Hg => vec![base_path.join(".hgignore")],
+ VersionControl::Pijul => vec![base_path.join(".ignore")],
+ // Fossil has a cleaning functionality configured in a separate file.
+ VersionControl::Fossil => vec![
+ base_path.join(".fossil-settings/ignore-glob"),
+ base_path.join(".fossil-settings/clean-glob"),
+ ],
+ VersionControl::NoVcs => return Ok(()),
+ } {
+ let ignore: String = match paths::open(&fp_ignore) {
+ Err(err) => match err.downcast_ref::<std::io::Error>() {
+ Some(io_err) if io_err.kind() == ErrorKind::NotFound => list.format_new(vcs),
+ _ => return Err(err),
+ },
+ Ok(file) => list.format_existing(BufReader::new(file), vcs)?,
+ };
+
+ paths::append(&fp_ignore, ignore.as_bytes())?;
+ }
+
+ Ok(())
+}
+
+/// Initializes the correct VCS system based on the provided config.
+fn init_vcs(path: &Path, vcs: VersionControl, config: &Config) -> CargoResult<()> {
+ match vcs {
+ VersionControl::Git => {
+ if !path.join(".git").exists() {
+ // Temporary fix to work around bug in libgit2 when creating a
+ // directory in the root of a posix filesystem.
+ // See: https://github.com/libgit2/libgit2/issues/5130
+ paths::create_dir_all(path)?;
+ GitRepo::init(path, config.cwd())?;
+ }
+ }
+ VersionControl::Hg => {
+ if !path.join(".hg").exists() {
+ HgRepo::init(path, config.cwd())?;
+ }
+ }
+ VersionControl::Pijul => {
+ if !path.join(".pijul").exists() {
+ PijulRepo::init(path, config.cwd())?;
+ }
+ }
+ VersionControl::Fossil => {
+ if !path.join(".fossil").exists() {
+ FossilRepo::init(path, config.cwd())?;
+ }
+ }
+ VersionControl::NoVcs => {
+ paths::create_dir_all(path)?;
+ }
+ };
+
+ Ok(())
+}
+
+fn mk(config: &Config, opts: &MkOptions<'_>) -> CargoResult<()> {
+ let path = opts.path;
+ let name = opts.name;
+ let cfg = config.get::<CargoNewConfig>("cargo-new")?;
+
+ // Using the push method with multiple arguments ensures that the entries
+ // for all mutually-incompatible VCS in terms of syntax are in sync.
+ let mut ignore = IgnoreList::new();
+ ignore.push("/target", "^target$", "target");
+ if !opts.bin {
+ ignore.push("/Cargo.lock", "^Cargo.lock$", "Cargo.lock");
+ }
+
+ let vcs = opts.version_control.unwrap_or_else(|| {
+ let in_existing_vcs = existing_vcs_repo(path.parent().unwrap_or(path), config.cwd());
+ match (cfg.version_control, in_existing_vcs) {
+ (None, false) => VersionControl::Git,
+ (Some(opt), false) => opt,
+ (_, true) => VersionControl::NoVcs,
+ }
+ });
+
+ init_vcs(path, vcs, config)?;
+ write_ignore_file(path, &ignore, vcs)?;
+
+ let mut cargotoml_path_specifier = String::new();
+
+ // Calculate what `[lib]` and `[[bin]]`s we need to append to `Cargo.toml`.
+
+ for i in &opts.source_files {
+ if i.bin {
+ if i.relative_path != "src/main.rs" {
+ cargotoml_path_specifier.push_str(&format!(
+ r#"
+[[bin]]
+name = "{}"
+path = {}
+"#,
+ i.target_name,
+ toml::Value::String(i.relative_path.clone())
+ ));
+ }
+ } else if i.relative_path != "src/lib.rs" {
+ cargotoml_path_specifier.push_str(&format!(
+ r#"
+[lib]
+name = "{}"
+path = {}
+"#,
+ i.target_name,
+ toml::Value::String(i.relative_path.clone())
+ ));
+ }
+ }
+
+ // Create `Cargo.toml` file with necessary `[lib]` and `[[bin]]` sections, if needed.
+
+ paths::write(
+ &path.join("Cargo.toml"),
+ format!(
+ r#"[package]
+name = "{}"
+version = "0.1.0"
+edition = {}
+{}
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+{}"#,
+ name,
+ match opts.edition {
+ Some(edition) => toml::Value::String(edition.to_string()),
+ None => toml::Value::String(Edition::LATEST_STABLE.to_string()),
+ },
+ match opts.registry {
+ Some(registry) => format!(
+ "publish = {}\n",
+ toml::Value::Array(vec!(toml::Value::String(registry.to_string())))
+ ),
+ None => "".to_string(),
+ },
+ cargotoml_path_specifier
+ )
+ .as_bytes(),
+ )?;
+
+ // Create all specified source files (with respective parent directories) if they don't exist.
+
+ for i in &opts.source_files {
+ let path_of_source_file = path.join(i.relative_path.clone());
+
+ if let Some(src_dir) = path_of_source_file.parent() {
+ paths::create_dir_all(src_dir)?;
+ }
+
+ let default_file_content: &[u8] = if i.bin {
+ b"\
+fn main() {
+ println!(\"Hello, world!\");
+}
+"
+ } else {
+ b"\
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn it_works() {
+ let result = add(2, 2);
+ assert_eq!(result, 4);
+ }
+}
+"
+ };
+
+ if !path_of_source_file.is_file() {
+ paths::write(&path_of_source_file, default_file_content)?;
+
+ // Format the newly created source file
+ if let Err(e) = cargo_util::ProcessBuilder::new("rustfmt")
+ .arg(&path_of_source_file)
+ .exec_with_output()
+ {
+ log::warn!("failed to call rustfmt: {:#}", e);
+ }
+ }
+ }
+
+ if let Err(e) = Workspace::new(&path.join("Cargo.toml"), config) {
+ crate::display_warning_with_error(
+ "compiling this new package may not work due to invalid \
+ workspace configuration",
+ &e,
+ &mut config.shell(),
+ );
+ }
+
+ Ok(())
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_output_metadata.rs b/src/tools/cargo/src/cargo/ops/cargo_output_metadata.rs
new file mode 100644
index 000000000..9d52fa09a
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_output_metadata.rs
@@ -0,0 +1,344 @@
+use crate::core::compiler::artifact::match_artifacts_kind_with_targets;
+use crate::core::compiler::{CompileKind, RustcTargetData};
+use crate::core::dependency::DepKind;
+use crate::core::package::SerializedPackage;
+use crate::core::resolver::{features::CliFeatures, HasDevUnits, Resolve};
+use crate::core::{Package, PackageId, Workspace};
+use crate::ops::{self, Packages};
+use crate::util::interning::InternedString;
+use crate::util::CargoResult;
+use cargo_platform::Platform;
+use serde::Serialize;
+use std::collections::BTreeMap;
+use std::path::PathBuf;
+
+const VERSION: u32 = 1;
+
+pub struct OutputMetadataOptions {
+ pub cli_features: CliFeatures,
+ pub no_deps: bool,
+ pub version: u32,
+ pub filter_platforms: Vec<String>,
+}
+
+/// Loads the manifest, resolves the dependencies of the package to the concrete
+/// used versions - considering overrides - and writes all dependencies in a JSON
+/// format to stdout.
+pub fn output_metadata(ws: &Workspace<'_>, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
+ if opt.version != VERSION {
+ anyhow::bail!(
+ "metadata version {} not supported, only {} is currently supported",
+ opt.version,
+ VERSION
+ );
+ }
+ let (packages, resolve) = if opt.no_deps {
+ let packages = ws.members().map(|pkg| pkg.serialized()).collect();
+ (packages, None)
+ } else {
+ let (packages, resolve) = build_resolve_graph(ws, opt)?;
+ (packages, Some(resolve))
+ };
+
+ Ok(ExportInfo {
+ packages,
+ workspace_members: ws.members().map(|pkg| pkg.package_id()).collect(),
+ resolve,
+ target_directory: ws.target_dir().into_path_unlocked(),
+ version: VERSION,
+ workspace_root: ws.root().to_path_buf(),
+ metadata: ws.custom_metadata().cloned(),
+ })
+}
+
+/// This is the structure that is serialized and displayed to the user.
+///
+/// See cargo-metadata.adoc for detailed documentation of the format.
+#[derive(Serialize)]
+pub struct ExportInfo {
+ packages: Vec<SerializedPackage>,
+ workspace_members: Vec<PackageId>,
+ resolve: Option<MetadataResolve>,
+ target_directory: PathBuf,
+ version: u32,
+ workspace_root: PathBuf,
+ metadata: Option<toml::Value>,
+}
+
+#[derive(Serialize)]
+struct MetadataResolve {
+ nodes: Vec<MetadataResolveNode>,
+ root: Option<PackageId>,
+}
+
+#[derive(Serialize)]
+struct MetadataResolveNode {
+ id: PackageId,
+ dependencies: Vec<PackageId>,
+ deps: Vec<Dep>,
+ features: Vec<InternedString>,
+}
+
+#[derive(Serialize)]
+struct Dep {
+ // TODO(bindeps): after -Zbindeps gets stabilized,
+ // mark this field as deprecated in the help manual of cargo-metadata
+ name: InternedString,
+ pkg: PackageId,
+ dep_kinds: Vec<DepKindInfo>,
+}
+
+#[derive(Serialize, PartialEq, Eq, PartialOrd, Ord)]
+struct DepKindInfo {
+ kind: DepKind,
+ target: Option<Platform>,
+
+ // vvvvv The fields below are introduced for `-Z bindeps`.
+ /// What the manifest calls the crate.
+ ///
+ /// A renamed dependency will show the rename instead of original name.
+ // TODO(bindeps): Remove `Option` after -Zbindeps get stabilized.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ extern_name: Option<InternedString>,
+ /// Artifact's crate type, e.g. staticlib, cdylib, bin...
+ #[serde(skip_serializing_if = "Option::is_none")]
+ artifact: Option<&'static str>,
+ /// Equivalent to `{ target = "…" }` in an artifact dependency requirement.
+ ///
+ /// * If the target points to a custom target JSON file, the path will be absolute.
+ /// * If the target is a build assumed target `{ target = "target" }`, it will show as `<target>`.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ compile_target: Option<InternedString>,
+ /// Executable name for an artifact binary dependency.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ bin_name: Option<String>,
+ // ^^^^^ The fields above are introduced for `-Z bindeps`.
+}
+
+/// Builds the resolve graph as it will be displayed to the user.
+fn build_resolve_graph(
+ ws: &Workspace<'_>,
+ metadata_opts: &OutputMetadataOptions,
+) -> CargoResult<(Vec<SerializedPackage>, MetadataResolve)> {
+ // TODO: Without --filter-platform, features are being resolved for `host` only.
+ // How should this work?
+ let requested_kinds =
+ CompileKind::from_requested_targets(ws.config(), &metadata_opts.filter_platforms)?;
+ let target_data = RustcTargetData::new(ws, &requested_kinds)?;
+ // Resolve entire workspace.
+ let specs = Packages::All.to_package_id_specs(ws)?;
+ let force_all = if metadata_opts.filter_platforms.is_empty() {
+ crate::core::resolver::features::ForceAllTargets::Yes
+ } else {
+ crate::core::resolver::features::ForceAllTargets::No
+ };
+
+ // Note that even with --filter-platform we end up downloading host dependencies as well,
+ // as that is the behavior of download_accessible.
+ let ws_resolve = ops::resolve_ws_with_opts(
+ ws,
+ &target_data,
+ &requested_kinds,
+ &metadata_opts.cli_features,
+ &specs,
+ HasDevUnits::Yes,
+ force_all,
+ )?;
+
+ let package_map: BTreeMap<PackageId, Package> = ws_resolve
+ .pkg_set
+ .packages()
+ // This is a little lazy, but serde doesn't handle Rc fields very well.
+ .map(|pkg| (pkg.package_id(), Package::clone(pkg)))
+ .collect();
+
+ // Start from the workspace roots, and recurse through filling out the
+ // map, filtering targets as necessary.
+ let mut node_map = BTreeMap::new();
+ for member_pkg in ws.members() {
+ build_resolve_graph_r(
+ &mut node_map,
+ member_pkg.package_id(),
+ &ws_resolve.targeted_resolve,
+ &package_map,
+ &target_data,
+ &requested_kinds,
+ )?;
+ }
+ // Get a Vec of Packages.
+ let actual_packages = package_map
+ .into_iter()
+ .filter_map(|(pkg_id, pkg)| node_map.get(&pkg_id).map(|_| pkg))
+ .map(|pkg| pkg.serialized())
+ .collect();
+
+ let mr = MetadataResolve {
+ nodes: node_map.into_iter().map(|(_pkg_id, node)| node).collect(),
+ root: ws.current_opt().map(|pkg| pkg.package_id()),
+ };
+ Ok((actual_packages, mr))
+}
+
+fn build_resolve_graph_r(
+ node_map: &mut BTreeMap<PackageId, MetadataResolveNode>,
+ pkg_id: PackageId,
+ resolve: &Resolve,
+ package_map: &BTreeMap<PackageId, Package>,
+ target_data: &RustcTargetData<'_>,
+ requested_kinds: &[CompileKind],
+) -> CargoResult<()> {
+ if node_map.contains_key(&pkg_id) {
+ return Ok(());
+ }
+ // This normalizes the IDs so that they are consistent between the
+ // `packages` array and the `resolve` map. This is a bit of a hack to
+ // compensate for the fact that
+ // SourceKind::Git(GitReference::Branch("master")) is the same as
+ // SourceKind::Git(GitReference::DefaultBranch). We want IDs in the JSON
+ // to be opaque, and compare with basic string equality, so this will
+ // always prefer the style of ID in the Package instead of the resolver.
+ // Cargo generally only exposes PackageIds from the Package struct, and
+ // AFAIK this is the only place where the resolver variant is exposed.
+ //
+ // This diverges because the SourceIds created for Packages are built
+ // based on the Dependency declaration, but the SourceIds in the resolver
+ // are deserialized from Cargo.lock. Cargo.lock may have been generated by
+ // an older (or newer!) version of Cargo which uses a different style.
+ let normalize_id = |id| -> PackageId { *package_map.get_key_value(&id).unwrap().0 };
+ let features = resolve.features(pkg_id).to_vec();
+
+ let deps = {
+ let mut dep_metadatas = Vec::new();
+ let iter = resolve.deps(pkg_id).filter(|(_dep_id, deps)| {
+ if requested_kinds == [CompileKind::Host] {
+ true
+ } else {
+ requested_kinds.iter().any(|kind| {
+ deps.iter()
+ .any(|dep| target_data.dep_platform_activated(dep, *kind))
+ })
+ }
+ });
+ for (dep_id, deps) in iter {
+ let mut dep_kinds = Vec::new();
+
+ let targets = package_map[&dep_id].targets();
+
+ // Try to get the extern name for lib, or crate name for bins.
+ let extern_name = |target| {
+ resolve
+ .extern_crate_name_and_dep_name(pkg_id, dep_id, target)
+ .map(|(ext_crate_name, _)| ext_crate_name)
+ };
+
+ let lib_target = targets.iter().find(|t| t.is_lib());
+
+ for dep in deps.iter() {
+ if let Some(target) = lib_target {
+ // When we do have a library target, include them in deps if...
+ let included = match dep.artifact() {
+ // it is not an artifact dep at all
+ None => true,
+ // it is also an artifact dep with `{ …, lib = true }`
+ Some(a) if a.is_lib() => true,
+ _ => false,
+ };
+ // TODO(bindeps): Cargo shouldn't have `extern_name` field
+ // if the user is not using -Zbindeps.
+ // Remove this condition ` after -Zbindeps gets stabilized.
+ let extern_name = if dep.artifact().is_some() {
+ Some(extern_name(target)?)
+ } else {
+ None
+ };
+ if included {
+ dep_kinds.push(DepKindInfo {
+ kind: dep.kind(),
+ target: dep.platform().cloned(),
+ extern_name,
+ artifact: None,
+ compile_target: None,
+ bin_name: None,
+ });
+ }
+ }
+
+ // No need to proceed if there is no artifact dependency.
+ let Some(artifact_requirements) = dep.artifact() else {
+ continue;
+ };
+
+ let compile_target = match artifact_requirements.target() {
+ Some(t) => t
+ .to_compile_target()
+ .map(|t| t.rustc_target())
+ // Given that Cargo doesn't know which target it should resolve to,
+ // when an artifact dep is specified with { target = "target" },
+ // keep it with a special "<target>" string,
+ .or_else(|| Some(InternedString::new("<target>"))),
+ None => None,
+ };
+
+ let target_set =
+ match_artifacts_kind_with_targets(dep, targets, pkg_id.name().as_str())?;
+ dep_kinds.reserve(target_set.len());
+ for (kind, target) in target_set.into_iter() {
+ dep_kinds.push(DepKindInfo {
+ kind: dep.kind(),
+ target: dep.platform().cloned(),
+ extern_name: extern_name(target).ok(),
+ artifact: Some(kind.crate_type()),
+ compile_target,
+ bin_name: target.is_bin().then(|| target.name().to_string()),
+ })
+ }
+ }
+
+ dep_kinds.sort();
+
+ let pkg = normalize_id(dep_id);
+
+ let dep = match (lib_target, dep_kinds.len()) {
+ (Some(target), _) => Dep {
+ name: extern_name(target)?,
+ pkg,
+ dep_kinds,
+ },
+ // No lib target exists but contains artifact deps.
+ (None, 1..) => Dep {
+ name: InternedString::new(""),
+ pkg,
+ dep_kinds,
+ },
+ // No lib or artifact dep exists.
+ // Ususally this mean parent depending on non-lib bin crate.
+ (None, _) => continue,
+ };
+
+ dep_metadatas.push(dep)
+ }
+ dep_metadatas
+ };
+
+ let dumb_deps: Vec<PackageId> = deps.iter().map(|dep| dep.pkg).collect();
+ let to_visit = dumb_deps.clone();
+ let node = MetadataResolveNode {
+ id: normalize_id(pkg_id),
+ dependencies: dumb_deps,
+ deps,
+ features,
+ };
+ node_map.insert(pkg_id, node);
+ for dep_id in to_visit {
+ build_resolve_graph_r(
+ node_map,
+ dep_id,
+ resolve,
+ package_map,
+ target_data,
+ requested_kinds,
+ )?;
+ }
+
+ Ok(())
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_package.rs b/src/tools/cargo/src/cargo/ops/cargo_package.rs
new file mode 100644
index 000000000..35b213361
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_package.rs
@@ -0,0 +1,986 @@
+use std::collections::{BTreeSet, HashMap};
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::io::SeekFrom;
+use std::path::{Path, PathBuf};
+use std::rc::Rc;
+use std::sync::Arc;
+use std::task::Poll;
+
+use crate::core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor};
+use crate::core::resolver::CliFeatures;
+use crate::core::{registry::PackageRegistry, resolver::HasDevUnits};
+use crate::core::{Feature, Shell, Verbosity, Workspace};
+use crate::core::{Package, PackageId, PackageSet, Resolve, SourceId};
+use crate::sources::PathSource;
+use crate::util::errors::CargoResult;
+use crate::util::toml::TomlManifest;
+use crate::util::{self, human_readable_bytes, restricted_names, Config, FileLock};
+use crate::{drop_println, ops};
+use anyhow::Context as _;
+use cargo_util::paths;
+use flate2::read::GzDecoder;
+use flate2::{Compression, GzBuilder};
+use log::debug;
+use serde::Serialize;
+use tar::{Archive, Builder, EntryType, Header, HeaderMode};
+
+pub struct PackageOpts<'cfg> {
+ pub config: &'cfg Config,
+ pub list: bool,
+ pub check_metadata: bool,
+ pub allow_dirty: bool,
+ pub verify: bool,
+ pub jobs: Option<i32>,
+ pub keep_going: bool,
+ pub to_package: ops::Packages,
+ pub targets: Vec<String>,
+ pub cli_features: CliFeatures,
+}
+
+const ORIGINAL_MANIFEST_FILE: &str = "Cargo.toml.orig";
+const VCS_INFO_FILE: &str = ".cargo_vcs_info.json";
+
+struct ArchiveFile {
+ /// The relative path in the archive (not including the top-level package
+ /// name directory).
+ rel_path: PathBuf,
+ /// String variant of `rel_path`, for convenience.
+ rel_str: String,
+ /// The contents to add to the archive.
+ contents: FileContents,
+}
+
+enum FileContents {
+ /// Absolute path to the file on disk to add to the archive.
+ OnDisk(PathBuf),
+ /// Generates a file.
+ Generated(GeneratedFile),
+}
+
+enum GeneratedFile {
+ /// Generates `Cargo.toml` by rewriting the original.
+ Manifest,
+ /// Generates `Cargo.lock` in some cases (like if there is a binary).
+ Lockfile,
+ /// Adds a `.cargo_vcs_info.json` file if in a (clean) git repo.
+ VcsInfo(VcsInfo),
+}
+
+#[derive(Serialize)]
+struct VcsInfo {
+ git: GitVcsInfo,
+ /// Path to the package within repo (empty string if root). / not \
+ path_in_vcs: String,
+}
+
+#[derive(Serialize)]
+struct GitVcsInfo {
+ sha1: String,
+}
+
+pub fn package_one(
+ ws: &Workspace<'_>,
+ pkg: &Package,
+ opts: &PackageOpts<'_>,
+) -> CargoResult<Option<FileLock>> {
+ let config = ws.config();
+ let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), config);
+ src.update()?;
+
+ if opts.check_metadata {
+ check_metadata(pkg, config)?;
+ }
+
+ if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() {
+ config.shell().warn(
+ "both package.include and package.exclude are specified; \
+ the exclude list will be ignored",
+ )?;
+ }
+ let src_files = src.list_files(pkg)?;
+
+ // Check (git) repository state, getting the current commit hash if not
+ // dirty.
+ let vcs_info = if !opts.allow_dirty {
+ // This will error if a dirty repo is found.
+ check_repo_state(pkg, &src_files, config)?
+ } else {
+ None
+ };
+
+ let ar_files = build_ar_list(ws, pkg, src_files, vcs_info)?;
+
+ let filecount = ar_files.len();
+
+ if opts.list {
+ for ar_file in ar_files {
+ drop_println!(config, "{}", ar_file.rel_str);
+ }
+
+ return Ok(None);
+ }
+
+ // Check that the package dependencies are safe to deploy.
+ for dep in pkg.dependencies() {
+ super::check_dep_has_version(dep, false)?;
+ }
+
+ let filename = format!("{}-{}.crate", pkg.name(), pkg.version());
+ let dir = ws.target_dir().join("package");
+ let mut dst = {
+ let tmp = format!(".{}", filename);
+ dir.open_rw(&tmp, config, "package scratch space")?
+ };
+
+ // Package up and test a temporary tarball and only move it to the final
+ // location if it actually passes all our tests. Any previously existing
+ // tarball can be assumed as corrupt or invalid, so we just blow it away if
+ // it exists.
+ config
+ .shell()
+ .status("Packaging", pkg.package_id().to_string())?;
+ dst.file().set_len(0)?;
+ let uncompressed_size = tar(ws, pkg, ar_files, dst.file(), &filename)
+ .with_context(|| "failed to prepare local package for uploading")?;
+ if opts.verify {
+ dst.seek(SeekFrom::Start(0))?;
+ run_verify(ws, pkg, &dst, opts).with_context(|| "failed to verify package tarball")?
+ }
+
+ dst.seek(SeekFrom::Start(0))?;
+ let src_path = dst.path();
+ let dst_path = dst.parent().join(&filename);
+ fs::rename(&src_path, &dst_path)
+ .with_context(|| "failed to move temporary tarball into final location")?;
+
+ let dst_metadata = dst
+ .file()
+ .metadata()
+ .with_context(|| format!("could not learn metadata for: `{}`", dst_path.display()))?;
+ let compressed_size = dst_metadata.len();
+
+ let uncompressed = human_readable_bytes(uncompressed_size);
+ let compressed = human_readable_bytes(compressed_size);
+
+ let message = format!(
+ "{} files, {:.1}{} ({:.1}{} compressed)",
+ filecount, uncompressed.0, uncompressed.1, compressed.0, compressed.1,
+ );
+ // It doesn't really matter if this fails.
+ drop(config.shell().status("Packaged", message));
+
+ return Ok(Some(dst));
+}
+
+pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Option<Vec<FileLock>>> {
+ let pkgs = ws.members_with_features(
+ &opts.to_package.to_package_id_specs(ws)?,
+ &opts.cli_features,
+ )?;
+
+ let mut dsts = Vec::with_capacity(pkgs.len());
+
+ if ws.root().join("Cargo.lock").exists() {
+ // Make sure the Cargo.lock is up-to-date and valid.
+ let _ = ops::resolve_ws(ws)?;
+ // If Cargo.lock does not exist, it will be generated by `build_lock`
+ // below, and will be validated during the verification step.
+ }
+
+ for (pkg, cli_features) in pkgs {
+ let result = package_one(
+ ws,
+ pkg,
+ &PackageOpts {
+ config: opts.config,
+ list: opts.list,
+ check_metadata: opts.check_metadata,
+ allow_dirty: opts.allow_dirty,
+ verify: opts.verify,
+ jobs: opts.jobs,
+ keep_going: opts.keep_going,
+ to_package: ops::Packages::Default,
+ targets: opts.targets.clone(),
+ cli_features: cli_features,
+ },
+ )?;
+
+ if !opts.list {
+ dsts.push(result.unwrap());
+ }
+ }
+
+ if opts.list {
+ // We're just listing, so there's no file output
+ Ok(None)
+ } else {
+ Ok(Some(dsts))
+ }
+}
+
+/// Builds list of files to archive.
+fn build_ar_list(
+ ws: &Workspace<'_>,
+ pkg: &Package,
+ src_files: Vec<PathBuf>,
+ vcs_info: Option<VcsInfo>,
+) -> CargoResult<Vec<ArchiveFile>> {
+ let mut result = Vec::new();
+ let root = pkg.root();
+ for src_file in src_files {
+ let rel_path = src_file.strip_prefix(&root)?.to_path_buf();
+ check_filename(&rel_path, &mut ws.config().shell())?;
+ let rel_str = rel_path
+ .to_str()
+ .ok_or_else(|| {
+ anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display())
+ })?
+ .to_string();
+ match rel_str.as_ref() {
+ "Cargo.toml" => {
+ result.push(ArchiveFile {
+ rel_path: PathBuf::from(ORIGINAL_MANIFEST_FILE),
+ rel_str: ORIGINAL_MANIFEST_FILE.to_string(),
+ contents: FileContents::OnDisk(src_file),
+ });
+ result.push(ArchiveFile {
+ rel_path,
+ rel_str,
+ contents: FileContents::Generated(GeneratedFile::Manifest),
+ });
+ }
+ "Cargo.lock" => continue,
+ VCS_INFO_FILE | ORIGINAL_MANIFEST_FILE => anyhow::bail!(
+ "invalid inclusion of reserved file name {} in package source",
+ rel_str
+ ),
+ _ => {
+ result.push(ArchiveFile {
+ rel_path,
+ rel_str,
+ contents: FileContents::OnDisk(src_file),
+ });
+ }
+ }
+ }
+ if pkg.include_lockfile() {
+ result.push(ArchiveFile {
+ rel_path: PathBuf::from("Cargo.lock"),
+ rel_str: "Cargo.lock".to_string(),
+ contents: FileContents::Generated(GeneratedFile::Lockfile),
+ });
+ }
+ if let Some(vcs_info) = vcs_info {
+ result.push(ArchiveFile {
+ rel_path: PathBuf::from(VCS_INFO_FILE),
+ rel_str: VCS_INFO_FILE.to_string(),
+ contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)),
+ });
+ }
+ if let Some(license_file) = &pkg.manifest().metadata().license_file {
+ let license_path = Path::new(license_file);
+ let abs_file_path = paths::normalize_path(&pkg.root().join(license_path));
+ if abs_file_path.exists() {
+ check_for_file_and_add(
+ "license-file",
+ license_path,
+ abs_file_path,
+ pkg,
+ &mut result,
+ ws,
+ )?;
+ } else {
+ let rel_msg = if license_path.is_absolute() {
+ "".to_string()
+ } else {
+ format!(" (relative to `{}`)", pkg.root().display())
+ };
+ ws.config().shell().warn(&format!(
+ "license-file `{}` does not appear to exist{}.\n\
+ Please update the license-file setting in the manifest at `{}`\n\
+ This may become a hard error in the future.",
+ license_path.display(),
+ rel_msg,
+ pkg.manifest_path().display()
+ ))?;
+ }
+ }
+ if let Some(readme) = &pkg.manifest().metadata().readme {
+ let readme_path = Path::new(readme);
+ let abs_file_path = paths::normalize_path(&pkg.root().join(readme_path));
+ if abs_file_path.exists() {
+ check_for_file_and_add("readme", readme_path, abs_file_path, pkg, &mut result, ws)?;
+ }
+ }
+ result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path));
+
+ Ok(result)
+}
+
+fn check_for_file_and_add(
+ label: &str,
+ file_path: &Path,
+ abs_file_path: PathBuf,
+ pkg: &Package,
+ result: &mut Vec<ArchiveFile>,
+ ws: &Workspace<'_>,
+) -> CargoResult<()> {
+ match abs_file_path.strip_prefix(&pkg.root()) {
+ Ok(rel_file_path) => {
+ if !result.iter().any(|ar| ar.rel_path == rel_file_path) {
+ result.push(ArchiveFile {
+ rel_path: rel_file_path.to_path_buf(),
+ rel_str: rel_file_path
+ .to_str()
+ .expect("everything was utf8")
+ .to_string(),
+ contents: FileContents::OnDisk(abs_file_path),
+ })
+ }
+ }
+ Err(_) => {
+ // The file exists somewhere outside of the package.
+ let file_name = file_path.file_name().unwrap();
+ if result
+ .iter()
+ .any(|ar| ar.rel_path.file_name().unwrap() == file_name)
+ {
+ ws.config().shell().warn(&format!(
+ "{} `{}` appears to be a path outside of the package, \
+ but there is already a file named `{}` in the root of the package. \
+ The archived crate will contain the copy in the root of the package. \
+ Update the {} to point to the path relative \
+ to the root of the package to remove this warning.",
+ label,
+ file_path.display(),
+ file_name.to_str().unwrap(),
+ label,
+ ))?;
+ } else {
+ result.push(ArchiveFile {
+ rel_path: PathBuf::from(file_name),
+ rel_str: file_name.to_str().unwrap().to_string(),
+ contents: FileContents::OnDisk(abs_file_path),
+ })
+ }
+ }
+ }
+ Ok(())
+}
+
+/// Construct `Cargo.lock` for the package to be published.
+fn build_lock(ws: &Workspace<'_>, orig_pkg: &Package) -> CargoResult<String> {
+ let config = ws.config();
+ let orig_resolve = ops::load_pkg_lockfile(ws)?;
+
+ // Convert Package -> TomlManifest -> Manifest -> Package
+ let toml_manifest = Rc::new(
+ orig_pkg
+ .manifest()
+ .original()
+ .prepare_for_publish(ws, orig_pkg.root())?,
+ );
+ let package_root = orig_pkg.root();
+ let source_id = orig_pkg.package_id().source_id();
+ let (manifest, _nested_paths) =
+ TomlManifest::to_real_manifest(&toml_manifest, source_id, package_root, config)?;
+ let new_pkg = Package::new(manifest, orig_pkg.manifest_path());
+
+ // Regenerate Cargo.lock using the old one as a guide.
+ let tmp_ws = Workspace::ephemeral(new_pkg, ws.config(), None, true)?;
+ let mut tmp_reg = PackageRegistry::new(ws.config())?;
+ let mut new_resolve = ops::resolve_with_previous(
+ &mut tmp_reg,
+ &tmp_ws,
+ &CliFeatures::new_all(true),
+ HasDevUnits::Yes,
+ orig_resolve.as_ref(),
+ None,
+ &[],
+ true,
+ )?;
+ let pkg_set = ops::get_resolved_packages(&new_resolve, tmp_reg)?;
+
+ if let Some(orig_resolve) = orig_resolve {
+ compare_resolve(config, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
+ }
+ check_yanked(
+ config,
+ &pkg_set,
+ &new_resolve,
+ "consider updating to a version that is not yanked",
+ )?;
+
+ ops::resolve_to_string(&tmp_ws, &mut new_resolve)
+}
+
+// Checks that the package has some piece of metadata that a human can
+// use to tell what the package is about.
+fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> {
+ let md = pkg.manifest().metadata();
+
+ let mut missing = vec![];
+
+ macro_rules! lacking {
+ ($( $($field: ident)||* ),*) => {{
+ $(
+ if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* {
+ $(missing.push(stringify!($field).replace("_", "-"));)*
+ }
+ )*
+ }}
+ }
+ lacking!(
+ description,
+ license || license_file,
+ documentation || homepage || repository
+ );
+
+ if !missing.is_empty() {
+ let mut things = missing[..missing.len() - 1].join(", ");
+ // `things` will be empty if and only if its length is 1 (i.e., the only case
+ // to have no `or`).
+ if !things.is_empty() {
+ things.push_str(" or ");
+ }
+ things.push_str(missing.last().unwrap());
+
+ config.shell().warn(&format!(
+ "manifest has no {things}.\n\
+ See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.",
+ things = things
+ ))?
+ }
+
+ Ok(())
+}
+
+/// Checks if the package source is in a *git* DVCS repository. If *git*, and
+/// the source is *dirty* (e.g., has uncommitted changes) then `bail!` with an
+/// informative message. Otherwise return the sha1 hash of the current *HEAD*
+/// commit, or `None` if no repo is found.
+fn check_repo_state(
+ p: &Package,
+ src_files: &[PathBuf],
+ config: &Config,
+) -> CargoResult<Option<VcsInfo>> {
+ if let Ok(repo) = git2::Repository::discover(p.root()) {
+ if let Some(workdir) = repo.workdir() {
+ debug!("found a git repo at {:?}", workdir);
+ let path = p.manifest_path();
+ let path = path.strip_prefix(workdir).unwrap_or(path);
+ if let Ok(status) = repo.status_file(path) {
+ if (status & git2::Status::IGNORED).is_empty() {
+ debug!(
+ "found (git) Cargo.toml at {:?} in workdir {:?}",
+ path, workdir
+ );
+ let path_in_vcs = path
+ .parent()
+ .and_then(|p| p.to_str())
+ .unwrap_or("")
+ .replace("\\", "/");
+ return Ok(Some(VcsInfo {
+ git: git(p, src_files, &repo)?,
+ path_in_vcs,
+ }));
+ }
+ }
+ config.shell().verbose(|shell| {
+ shell.warn(format!(
+ "No (git) Cargo.toml found at `{}` in workdir `{}`",
+ path.display(),
+ workdir.display()
+ ))
+ })?;
+ }
+ } else {
+ config.shell().verbose(|shell| {
+ shell.warn(format!("No (git) VCS found for `{}`", p.root().display()))
+ })?;
+ }
+
+ // No VCS with a checked in `Cargo.toml` found, so we don't know if the
+ // directory is dirty or not, thus we have to assume that it's clean.
+ return Ok(None);
+
+ fn git(p: &Package, src_files: &[PathBuf], repo: &git2::Repository) -> CargoResult<GitVcsInfo> {
+ // This is a collection of any dirty or untracked files. This covers:
+ // - new/modified/deleted/renamed/type change (index or worktree)
+ // - untracked files (which are "new" worktree files)
+ // - ignored (in case the user has an `include` directive that
+ // conflicts with .gitignore).
+ let mut dirty_files = Vec::new();
+ collect_statuses(repo, &mut dirty_files)?;
+ // Include each submodule so that the error message can provide
+ // specifically *which* files in a submodule are modified.
+ status_submodules(repo, &mut dirty_files)?;
+
+ // Find the intersection of dirty in git, and the src_files that would
+ // be packaged. This is a lazy n^2 check, but seems fine with
+ // thousands of files.
+ let dirty_src_files: Vec<String> = src_files
+ .iter()
+ .filter(|src_file| dirty_files.iter().any(|path| src_file.starts_with(path)))
+ .map(|path| {
+ path.strip_prefix(p.root())
+ .unwrap_or(path)
+ .display()
+ .to_string()
+ })
+ .collect();
+ if dirty_src_files.is_empty() {
+ let rev_obj = repo.revparse_single("HEAD")?;
+ Ok(GitVcsInfo {
+ sha1: rev_obj.id().to_string(),
+ })
+ } else {
+ anyhow::bail!(
+ "{} files in the working directory contain changes that were \
+ not yet committed into git:\n\n{}\n\n\
+ to proceed despite this and include the uncommitted changes, pass the `--allow-dirty` flag",
+ dirty_src_files.len(),
+ dirty_src_files.join("\n")
+ )
+ }
+ }
+
+ // Helper to collect dirty statuses for a single repo.
+ fn collect_statuses(
+ repo: &git2::Repository,
+ dirty_files: &mut Vec<PathBuf>,
+ ) -> CargoResult<()> {
+ let mut status_opts = git2::StatusOptions::new();
+ // Exclude submodules, as they are being handled manually by recursing
+ // into each one so that details about specific files can be
+ // retrieved.
+ status_opts
+ .exclude_submodules(true)
+ .include_ignored(true)
+ .include_untracked(true);
+ let repo_statuses = repo.statuses(Some(&mut status_opts)).with_context(|| {
+ format!(
+ "failed to retrieve git status from repo {}",
+ repo.path().display()
+ )
+ })?;
+ let workdir = repo.workdir().unwrap();
+ let this_dirty = repo_statuses.iter().filter_map(|entry| {
+ let path = entry.path().expect("valid utf-8 path");
+ if path.ends_with("Cargo.lock") && entry.status() == git2::Status::IGNORED {
+ // It is OK to include Cargo.lock even if it is ignored.
+ return None;
+ }
+ // Use an absolute path, so that comparing paths is easier
+ // (particularly with submodules).
+ Some(workdir.join(path))
+ });
+ dirty_files.extend(this_dirty);
+ Ok(())
+ }
+
+ // Helper to collect dirty statuses while recursing into submodules.
+ fn status_submodules(
+ repo: &git2::Repository,
+ dirty_files: &mut Vec<PathBuf>,
+ ) -> CargoResult<()> {
+ for submodule in repo.submodules()? {
+ // Ignore submodules that don't open, they are probably not initialized.
+ // If its files are required, then the verification step should fail.
+ if let Ok(sub_repo) = submodule.open() {
+ status_submodules(&sub_repo, dirty_files)?;
+ collect_statuses(&sub_repo, dirty_files)?;
+ }
+ }
+ Ok(())
+ }
+}
+
+/// Compresses and packages a list of [`ArchiveFile`]s and writes into the given file.
+///
+/// Returns the uncompressed size of the contents of the new archive file.
+fn tar(
+ ws: &Workspace<'_>,
+ pkg: &Package,
+ ar_files: Vec<ArchiveFile>,
+ dst: &File,
+ filename: &str,
+) -> CargoResult<u64> {
+ // Prepare the encoder and its header.
+ let filename = Path::new(filename);
+ let encoder = GzBuilder::new()
+ .filename(paths::path2bytes(filename)?)
+ .write(dst, Compression::best());
+
+ // Put all package files into a compressed archive.
+ let mut ar = Builder::new(encoder);
+ let config = ws.config();
+
+ let base_name = format!("{}-{}", pkg.name(), pkg.version());
+ let base_path = Path::new(&base_name);
+
+ let mut uncompressed_size = 0;
+ for ar_file in ar_files {
+ let ArchiveFile {
+ rel_path,
+ rel_str,
+ contents,
+ } = ar_file;
+ let ar_path = base_path.join(&rel_path);
+ config
+ .shell()
+ .verbose(|shell| shell.status("Archiving", &rel_str))?;
+ let mut header = Header::new_gnu();
+ match contents {
+ FileContents::OnDisk(disk_path) => {
+ let mut file = File::open(&disk_path).with_context(|| {
+ format!("failed to open for archiving: `{}`", disk_path.display())
+ })?;
+ let metadata = file.metadata().with_context(|| {
+ format!("could not learn metadata for: `{}`", disk_path.display())
+ })?;
+ header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic);
+ header.set_cksum();
+ ar.append_data(&mut header, &ar_path, &mut file)
+ .with_context(|| {
+ format!("could not archive source file `{}`", disk_path.display())
+ })?;
+ uncompressed_size += metadata.len() as u64;
+ }
+ FileContents::Generated(generated_kind) => {
+ let contents = match generated_kind {
+ GeneratedFile::Manifest => pkg.to_registry_toml(ws)?,
+ GeneratedFile::Lockfile => build_lock(ws, pkg)?,
+ GeneratedFile::VcsInfo(ref s) => serde_json::to_string_pretty(s)?,
+ };
+ header.set_entry_type(EntryType::file());
+ header.set_mode(0o644);
+ header.set_size(contents.len() as u64);
+ // use something nonzero to avoid rust-lang/cargo#9512
+ header.set_mtime(1);
+ header.set_cksum();
+ ar.append_data(&mut header, &ar_path, contents.as_bytes())
+ .with_context(|| format!("could not archive source file `{}`", rel_str))?;
+ uncompressed_size += contents.len() as u64;
+ }
+ }
+ }
+
+ let encoder = ar.into_inner()?;
+ encoder.finish()?;
+ Ok(uncompressed_size)
+}
+
+/// Generate warnings when packaging Cargo.lock, and the resolve have changed.
+fn compare_resolve(
+ config: &Config,
+ current_pkg: &Package,
+ orig_resolve: &Resolve,
+ new_resolve: &Resolve,
+) -> CargoResult<()> {
+ if config.shell().verbosity() != Verbosity::Verbose {
+ return Ok(());
+ }
+ let new_set: BTreeSet<PackageId> = new_resolve.iter().collect();
+ let orig_set: BTreeSet<PackageId> = orig_resolve.iter().collect();
+ let added = new_set.difference(&orig_set);
+ // Removed entries are ignored, this is used to quickly find hints for why
+ // an entry changed.
+ let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect();
+ for pkg_id in added {
+ if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() {
+ // Skip the package that is being created, since its SourceId
+ // (directory) changes.
+ continue;
+ }
+ // Check for candidates where the source has changed (such as [patch]
+ // or a dependency with multiple sources like path/version).
+ let removed_candidates: Vec<&PackageId> = removed
+ .iter()
+ .filter(|orig_pkg_id| {
+ orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version()
+ })
+ .cloned()
+ .collect();
+ let extra = match removed_candidates.len() {
+ 0 => {
+ // This can happen if the original was out of date.
+ let previous_versions: Vec<&PackageId> = removed
+ .iter()
+ .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name())
+ .cloned()
+ .collect();
+ match previous_versions.len() {
+ 0 => String::new(),
+ 1 => format!(
+ ", previous version was `{}`",
+ previous_versions[0].version()
+ ),
+ _ => format!(
+ ", previous versions were: {}",
+ previous_versions
+ .iter()
+ .map(|pkg_id| format!("`{}`", pkg_id.version()))
+ .collect::<Vec<_>>()
+ .join(", ")
+ ),
+ }
+ }
+ 1 => {
+ // This can happen for multi-sourced dependencies like
+ // `{path="...", version="..."}` or `[patch]` replacement.
+ // `[replace]` is not captured in Cargo.lock.
+ format!(
+ ", was originally sourced from `{}`",
+ removed_candidates[0].source_id()
+ )
+ }
+ _ => {
+ // I don't know if there is a way to actually trigger this,
+ // but handle it just in case.
+ let comma_list = removed_candidates
+ .iter()
+ .map(|pkg_id| format!("`{}`", pkg_id.source_id()))
+ .collect::<Vec<_>>()
+ .join(", ");
+ format!(
+ ", was originally sourced from one of these sources: {}",
+ comma_list
+ )
+ }
+ };
+ let msg = format!(
+ "package `{}` added to the packaged Cargo.lock file{}",
+ pkg_id, extra
+ );
+ config.shell().note(msg)?;
+ }
+ Ok(())
+}
+
+pub fn check_yanked(
+ config: &Config,
+ pkg_set: &PackageSet<'_>,
+ resolve: &Resolve,
+ hint: &str,
+) -> CargoResult<()> {
+ // Checking the yanked status involves taking a look at the registry and
+ // maybe updating files, so be sure to lock it here.
+ let _lock = config.acquire_package_cache_lock()?;
+
+ let mut sources = pkg_set.sources_mut();
+ let mut pending: Vec<PackageId> = resolve.iter().collect();
+ let mut results = Vec::new();
+ for (_id, source) in sources.sources_mut() {
+ source.invalidate_cache();
+ }
+ while !pending.is_empty() {
+ pending.retain(|pkg_id| {
+ if let Some(source) = sources.get_mut(pkg_id.source_id()) {
+ match source.is_yanked(*pkg_id) {
+ Poll::Ready(result) => results.push((*pkg_id, result)),
+ Poll::Pending => return true,
+ }
+ }
+ false
+ });
+ for (_id, source) in sources.sources_mut() {
+ source.block_until_ready()?;
+ }
+ }
+
+ for (pkg_id, is_yanked) in results {
+ if is_yanked? {
+ config.shell().warn(format!(
+ "package `{}` in Cargo.lock is yanked in registry `{}`, {}",
+ pkg_id,
+ pkg_id.source_id().display_registry_name(),
+ hint
+ ))?;
+ }
+ }
+ Ok(())
+}
+
+fn run_verify(
+ ws: &Workspace<'_>,
+ pkg: &Package,
+ tar: &FileLock,
+ opts: &PackageOpts<'_>,
+) -> CargoResult<()> {
+ let config = ws.config();
+
+ config.shell().status("Verifying", pkg)?;
+
+ let f = GzDecoder::new(tar.file());
+ let dst = tar
+ .parent()
+ .join(&format!("{}-{}", pkg.name(), pkg.version()));
+ if dst.exists() {
+ paths::remove_dir_all(&dst)?;
+ }
+ let mut archive = Archive::new(f);
+ // We don't need to set the Modified Time, as it's not relevant to verification
+ // and it errors on filesystems that don't support setting a modified timestamp
+ archive.set_preserve_mtime(false);
+ archive.unpack(dst.parent().unwrap())?;
+
+ // Manufacture an ephemeral workspace to ensure that even if the top-level
+ // package has a workspace we can still build our new crate.
+ let id = SourceId::for_path(&dst)?;
+ let mut src = PathSource::new(&dst, id, ws.config());
+ let new_pkg = src.root_package()?;
+ let pkg_fingerprint = hash_all(&dst)?;
+ let ws = Workspace::ephemeral(new_pkg, config, None, true)?;
+
+ let rustc_args = if pkg
+ .manifest()
+ .unstable_features()
+ .require(Feature::public_dependency())
+ .is_ok()
+ {
+ // FIXME: Turn this on at some point in the future
+ //Some(vec!["-D exported_private_dependencies".to_string()])
+ Some(vec![])
+ } else {
+ None
+ };
+
+ let exec: Arc<dyn Executor> = Arc::new(DefaultExecutor);
+ ops::compile_with_exec(
+ &ws,
+ &ops::CompileOptions {
+ build_config: BuildConfig::new(
+ config,
+ opts.jobs,
+ opts.keep_going,
+ &opts.targets,
+ CompileMode::Build,
+ )?,
+ cli_features: opts.cli_features.clone(),
+ spec: ops::Packages::Packages(Vec::new()),
+ filter: ops::CompileFilter::Default {
+ required_features_filterable: true,
+ },
+ target_rustdoc_args: None,
+ target_rustc_args: rustc_args,
+ target_rustc_crate_types: None,
+ rustdoc_document_private_items: false,
+ honor_rust_version: true,
+ },
+ &exec,
+ )?;
+
+ // Check that `build.rs` didn't modify any files in the `src` directory.
+ let ws_fingerprint = hash_all(&dst)?;
+ if pkg_fingerprint != ws_fingerprint {
+ let changes = report_hash_difference(&pkg_fingerprint, &ws_fingerprint);
+ anyhow::bail!(
+ "Source directory was modified by build.rs during cargo publish. \
+ Build scripts should not modify anything outside of OUT_DIR.\n\
+ {}\n\n\
+ To proceed despite this, pass the `--no-verify` flag.",
+ changes
+ )
+ }
+
+ Ok(())
+}
+
+fn hash_all(path: &Path) -> CargoResult<HashMap<PathBuf, u64>> {
+ fn wrap(path: &Path) -> CargoResult<HashMap<PathBuf, u64>> {
+ let mut result = HashMap::new();
+ let walker = walkdir::WalkDir::new(path).into_iter();
+ for entry in walker.filter_entry(|e| !(e.depth() == 1 && e.file_name() == "target")) {
+ let entry = entry?;
+ let file_type = entry.file_type();
+ if file_type.is_file() {
+ let file = File::open(entry.path())?;
+ let hash = util::hex::hash_u64_file(&file)?;
+ result.insert(entry.path().to_path_buf(), hash);
+ } else if file_type.is_symlink() {
+ let hash = util::hex::hash_u64(&fs::read_link(entry.path())?);
+ result.insert(entry.path().to_path_buf(), hash);
+ } else if file_type.is_dir() {
+ let hash = util::hex::hash_u64(&());
+ result.insert(entry.path().to_path_buf(), hash);
+ }
+ }
+ Ok(result)
+ }
+ let result = wrap(path).with_context(|| format!("failed to verify output at {:?}", path))?;
+ Ok(result)
+}
+
+fn report_hash_difference(orig: &HashMap<PathBuf, u64>, after: &HashMap<PathBuf, u64>) -> String {
+ let mut changed = Vec::new();
+ let mut removed = Vec::new();
+ for (key, value) in orig {
+ match after.get(key) {
+ Some(after_value) => {
+ if value != after_value {
+ changed.push(key.to_string_lossy());
+ }
+ }
+ None => removed.push(key.to_string_lossy()),
+ }
+ }
+ let mut added: Vec<_> = after
+ .keys()
+ .filter(|key| !orig.contains_key(*key))
+ .map(|key| key.to_string_lossy())
+ .collect();
+ let mut result = Vec::new();
+ if !changed.is_empty() {
+ changed.sort_unstable();
+ result.push(format!("Changed: {}", changed.join("\n\t")));
+ }
+ if !added.is_empty() {
+ added.sort_unstable();
+ result.push(format!("Added: {}", added.join("\n\t")));
+ }
+ if !removed.is_empty() {
+ removed.sort_unstable();
+ result.push(format!("Removed: {}", removed.join("\n\t")));
+ }
+ assert!(!result.is_empty(), "unexpected empty change detection");
+ result.join("\n")
+}
+
+// It can often be the case that files of a particular name on one platform
+// can't actually be created on another platform. For example files with colons
+// in the name are allowed on Unix but not on Windows.
+//
+// To help out in situations like this, issue about weird filenames when
+// packaging as a "heads up" that something may not work on other platforms.
+fn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> {
+ let name = match file.file_name() {
+ Some(name) => name,
+ None => return Ok(()),
+ };
+ let name = match name.to_str() {
+ Some(name) => name,
+ None => anyhow::bail!(
+ "path does not have a unicode filename which may not unpack \
+ on all platforms: {}",
+ file.display()
+ ),
+ };
+ let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
+ if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
+ anyhow::bail!(
+ "cannot package a filename with a special character `{}`: {}",
+ c,
+ file.display()
+ )
+ }
+ if restricted_names::is_windows_reserved_path(file) {
+ shell.warn(format!(
+ "file {} is a reserved Windows filename, \
+ it will not work on Windows platforms",
+ file.display()
+ ))?;
+ }
+ Ok(())
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_pkgid.rs b/src/tools/cargo/src/cargo/ops/cargo_pkgid.rs
new file mode 100644
index 000000000..eeed6ac02
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_pkgid.rs
@@ -0,0 +1,16 @@
+use crate::core::{PackageIdSpec, Workspace};
+use crate::ops;
+use crate::util::CargoResult;
+
+pub fn pkgid(ws: &Workspace<'_>, spec: Option<&str>) -> CargoResult<PackageIdSpec> {
+ let resolve = match ops::load_pkg_lockfile(ws)? {
+ Some(resolve) => resolve,
+ None => anyhow::bail!("a Cargo.lock must exist for this command"),
+ };
+
+ let pkgid = match spec {
+ Some(spec) => PackageIdSpec::query_str(spec, resolve.iter())?,
+ None => ws.current()?.package_id(),
+ };
+ Ok(PackageIdSpec::from_package_id(pkgid))
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_read_manifest.rs b/src/tools/cargo/src/cargo/ops/cargo_read_manifest.rs
new file mode 100644
index 000000000..2dfe90086
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_read_manifest.rs
@@ -0,0 +1,234 @@
+use std::collections::{HashMap, HashSet};
+use std::fs;
+use std::io;
+use std::path::{Path, PathBuf};
+
+use crate::core::{EitherManifest, Package, PackageId, SourceId};
+use crate::util::errors::CargoResult;
+use crate::util::important_paths::find_project_manifest_exact;
+use crate::util::toml::read_manifest;
+use crate::util::Config;
+use cargo_util::paths;
+use log::{info, trace};
+
+pub fn read_package(
+ path: &Path,
+ source_id: SourceId,
+ config: &Config,
+) -> CargoResult<(Package, Vec<PathBuf>)> {
+ trace!(
+ "read_package; path={}; source-id={}",
+ path.display(),
+ source_id
+ );
+ let (manifest, nested) = read_manifest(path, source_id, config)?;
+ let manifest = match manifest {
+ EitherManifest::Real(manifest) => manifest,
+ EitherManifest::Virtual(..) => anyhow::bail!(
+ "found a virtual manifest at `{}` instead of a package \
+ manifest",
+ path.display()
+ ),
+ };
+
+ Ok((Package::new(manifest, path), nested))
+}
+
+pub fn read_packages(
+ path: &Path,
+ source_id: SourceId,
+ config: &Config,
+) -> CargoResult<Vec<Package>> {
+ let mut all_packages = HashMap::new();
+ let mut visited = HashSet::<PathBuf>::new();
+ let mut errors = Vec::<anyhow::Error>::new();
+
+ trace!(
+ "looking for root package: {}, source_id={}",
+ path.display(),
+ source_id
+ );
+
+ walk(path, &mut |dir| {
+ trace!("looking for child package: {}", dir.display());
+
+ // Don't recurse into hidden/dot directories unless we're at the toplevel
+ if dir != path {
+ let name = dir.file_name().and_then(|s| s.to_str());
+ if name.map(|s| s.starts_with('.')) == Some(true) {
+ return Ok(false);
+ }
+
+ // Don't automatically discover packages across git submodules
+ if dir.join(".git").exists() {
+ return Ok(false);
+ }
+ }
+
+ // Don't ever look at target directories
+ if dir.file_name().and_then(|s| s.to_str()) == Some("target")
+ && has_manifest(dir.parent().unwrap())
+ {
+ return Ok(false);
+ }
+
+ if has_manifest(dir) {
+ read_nested_packages(
+ dir,
+ &mut all_packages,
+ source_id,
+ config,
+ &mut visited,
+ &mut errors,
+ )?;
+ }
+ Ok(true)
+ })?;
+
+ if all_packages.is_empty() {
+ match errors.pop() {
+ Some(err) => Err(err),
+ None => {
+ if find_project_manifest_exact(path, "cargo.toml").is_ok() {
+ Err(anyhow::format_err!(
+ "Could not find Cargo.toml in `{}`, but found cargo.toml please try to rename it to Cargo.toml",
+ path.display()
+ ))
+ } else {
+ Err(anyhow::format_err!(
+ "Could not find Cargo.toml in `{}`",
+ path.display()
+ ))
+ }
+ }
+ }
+ } else {
+ Ok(all_packages.into_iter().map(|(_, v)| v).collect())
+ }
+}
+
+fn walk(path: &Path, callback: &mut dyn FnMut(&Path) -> CargoResult<bool>) -> CargoResult<()> {
+ if !callback(path)? {
+ trace!("not processing {}", path.display());
+ return Ok(());
+ }
+
+ // Ignore any permission denied errors because temporary directories
+ // can often have some weird permissions on them.
+ let dirs = match fs::read_dir(path) {
+ Ok(dirs) => dirs,
+ Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => return Ok(()),
+ Err(e) => {
+ let cx = format!("failed to read directory `{}`", path.display());
+ let e = anyhow::Error::from(e);
+ return Err(e.context(cx));
+ }
+ };
+ for dir in dirs {
+ let dir = dir?;
+ if dir.file_type()?.is_dir() {
+ walk(&dir.path(), callback)?;
+ }
+ }
+ Ok(())
+}
+
+fn has_manifest(path: &Path) -> bool {
+ find_project_manifest_exact(path, "Cargo.toml").is_ok()
+}
+
+fn read_nested_packages(
+ path: &Path,
+ all_packages: &mut HashMap<PackageId, Package>,
+ source_id: SourceId,
+ config: &Config,
+ visited: &mut HashSet<PathBuf>,
+ errors: &mut Vec<anyhow::Error>,
+) -> CargoResult<()> {
+ if !visited.insert(path.to_path_buf()) {
+ return Ok(());
+ }
+
+ let manifest_path = find_project_manifest_exact(path, "Cargo.toml")?;
+
+ let (manifest, nested) = match read_manifest(&manifest_path, source_id, config) {
+ Err(err) => {
+ // Ignore malformed manifests found on git repositories
+ //
+ // git source try to find and read all manifests from the repository
+ // but since it's not possible to exclude folders from this search
+ // it's safer to ignore malformed manifests to avoid
+ //
+ // TODO: Add a way to exclude folders?
+ info!(
+ "skipping malformed package found at `{}`",
+ path.to_string_lossy()
+ );
+ errors.push(err.into());
+ return Ok(());
+ }
+ Ok(tuple) => tuple,
+ };
+
+ let manifest = match manifest {
+ EitherManifest::Real(manifest) => manifest,
+ EitherManifest::Virtual(..) => return Ok(()),
+ };
+ let pkg = Package::new(manifest, &manifest_path);
+
+ let pkg_id = pkg.package_id();
+ use std::collections::hash_map::Entry;
+ match all_packages.entry(pkg_id) {
+ Entry::Vacant(v) => {
+ v.insert(pkg);
+ }
+ Entry::Occupied(_) => {
+ // We can assume a package with publish = false isn't intended to be seen
+ // by users so we can hide the warning about those since the user is unlikely
+ // to care about those cases.
+ if pkg.publish().is_none() {
+ let _ = config.shell().warn(format!(
+ "skipping duplicate package `{}` found at `{}`",
+ pkg.name(),
+ path.display()
+ ));
+ }
+ }
+ }
+
+ // Registry sources are not allowed to have `path=` dependencies because
+ // they're all translated to actual registry dependencies.
+ //
+ // We normalize the path here ensure that we don't infinitely walk around
+ // looking for crates. By normalizing we ensure that we visit this crate at
+ // most once.
+ //
+ // TODO: filesystem/symlink implications?
+ if !source_id.is_registry() {
+ for p in nested.iter() {
+ let path = paths::normalize_path(&path.join(p));
+ let result =
+ read_nested_packages(&path, all_packages, source_id, config, visited, errors);
+ // Ignore broken manifests found on git repositories.
+ //
+ // A well formed manifest might still fail to load due to reasons
+ // like referring to a "path" that requires an extra build step.
+ //
+ // See https://github.com/rust-lang/cargo/issues/6822.
+ if let Err(err) = result {
+ if source_id.is_git() {
+ info!(
+ "skipping nested package found at `{}`: {:?}",
+ path.display(),
+ &err,
+ );
+ errors.push(err);
+ } else {
+ return Err(err);
+ }
+ }
+ }
+ }
+
+ Ok(())
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_remove.rs b/src/tools/cargo/src/cargo/ops/cargo_remove.rs
new file mode 100644
index 000000000..4866caedd
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_remove.rs
@@ -0,0 +1,65 @@
+//! Core of cargo-remove command
+
+use crate::core::Package;
+use crate::util::toml_mut::manifest::DepTable;
+use crate::util::toml_mut::manifest::LocalManifest;
+use crate::CargoResult;
+use crate::Config;
+
+/// Remove a dependency from a Cargo.toml manifest file.
+#[derive(Debug)]
+pub struct RemoveOptions<'a> {
+ /// Configuration information for Cargo operations
+ pub config: &'a Config,
+ /// Package to remove dependencies from
+ pub spec: &'a Package,
+ /// Dependencies to remove
+ pub dependencies: Vec<String>,
+ /// Which dependency section to remove these from
+ pub section: DepTable,
+ /// Whether or not to actually write the manifest
+ pub dry_run: bool,
+}
+
+/// Remove dependencies from a manifest
+pub fn remove(options: &RemoveOptions<'_>) -> CargoResult<()> {
+ let dep_table = options
+ .section
+ .to_table()
+ .into_iter()
+ .map(String::from)
+ .collect::<Vec<_>>();
+
+ let manifest_path = options.spec.manifest_path().to_path_buf();
+ let mut manifest = LocalManifest::try_new(&manifest_path)?;
+
+ for dep in &options.dependencies {
+ let section = if dep_table.len() >= 3 {
+ format!("{} for target `{}`", &dep_table[2], &dep_table[1])
+ } else {
+ dep_table[0].clone()
+ };
+ options
+ .config
+ .shell()
+ .status("Removing", format!("{dep} from {section}"))?;
+
+ manifest.remove_from_table(&dep_table, dep)?;
+
+ // Now that we have removed the crate, if that was the last reference to that
+ // crate, then we need to drop any explicitly activated features on
+ // that crate.
+ manifest.gc_dep(dep);
+ }
+
+ if options.dry_run {
+ options
+ .config
+ .shell()
+ .warn("aborting remove due to dry run")?;
+ } else {
+ manifest.write()?;
+ }
+
+ Ok(())
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_run.rs b/src/tools/cargo/src/cargo/ops/cargo_run.rs
new file mode 100644
index 000000000..53916715a
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_run.rs
@@ -0,0 +1,106 @@
+use std::ffi::OsString;
+use std::iter;
+use std::path::Path;
+
+use crate::core::compiler::UnitOutput;
+use crate::core::{TargetKind, Workspace};
+use crate::ops;
+use crate::util::CargoResult;
+
+pub fn run(
+ ws: &Workspace<'_>,
+ options: &ops::CompileOptions,
+ args: &[OsString],
+) -> CargoResult<()> {
+ let config = ws.config();
+
+ if options.filter.contains_glob_patterns() {
+ anyhow::bail!("`cargo run` does not support glob patterns on target selection")
+ }
+
+ // We compute the `bins` here *just for diagnosis*. The actual set of
+ // packages to be run is determined by the `ops::compile` call below.
+ let packages = options.spec.get_packages(ws)?;
+ let bins: Vec<_> = packages
+ .into_iter()
+ .flat_map(|pkg| {
+ iter::repeat(pkg).zip(pkg.manifest().targets().iter().filter(|target| {
+ !target.is_lib()
+ && !target.is_custom_build()
+ && if !options.filter.is_specific() {
+ target.is_bin()
+ } else {
+ options.filter.target_run(target)
+ }
+ }))
+ })
+ .collect();
+
+ if bins.is_empty() {
+ if !options.filter.is_specific() {
+ anyhow::bail!("a bin target must be available for `cargo run`")
+ } else {
+ // This will be verified in `cargo_compile`.
+ }
+ }
+
+ if bins.len() == 1 {
+ let target = bins[0].1;
+ if let TargetKind::ExampleLib(..) = target.kind() {
+ anyhow::bail!(
+ "example target `{}` is a library and cannot be executed",
+ target.name()
+ )
+ }
+ }
+
+ if bins.len() > 1 {
+ if !options.filter.is_specific() {
+ let mut names: Vec<&str> = bins
+ .into_iter()
+ .map(|(_pkg, target)| target.name())
+ .collect();
+ names.sort();
+ anyhow::bail!(
+ "`cargo run` could not determine which binary to run. \
+ Use the `--bin` option to specify a binary, \
+ or the `default-run` manifest key.\n\
+ available binaries: {}",
+ names.join(", ")
+ )
+ } else {
+ anyhow::bail!(
+ "`cargo run` can run at most one executable, but \
+ multiple were specified"
+ )
+ }
+ }
+
+ // `cargo run` is only compatible with one `--target` flag at most
+ options.build_config.single_requested_kind()?;
+
+ let compile = ops::compile(ws, options)?;
+ assert_eq!(compile.binaries.len(), 1);
+ let UnitOutput {
+ unit,
+ path,
+ script_meta,
+ } = &compile.binaries[0];
+ let exe = match path.strip_prefix(config.cwd()) {
+ Ok(path) if path.file_name() == Some(path.as_os_str()) => Path::new(".").join(path),
+ Ok(path) => path.to_path_buf(),
+ Err(_) => path.to_path_buf(),
+ };
+ let pkg = bins[0].0;
+ let mut process = compile.target_process(exe, unit.kind, pkg, *script_meta)?;
+
+ // Sets the working directory of the child process to the current working
+ // directory of the parent process.
+ // Overrides the default working directory of the `ProcessBuilder` returned
+ // by `compile.target_process` (the package's root directory)
+ process.args(args).cwd(config.cwd());
+
+ config.shell().status("Running", process.to_string())?;
+
+ process.exec_replace()
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_test.rs b/src/tools/cargo/src/cargo/ops/cargo_test.rs
new file mode 100644
index 000000000..b7e61982d
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_test.rs
@@ -0,0 +1,439 @@
+use crate::core::compiler::{Compilation, CompileKind, Doctest, Metadata, Unit, UnitOutput};
+use crate::core::shell::Verbosity;
+use crate::core::{TargetKind, Workspace};
+use crate::ops;
+use crate::util::errors::CargoResult;
+use crate::util::{add_path_args, CliError, CliResult, Config};
+use anyhow::format_err;
+use cargo_util::{ProcessBuilder, ProcessError};
+use std::ffi::OsString;
+use std::fmt::Write;
+use std::path::{Path, PathBuf};
+
+pub struct TestOptions {
+ pub compile_opts: ops::CompileOptions,
+ pub no_run: bool,
+ pub no_fail_fast: bool,
+}
+
+/// The kind of test.
+///
+/// This is needed because `Unit` does not track whether or not something is a
+/// benchmark.
+#[derive(Copy, Clone)]
+enum TestKind {
+ Test,
+ Bench,
+ Doctest,
+}
+
+/// A unit that failed to run.
+struct UnitTestError {
+ unit: Unit,
+ kind: TestKind,
+}
+
+impl UnitTestError {
+ /// Returns the CLI args needed to target this unit.
+ fn cli_args(&self, ws: &Workspace<'_>, opts: &ops::CompileOptions) -> String {
+ let mut args = if opts.spec.needs_spec_flag(ws) {
+ format!("-p {} ", self.unit.pkg.name())
+ } else {
+ String::new()
+ };
+ let mut add = |which| write!(args, "--{which} {}", self.unit.target.name()).unwrap();
+
+ match self.kind {
+ TestKind::Test | TestKind::Bench => match self.unit.target.kind() {
+ TargetKind::Lib(_) => args.push_str("--lib"),
+ TargetKind::Bin => add("bin"),
+ TargetKind::Test => add("test"),
+ TargetKind::Bench => add("bench"),
+ TargetKind::ExampleLib(_) | TargetKind::ExampleBin => add("example"),
+ TargetKind::CustomBuild => panic!("unexpected CustomBuild kind"),
+ },
+ TestKind::Doctest => args.push_str("--doc"),
+ }
+ args
+ }
+}
+
+/// Compiles and runs tests.
+///
+/// On error, the returned [`CliError`] will have the appropriate process exit
+/// code that Cargo should use.
+pub fn run_tests(ws: &Workspace<'_>, options: &TestOptions, test_args: &[&str]) -> CliResult {
+ let compilation = compile_tests(ws, options)?;
+
+ if options.no_run {
+ if !options.compile_opts.build_config.emit_json() {
+ display_no_run_information(ws, test_args, &compilation, "unittests")?;
+ }
+ return Ok(());
+ }
+ let mut errors = run_unit_tests(ws, options, test_args, &compilation, TestKind::Test)?;
+
+ let doctest_errors = run_doc_tests(ws, options, test_args, &compilation)?;
+ errors.extend(doctest_errors);
+ no_fail_fast_err(ws, &options.compile_opts, &errors)
+}
+
+/// Compiles and runs benchmarks.
+///
+/// On error, the returned [`CliError`] will have the appropriate process exit
+/// code that Cargo should use.
+pub fn run_benches(ws: &Workspace<'_>, options: &TestOptions, args: &[&str]) -> CliResult {
+ let compilation = compile_tests(ws, options)?;
+
+ if options.no_run {
+ if !options.compile_opts.build_config.emit_json() {
+ display_no_run_information(ws, args, &compilation, "benches")?;
+ }
+ return Ok(());
+ }
+
+ let mut args = args.to_vec();
+ args.push("--bench");
+
+ let errors = run_unit_tests(ws, options, &args, &compilation, TestKind::Bench)?;
+ no_fail_fast_err(ws, &options.compile_opts, &errors)
+}
+
+fn compile_tests<'a>(ws: &Workspace<'a>, options: &TestOptions) -> CargoResult<Compilation<'a>> {
+ let mut compilation = ops::compile(ws, &options.compile_opts)?;
+ compilation.tests.sort();
+ Ok(compilation)
+}
+
+/// Runs the unit and integration tests of a package.
+///
+/// Returns a `Vec` of tests that failed when `--no-fail-fast` is used.
+/// If `--no-fail-fast` is *not* used, then this returns an `Err`.
+fn run_unit_tests(
+ ws: &Workspace<'_>,
+ options: &TestOptions,
+ test_args: &[&str],
+ compilation: &Compilation<'_>,
+ test_kind: TestKind,
+) -> Result<Vec<UnitTestError>, CliError> {
+ let config = ws.config();
+ let cwd = config.cwd();
+ let mut errors = Vec::new();
+
+ for UnitOutput {
+ unit,
+ path,
+ script_meta,
+ } in compilation.tests.iter()
+ {
+ let (exe_display, cmd) = cmd_builds(
+ config,
+ cwd,
+ unit,
+ path,
+ script_meta,
+ test_args,
+ compilation,
+ "unittests",
+ )?;
+ config
+ .shell()
+ .concise(|shell| shell.status("Running", &exe_display))?;
+ config
+ .shell()
+ .verbose(|shell| shell.status("Running", &cmd))?;
+
+ if let Err(e) = cmd.exec() {
+ let code = fail_fast_code(&e);
+ let unit_err = UnitTestError {
+ unit: unit.clone(),
+ kind: test_kind,
+ };
+ report_test_error(ws, &options.compile_opts, &unit_err, e);
+ errors.push(unit_err);
+ if !options.no_fail_fast {
+ return Err(CliError::code(code));
+ }
+ }
+ }
+ Ok(errors)
+}
+
+/// Runs doc tests.
+///
+/// Returns a `Vec` of tests that failed when `--no-fail-fast` is used.
+/// If `--no-fail-fast` is *not* used, then this returns an `Err`.
+fn run_doc_tests(
+ ws: &Workspace<'_>,
+ options: &TestOptions,
+ test_args: &[&str],
+ compilation: &Compilation<'_>,
+) -> Result<Vec<UnitTestError>, CliError> {
+ let config = ws.config();
+ let mut errors = Vec::new();
+ let doctest_xcompile = config.cli_unstable().doctest_xcompile;
+ let doctest_in_workspace = config.cli_unstable().doctest_in_workspace;
+
+ for doctest_info in &compilation.to_doc_test {
+ let Doctest {
+ args,
+ unstable_opts,
+ unit,
+ linker,
+ script_meta,
+ env,
+ } = doctest_info;
+
+ if !doctest_xcompile {
+ match unit.kind {
+ CompileKind::Host => {}
+ CompileKind::Target(target) => {
+ if target.short_name() != compilation.host {
+ // Skip doctests, -Zdoctest-xcompile not enabled.
+ config.shell().verbose(|shell| {
+ shell.note(format!(
+ "skipping doctests for {} ({}), \
+ cross-compilation doctests are not yet supported\n\
+ See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#doctest-xcompile \
+ for more information.",
+ unit.pkg,
+ unit.target.description_named()
+ ))
+ })?;
+ continue;
+ }
+ }
+ }
+ }
+
+ config.shell().status("Doc-tests", unit.target.name())?;
+ let mut p = compilation.rustdoc_process(unit, *script_meta)?;
+
+ for (var, value) in env {
+ p.env(var, value);
+ }
+ p.arg("--crate-name").arg(&unit.target.crate_name());
+ p.arg("--test");
+
+ if doctest_in_workspace {
+ add_path_args(ws, unit, &mut p);
+ // FIXME(swatinem): remove the `unstable-options` once rustdoc stabilizes the `test-run-directory` option
+ p.arg("-Z").arg("unstable-options");
+ p.arg("--test-run-directory")
+ .arg(unit.pkg.root().to_path_buf());
+ } else {
+ p.arg(unit.target.src_path().path().unwrap());
+ }
+
+ if let CompileKind::Target(target) = unit.kind {
+ // use `rustc_target()` to properly handle JSON target paths
+ p.arg("--target").arg(target.rustc_target());
+ }
+
+ if doctest_xcompile {
+ p.arg("-Zunstable-options");
+ p.arg("--enable-per-target-ignores");
+ if let Some((runtool, runtool_args)) = compilation.target_runner(unit.kind) {
+ p.arg("--runtool").arg(runtool);
+ for arg in runtool_args {
+ p.arg("--runtool-arg").arg(arg);
+ }
+ }
+ if let Some(linker) = linker {
+ let mut joined = OsString::from("linker=");
+ joined.push(linker);
+ p.arg("-C").arg(joined);
+ }
+ }
+
+ for &rust_dep in &[
+ &compilation.deps_output[&unit.kind],
+ &compilation.deps_output[&CompileKind::Host],
+ ] {
+ let mut arg = OsString::from("dependency=");
+ arg.push(rust_dep);
+ p.arg("-L").arg(arg);
+ }
+
+ for native_dep in compilation.native_dirs.iter() {
+ p.arg("-L").arg(native_dep);
+ }
+
+ for arg in test_args {
+ p.arg("--test-args").arg(arg);
+ }
+
+ if config.shell().verbosity() == Verbosity::Quiet {
+ p.arg("--test-args").arg("--quiet");
+ }
+
+ p.args(args);
+
+ if *unstable_opts {
+ p.arg("-Zunstable-options");
+ }
+
+ config
+ .shell()
+ .verbose(|shell| shell.status("Running", p.to_string()))?;
+ if let Err(e) = p.exec() {
+ let code = fail_fast_code(&e);
+ let unit_err = UnitTestError {
+ unit: unit.clone(),
+ kind: TestKind::Doctest,
+ };
+ report_test_error(ws, &options.compile_opts, &unit_err, e);
+ errors.push(unit_err);
+ if !options.no_fail_fast {
+ return Err(CliError::code(code));
+ }
+ }
+ }
+ Ok(errors)
+}
+
+/// Displays human-readable descriptions of the test executables.
+///
+/// This is used when `cargo test --no-run` is used.
+fn display_no_run_information(
+ ws: &Workspace<'_>,
+ test_args: &[&str],
+ compilation: &Compilation<'_>,
+ exec_type: &str,
+) -> CargoResult<()> {
+ let config = ws.config();
+ let cwd = config.cwd();
+ for UnitOutput {
+ unit,
+ path,
+ script_meta,
+ } in compilation.tests.iter()
+ {
+ let (exe_display, cmd) = cmd_builds(
+ config,
+ cwd,
+ unit,
+ path,
+ script_meta,
+ test_args,
+ compilation,
+ exec_type,
+ )?;
+ config
+ .shell()
+ .concise(|shell| shell.status("Executable", &exe_display))?;
+ config
+ .shell()
+ .verbose(|shell| shell.status("Executable", &cmd))?;
+ }
+
+ return Ok(());
+}
+
+/// Creates a [`ProcessBuilder`] for executing a single test.
+///
+/// Returns a tuple `(exe_display, process)` where `exe_display` is a string
+/// to display that describes the executable path in a human-readable form.
+/// `process` is the `ProcessBuilder` to use for executing the test.
+fn cmd_builds(
+ config: &Config,
+ cwd: &Path,
+ unit: &Unit,
+ path: &PathBuf,
+ script_meta: &Option<Metadata>,
+ test_args: &[&str],
+ compilation: &Compilation<'_>,
+ exec_type: &str,
+) -> CargoResult<(String, ProcessBuilder)> {
+ let test_path = unit.target.src_path().path().unwrap();
+ let short_test_path = test_path
+ .strip_prefix(unit.pkg.root())
+ .unwrap_or(test_path)
+ .display();
+
+ let exe_display = match unit.target.kind() {
+ TargetKind::Test | TargetKind::Bench => format!(
+ "{} ({})",
+ short_test_path,
+ path.strip_prefix(cwd).unwrap_or(path).display()
+ ),
+ _ => format!(
+ "{} {} ({})",
+ exec_type,
+ short_test_path,
+ path.strip_prefix(cwd).unwrap_or(path).display()
+ ),
+ };
+
+ let mut cmd = compilation.target_process(path, unit.kind, &unit.pkg, *script_meta)?;
+ cmd.args(test_args);
+ if unit.target.harness() && config.shell().verbosity() == Verbosity::Quiet {
+ cmd.arg("--quiet");
+ }
+
+ Ok((exe_display, cmd))
+}
+
+/// Returns the error code to use when *not* using `--no-fail-fast`.
+///
+/// Cargo will return the error code from the test process itself. If some
+/// other error happened (like a failure to launch the process), then it will
+/// return a standard 101 error code.
+///
+/// When using `--no-fail-fast`, Cargo always uses the 101 exit code (since
+/// there may not be just one process to report).
+fn fail_fast_code(error: &anyhow::Error) -> i32 {
+ if let Some(proc_err) = error.downcast_ref::<ProcessError>() {
+ if let Some(code) = proc_err.code {
+ return code;
+ }
+ }
+ 101
+}
+
+/// Returns the `CliError` when using `--no-fail-fast` and there is at least
+/// one error.
+fn no_fail_fast_err(
+ ws: &Workspace<'_>,
+ opts: &ops::CompileOptions,
+ errors: &[UnitTestError],
+) -> CliResult {
+ // TODO: This could be improved by combining the flags on a single line when feasible.
+ let args: Vec<_> = errors
+ .iter()
+ .map(|unit_err| format!(" `{}`", unit_err.cli_args(ws, opts)))
+ .collect();
+ let message = match errors.len() {
+ 0 => return Ok(()),
+ 1 => format!("1 target failed:\n{}", args.join("\n")),
+ n => format!("{n} targets failed:\n{}", args.join("\n")),
+ };
+ Err(anyhow::Error::msg(message).into())
+}
+
+/// Displays an error on the console about a test failure.
+fn report_test_error(
+ ws: &Workspace<'_>,
+ opts: &ops::CompileOptions,
+ unit_err: &UnitTestError,
+ test_error: anyhow::Error,
+) {
+ let which = match unit_err.kind {
+ TestKind::Test => "test failed",
+ TestKind::Bench => "bench failed",
+ TestKind::Doctest => "doctest failed",
+ };
+
+ let mut err = format_err!("{}, to rerun pass `{}`", which, unit_err.cli_args(ws, opts));
+ // Don't show "process didn't exit successfully" for simple errors.
+ // libtest exits with 101 for normal errors.
+ let is_simple = test_error
+ .downcast_ref::<ProcessError>()
+ .and_then(|proc_err| proc_err.code)
+ .map_or(false, |code| code == 101);
+ if !is_simple {
+ err = test_error.context(err);
+ }
+
+ crate::display_error(&err, &mut ws.config().shell());
+}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_uninstall.rs b/src/tools/cargo/src/cargo/ops/cargo_uninstall.rs
new file mode 100644
index 000000000..355154418
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/cargo_uninstall.rs
@@ -0,0 +1,155 @@
+use crate::core::PackageId;
+use crate::core::{PackageIdSpec, SourceId};
+use crate::ops::common_for_install_and_uninstall::*;
+use crate::sources::PathSource;
+use crate::util::errors::CargoResult;
+use crate::util::Config;
+use crate::util::Filesystem;
+use anyhow::bail;
+use cargo_util::paths;
+use std::collections::BTreeSet;
+use std::env;
+
+pub fn uninstall(
+ root: Option<&str>,
+ specs: Vec<&str>,
+ bins: &[String],
+ config: &Config,
+) -> CargoResult<()> {
+ if specs.len() > 1 && !bins.is_empty() {
+ bail!("A binary can only be associated with a single installed package, specifying multiple specs with --bin is redundant.");
+ }
+
+ let root = resolve_root(root, config)?;
+ let scheduled_error = if specs.len() == 1 {
+ uninstall_one(&root, specs[0], bins, config)?;
+ false
+ } else if specs.is_empty() {
+ uninstall_cwd(&root, bins, config)?;
+ false
+ } else {
+ let mut succeeded = vec![];
+ let mut failed = vec![];
+ for spec in specs {
+ let root = root.clone();
+ match uninstall_one(&root, spec, bins, config) {
+ Ok(()) => succeeded.push(spec),
+ Err(e) => {
+ crate::display_error(&e, &mut config.shell());
+ failed.push(spec)
+ }
+ }
+ }
+
+ let mut summary = vec![];
+ if !succeeded.is_empty() {
+ summary.push(format!(
+ "Successfully uninstalled {}!",
+ succeeded.join(", ")
+ ));
+ }
+ if !failed.is_empty() {
+ summary.push(format!(
+ "Failed to uninstall {} (see error(s) above).",
+ failed.join(", ")
+ ));
+ }
+
+ if !succeeded.is_empty() || !failed.is_empty() {
+ config.shell().status("Summary", summary.join(" "))?;
+ }
+
+ !failed.is_empty()
+ };
+
+ if scheduled_error {
+ bail!("some packages failed to uninstall");
+ }
+
+ Ok(())
+}
+
+pub fn uninstall_one(
+ root: &Filesystem,
+ spec: &str,
+ bins: &[String],
+ config: &Config,
+) -> CargoResult<()> {
+ let tracker = InstallTracker::load(config, root)?;
+ let all_pkgs = tracker.all_installed_bins().map(|(pkg_id, _set)| *pkg_id);
+ let pkgid = PackageIdSpec::query_str(spec, all_pkgs)?;
+ uninstall_pkgid(root, tracker, pkgid, bins, config)
+}
+
+fn uninstall_cwd(root: &Filesystem, bins: &[String], config: &Config) -> CargoResult<()> {
+ let tracker = InstallTracker::load(config, root)?;
+ let source_id = SourceId::for_path(config.cwd())?;
+ let mut src = path_source(source_id, config)?;
+ let pkg = select_pkg(
+ &mut src,
+ None,
+ |path: &mut PathSource<'_>| path.read_packages(),
+ config,
+ )?;
+ let pkgid = pkg.package_id();
+ uninstall_pkgid(root, tracker, pkgid, bins, config)
+}
+
+fn uninstall_pkgid(
+ root: &Filesystem,
+ mut tracker: InstallTracker,
+ pkgid: PackageId,
+ bins: &[String],
+ config: &Config,
+) -> CargoResult<()> {
+ let mut to_remove = Vec::new();
+ let installed = match tracker.installed_bins(pkgid) {
+ Some(bins) => bins.clone(),
+ None => bail!("package `{}` is not installed", pkgid),
+ };
+
+ let dst = root.join("bin").into_path_unlocked();
+ for bin in &installed {
+ let bin = dst.join(bin);
+ if !bin.exists() {
+ bail!(
+ "corrupt metadata, `{}` does not exist when it should",
+ bin.display()
+ )
+ }
+ }
+
+ let bins = bins
+ .iter()
+ .map(|s| {
+ if s.ends_with(env::consts::EXE_SUFFIX) {
+ s.to_string()
+ } else {
+ format!("{}{}", s, env::consts::EXE_SUFFIX)
+ }
+ })
+ .collect::<BTreeSet<_>>();
+
+ for bin in bins.iter() {
+ if !installed.contains(bin) {
+ bail!("binary `{}` not installed as part of `{}`", bin, pkgid)
+ }
+ }
+
+ if bins.is_empty() {
+ to_remove.extend(installed.iter().map(|b| dst.join(b)));
+ tracker.remove(pkgid, &installed);
+ } else {
+ for bin in bins.iter() {
+ to_remove.push(dst.join(bin));
+ }
+ tracker.remove(pkgid, &bins);
+ }
+ tracker.save()?;
+ for bin in to_remove {
+ config.shell().status("Removing", bin.display())?;
+ paths::remove_file(bin)?;
+ }
+
+ Ok(())
+}
diff --git a/src/tools/cargo/src/cargo/ops/common_for_install_and_uninstall.rs b/src/tools/cargo/src/cargo/ops/common_for_install_and_uninstall.rs
new file mode 100644
index 000000000..f33847d57
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/common_for_install_and_uninstall.rs
@@ -0,0 +1,717 @@
+use std::collections::{btree_map, BTreeMap, BTreeSet};
+use std::env;
+use std::io::prelude::*;
+use std::io::SeekFrom;
+use std::path::{Path, PathBuf};
+use std::rc::Rc;
+use std::task::Poll;
+
+use anyhow::{bail, format_err, Context as _};
+use ops::FilterRule;
+use serde::{Deserialize, Serialize};
+
+use crate::core::compiler::{DirtyReason, Freshness};
+use crate::core::Target;
+use crate::core::{Dependency, FeatureValue, Package, PackageId, QueryKind, Source, SourceId};
+use crate::ops::{self, CompileFilter, CompileOptions};
+use crate::sources::PathSource;
+use crate::util::errors::CargoResult;
+use crate::util::Config;
+use crate::util::{FileLock, Filesystem};
+
+/// On-disk tracking for which package installed which binary.
+///
+/// v1 is an older style, v2 is a new style that tracks more information, and
+/// is both backwards and forwards compatible. Cargo keeps both files in sync,
+/// updating both v1 and v2 at the same time. Additionally, if it detects
+/// changes in v1 that are not in v2 (such as when an older version of Cargo
+/// is used), it will automatically propagate those changes to v2.
+///
+/// This maintains a filesystem lock, preventing other instances of Cargo from
+/// modifying at the same time. Drop the value to unlock.
+///
+/// It is intended that v1 should be retained for a while during a longish
+/// transition period, and then v1 can be removed.
+pub struct InstallTracker {
+ v1: CrateListingV1,
+ v2: CrateListingV2,
+ v1_lock: FileLock,
+ v2_lock: FileLock,
+}
+
+/// Tracking information for the set of installed packages.
+#[derive(Default, Deserialize, Serialize)]
+struct CrateListingV2 {
+ /// Map of every installed package.
+ installs: BTreeMap<PackageId, InstallInfo>,
+ /// Forwards compatibility. Unknown keys from future versions of Cargo
+ /// will be stored here and retained when the file is saved.
+ #[serde(flatten)]
+ other: BTreeMap<String, serde_json::Value>,
+}
+
+/// Tracking information for the installation of a single package.
+///
+/// This tracks the settings that were used when the package was installed.
+/// Future attempts to install the same package will check these settings to
+/// determine if it needs to be rebuilt/reinstalled. If nothing has changed,
+/// then Cargo will inform the user that it is "up to date".
+///
+/// This is only used for the v2 format.
+#[derive(Debug, Deserialize, Serialize)]
+struct InstallInfo {
+ /// Version requested via `--version`.
+ /// None if `--version` not specified. Currently not used, possibly may be
+ /// used in the future.
+ version_req: Option<String>,
+ /// Set of binary names installed.
+ bins: BTreeSet<String>,
+ /// Set of features explicitly enabled.
+ features: BTreeSet<String>,
+ all_features: bool,
+ no_default_features: bool,
+ /// Either "debug" or "release".
+ profile: String,
+ /// The installation target.
+ /// Either the host or the value specified in `--target`.
+ /// None if unknown (when loading from v1).
+ target: Option<String>,
+ /// Output of `rustc -V`.
+ /// None if unknown (when loading from v1).
+ /// Currently not used, possibly may be used in the future.
+ rustc: Option<String>,
+ /// Forwards compatibility.
+ #[serde(flatten)]
+ other: BTreeMap<String, serde_json::Value>,
+}
+
+/// Tracking information for the set of installed packages.
+#[derive(Default, Deserialize, Serialize)]
+pub struct CrateListingV1 {
+ /// Map of installed package id to the set of binary names for that package.
+ v1: BTreeMap<PackageId, BTreeSet<String>>,
+}
+
+impl InstallTracker {
+ /// Create an InstallTracker from information on disk.
+ pub fn load(config: &Config, root: &Filesystem) -> CargoResult<InstallTracker> {
+ let v1_lock = root.open_rw(Path::new(".crates.toml"), config, "crate metadata")?;
+ let v2_lock = root.open_rw(Path::new(".crates2.json"), config, "crate metadata")?;
+
+ let v1 = (|| -> CargoResult<_> {
+ let mut contents = String::new();
+ v1_lock.file().read_to_string(&mut contents)?;
+ if contents.is_empty() {
+ Ok(CrateListingV1::default())
+ } else {
+ Ok(toml::from_str(&contents).with_context(|| "invalid TOML found for metadata")?)
+ }
+ })()
+ .with_context(|| {
+ format!(
+ "failed to parse crate metadata at `{}`",
+ v1_lock.path().to_string_lossy()
+ )
+ })?;
+
+ let v2 = (|| -> CargoResult<_> {
+ let mut contents = String::new();
+ v2_lock.file().read_to_string(&mut contents)?;
+ let mut v2 = if contents.is_empty() {
+ CrateListingV2::default()
+ } else {
+ serde_json::from_str(&contents)
+ .with_context(|| "invalid JSON found for metadata")?
+ };
+ v2.sync_v1(&v1);
+ Ok(v2)
+ })()
+ .with_context(|| {
+ format!(
+ "failed to parse crate metadata at `{}`",
+ v2_lock.path().to_string_lossy()
+ )
+ })?;
+
+ Ok(InstallTracker {
+ v1,
+ v2,
+ v1_lock,
+ v2_lock,
+ })
+ }
+
+ /// Checks if the given package should be built, and checks if executables
+ /// already exist in the destination directory.
+ ///
+ /// Returns a tuple `(freshness, map)`. `freshness` indicates if the
+ /// package should be built (`Dirty`) or if it is already up-to-date
+ /// (`Fresh`) and should be skipped. The map maps binary names to the
+ /// PackageId that installed it (which is None if not known).
+ ///
+ /// If there are no duplicates, then it will be considered `Dirty` (i.e.,
+ /// it is OK to build/install).
+ ///
+ /// `force=true` will always be considered `Dirty` (i.e., it will always
+ /// be rebuilt/reinstalled).
+ ///
+ /// Returns an error if there is a duplicate and `--force` is not used.
+ pub fn check_upgrade(
+ &self,
+ dst: &Path,
+ pkg: &Package,
+ force: bool,
+ opts: &CompileOptions,
+ target: &str,
+ _rustc: &str,
+ ) -> CargoResult<(Freshness, BTreeMap<String, Option<PackageId>>)> {
+ let exes = exe_names(pkg, &opts.filter);
+ // Check if any tracked exe's are already installed.
+ let duplicates = self.find_duplicates(dst, &exes);
+ if force || duplicates.is_empty() {
+ return Ok((Freshness::Dirty(Some(DirtyReason::Forced)), duplicates));
+ }
+ // Check if all duplicates come from packages of the same name. If
+ // there are duplicates from other packages, then --force will be
+ // required.
+ //
+ // There may be multiple matching duplicates if different versions of
+ // the same package installed different binaries.
+ //
+ // This does not check the source_id in order to allow the user to
+ // switch between different sources. For example, installing from git,
+ // and then switching to the official crates.io release or vice-versa.
+ // If the source_id were included, then the user would get possibly
+ // confusing errors like "package `foo 1.0.0` is already installed"
+ // and the change of source may not be obvious why it fails.
+ let matching_duplicates: Vec<PackageId> = duplicates
+ .values()
+ .filter_map(|v| match v {
+ Some(dupe_pkg_id) if dupe_pkg_id.name() == pkg.name() => Some(*dupe_pkg_id),
+ _ => None,
+ })
+ .collect();
+
+ // If both sets are the same length, that means all duplicates come
+ // from packages with the same name.
+ if matching_duplicates.len() == duplicates.len() {
+ // Determine if it is dirty or fresh.
+ let source_id = pkg.package_id().source_id();
+ if source_id.is_path() {
+ // `cargo install --path ...` is always rebuilt.
+ return Ok((Freshness::Dirty(Some(DirtyReason::Forced)), duplicates));
+ }
+ let is_up_to_date = |dupe_pkg_id| {
+ let info = self
+ .v2
+ .installs
+ .get(dupe_pkg_id)
+ .expect("dupes must be in sync");
+ let precise_equal = if source_id.is_git() {
+ // Git sources must have the exact same hash to be
+ // considered "fresh".
+ dupe_pkg_id.source_id().precise() == source_id.precise()
+ } else {
+ true
+ };
+
+ dupe_pkg_id.version() == pkg.version()
+ && dupe_pkg_id.source_id() == source_id
+ && precise_equal
+ && info.is_up_to_date(opts, target, &exes)
+ };
+ if matching_duplicates.iter().all(is_up_to_date) {
+ Ok((Freshness::Fresh, duplicates))
+ } else {
+ Ok((Freshness::Dirty(Some(DirtyReason::Forced)), duplicates))
+ }
+ } else {
+ // Format the error message.
+ let mut msg = String::new();
+ for (bin, p) in duplicates.iter() {
+ msg.push_str(&format!("binary `{}` already exists in destination", bin));
+ if let Some(p) = p.as_ref() {
+ msg.push_str(&format!(" as part of `{}`\n", p));
+ } else {
+ msg.push('\n');
+ }
+ }
+ msg.push_str("Add --force to overwrite");
+ bail!("{}", msg);
+ }
+ }
+
+ /// Check if any executables are already installed.
+ ///
+ /// Returns a map of duplicates, the key is the executable name and the
+ /// value is the PackageId that is already installed. The PackageId is
+ /// None if it is an untracked executable.
+ fn find_duplicates(
+ &self,
+ dst: &Path,
+ exes: &BTreeSet<String>,
+ ) -> BTreeMap<String, Option<PackageId>> {
+ exes.iter()
+ .filter_map(|name| {
+ if !dst.join(&name).exists() {
+ None
+ } else {
+ let p = self.v2.package_for_bin(name);
+ Some((name.clone(), p))
+ }
+ })
+ .collect()
+ }
+
+ /// Mark that a package was installed.
+ pub fn mark_installed(
+ &mut self,
+ package: &Package,
+ bins: &BTreeSet<String>,
+ version_req: Option<String>,
+ opts: &CompileOptions,
+ target: &str,
+ rustc: &str,
+ ) {
+ self.v2
+ .mark_installed(package, bins, version_req, opts, target, rustc);
+ self.v1.mark_installed(package, bins);
+ }
+
+ /// Save tracking information to disk.
+ pub fn save(&self) -> CargoResult<()> {
+ self.v1.save(&self.v1_lock).with_context(|| {
+ format!(
+ "failed to write crate metadata at `{}`",
+ self.v1_lock.path().to_string_lossy()
+ )
+ })?;
+
+ self.v2.save(&self.v2_lock).with_context(|| {
+ format!(
+ "failed to write crate metadata at `{}`",
+ self.v2_lock.path().to_string_lossy()
+ )
+ })?;
+ Ok(())
+ }
+
+ /// Iterator of all installed binaries.
+ /// Items are `(pkg_id, bins)` where `bins` is the set of binaries that
+ /// package installed.
+ pub fn all_installed_bins(&self) -> impl Iterator<Item = (&PackageId, &BTreeSet<String>)> {
+ self.v1.v1.iter()
+ }
+
+ /// Set of binaries installed by a particular package.
+ /// Returns None if the package is not installed.
+ pub fn installed_bins(&self, pkg_id: PackageId) -> Option<&BTreeSet<String>> {
+ self.v1.v1.get(&pkg_id)
+ }
+
+ /// Remove a package from the tracker.
+ pub fn remove(&mut self, pkg_id: PackageId, bins: &BTreeSet<String>) {
+ self.v1.remove(pkg_id, bins);
+ self.v2.remove(pkg_id, bins);
+ }
+}
+
+impl CrateListingV1 {
+ fn mark_installed(&mut self, pkg: &Package, bins: &BTreeSet<String>) {
+ // Remove bins from any other packages.
+ for other_bins in self.v1.values_mut() {
+ for bin in bins {
+ other_bins.remove(bin);
+ }
+ }
+ // Remove entries where `bins` is empty.
+ let to_remove = self
+ .v1
+ .iter()
+ .filter_map(|(&p, set)| if set.is_empty() { Some(p) } else { None })
+ .collect::<Vec<_>>();
+ for p in to_remove.iter() {
+ self.v1.remove(p);
+ }
+ // Add these bins.
+ self.v1
+ .entry(pkg.package_id())
+ .or_insert_with(BTreeSet::new)
+ .append(&mut bins.clone());
+ }
+
+ fn remove(&mut self, pkg_id: PackageId, bins: &BTreeSet<String>) {
+ let mut installed = match self.v1.entry(pkg_id) {
+ btree_map::Entry::Occupied(e) => e,
+ btree_map::Entry::Vacant(..) => panic!("v1 unexpected missing `{}`", pkg_id),
+ };
+
+ for bin in bins {
+ installed.get_mut().remove(bin);
+ }
+ if installed.get().is_empty() {
+ installed.remove();
+ }
+ }
+
+ fn save(&self, lock: &FileLock) -> CargoResult<()> {
+ let mut file = lock.file();
+ file.seek(SeekFrom::Start(0))?;
+ file.set_len(0)?;
+ let data = toml::to_string_pretty(self)?;
+ file.write_all(data.as_bytes())?;
+ Ok(())
+ }
+}
+
+impl CrateListingV2 {
+ /// Incorporate any changes from v1 into self.
+ /// This handles the initial upgrade to v2, *and* handles the case
+ /// where v2 is in use, and a v1 update is made, then v2 is used again.
+ /// i.e., `cargo +new install foo ; cargo +old install bar ; cargo +new install bar`
+ /// For now, v1 is the source of truth, so its values are trusted over v2.
+ fn sync_v1(&mut self, v1: &CrateListingV1) {
+ // Make the `bins` entries the same.
+ for (pkg_id, bins) in &v1.v1 {
+ self.installs
+ .entry(*pkg_id)
+ .and_modify(|info| info.bins = bins.clone())
+ .or_insert_with(|| InstallInfo::from_v1(bins));
+ }
+ // Remove any packages that aren't present in v1.
+ let to_remove: Vec<_> = self
+ .installs
+ .keys()
+ .filter(|pkg_id| !v1.v1.contains_key(pkg_id))
+ .cloned()
+ .collect();
+ for pkg_id in to_remove {
+ self.installs.remove(&pkg_id);
+ }
+ }
+
+ fn package_for_bin(&self, bin_name: &str) -> Option<PackageId> {
+ self.installs
+ .iter()
+ .find(|(_, info)| info.bins.contains(bin_name))
+ .map(|(pkg_id, _)| *pkg_id)
+ }
+
+ fn mark_installed(
+ &mut self,
+ pkg: &Package,
+ bins: &BTreeSet<String>,
+ version_req: Option<String>,
+ opts: &CompileOptions,
+ target: &str,
+ rustc: &str,
+ ) {
+ // Remove bins from any other packages.
+ for info in &mut self.installs.values_mut() {
+ for bin in bins {
+ info.bins.remove(bin);
+ }
+ }
+ // Remove entries where `bins` is empty.
+ let to_remove = self
+ .installs
+ .iter()
+ .filter_map(|(&p, info)| if info.bins.is_empty() { Some(p) } else { None })
+ .collect::<Vec<_>>();
+ for p in to_remove.iter() {
+ self.installs.remove(p);
+ }
+ // Add these bins.
+ if let Some(info) = self.installs.get_mut(&pkg.package_id()) {
+ info.bins.append(&mut bins.clone());
+ info.version_req = version_req;
+ info.features = feature_set(&opts.cli_features.features);
+ info.all_features = opts.cli_features.all_features;
+ info.no_default_features = !opts.cli_features.uses_default_features;
+ info.profile = opts.build_config.requested_profile.to_string();
+ info.target = Some(target.to_string());
+ info.rustc = Some(rustc.to_string());
+ } else {
+ self.installs.insert(
+ pkg.package_id(),
+ InstallInfo {
+ version_req,
+ bins: bins.clone(),
+ features: feature_set(&opts.cli_features.features),
+ all_features: opts.cli_features.all_features,
+ no_default_features: !opts.cli_features.uses_default_features,
+ profile: opts.build_config.requested_profile.to_string(),
+ target: Some(target.to_string()),
+ rustc: Some(rustc.to_string()),
+ other: BTreeMap::new(),
+ },
+ );
+ }
+ }
+
+ fn remove(&mut self, pkg_id: PackageId, bins: &BTreeSet<String>) {
+ let mut info_entry = match self.installs.entry(pkg_id) {
+ btree_map::Entry::Occupied(e) => e,
+ btree_map::Entry::Vacant(..) => panic!("v2 unexpected missing `{}`", pkg_id),
+ };
+
+ for bin in bins {
+ info_entry.get_mut().bins.remove(bin);
+ }
+ if info_entry.get().bins.is_empty() {
+ info_entry.remove();
+ }
+ }
+
+ fn save(&self, lock: &FileLock) -> CargoResult<()> {
+ let mut file = lock.file();
+ file.seek(SeekFrom::Start(0))?;
+ file.set_len(0)?;
+ let data = serde_json::to_string(self)?;
+ file.write_all(data.as_bytes())?;
+ Ok(())
+ }
+}
+
+impl InstallInfo {
+ fn from_v1(set: &BTreeSet<String>) -> InstallInfo {
+ InstallInfo {
+ version_req: None,
+ bins: set.clone(),
+ features: BTreeSet::new(),
+ all_features: false,
+ no_default_features: false,
+ profile: "release".to_string(),
+ target: None,
+ rustc: None,
+ other: BTreeMap::new(),
+ }
+ }
+
+ /// Determine if this installation is "up to date", or if it needs to be reinstalled.
+ ///
+ /// This does not do Package/Source/Version checking.
+ fn is_up_to_date(&self, opts: &CompileOptions, target: &str, exes: &BTreeSet<String>) -> bool {
+ self.features == feature_set(&opts.cli_features.features)
+ && self.all_features == opts.cli_features.all_features
+ && self.no_default_features != opts.cli_features.uses_default_features
+ && self.profile.as_str() == opts.build_config.requested_profile.as_str()
+ && (self.target.is_none() || self.target.as_deref() == Some(target))
+ && &self.bins == exes
+ }
+}
+
+/// Determines the root directory where installation is done.
+pub fn resolve_root(flag: Option<&str>, config: &Config) -> CargoResult<Filesystem> {
+ let config_root = config.get_path("install.root")?;
+ Ok(flag
+ .map(PathBuf::from)
+ .or_else(|| config.get_env_os("CARGO_INSTALL_ROOT").map(PathBuf::from))
+ .or_else(move || config_root.map(|v| v.val))
+ .map(Filesystem::new)
+ .unwrap_or_else(|| config.home().clone()))
+}
+
+/// Determines the `PathSource` from a `SourceId`.
+pub fn path_source(source_id: SourceId, config: &Config) -> CargoResult<PathSource<'_>> {
+ let path = source_id
+ .url()
+ .to_file_path()
+ .map_err(|()| format_err!("path sources must have a valid path"))?;
+ Ok(PathSource::new(&path, source_id, config))
+}
+
+/// Gets a Package based on command-line requirements.
+pub fn select_dep_pkg<T>(
+ source: &mut T,
+ dep: Dependency,
+ config: &Config,
+ needs_update: bool,
+) -> CargoResult<Package>
+where
+ T: Source,
+{
+ // This operation may involve updating some sources or making a few queries
+ // which may involve frobbing caches, as a result make sure we synchronize
+ // with other global Cargos
+ let _lock = config.acquire_package_cache_lock()?;
+
+ if needs_update {
+ source.invalidate_cache();
+ }
+
+ let deps = loop {
+ match source.query_vec(&dep, QueryKind::Exact)? {
+ Poll::Ready(deps) => break deps,
+ Poll::Pending => source.block_until_ready()?,
+ }
+ };
+ match deps.iter().map(|p| p.package_id()).max() {
+ Some(pkgid) => {
+ let pkg = Box::new(source).download_now(pkgid, config)?;
+ Ok(pkg)
+ }
+ None => {
+ let is_yanked: bool = if dep.version_req().is_exact() {
+ let version: String = dep.version_req().to_string();
+ if let Ok(pkg_id) =
+ PackageId::new(dep.package_name(), &version[1..], source.source_id())
+ {
+ source.invalidate_cache();
+ loop {
+ match source.is_yanked(pkg_id) {
+ Poll::Ready(Ok(is_yanked)) => break is_yanked,
+ Poll::Ready(Err(_)) => break false,
+ Poll::Pending => source.block_until_ready()?,
+ }
+ }
+ } else {
+ false
+ }
+ } else {
+ false
+ };
+ if is_yanked {
+ bail!(
+ "cannot install package `{}`, it has been yanked from {}",
+ dep.package_name(),
+ source.source_id()
+ )
+ } else {
+ bail!(
+ "could not find `{}` in {} with version `{}`",
+ dep.package_name(),
+ source.source_id(),
+ dep.version_req(),
+ )
+ }
+ }
+ }
+}
+
+pub fn select_pkg<T, F>(
+ source: &mut T,
+ dep: Option<Dependency>,
+ mut list_all: F,
+ config: &Config,
+) -> CargoResult<Package>
+where
+ T: Source,
+ F: FnMut(&mut T) -> CargoResult<Vec<Package>>,
+{
+ // This operation may involve updating some sources or making a few queries
+ // which may involve frobbing caches, as a result make sure we synchronize
+ // with other global Cargos
+ let _lock = config.acquire_package_cache_lock()?;
+
+ source.invalidate_cache();
+
+ return if let Some(dep) = dep {
+ select_dep_pkg(source, dep, config, false)
+ } else {
+ let candidates = list_all(source)?;
+ let binaries = candidates
+ .iter()
+ .filter(|cand| cand.targets().iter().filter(|t| t.is_bin()).count() > 0);
+ let examples = candidates
+ .iter()
+ .filter(|cand| cand.targets().iter().filter(|t| t.is_example()).count() > 0);
+ let git_url = source.source_id().url().to_string();
+ let pkg = match one(binaries, |v| multi_err("binaries", &git_url, v))? {
+ Some(p) => p,
+ None => match one(examples, |v| multi_err("examples", &git_url, v))? {
+ Some(p) => p,
+ None => bail!(
+ "no packages found with binaries or \
+ examples"
+ ),
+ },
+ };
+ Ok(pkg.clone())
+ };
+
+ fn multi_err(kind: &str, git_url: &str, mut pkgs: Vec<&Package>) -> String {
+ pkgs.sort_unstable_by_key(|a| a.name());
+ let first_pkg = pkgs[0];
+ format!(
+ "multiple packages with {} found: {}. When installing a git repository, \
+ cargo will always search the entire repo for any Cargo.toml.\n\
+ Please specify a package, e.g. `cargo install --git {} {}`.",
+ kind,
+ pkgs.iter()
+ .map(|p| p.name().as_str())
+ .collect::<Vec<_>>()
+ .join(", "),
+ git_url,
+ first_pkg.name()
+ )
+ }
+}
+
+/// Get one element from the iterator.
+/// Returns None if none left.
+/// Returns error if there is more than one item in the iterator.
+fn one<I, F>(mut i: I, f: F) -> CargoResult<Option<I::Item>>
+where
+ I: Iterator,
+ F: FnOnce(Vec<I::Item>) -> String,
+{
+ match (i.next(), i.next()) {
+ (Some(i1), Some(i2)) => {
+ let mut v = vec![i1, i2];
+ v.extend(i);
+ Err(format_err!("{}", f(v)))
+ }
+ (Some(i), None) => Ok(Some(i)),
+ (None, _) => Ok(None),
+ }
+}
+
+/// Helper to convert features to a BTreeSet.
+fn feature_set(features: &Rc<BTreeSet<FeatureValue>>) -> BTreeSet<String> {
+ features.iter().map(|s| s.to_string()).collect()
+}
+
+/// Helper to get the executable names from a filter.
+pub fn exe_names(pkg: &Package, filter: &ops::CompileFilter) -> BTreeSet<String> {
+ let to_exe = |name| format!("{}{}", name, env::consts::EXE_SUFFIX);
+ match filter {
+ CompileFilter::Default { .. } => pkg
+ .targets()
+ .iter()
+ .filter(|t| t.is_bin())
+ .map(|t| to_exe(t.name()))
+ .collect(),
+ CompileFilter::Only {
+ all_targets: true, ..
+ } => pkg
+ .targets()
+ .iter()
+ .filter(|target| target.is_executable())
+ .map(|target| to_exe(target.name()))
+ .collect(),
+ CompileFilter::Only {
+ ref bins,
+ ref examples,
+ ..
+ } => {
+ let collect = |rule: &_, f: fn(&Target) -> _| match rule {
+ FilterRule::All => pkg
+ .targets()
+ .iter()
+ .filter(|t| f(t))
+ .map(|t| t.name().into())
+ .collect(),
+ FilterRule::Just(targets) => targets.clone(),
+ };
+ let all_bins = collect(bins, Target::is_bin);
+ let all_examples = collect(examples, Target::is_exe_example);
+
+ all_bins
+ .iter()
+ .chain(all_examples.iter())
+ .map(|name| to_exe(name))
+ .collect()
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/ops/fix.rs b/src/tools/cargo/src/cargo/ops/fix.rs
new file mode 100644
index 000000000..be24967f8
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/fix.rs
@@ -0,0 +1,1021 @@
+//! High-level overview of how `fix` works:
+//!
+//! The main goal is to run `cargo check` to get rustc to emit JSON
+//! diagnostics with suggested fixes that can be applied to the files on the
+//! filesystem, and validate that those changes didn't break anything.
+//!
+//! Cargo begins by launching a `LockServer` thread in the background to
+//! listen for network connections to coordinate locking when multiple targets
+//! are built simultaneously. It ensures each package has only one fix running
+//! at once.
+//!
+//! The `RustfixDiagnosticServer` is launched in a background thread (in
+//! `JobQueue`) to listen for network connections to coordinate displaying
+//! messages to the user on the console (so that multiple processes don't try
+//! to print at the same time).
+//!
+//! Cargo begins a normal `cargo check` operation with itself set as a proxy
+//! for rustc by setting `primary_unit_rustc` in the build config. When
+//! cargo launches rustc to check a crate, it is actually launching itself.
+//! The `FIX_ENV_INTERNAL` environment variable is set so that cargo knows it is in
+//! fix-proxy-mode.
+//!
+//! Each proxied cargo-as-rustc detects it is in fix-proxy-mode (via `FIX_ENV_INTERNAL`
+//! environment variable in `main`) and does the following:
+//!
+//! - Acquire a lock from the `LockServer` from the master cargo process.
+//! - Launches the real rustc (`rustfix_and_fix`), looking at the JSON output
+//! for suggested fixes.
+//! - Uses the `rustfix` crate to apply the suggestions to the files on the
+//! file system.
+//! - If rustfix fails to apply any suggestions (for example, they are
+//! overlapping), but at least some suggestions succeeded, it will try the
+//! previous two steps up to 4 times as long as some suggestions succeed.
+//! - Assuming there's at least one suggestion applied, and the suggestions
+//! applied cleanly, rustc is run again to verify the suggestions didn't
+//! break anything. The change will be backed out if it fails (unless
+//! `--broken-code` is used).
+//! - If there are any warnings or errors, rustc will be run one last time to
+//! show them to the user.
+
+use std::collections::{BTreeSet, HashMap, HashSet};
+use std::ffi::OsString;
+use std::path::{Path, PathBuf};
+use std::process::{self, ExitStatus};
+use std::{env, fs, str};
+
+use anyhow::{bail, Context as _};
+use cargo_util::{exit_status_to_string, is_simple_exit_code, paths, ProcessBuilder};
+use log::{debug, trace, warn};
+use rustfix::diagnostics::Diagnostic;
+use rustfix::{self, CodeFix};
+use semver::Version;
+
+use crate::core::compiler::RustcTargetData;
+use crate::core::resolver::features::{DiffMap, FeatureOpts, FeatureResolver, FeaturesFor};
+use crate::core::resolver::{HasDevUnits, Resolve, ResolveBehavior};
+use crate::core::{Edition, MaybePackage, PackageId, Workspace};
+use crate::ops::resolve::WorkspaceResolve;
+use crate::ops::{self, CompileOptions};
+use crate::util::diagnostic_server::{Message, RustfixDiagnosticServer};
+use crate::util::errors::CargoResult;
+use crate::util::Config;
+use crate::util::{existing_vcs_repo, LockServer, LockServerClient};
+use crate::{drop_eprint, drop_eprintln};
+
+/// **Internal only.**
+/// Indicates Cargo is in fix-proxy-mode if presents.
+/// The value of it is the socket address of the [`LockServer`] being used.
+/// See the [module-level documentation](mod@super::fix) for more.
+const FIX_ENV_INTERNAL: &str = "__CARGO_FIX_PLZ";
+/// **Internal only.**
+/// For passing [`FixOptions::broken_code`] through to cargo running in proxy mode.
+const BROKEN_CODE_ENV_INTERNAL: &str = "__CARGO_FIX_BROKEN_CODE";
+/// **Internal only.**
+/// For passing [`FixOptions::edition`] through to cargo running in proxy mode.
+const EDITION_ENV_INTERNAL: &str = "__CARGO_FIX_EDITION";
+/// **Internal only.**
+/// For passing [`FixOptions::idioms`] through to cargo running in proxy mode.
+const IDIOMS_ENV_INTERNAL: &str = "__CARGO_FIX_IDIOMS";
+
+pub struct FixOptions {
+ pub edition: bool,
+ pub idioms: bool,
+ pub compile_opts: CompileOptions,
+ pub allow_dirty: bool,
+ pub allow_no_vcs: bool,
+ pub allow_staged: bool,
+ pub broken_code: bool,
+}
+
+pub fn fix(ws: &Workspace<'_>, opts: &mut FixOptions) -> CargoResult<()> {
+ check_version_control(ws.config(), opts)?;
+ if opts.edition {
+ check_resolver_change(ws, opts)?;
+ }
+
+ // Spin up our lock server, which our subprocesses will use to synchronize fixes.
+ let lock_server = LockServer::new()?;
+ let mut wrapper = ProcessBuilder::new(env::current_exe()?);
+ wrapper.env(FIX_ENV_INTERNAL, lock_server.addr().to_string());
+ let _started = lock_server.start()?;
+
+ opts.compile_opts.build_config.force_rebuild = true;
+
+ if opts.broken_code {
+ wrapper.env(BROKEN_CODE_ENV_INTERNAL, "1");
+ }
+
+ if opts.edition {
+ wrapper.env(EDITION_ENV_INTERNAL, "1");
+ }
+ if opts.idioms {
+ wrapper.env(IDIOMS_ENV_INTERNAL, "1");
+ }
+
+ *opts
+ .compile_opts
+ .build_config
+ .rustfix_diagnostic_server
+ .borrow_mut() = Some(RustfixDiagnosticServer::new()?);
+
+ if let Some(server) = opts
+ .compile_opts
+ .build_config
+ .rustfix_diagnostic_server
+ .borrow()
+ .as_ref()
+ {
+ server.configure(&mut wrapper);
+ }
+
+ let rustc = ws.config().load_global_rustc(Some(ws))?;
+ wrapper.arg(&rustc.path);
+ // This is calling rustc in cargo fix-proxy-mode, so it also need to retry.
+ // The argfile handling are located at `FixArgs::from_args`.
+ wrapper.retry_with_argfile(true);
+
+ // primary crates are compiled using a cargo subprocess to do extra work of applying fixes and
+ // repeating build until there are no more changes to be applied
+ opts.compile_opts.build_config.primary_unit_rustc = Some(wrapper);
+
+ ops::compile(ws, &opts.compile_opts)?;
+ Ok(())
+}
+
+fn check_version_control(config: &Config, opts: &FixOptions) -> CargoResult<()> {
+ if opts.allow_no_vcs {
+ return Ok(());
+ }
+ if !existing_vcs_repo(config.cwd(), config.cwd()) {
+ bail!(
+ "no VCS found for this package and `cargo fix` can potentially \
+ perform destructive changes; if you'd like to suppress this \
+ error pass `--allow-no-vcs`"
+ )
+ }
+
+ if opts.allow_dirty && opts.allow_staged {
+ return Ok(());
+ }
+
+ let mut dirty_files = Vec::new();
+ let mut staged_files = Vec::new();
+ if let Ok(repo) = git2::Repository::discover(config.cwd()) {
+ let mut repo_opts = git2::StatusOptions::new();
+ repo_opts.include_ignored(false);
+ repo_opts.include_untracked(true);
+ for status in repo.statuses(Some(&mut repo_opts))?.iter() {
+ if let Some(path) = status.path() {
+ match status.status() {
+ git2::Status::CURRENT => (),
+ git2::Status::INDEX_NEW
+ | git2::Status::INDEX_MODIFIED
+ | git2::Status::INDEX_DELETED
+ | git2::Status::INDEX_RENAMED
+ | git2::Status::INDEX_TYPECHANGE => {
+ if !opts.allow_staged {
+ staged_files.push(path.to_string())
+ }
+ }
+ _ => {
+ if !opts.allow_dirty {
+ dirty_files.push(path.to_string())
+ }
+ }
+ };
+ }
+ }
+ }
+
+ if dirty_files.is_empty() && staged_files.is_empty() {
+ return Ok(());
+ }
+
+ let mut files_list = String::new();
+ for file in dirty_files {
+ files_list.push_str(" * ");
+ files_list.push_str(&file);
+ files_list.push_str(" (dirty)\n");
+ }
+ for file in staged_files {
+ files_list.push_str(" * ");
+ files_list.push_str(&file);
+ files_list.push_str(" (staged)\n");
+ }
+
+ bail!(
+ "the working directory of this package has uncommitted changes, and \
+ `cargo fix` can potentially perform destructive changes; if you'd \
+ like to suppress this error pass `--allow-dirty`, `--allow-staged`, \
+ or commit the changes to these files:\n\
+ \n\
+ {}\n\
+ ",
+ files_list
+ );
+}
+
+fn check_resolver_change(ws: &Workspace<'_>, opts: &FixOptions) -> CargoResult<()> {
+ let root = ws.root_maybe();
+ match root {
+ MaybePackage::Package(root_pkg) => {
+ if root_pkg.manifest().resolve_behavior().is_some() {
+ // If explicitly specified by the user, no need to check.
+ return Ok(());
+ }
+ // Only trigger if updating the root package from 2018.
+ let pkgs = opts.compile_opts.spec.get_packages(ws)?;
+ if !pkgs.iter().any(|&pkg| pkg == root_pkg) {
+ // The root is not being migrated.
+ return Ok(());
+ }
+ if root_pkg.manifest().edition() != Edition::Edition2018 {
+ // V1 to V2 only happens on 2018 to 2021.
+ return Ok(());
+ }
+ }
+ MaybePackage::Virtual(_vm) => {
+ // Virtual workspaces don't have a global edition to set (yet).
+ return Ok(());
+ }
+ }
+ // 2018 without `resolver` set must be V1
+ assert_eq!(ws.resolve_behavior(), ResolveBehavior::V1);
+ let specs = opts.compile_opts.spec.to_package_id_specs(ws)?;
+ let target_data = RustcTargetData::new(ws, &opts.compile_opts.build_config.requested_kinds)?;
+ let resolve_differences = |has_dev_units| -> CargoResult<(WorkspaceResolve<'_>, DiffMap)> {
+ let ws_resolve = ops::resolve_ws_with_opts(
+ ws,
+ &target_data,
+ &opts.compile_opts.build_config.requested_kinds,
+ &opts.compile_opts.cli_features,
+ &specs,
+ has_dev_units,
+ crate::core::resolver::features::ForceAllTargets::No,
+ )?;
+
+ let feature_opts = FeatureOpts::new_behavior(ResolveBehavior::V2, has_dev_units);
+ let v2_features = FeatureResolver::resolve(
+ ws,
+ &target_data,
+ &ws_resolve.targeted_resolve,
+ &ws_resolve.pkg_set,
+ &opts.compile_opts.cli_features,
+ &specs,
+ &opts.compile_opts.build_config.requested_kinds,
+ feature_opts,
+ )?;
+
+ let diffs = v2_features.compare_legacy(&ws_resolve.resolved_features);
+ Ok((ws_resolve, diffs))
+ };
+ let (_, without_dev_diffs) = resolve_differences(HasDevUnits::No)?;
+ let (ws_resolve, mut with_dev_diffs) = resolve_differences(HasDevUnits::Yes)?;
+ if without_dev_diffs.is_empty() && with_dev_diffs.is_empty() {
+ // Nothing is different, nothing to report.
+ return Ok(());
+ }
+ // Only display unique changes with dev-dependencies.
+ with_dev_diffs.retain(|k, vals| without_dev_diffs.get(k) != Some(vals));
+ let config = ws.config();
+ config.shell().note(
+ "Switching to Edition 2021 will enable the use of the version 2 feature resolver in Cargo.",
+ )?;
+ drop_eprintln!(
+ config,
+ "This may cause some dependencies to be built with fewer features enabled than previously."
+ );
+ drop_eprintln!(
+ config,
+ "More information about the resolver changes may be found \
+ at https://doc.rust-lang.org/nightly/edition-guide/rust-2021/default-cargo-resolver.html"
+ );
+ drop_eprintln!(
+ config,
+ "When building the following dependencies, \
+ the given features will no longer be used:\n"
+ );
+ let show_diffs = |differences: DiffMap| {
+ for ((pkg_id, features_for), removed) in differences {
+ drop_eprint!(config, " {}", pkg_id);
+ if let FeaturesFor::HostDep = features_for {
+ drop_eprint!(config, " (as host dependency)");
+ }
+ drop_eprint!(config, " removed features: ");
+ let joined: Vec<_> = removed.iter().map(|s| s.as_str()).collect();
+ drop_eprintln!(config, "{}", joined.join(", "));
+ }
+ drop_eprint!(config, "\n");
+ };
+ if !without_dev_diffs.is_empty() {
+ show_diffs(without_dev_diffs);
+ }
+ if !with_dev_diffs.is_empty() {
+ drop_eprintln!(
+ config,
+ "The following differences only apply when building with dev-dependencies:\n"
+ );
+ show_diffs(with_dev_diffs);
+ }
+ report_maybe_diesel(config, &ws_resolve.targeted_resolve)?;
+ Ok(())
+}
+
+fn report_maybe_diesel(config: &Config, resolve: &Resolve) -> CargoResult<()> {
+ fn is_broken_diesel(pid: PackageId) -> bool {
+ pid.name() == "diesel" && pid.version() < &Version::new(1, 4, 8)
+ }
+
+ fn is_broken_diesel_migration(pid: PackageId) -> bool {
+ pid.name() == "diesel_migrations" && pid.version().major <= 1
+ }
+
+ if resolve.iter().any(is_broken_diesel) && resolve.iter().any(is_broken_diesel_migration) {
+ config.shell().note(
+ "\
+This project appears to use both diesel and diesel_migrations. These packages have
+a known issue where the build may fail due to the version 2 resolver preventing
+feature unification between those two packages. Please update to at least diesel 1.4.8
+to prevent this issue from happening.
+",
+ )?;
+ }
+ Ok(())
+}
+
+/// Provide the lock address when running in proxy mode
+///
+/// Returns `None` if `fix` is not being run (not in proxy mode). Returns
+/// `Some(...)` if in `fix` proxy mode
+pub fn fix_get_proxy_lock_addr() -> Option<String> {
+ // ALLOWED: For the internal mechanism of `cargo fix` only.
+ // Shouldn't be set directly by anyone.
+ #[allow(clippy::disallowed_methods)]
+ env::var(FIX_ENV_INTERNAL).ok()
+}
+
+/// Entry point for `cargo` running as a proxy for `rustc`.
+///
+/// This is called every time `cargo` is run to check if it is in proxy mode.
+///
+/// If there are warnings or errors, this does not return,
+/// and the process exits with the corresponding `rustc` exit code.
+///
+/// See [`fix_get_proxy_lock_addr`]
+pub fn fix_exec_rustc(config: &Config, lock_addr: &str) -> CargoResult<()> {
+ let args = FixArgs::get()?;
+ trace!("cargo-fix as rustc got file {:?}", args.file);
+
+ let workspace_rustc = config
+ .get_env("RUSTC_WORKSPACE_WRAPPER")
+ .map(PathBuf::from)
+ .ok();
+ let mut rustc = ProcessBuilder::new(&args.rustc).wrapped(workspace_rustc.as_ref());
+ rustc.retry_with_argfile(true);
+ rustc.env_remove(FIX_ENV_INTERNAL);
+ args.apply(&mut rustc);
+
+ trace!("start rustfixing {:?}", args.file);
+ let json_error_rustc = {
+ let mut cmd = rustc.clone();
+ cmd.arg("--error-format=json");
+ cmd
+ };
+ let fixes = rustfix_crate(&lock_addr, &json_error_rustc, &args.file, &args, config)?;
+
+ // Ok now we have our final goal of testing out the changes that we applied.
+ // If these changes went awry and actually started to cause the crate to
+ // *stop* compiling then we want to back them out and continue to print
+ // warnings to the user.
+ //
+ // If we didn't actually make any changes then we can immediately execute the
+ // new rustc, and otherwise we capture the output to hide it in the scenario
+ // that we have to back it all out.
+ if !fixes.files.is_empty() {
+ debug!("calling rustc for final verification: {json_error_rustc}");
+ let output = json_error_rustc.output()?;
+
+ if output.status.success() {
+ for (path, file) in fixes.files.iter() {
+ Message::Fixed {
+ file: path.clone(),
+ fixes: file.fixes_applied,
+ }
+ .post(config)?;
+ }
+ }
+
+ // If we succeeded then we'll want to commit to the changes we made, if
+ // any. If stderr is empty then there's no need for the final exec at
+ // the end, we just bail out here.
+ if output.status.success() && output.stderr.is_empty() {
+ return Ok(());
+ }
+
+ // Otherwise, if our rustc just failed, then that means that we broke the
+ // user's code with our changes. Back out everything and fall through
+ // below to recompile again.
+ if !output.status.success() {
+ if config.get_env_os(BROKEN_CODE_ENV_INTERNAL).is_none() {
+ for (path, file) in fixes.files.iter() {
+ debug!("reverting {:?} due to errors", path);
+ paths::write(path, &file.original_code)?;
+ }
+ }
+
+ let krate = {
+ let mut iter = json_error_rustc.get_args();
+ let mut krate = None;
+ while let Some(arg) = iter.next() {
+ if arg == "--crate-name" {
+ krate = iter.next().and_then(|s| s.to_owned().into_string().ok());
+ }
+ }
+ krate
+ };
+ log_failed_fix(config, krate, &output.stderr, output.status)?;
+ }
+ }
+
+ // This final fall-through handles multiple cases;
+ // - If the fix failed, show the original warnings and suggestions.
+ // - If `--broken-code`, show the error messages.
+ // - If the fix succeeded, show any remaining warnings.
+ for arg in args.format_args {
+ // Add any json/error format arguments that Cargo wants. This allows
+ // things like colored output to work correctly.
+ rustc.arg(arg);
+ }
+ debug!("calling rustc to display remaining diagnostics: {rustc}");
+ exit_with(rustc.status()?);
+}
+
+#[derive(Default)]
+struct FixedCrate {
+ files: HashMap<String, FixedFile>,
+}
+
+struct FixedFile {
+ errors_applying_fixes: Vec<String>,
+ fixes_applied: u32,
+ original_code: String,
+}
+
+/// Attempts to apply fixes to a single crate.
+///
+/// This runs `rustc` (possibly multiple times) to gather suggestions from the
+/// compiler and applies them to the files on disk.
+fn rustfix_crate(
+ lock_addr: &str,
+ rustc: &ProcessBuilder,
+ filename: &Path,
+ args: &FixArgs,
+ config: &Config,
+) -> CargoResult<FixedCrate> {
+ if !args.can_run_rustfix(config)? {
+ // This fix should not be run. Skipping...
+ return Ok(FixedCrate::default());
+ }
+
+ // First up, we want to make sure that each crate is only checked by one
+ // process at a time. If two invocations concurrently check a crate then
+ // it's likely to corrupt it.
+ //
+ // Historically this used per-source-file locking, then per-package
+ // locking. It now uses a single, global lock as some users do things like
+ // #[path] or include!() of shared files between packages. Serializing
+ // makes it slower, but is the only safe way to prevent concurrent
+ // modification.
+ let _lock = LockServerClient::lock(&lock_addr.parse()?, "global")?;
+
+ // Next up, this is a bit suspicious, but we *iteratively* execute rustc and
+ // collect suggestions to feed to rustfix. Once we hit our limit of times to
+ // execute rustc or we appear to be reaching a fixed point we stop running
+ // rustc.
+ //
+ // This is currently done to handle code like:
+ //
+ // ::foo::<::Bar>();
+ //
+ // where there are two fixes to happen here: `crate::foo::<crate::Bar>()`.
+ // The spans for these two suggestions are overlapping and its difficult in
+ // the compiler to **not** have overlapping spans here. As a result, a naive
+ // implementation would feed the two compiler suggestions for the above fix
+ // into `rustfix`, but one would be rejected because it overlaps with the
+ // other.
+ //
+ // In this case though, both suggestions are valid and can be automatically
+ // applied! To handle this case we execute rustc multiple times, collecting
+ // fixes each time we do so. Along the way we discard any suggestions that
+ // failed to apply, assuming that they can be fixed the next time we run
+ // rustc.
+ //
+ // Naturally, we want a few protections in place here though to avoid looping
+ // forever or otherwise losing data. To that end we have a few termination
+ // conditions:
+ //
+ // * Do this whole process a fixed number of times. In theory we probably
+ // need an infinite number of times to apply fixes, but we're not gonna
+ // sit around waiting for that.
+ // * If it looks like a fix genuinely can't be applied we need to bail out.
+ // Detect this when a fix fails to get applied *and* no suggestions
+ // successfully applied to the same file. In that case looks like we
+ // definitely can't make progress, so bail out.
+ let mut fixes = FixedCrate::default();
+ let mut last_fix_counts = HashMap::new();
+ let iterations = config
+ .get_env("CARGO_FIX_MAX_RETRIES")
+ .ok()
+ .and_then(|n| n.parse().ok())
+ .unwrap_or(4);
+ for _ in 0..iterations {
+ last_fix_counts.clear();
+ for (path, file) in fixes.files.iter_mut() {
+ last_fix_counts.insert(path.clone(), file.fixes_applied);
+ // We'll generate new errors below.
+ file.errors_applying_fixes.clear();
+ }
+ rustfix_and_fix(&mut fixes, rustc, filename, config)?;
+ let mut progress_yet_to_be_made = false;
+ for (path, file) in fixes.files.iter_mut() {
+ if file.errors_applying_fixes.is_empty() {
+ continue;
+ }
+ // If anything was successfully fixed *and* there's at least one
+ // error, then assume the error was spurious and we'll try again on
+ // the next iteration.
+ if file.fixes_applied != *last_fix_counts.get(path).unwrap_or(&0) {
+ progress_yet_to_be_made = true;
+ }
+ }
+ if !progress_yet_to_be_made {
+ break;
+ }
+ }
+
+ // Any errors still remaining at this point need to be reported as probably
+ // bugs in Cargo and/or rustfix.
+ for (path, file) in fixes.files.iter_mut() {
+ for error in file.errors_applying_fixes.drain(..) {
+ Message::ReplaceFailed {
+ file: path.clone(),
+ message: error,
+ }
+ .post(config)?;
+ }
+ }
+
+ Ok(fixes)
+}
+
+/// Executes `rustc` to apply one round of suggestions to the crate in question.
+///
+/// This will fill in the `fixes` map with original code, suggestions applied,
+/// and any errors encountered while fixing files.
+fn rustfix_and_fix(
+ fixes: &mut FixedCrate,
+ rustc: &ProcessBuilder,
+ filename: &Path,
+ config: &Config,
+) -> CargoResult<()> {
+ // If not empty, filter by these lints.
+ // TODO: implement a way to specify this.
+ let only = HashSet::new();
+
+ debug!("calling rustc to collect suggestions and validate previous fixes: {rustc}");
+ let output = rustc.output()?;
+
+ // If rustc didn't succeed for whatever reasons then we're very likely to be
+ // looking at otherwise broken code. Let's not make things accidentally
+ // worse by applying fixes where a bug could cause *more* broken code.
+ // Instead, punt upwards which will reexec rustc over the original code,
+ // displaying pretty versions of the diagnostics we just read out.
+ if !output.status.success() && config.get_env_os(BROKEN_CODE_ENV_INTERNAL).is_none() {
+ debug!(
+ "rustfixing `{:?}` failed, rustc exited with {:?}",
+ filename,
+ output.status.code()
+ );
+ return Ok(());
+ }
+
+ let fix_mode = config
+ .get_env_os("__CARGO_FIX_YOLO")
+ .map(|_| rustfix::Filter::Everything)
+ .unwrap_or(rustfix::Filter::MachineApplicableOnly);
+
+ // Sift through the output of the compiler to look for JSON messages.
+ // indicating fixes that we can apply.
+ let stderr = str::from_utf8(&output.stderr).context("failed to parse rustc stderr as UTF-8")?;
+
+ let suggestions = stderr
+ .lines()
+ .filter(|x| !x.is_empty())
+ .inspect(|y| trace!("line: {}", y))
+ // Parse each line of stderr, ignoring errors, as they may not all be JSON.
+ .filter_map(|line| serde_json::from_str::<Diagnostic>(line).ok())
+ // From each diagnostic, try to extract suggestions from rustc.
+ .filter_map(|diag| rustfix::collect_suggestions(&diag, &only, fix_mode));
+
+ // Collect suggestions by file so we can apply them one at a time later.
+ let mut file_map = HashMap::new();
+ let mut num_suggestion = 0;
+ // It's safe since we won't read any content under home dir.
+ let home_path = config.home().as_path_unlocked();
+ for suggestion in suggestions {
+ trace!("suggestion");
+ // Make sure we've got a file associated with this suggestion and all
+ // snippets point to the same file. Right now it's not clear what
+ // we would do with multiple files.
+ let file_names = suggestion
+ .solutions
+ .iter()
+ .flat_map(|s| s.replacements.iter())
+ .map(|r| &r.snippet.file_name);
+
+ let file_name = if let Some(file_name) = file_names.clone().next() {
+ file_name.clone()
+ } else {
+ trace!("rejecting as it has no solutions {:?}", suggestion);
+ continue;
+ };
+
+ // Do not write into registry cache. See rust-lang/cargo#9857.
+ if Path::new(&file_name).starts_with(home_path) {
+ continue;
+ }
+
+ if !file_names.clone().all(|f| f == &file_name) {
+ trace!("rejecting as it changes multiple files: {:?}", suggestion);
+ continue;
+ }
+
+ trace!("adding suggestion for {:?}: {:?}", file_name, suggestion);
+ file_map
+ .entry(file_name)
+ .or_insert_with(Vec::new)
+ .push(suggestion);
+ num_suggestion += 1;
+ }
+
+ debug!(
+ "collected {} suggestions for `{}`",
+ num_suggestion,
+ filename.display(),
+ );
+
+ for (file, suggestions) in file_map {
+ // Attempt to read the source code for this file. If this fails then
+ // that'd be pretty surprising, so log a message and otherwise keep
+ // going.
+ let code = match paths::read(file.as_ref()) {
+ Ok(s) => s,
+ Err(e) => {
+ warn!("failed to read `{}`: {}", file, e);
+ continue;
+ }
+ };
+ let num_suggestions = suggestions.len();
+ debug!("applying {} fixes to {}", num_suggestions, file);
+
+ // If this file doesn't already exist then we just read the original
+ // code, so save it. If the file already exists then the original code
+ // doesn't need to be updated as we've just read an interim state with
+ // some fixes but perhaps not all.
+ let fixed_file = fixes
+ .files
+ .entry(file.clone())
+ .or_insert_with(|| FixedFile {
+ errors_applying_fixes: Vec::new(),
+ fixes_applied: 0,
+ original_code: code.clone(),
+ });
+ let mut fixed = CodeFix::new(&code);
+
+ // As mentioned above in `rustfix_crate`, we don't immediately warn
+ // about suggestions that fail to apply here, and instead we save them
+ // off for later processing.
+ for suggestion in suggestions.iter().rev() {
+ match fixed.apply(suggestion) {
+ Ok(()) => fixed_file.fixes_applied += 1,
+ Err(e) => fixed_file.errors_applying_fixes.push(e.to_string()),
+ }
+ }
+ let new_code = fixed.finish()?;
+ paths::write(&file, new_code)?;
+ }
+
+ Ok(())
+}
+
+fn exit_with(status: ExitStatus) -> ! {
+ #[cfg(unix)]
+ {
+ use std::io::Write;
+ use std::os::unix::prelude::*;
+ if let Some(signal) = status.signal() {
+ drop(writeln!(
+ std::io::stderr().lock(),
+ "child failed with signal `{}`",
+ signal
+ ));
+ process::exit(2);
+ }
+ }
+ process::exit(status.code().unwrap_or(3));
+}
+
+fn log_failed_fix(
+ config: &Config,
+ krate: Option<String>,
+ stderr: &[u8],
+ status: ExitStatus,
+) -> CargoResult<()> {
+ let stderr = str::from_utf8(stderr).context("failed to parse rustc stderr as utf-8")?;
+
+ let diagnostics = stderr
+ .lines()
+ .filter(|x| !x.is_empty())
+ .filter_map(|line| serde_json::from_str::<Diagnostic>(line).ok());
+ let mut files = BTreeSet::new();
+ let mut errors = Vec::new();
+ for diagnostic in diagnostics {
+ errors.push(diagnostic.rendered.unwrap_or(diagnostic.message));
+ for span in diagnostic.spans.into_iter() {
+ files.insert(span.file_name);
+ }
+ }
+ // Include any abnormal messages (like an ICE or whatever).
+ errors.extend(
+ stderr
+ .lines()
+ .filter(|x| !x.starts_with('{'))
+ .map(|x| x.to_string()),
+ );
+
+ let files = files.into_iter().collect();
+ let abnormal_exit = if status.code().map_or(false, is_simple_exit_code) {
+ None
+ } else {
+ Some(exit_status_to_string(status))
+ };
+ Message::FixFailed {
+ files,
+ krate,
+ errors,
+ abnormal_exit,
+ }
+ .post(config)?;
+
+ Ok(())
+}
+
+/// Various command-line options and settings used when `cargo` is running as
+/// a proxy for `rustc` during the fix operation.
+struct FixArgs {
+ /// This is the `.rs` file that is being fixed.
+ file: PathBuf,
+ /// If `--edition` is used to migrate to the next edition, this is the
+ /// edition we are migrating towards.
+ prepare_for_edition: Option<Edition>,
+ /// `true` if `--edition-idioms` is enabled.
+ idioms: bool,
+ /// The current edition.
+ ///
+ /// `None` if on 2015.
+ enabled_edition: Option<Edition>,
+ /// Other command-line arguments not reflected by other fields in
+ /// `FixArgs`.
+ other: Vec<OsString>,
+ /// Path to the `rustc` executable.
+ rustc: PathBuf,
+ /// Console output flags (`--error-format`, `--json`, etc.).
+ ///
+ /// The normal fix procedure always uses `--json`, so it overrides what
+ /// Cargo normally passes when applying fixes. When displaying warnings or
+ /// errors, it will use these flags.
+ format_args: Vec<String>,
+}
+
+impl FixArgs {
+ fn get() -> CargoResult<FixArgs> {
+ Self::from_args(env::args_os())
+ }
+
+ // This is a separate function so that we can use it in tests.
+ fn from_args(argv: impl IntoIterator<Item = OsString>) -> CargoResult<Self> {
+ let mut argv = argv.into_iter();
+ let mut rustc = argv
+ .nth(1)
+ .map(PathBuf::from)
+ .ok_or_else(|| anyhow::anyhow!("expected rustc or `@path` as first argument"))?;
+ let mut file = None;
+ let mut enabled_edition = None;
+ let mut other = Vec::new();
+ let mut format_args = Vec::new();
+
+ let mut handle_arg = |arg: OsString| -> CargoResult<()> {
+ let path = PathBuf::from(arg);
+ if path.extension().and_then(|s| s.to_str()) == Some("rs") && path.exists() {
+ file = Some(path);
+ return Ok(());
+ }
+ if let Some(s) = path.to_str() {
+ if let Some(edition) = s.strip_prefix("--edition=") {
+ enabled_edition = Some(edition.parse()?);
+ return Ok(());
+ }
+ if s.starts_with("--error-format=") || s.starts_with("--json=") {
+ // Cargo may add error-format in some cases, but `cargo
+ // fix` wants to add its own.
+ format_args.push(s.to_string());
+ return Ok(());
+ }
+ }
+ other.push(path.into());
+ Ok(())
+ };
+
+ if let Some(argfile_path) = rustc.to_str().unwrap_or_default().strip_prefix("@") {
+ // Because cargo in fix-proxy-mode might hit the command line size limit,
+ // cargo fix need handle `@path` argfile for this special case.
+ if argv.next().is_some() {
+ bail!("argfile `@path` cannot be combined with other arguments");
+ }
+ let contents = fs::read_to_string(argfile_path)
+ .with_context(|| format!("failed to read argfile at `{argfile_path}`"))?;
+ let mut iter = contents.lines().map(OsString::from);
+ rustc = iter
+ .next()
+ .map(PathBuf::from)
+ .ok_or_else(|| anyhow::anyhow!("expected rustc as first argument"))?;
+ for arg in iter {
+ handle_arg(arg)?;
+ }
+ } else {
+ for arg in argv {
+ handle_arg(arg)?;
+ }
+ }
+
+ let file = file.ok_or_else(|| anyhow::anyhow!("could not find .rs file in rustc args"))?;
+ // ALLOWED: For the internal mechanism of `cargo fix` only.
+ // Shouldn't be set directly by anyone.
+ #[allow(clippy::disallowed_methods)]
+ let idioms = env::var(IDIOMS_ENV_INTERNAL).is_ok();
+
+ // ALLOWED: For the internal mechanism of `cargo fix` only.
+ // Shouldn't be set directly by anyone.
+ #[allow(clippy::disallowed_methods)]
+ let prepare_for_edition = env::var(EDITION_ENV_INTERNAL).ok().map(|_| {
+ enabled_edition
+ .unwrap_or(Edition::Edition2015)
+ .saturating_next()
+ });
+
+ Ok(FixArgs {
+ file,
+ prepare_for_edition,
+ idioms,
+ enabled_edition,
+ other,
+ rustc,
+ format_args,
+ })
+ }
+
+ fn apply(&self, cmd: &mut ProcessBuilder) {
+ cmd.arg(&self.file);
+ cmd.args(&self.other);
+ if self.prepare_for_edition.is_some() {
+ // When migrating an edition, we don't want to fix other lints as
+ // they can sometimes add suggestions that fail to apply, causing
+ // the entire migration to fail. But those lints aren't needed to
+ // migrate.
+ cmd.arg("--cap-lints=allow");
+ } else {
+ // This allows `cargo fix` to work even if the crate has #[deny(warnings)].
+ cmd.arg("--cap-lints=warn");
+ }
+ if let Some(edition) = self.enabled_edition {
+ cmd.arg("--edition").arg(edition.to_string());
+ if self.idioms && edition.supports_idiom_lint() {
+ cmd.arg(format!("-Wrust-{}-idioms", edition));
+ }
+ }
+
+ if let Some(edition) = self.prepare_for_edition {
+ if edition.supports_compat_lint() {
+ cmd.arg("--force-warn")
+ .arg(format!("rust-{}-compatibility", edition));
+ }
+ }
+ }
+
+ /// Validates the edition, and sends a message indicating what is being
+ /// done. Returns a flag indicating whether this fix should be run.
+ fn can_run_rustfix(&self, config: &Config) -> CargoResult<bool> {
+ let to_edition = match self.prepare_for_edition {
+ Some(s) => s,
+ None => {
+ return Message::Fixing {
+ file: self.file.display().to_string(),
+ }
+ .post(config)
+ .and(Ok(true));
+ }
+ };
+ // Unfortunately determining which cargo targets are being built
+ // isn't easy, and each target can be a different edition. The
+ // cargo-as-rustc fix wrapper doesn't know anything about the
+ // workspace, so it can't check for the `cargo-features` unstable
+ // opt-in. As a compromise, this just restricts to the nightly
+ // toolchain.
+ //
+ // Unfortunately this results in a pretty poor error message when
+ // multiple jobs run in parallel (the error appears multiple
+ // times). Hopefully this doesn't happen often in practice.
+ if !to_edition.is_stable() && !config.nightly_features_allowed {
+ let message = format!(
+ "`{file}` is on the latest edition, but trying to \
+ migrate to edition {to_edition}.\n\
+ Edition {to_edition} is unstable and not allowed in \
+ this release, consider trying the nightly release channel.",
+ file = self.file.display(),
+ to_edition = to_edition
+ );
+ return Message::EditionAlreadyEnabled {
+ message,
+ edition: to_edition.previous().unwrap(),
+ }
+ .post(config)
+ .and(Ok(false)); // Do not run rustfix for this the edition.
+ }
+ let from_edition = self.enabled_edition.unwrap_or(Edition::Edition2015);
+ if from_edition == to_edition {
+ let message = format!(
+ "`{}` is already on the latest edition ({}), \
+ unable to migrate further",
+ self.file.display(),
+ to_edition
+ );
+ Message::EditionAlreadyEnabled {
+ message,
+ edition: to_edition,
+ }
+ .post(config)
+ } else {
+ Message::Migrating {
+ file: self.file.display().to_string(),
+ from_edition,
+ to_edition,
+ }
+ .post(config)
+ }
+ .and(Ok(true))
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::FixArgs;
+ use std::ffi::OsString;
+ use std::io::Write as _;
+ use std::path::PathBuf;
+
+ #[test]
+ fn get_fix_args_from_argfile() {
+ let mut temp = tempfile::Builder::new().tempfile().unwrap();
+ let main_rs = tempfile::Builder::new().suffix(".rs").tempfile().unwrap();
+
+ let content = format!("/path/to/rustc\n{}\nfoobar\n", main_rs.path().display());
+ temp.write_all(content.as_bytes()).unwrap();
+
+ let argfile = format!("@{}", temp.path().display());
+ let args = ["cargo", &argfile];
+ let fix_args = FixArgs::from_args(args.map(|x| x.into())).unwrap();
+ assert_eq!(fix_args.rustc, PathBuf::from("/path/to/rustc"));
+ assert_eq!(fix_args.file, main_rs.path());
+ assert_eq!(fix_args.other, vec![OsString::from("foobar")]);
+ }
+
+ #[test]
+ fn get_fix_args_from_argfile_with_extra_arg() {
+ let mut temp = tempfile::Builder::new().tempfile().unwrap();
+ let main_rs = tempfile::Builder::new().suffix(".rs").tempfile().unwrap();
+
+ let content = format!("/path/to/rustc\n{}\nfoobar\n", main_rs.path().display());
+ temp.write_all(content.as_bytes()).unwrap();
+
+ let argfile = format!("@{}", temp.path().display());
+ let args = ["cargo", &argfile, "boo!"];
+ match FixArgs::from_args(args.map(|x| x.into())) {
+ Err(e) => assert_eq!(
+ e.to_string(),
+ "argfile `@path` cannot be combined with other arguments"
+ ),
+ Ok(_) => panic!("should fail"),
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/ops/lockfile.rs b/src/tools/cargo/src/cargo/ops/lockfile.rs
new file mode 100644
index 000000000..e11e492af
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/lockfile.rs
@@ -0,0 +1,229 @@
+use std::io::prelude::*;
+
+use crate::core::{resolver, Resolve, ResolveVersion, Workspace};
+use crate::util::errors::CargoResult;
+use crate::util::toml as cargo_toml;
+use crate::util::Filesystem;
+
+use anyhow::Context as _;
+
+pub fn load_pkg_lockfile(ws: &Workspace<'_>) -> CargoResult<Option<Resolve>> {
+ if !ws.root().join("Cargo.lock").exists() {
+ return Ok(None);
+ }
+
+ let root = Filesystem::new(ws.root().to_path_buf());
+ let mut f = root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file")?;
+
+ let mut s = String::new();
+ f.read_to_string(&mut s)
+ .with_context(|| format!("failed to read file: {}", f.path().display()))?;
+
+ let resolve = (|| -> CargoResult<Option<Resolve>> {
+ let resolve: toml::Table = cargo_toml::parse_document(&s, f.path(), ws.config())?;
+ let v: resolver::EncodableResolve = resolve.try_into()?;
+ Ok(Some(v.into_resolve(&s, ws)?))
+ })()
+ .with_context(|| format!("failed to parse lock file at: {}", f.path().display()))?;
+ Ok(resolve)
+}
+
+/// Generate a toml String of Cargo.lock from a Resolve.
+pub fn resolve_to_string(ws: &Workspace<'_>, resolve: &mut Resolve) -> CargoResult<String> {
+ let (_orig, out, _ws_root) = resolve_to_string_orig(ws, resolve);
+ Ok(out)
+}
+
+pub fn write_pkg_lockfile(ws: &Workspace<'_>, resolve: &mut Resolve) -> CargoResult<()> {
+ let (orig, mut out, ws_root) = resolve_to_string_orig(ws, resolve);
+
+ // If the lock file contents haven't changed so don't rewrite it. This is
+ // helpful on read-only filesystems.
+ if let Some(orig) = &orig {
+ if are_equal_lockfiles(orig, &out, ws) {
+ return Ok(());
+ }
+ }
+
+ if !ws.config().lock_update_allowed() {
+ let flag = if ws.config().locked() {
+ "--locked"
+ } else {
+ "--frozen"
+ };
+ anyhow::bail!(
+ "the lock file {} needs to be updated but {} was passed to prevent this\n\
+ If you want to try to generate the lock file without accessing the network, \
+ remove the {} flag and use --offline instead.",
+ ws.root().to_path_buf().join("Cargo.lock").display(),
+ flag,
+ flag
+ );
+ }
+
+ // While we're updating the lock file anyway go ahead and update its
+ // encoding to whatever the latest default is. That way we can slowly roll
+ // out lock file updates as they're otherwise already updated, and changes
+ // which don't touch dependencies won't seemingly spuriously update the lock
+ // file.
+ if resolve.version() < ResolveVersion::default() {
+ resolve.set_version(ResolveVersion::default());
+ out = serialize_resolve(resolve, orig.as_deref());
+ }
+
+ // Ok, if that didn't work just write it out
+ ws_root
+ .open_rw("Cargo.lock", ws.config(), "Cargo.lock file")
+ .and_then(|mut f| {
+ f.file().set_len(0)?;
+ f.write_all(out.as_bytes())?;
+ Ok(())
+ })
+ .with_context(|| format!("failed to write {}", ws.root().join("Cargo.lock").display()))?;
+ Ok(())
+}
+
+fn resolve_to_string_orig(
+ ws: &Workspace<'_>,
+ resolve: &mut Resolve,
+) -> (Option<String>, String, Filesystem) {
+ // Load the original lock file if it exists.
+ let ws_root = Filesystem::new(ws.root().to_path_buf());
+ let orig = ws_root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file");
+ let orig = orig.and_then(|mut f| {
+ let mut s = String::new();
+ f.read_to_string(&mut s)?;
+ Ok(s)
+ });
+ let out = serialize_resolve(resolve, orig.as_deref().ok());
+ (orig.ok(), out, ws_root)
+}
+
+fn serialize_resolve(resolve: &Resolve, orig: Option<&str>) -> String {
+ let toml = toml::Table::try_from(resolve).unwrap();
+
+ let mut out = String::new();
+
+ // At the start of the file we notify the reader that the file is generated.
+ // Specifically Phabricator ignores files containing "@generated", so we use that.
+ let marker_line = "# This file is automatically @generated by Cargo.";
+ let extra_line = "# It is not intended for manual editing.";
+ out.push_str(marker_line);
+ out.push('\n');
+ out.push_str(extra_line);
+ out.push('\n');
+ // and preserve any other top comments
+ if let Some(orig) = orig {
+ let mut comments = orig.lines().take_while(|line| line.starts_with('#'));
+ if let Some(first) = comments.next() {
+ if first != marker_line {
+ out.push_str(first);
+ out.push('\n');
+ }
+ if let Some(second) = comments.next() {
+ if second != extra_line {
+ out.push_str(second);
+ out.push('\n');
+ }
+ for line in comments {
+ out.push_str(line);
+ out.push('\n');
+ }
+ }
+ }
+ }
+
+ if let Some(version) = toml.get("version") {
+ out.push_str(&format!("version = {}\n\n", version));
+ }
+
+ let deps = toml["package"].as_array().unwrap();
+ for dep in deps {
+ let dep = dep.as_table().unwrap();
+
+ out.push_str("[[package]]\n");
+ emit_package(dep, &mut out);
+ }
+
+ if let Some(patch) = toml.get("patch") {
+ let list = patch["unused"].as_array().unwrap();
+ for entry in list {
+ out.push_str("[[patch.unused]]\n");
+ emit_package(entry.as_table().unwrap(), &mut out);
+ out.push('\n');
+ }
+ }
+
+ if let Some(meta) = toml.get("metadata") {
+ // 1. We need to ensure we print the entire tree, not just the direct members of `metadata`
+ // (which `toml_edit::Table::to_string` only shows)
+ // 2. We need to ensure all children tables have `metadata.` prefix
+ let meta_table = meta
+ .as_table()
+ .expect("validation ensures this is a table")
+ .clone();
+ let mut meta_doc = toml::Table::new();
+ meta_doc.insert("metadata".to_owned(), toml::Value::Table(meta_table));
+
+ out.push_str(&meta_doc.to_string());
+ }
+
+ // Historical versions of Cargo in the old format accidentally left trailing
+ // blank newlines at the end of files, so we just leave that as-is. For all
+ // encodings going forward, though, we want to be sure that our encoded lock
+ // file doesn't contain any trailing newlines so trim out the extra if
+ // necessary.
+ if resolve.version() >= ResolveVersion::V2 {
+ while out.ends_with("\n\n") {
+ out.pop();
+ }
+ }
+ out
+}
+
+fn are_equal_lockfiles(orig: &str, current: &str, ws: &Workspace<'_>) -> bool {
+ // If we want to try and avoid updating the lock file, parse both and
+ // compare them; since this is somewhat expensive, don't do it in the
+ // common case where we can update lock files.
+ if !ws.config().lock_update_allowed() {
+ let res: CargoResult<bool> = (|| {
+ let old: resolver::EncodableResolve = toml::from_str(orig)?;
+ let new: resolver::EncodableResolve = toml::from_str(current)?;
+ Ok(old.into_resolve(orig, ws)? == new.into_resolve(current, ws)?)
+ })();
+ if let Ok(true) = res {
+ return true;
+ }
+ }
+
+ orig.lines().eq(current.lines())
+}
+
+fn emit_package(dep: &toml::Table, out: &mut String) {
+ out.push_str(&format!("name = {}\n", &dep["name"]));
+ out.push_str(&format!("version = {}\n", &dep["version"]));
+
+ if dep.contains_key("source") {
+ out.push_str(&format!("source = {}\n", &dep["source"]));
+ }
+ if dep.contains_key("checksum") {
+ out.push_str(&format!("checksum = {}\n", &dep["checksum"]));
+ }
+
+ if let Some(s) = dep.get("dependencies") {
+ let slice = s.as_array().unwrap();
+
+ if !slice.is_empty() {
+ out.push_str("dependencies = [\n");
+
+ for child in slice.iter() {
+ out.push_str(&format!(" {},\n", child));
+ }
+
+ out.push_str("]\n");
+ }
+ out.push('\n');
+ } else if dep.contains_key("replace") {
+ out.push_str(&format!("replace = {}\n\n", &dep["replace"]));
+ }
+}
diff --git a/src/tools/cargo/src/cargo/ops/mod.rs b/src/tools/cargo/src/cargo/ops/mod.rs
new file mode 100644
index 000000000..4b6aea991
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/mod.rs
@@ -0,0 +1,90 @@
+use crate::sources::CRATES_IO_DOMAIN;
+
+pub use self::cargo_clean::{clean, CleanOptions};
+pub use self::cargo_compile::{
+ compile, compile_with_exec, compile_ws, create_bcx, print, resolve_all_features, CompileOptions,
+};
+pub use self::cargo_compile::{CompileFilter, FilterRule, LibRule, Packages};
+pub use self::cargo_doc::{doc, DocOptions};
+pub use self::cargo_fetch::{fetch, FetchOptions};
+pub use self::cargo_generate_lockfile::generate_lockfile;
+pub use self::cargo_generate_lockfile::update_lockfile;
+pub use self::cargo_generate_lockfile::UpdateOptions;
+pub use self::cargo_install::{install, install_list};
+pub use self::cargo_new::{init, new, NewOptions, NewProjectKind, VersionControl};
+pub use self::cargo_output_metadata::{output_metadata, ExportInfo, OutputMetadataOptions};
+pub use self::cargo_package::{check_yanked, package, package_one, PackageOpts};
+pub use self::cargo_pkgid::pkgid;
+pub use self::cargo_read_manifest::{read_package, read_packages};
+pub use self::cargo_run::run;
+pub use self::cargo_test::{run_benches, run_tests, TestOptions};
+pub use self::cargo_uninstall::uninstall;
+pub use self::fix::{fix, fix_exec_rustc, fix_get_proxy_lock_addr, FixOptions};
+pub use self::lockfile::{load_pkg_lockfile, resolve_to_string, write_pkg_lockfile};
+pub use self::registry::HttpTimeout;
+pub use self::registry::{configure_http_handle, http_handle, http_handle_and_timeout};
+pub use self::registry::{modify_owners, yank, OwnersOptions, PublishOpts};
+pub use self::registry::{needs_custom_http_transport, registry_login, registry_logout, search};
+pub use self::registry::{publish, RegistryCredentialConfig};
+pub use self::resolve::{
+ add_overrides, get_resolved_packages, resolve_with_previous, resolve_ws, resolve_ws_with_opts,
+ WorkspaceResolve,
+};
+pub use self::vendor::{vendor, VendorOptions};
+
+pub mod cargo_add;
+mod cargo_clean;
+pub(crate) mod cargo_compile;
+pub mod cargo_config;
+mod cargo_doc;
+mod cargo_fetch;
+mod cargo_generate_lockfile;
+mod cargo_install;
+mod cargo_new;
+mod cargo_output_metadata;
+mod cargo_package;
+mod cargo_pkgid;
+mod cargo_read_manifest;
+pub mod cargo_remove;
+mod cargo_run;
+mod cargo_test;
+mod cargo_uninstall;
+mod common_for_install_and_uninstall;
+mod fix;
+pub(crate) mod lockfile;
+pub(crate) mod registry;
+pub(crate) mod resolve;
+pub mod tree;
+mod vendor;
+
+/// Returns true if the dependency is either git or path, false otherwise
+/// Error if a git/path dep is transitive, but has no version (registry source).
+/// This check is performed on dependencies before publishing or packaging
+fn check_dep_has_version(dep: &crate::core::Dependency, publish: bool) -> crate::CargoResult<bool> {
+ let which = if dep.source_id().is_path() {
+ "path"
+ } else if dep.source_id().is_git() {
+ "git"
+ } else {
+ return Ok(false);
+ };
+
+ if !dep.specified_req() && dep.is_transitive() {
+ let dep_version_source = dep.registry_id().map_or_else(
+ || CRATES_IO_DOMAIN.to_string(),
+ |registry_id| registry_id.display_registry_name(),
+ );
+ anyhow::bail!(
+ "all dependencies must have a version specified when {}.\n\
+ dependency `{}` does not specify a version\n\
+ Note: The {} dependency will use the version from {},\n\
+ the `{}` specification will be removed from the dependency declaration.",
+ if publish { "publishing" } else { "packaging" },
+ dep.package_name(),
+ if publish { "published" } else { "packaged" },
+ dep_version_source,
+ which,
+ )
+ }
+ Ok(true)
+}
diff --git a/src/tools/cargo/src/cargo/ops/registry.rs b/src/tools/cargo/src/cargo/ops/registry.rs
new file mode 100644
index 000000000..e04f7ba2c
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/registry.rs
@@ -0,0 +1,1250 @@
+use std::cmp;
+use std::collections::{BTreeMap, HashSet};
+use std::fs::File;
+use std::io::{self, BufRead};
+use std::iter::repeat;
+use std::path::PathBuf;
+use std::str;
+use std::task::Poll;
+use std::time::Duration;
+
+use anyhow::{anyhow, bail, format_err, Context as _};
+use cargo_util::paths;
+use crates_io::{self, NewCrate, NewCrateDependency, Registry};
+use curl::easy::{Easy, InfoType, SslOpt, SslVersion};
+use log::{log, Level};
+use pasetors::keys::{AsymmetricKeyPair, Generate};
+use pasetors::paserk::FormatAsPaserk;
+use termcolor::Color::Green;
+use termcolor::ColorSpec;
+use url::Url;
+
+use crate::core::dependency::DepKind;
+use crate::core::dependency::Dependency;
+use crate::core::manifest::ManifestMetadata;
+use crate::core::resolver::CliFeatures;
+use crate::core::source::Source;
+use crate::core::QueryKind;
+use crate::core::{Package, SourceId, Workspace};
+use crate::ops;
+use crate::ops::Packages;
+use crate::sources::{RegistrySource, SourceConfigMap, CRATES_IO_DOMAIN, CRATES_IO_REGISTRY};
+use crate::util::auth::{
+ paserk_public_from_paserk_secret, Secret, {self, AuthorizationError},
+};
+use crate::util::config::{Config, SslVersionConfig, SslVersionConfigRange};
+use crate::util::errors::CargoResult;
+use crate::util::important_paths::find_root_manifest_for_wd;
+use crate::util::{truncate_with_ellipsis, IntoUrl};
+use crate::util::{Progress, ProgressStyle};
+use crate::{drop_print, drop_println, version};
+
+/// Registry settings loaded from config files.
+///
+/// This is loaded based on the `--registry` flag and the config settings.
+#[derive(Debug, PartialEq)]
+pub enum RegistryCredentialConfig {
+ None,
+ /// The authentication token.
+ Token(Secret<String>),
+ /// Process used for fetching a token.
+ Process((PathBuf, Vec<String>)),
+ /// Secret Key and subject for Asymmetric tokens.
+ AsymmetricKey((Secret<String>, Option<String>)),
+}
+
+impl RegistryCredentialConfig {
+ /// Returns `true` if the credential is [`None`].
+ ///
+ /// [`None`]: Self::None
+ pub fn is_none(&self) -> bool {
+ matches!(self, Self::None)
+ }
+ /// Returns `true` if the credential is [`Token`].
+ ///
+ /// [`Token`]: Self::Token
+ pub fn is_token(&self) -> bool {
+ matches!(self, Self::Token(..))
+ }
+ /// Returns `true` if the credential is [`AsymmetricKey`].
+ ///
+ /// [`AsymmetricKey`]: RegistryCredentialConfig::AsymmetricKey
+ pub fn is_asymmetric_key(&self) -> bool {
+ matches!(self, Self::AsymmetricKey(..))
+ }
+ pub fn as_token(&self) -> Option<Secret<&str>> {
+ if let Self::Token(v) = self {
+ Some(v.as_deref())
+ } else {
+ None
+ }
+ }
+ pub fn as_process(&self) -> Option<&(PathBuf, Vec<String>)> {
+ if let Self::Process(v) = self {
+ Some(v)
+ } else {
+ None
+ }
+ }
+ pub fn as_asymmetric_key(&self) -> Option<&(Secret<String>, Option<String>)> {
+ if let Self::AsymmetricKey(v) = self {
+ Some(v)
+ } else {
+ None
+ }
+ }
+}
+
+pub struct PublishOpts<'cfg> {
+ pub config: &'cfg Config,
+ pub token: Option<Secret<String>>,
+ pub index: Option<String>,
+ pub verify: bool,
+ pub allow_dirty: bool,
+ pub jobs: Option<i32>,
+ pub keep_going: bool,
+ pub to_publish: ops::Packages,
+ pub targets: Vec<String>,
+ pub dry_run: bool,
+ pub registry: Option<String>,
+ pub cli_features: CliFeatures,
+}
+
+pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> {
+ let specs = opts.to_publish.to_package_id_specs(ws)?;
+ if specs.len() > 1 {
+ bail!("the `-p` argument must be specified to select a single package to publish")
+ }
+ if Packages::Default == opts.to_publish && ws.is_virtual() {
+ bail!("the `-p` argument must be specified in the root of a virtual workspace")
+ }
+ let member_ids = ws.members().map(|p| p.package_id());
+ // Check that the spec matches exactly one member.
+ specs[0].query(member_ids)?;
+ let mut pkgs = ws.members_with_features(&specs, &opts.cli_features)?;
+ // In `members_with_features_old`, it will add "current" package (determined by the cwd)
+ // So we need filter
+ pkgs = pkgs
+ .into_iter()
+ .filter(|(m, _)| specs.iter().any(|spec| spec.matches(m.package_id())))
+ .collect();
+ // Double check. It is safe theoretically, unless logic has updated.
+ assert_eq!(pkgs.len(), 1);
+
+ let (pkg, cli_features) = pkgs.pop().unwrap();
+
+ let mut publish_registry = opts.registry.clone();
+ if let Some(ref allowed_registries) = *pkg.publish() {
+ if publish_registry.is_none() && allowed_registries.len() == 1 {
+ // If there is only one allowed registry, push to that one directly,
+ // even though there is no registry specified in the command.
+ let default_registry = &allowed_registries[0];
+ if default_registry != CRATES_IO_REGISTRY {
+ // Don't change the registry for crates.io and don't warn the user.
+ // crates.io will be defaulted even without this.
+ opts.config.shell().note(&format!(
+ "Found `{}` as only allowed registry. Publishing to it automatically.",
+ default_registry
+ ))?;
+ publish_registry = Some(default_registry.clone());
+ }
+ }
+
+ let reg_name = publish_registry
+ .clone()
+ .unwrap_or_else(|| CRATES_IO_REGISTRY.to_string());
+ if allowed_registries.is_empty() {
+ bail!(
+ "`{}` cannot be published.\n\
+ `package.publish` is set to `false` or an empty list in Cargo.toml and prevents publishing.",
+ pkg.name(),
+ );
+ } else if !allowed_registries.contains(&reg_name) {
+ bail!(
+ "`{}` cannot be published.\n\
+ The registry `{}` is not listed in the `package.publish` value in Cargo.toml.",
+ pkg.name(),
+ reg_name
+ );
+ }
+ }
+ // This is only used to confirm that we can create a token before we build the package.
+ // This causes the credential provider to be called an extra time, but keeps the same order of errors.
+ let ver = pkg.version().to_string();
+ let mutation = auth::Mutation::PrePublish;
+
+ let (mut registry, reg_ids) = registry(
+ opts.config,
+ opts.token.as_ref().map(Secret::as_deref),
+ opts.index.as_deref(),
+ publish_registry.as_deref(),
+ true,
+ Some(mutation).filter(|_| !opts.dry_run),
+ )?;
+ verify_dependencies(pkg, &registry, reg_ids.original)?;
+
+ // Prepare a tarball, with a non-suppressible warning if metadata
+ // is missing since this is being put online.
+ let tarball = ops::package_one(
+ ws,
+ pkg,
+ &ops::PackageOpts {
+ config: opts.config,
+ verify: opts.verify,
+ list: false,
+ check_metadata: true,
+ allow_dirty: opts.allow_dirty,
+ to_package: ops::Packages::Default,
+ targets: opts.targets.clone(),
+ jobs: opts.jobs,
+ keep_going: opts.keep_going,
+ cli_features: cli_features,
+ },
+ )?
+ .unwrap();
+
+ if !opts.dry_run {
+ let hash = cargo_util::Sha256::new()
+ .update_file(tarball.file())?
+ .finish_hex();
+ let mutation = Some(auth::Mutation::Publish {
+ name: pkg.name().as_str(),
+ vers: &ver,
+ cksum: &hash,
+ });
+ registry.set_token(Some(auth::auth_token(
+ &opts.config,
+ &reg_ids.original,
+ None,
+ mutation,
+ )?));
+ }
+
+ opts.config
+ .shell()
+ .status("Uploading", pkg.package_id().to_string())?;
+ transmit(
+ opts.config,
+ pkg,
+ tarball.file(),
+ &mut registry,
+ reg_ids.original,
+ opts.dry_run,
+ )?;
+ if !opts.dry_run {
+ const DEFAULT_TIMEOUT: u64 = 60;
+ let timeout = if opts.config.cli_unstable().publish_timeout {
+ let timeout: Option<u64> = opts.config.get("publish.timeout")?;
+ timeout.unwrap_or(DEFAULT_TIMEOUT)
+ } else {
+ DEFAULT_TIMEOUT
+ };
+ if 0 < timeout {
+ let timeout = std::time::Duration::from_secs(timeout);
+ wait_for_publish(opts.config, reg_ids.original, pkg, timeout)?;
+ }
+ }
+
+ Ok(())
+}
+
+fn verify_dependencies(
+ pkg: &Package,
+ registry: &Registry,
+ registry_src: SourceId,
+) -> CargoResult<()> {
+ for dep in pkg.dependencies().iter() {
+ if super::check_dep_has_version(dep, true)? {
+ continue;
+ }
+ // TomlManifest::prepare_for_publish will rewrite the dependency
+ // to be just the `version` field.
+ if dep.source_id() != registry_src {
+ if !dep.source_id().is_registry() {
+ // Consider making SourceId::kind a public type that we can
+ // exhaustively match on. Using match can help ensure that
+ // every kind is properly handled.
+ panic!("unexpected source kind for dependency {:?}", dep);
+ }
+ // Block requests to send to crates.io with alt-registry deps.
+ // This extra hostname check is mostly to assist with testing,
+ // but also prevents someone using `--index` to specify
+ // something that points to crates.io.
+ if registry_src.is_crates_io() || registry.host_is_crates_io() {
+ bail!("crates cannot be published to crates.io with dependencies sourced from other\n\
+ registries. `{}` needs to be published to crates.io before publishing this crate.\n\
+ (crate `{}` is pulled from {})",
+ dep.package_name(),
+ dep.package_name(),
+ dep.source_id());
+ }
+ }
+ }
+ Ok(())
+}
+
+fn transmit(
+ config: &Config,
+ pkg: &Package,
+ tarball: &File,
+ registry: &mut Registry,
+ registry_id: SourceId,
+ dry_run: bool,
+) -> CargoResult<()> {
+ let deps = pkg
+ .dependencies()
+ .iter()
+ .filter(|dep| {
+ // Skip dev-dependency without version.
+ dep.is_transitive() || dep.specified_req()
+ })
+ .map(|dep| {
+ // If the dependency is from a different registry, then include the
+ // registry in the dependency.
+ let dep_registry_id = match dep.registry_id() {
+ Some(id) => id,
+ None => SourceId::crates_io(config)?,
+ };
+ // In the index and Web API, None means "from the same registry"
+ // whereas in Cargo.toml, it means "from crates.io".
+ let dep_registry = if dep_registry_id != registry_id {
+ Some(dep_registry_id.url().to_string())
+ } else {
+ None
+ };
+
+ Ok(NewCrateDependency {
+ optional: dep.is_optional(),
+ default_features: dep.uses_default_features(),
+ name: dep.package_name().to_string(),
+ features: dep.features().iter().map(|s| s.to_string()).collect(),
+ version_req: dep.version_req().to_string(),
+ target: dep.platform().map(|s| s.to_string()),
+ kind: match dep.kind() {
+ DepKind::Normal => "normal",
+ DepKind::Build => "build",
+ DepKind::Development => "dev",
+ }
+ .to_string(),
+ registry: dep_registry,
+ explicit_name_in_toml: dep.explicit_name_in_toml().map(|s| s.to_string()),
+ })
+ })
+ .collect::<CargoResult<Vec<NewCrateDependency>>>()?;
+ let manifest = pkg.manifest();
+ let ManifestMetadata {
+ ref authors,
+ ref description,
+ ref homepage,
+ ref documentation,
+ ref keywords,
+ ref readme,
+ ref repository,
+ ref license,
+ ref license_file,
+ ref categories,
+ ref badges,
+ ref links,
+ } = *manifest.metadata();
+ let readme_content = readme
+ .as_ref()
+ .map(|readme| {
+ paths::read(&pkg.root().join(readme))
+ .with_context(|| format!("failed to read `readme` file for package `{}`", pkg))
+ })
+ .transpose()?;
+ if let Some(ref file) = *license_file {
+ if !pkg.root().join(file).exists() {
+ bail!("the license file `{}` does not exist", file)
+ }
+ }
+
+ // Do not upload if performing a dry run
+ if dry_run {
+ config.shell().warn("aborting upload due to dry run")?;
+ return Ok(());
+ }
+
+ let string_features = match manifest.original().features() {
+ Some(features) => features
+ .iter()
+ .map(|(feat, values)| {
+ (
+ feat.to_string(),
+ values.iter().map(|fv| fv.to_string()).collect(),
+ )
+ })
+ .collect::<BTreeMap<String, Vec<String>>>(),
+ None => BTreeMap::new(),
+ };
+
+ let warnings = registry
+ .publish(
+ &NewCrate {
+ name: pkg.name().to_string(),
+ vers: pkg.version().to_string(),
+ deps,
+ features: string_features,
+ authors: authors.clone(),
+ description: description.clone(),
+ homepage: homepage.clone(),
+ documentation: documentation.clone(),
+ keywords: keywords.clone(),
+ categories: categories.clone(),
+ readme: readme_content,
+ readme_file: readme.clone(),
+ repository: repository.clone(),
+ license: license.clone(),
+ license_file: license_file.clone(),
+ badges: badges.clone(),
+ links: links.clone(),
+ },
+ tarball,
+ )
+ .with_context(|| format!("failed to publish to registry at {}", registry.host()))?;
+
+ if !warnings.invalid_categories.is_empty() {
+ let msg = format!(
+ "the following are not valid category slugs and were \
+ ignored: {}. Please see https://crates.io/category_slugs \
+ for the list of all category slugs. \
+ ",
+ warnings.invalid_categories.join(", ")
+ );
+ config.shell().warn(&msg)?;
+ }
+
+ if !warnings.invalid_badges.is_empty() {
+ let msg = format!(
+ "the following are not valid badges and were ignored: {}. \
+ Either the badge type specified is unknown or a required \
+ attribute is missing. Please see \
+ https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata \
+ for valid badge types and their required attributes.",
+ warnings.invalid_badges.join(", ")
+ );
+ config.shell().warn(&msg)?;
+ }
+
+ if !warnings.other.is_empty() {
+ for msg in warnings.other {
+ config.shell().warn(&msg)?;
+ }
+ }
+
+ Ok(())
+}
+
+fn wait_for_publish(
+ config: &Config,
+ registry_src: SourceId,
+ pkg: &Package,
+ timeout: std::time::Duration,
+) -> CargoResult<()> {
+ let version_req = format!("={}", pkg.version());
+ let mut source = SourceConfigMap::empty(config)?.load(registry_src, &HashSet::new())?;
+ // Disable the source's built-in progress bars. Repeatedly showing a bunch
+ // of independent progress bars can be a little confusing. There is an
+ // overall progress bar managed here.
+ source.set_quiet(true);
+ let source_description = source.source_id().to_string();
+ let query = Dependency::parse(pkg.name(), Some(&version_req), registry_src)?;
+
+ let now = std::time::Instant::now();
+ let sleep_time = std::time::Duration::from_secs(1);
+ let max = timeout.as_secs() as usize;
+ // Short does not include the registry name.
+ let short_pkg_description = format!("{} v{}", pkg.name(), pkg.version());
+ config.shell().status(
+ "Uploaded",
+ format!("{short_pkg_description} to {source_description}"),
+ )?;
+ config.shell().note(format!(
+ "Waiting for `{short_pkg_description}` to be available at {source_description}.\n\
+ You may press ctrl-c to skip waiting; the crate should be available shortly."
+ ))?;
+ let mut progress = Progress::with_style("Waiting", ProgressStyle::Ratio, config);
+ progress.tick_now(0, max, "")?;
+ let is_available = loop {
+ {
+ let _lock = config.acquire_package_cache_lock()?;
+ // Force re-fetching the source
+ //
+ // As pulling from a git source is expensive, we track when we've done it within the
+ // process to only do it once, but we are one of the rare cases that needs to do it
+ // multiple times
+ config
+ .updated_sources()
+ .remove(&source.replaced_source_id());
+ source.invalidate_cache();
+ let summaries = loop {
+ // Exact to avoid returning all for path/git
+ match source.query_vec(&query, QueryKind::Exact) {
+ std::task::Poll::Ready(res) => {
+ break res?;
+ }
+ std::task::Poll::Pending => source.block_until_ready()?,
+ }
+ };
+ if !summaries.is_empty() {
+ break true;
+ }
+ }
+
+ let elapsed = now.elapsed();
+ if timeout < elapsed {
+ config.shell().warn(format!(
+ "timed out waiting for `{short_pkg_description}` to be available in {source_description}",
+ ))?;
+ config.shell().note(
+ "The registry may have a backlog that is delaying making the \
+ crate available. The crate should be available soon.",
+ )?;
+ break false;
+ }
+
+ progress.tick_now(elapsed.as_secs() as usize, max, "")?;
+ std::thread::sleep(sleep_time);
+ };
+ if is_available {
+ config.shell().status(
+ "Published",
+ format!("{short_pkg_description} at {source_description}"),
+ )?;
+ }
+
+ Ok(())
+}
+
+/// Returns the `Registry` and `Source` based on command-line and config settings.
+///
+/// * `token_from_cmdline`: The token from the command-line. If not set, uses the token
+/// from the config.
+/// * `index`: The index URL from the command-line.
+/// * `registry`: The registry name from the command-line. If neither
+/// `registry`, or `index` are set, then uses `crates-io`.
+/// * `force_update`: If `true`, forces the index to be updated.
+/// * `token_required`: If `true`, the token will be set.
+fn registry(
+ config: &Config,
+ token_from_cmdline: Option<Secret<&str>>,
+ index: Option<&str>,
+ registry: Option<&str>,
+ force_update: bool,
+ token_required: Option<auth::Mutation<'_>>,
+) -> CargoResult<(Registry, RegistrySourceIds)> {
+ let source_ids = get_source_id(config, index, registry)?;
+
+ if token_required.is_some() && index.is_some() && token_from_cmdline.is_none() {
+ bail!("command-line argument --index requires --token to be specified");
+ }
+ if let Some(token) = token_from_cmdline {
+ auth::cache_token(config, &source_ids.original, token);
+ }
+
+ let cfg = {
+ let _lock = config.acquire_package_cache_lock()?;
+ let mut src = RegistrySource::remote(source_ids.replacement, &HashSet::new(), config)?;
+ // Only update the index if `force_update` is set.
+ if force_update {
+ src.invalidate_cache()
+ }
+ let cfg = loop {
+ match src.config()? {
+ Poll::Pending => src
+ .block_until_ready()
+ .with_context(|| format!("failed to update {}", source_ids.replacement))?,
+ Poll::Ready(cfg) => break cfg,
+ }
+ };
+ cfg.expect("remote registries must have config")
+ };
+ let api_host = cfg
+ .api
+ .ok_or_else(|| format_err!("{} does not support API commands", source_ids.replacement))?;
+ let token = if token_required.is_some() || cfg.auth_required {
+ Some(auth::auth_token(
+ config,
+ &source_ids.original,
+ None,
+ token_required,
+ )?)
+ } else {
+ None
+ };
+ let handle = http_handle(config)?;
+ Ok((
+ Registry::new_handle(api_host, token, handle, cfg.auth_required),
+ source_ids,
+ ))
+}
+
+/// Creates a new HTTP handle with appropriate global configuration for cargo.
+pub fn http_handle(config: &Config) -> CargoResult<Easy> {
+ let (mut handle, timeout) = http_handle_and_timeout(config)?;
+ timeout.configure(&mut handle)?;
+ Ok(handle)
+}
+
+pub fn http_handle_and_timeout(config: &Config) -> CargoResult<(Easy, HttpTimeout)> {
+ if config.frozen() {
+ bail!(
+ "attempting to make an HTTP request, but --frozen was \
+ specified"
+ )
+ }
+ if config.offline() {
+ bail!(
+ "attempting to make an HTTP request, but --offline was \
+ specified"
+ )
+ }
+
+ // The timeout option for libcurl by default times out the entire transfer,
+ // but we probably don't want this. Instead we only set timeouts for the
+ // connect phase as well as a "low speed" timeout so if we don't receive
+ // many bytes in a large-ish period of time then we time out.
+ let mut handle = Easy::new();
+ let timeout = configure_http_handle(config, &mut handle)?;
+ Ok((handle, timeout))
+}
+
+pub fn needs_custom_http_transport(config: &Config) -> CargoResult<bool> {
+ Ok(http_proxy_exists(config)?
+ || *config.http_config()? != Default::default()
+ || config.get_env_os("HTTP_TIMEOUT").is_some())
+}
+
+/// Configure a libcurl http handle with the defaults options for Cargo
+pub fn configure_http_handle(config: &Config, handle: &mut Easy) -> CargoResult<HttpTimeout> {
+ let http = config.http_config()?;
+ if let Some(proxy) = http_proxy(config)? {
+ handle.proxy(&proxy)?;
+ }
+ if let Some(cainfo) = &http.cainfo {
+ let cainfo = cainfo.resolve_path(config);
+ handle.cainfo(&cainfo)?;
+ }
+ if let Some(check) = http.check_revoke {
+ handle.ssl_options(SslOpt::new().no_revoke(!check))?;
+ }
+
+ if let Some(user_agent) = &http.user_agent {
+ handle.useragent(user_agent)?;
+ } else {
+ handle.useragent(&format!("cargo {}", version()))?;
+ }
+
+ fn to_ssl_version(s: &str) -> CargoResult<SslVersion> {
+ let version = match s {
+ "default" => SslVersion::Default,
+ "tlsv1" => SslVersion::Tlsv1,
+ "tlsv1.0" => SslVersion::Tlsv10,
+ "tlsv1.1" => SslVersion::Tlsv11,
+ "tlsv1.2" => SslVersion::Tlsv12,
+ "tlsv1.3" => SslVersion::Tlsv13,
+ _ => bail!(
+ "Invalid ssl version `{s}`,\
+ choose from 'default', 'tlsv1', 'tlsv1.0', 'tlsv1.1', 'tlsv1.2', 'tlsv1.3'."
+ ),
+ };
+ Ok(version)
+ }
+
+ // Empty string accept encoding expands to the encodings supported by the current libcurl.
+ handle.accept_encoding("")?;
+ if let Some(ssl_version) = &http.ssl_version {
+ match ssl_version {
+ SslVersionConfig::Single(s) => {
+ let version = to_ssl_version(s.as_str())?;
+ handle.ssl_version(version)?;
+ }
+ SslVersionConfig::Range(SslVersionConfigRange { min, max }) => {
+ let min_version = min
+ .as_ref()
+ .map_or(Ok(SslVersion::Default), |s| to_ssl_version(s))?;
+ let max_version = max
+ .as_ref()
+ .map_or(Ok(SslVersion::Default), |s| to_ssl_version(s))?;
+ handle.ssl_min_max_version(min_version, max_version)?;
+ }
+ }
+ } else if cfg!(windows) {
+ // This is a temporary workaround for some bugs with libcurl and
+ // schannel and TLS 1.3.
+ //
+ // Our libcurl on Windows is usually built with schannel.
+ // On Windows 11 (or Windows Server 2022), libcurl recently (late
+ // 2022) gained support for TLS 1.3 with schannel, and it now defaults
+ // to 1.3. Unfortunately there have been some bugs with this.
+ // https://github.com/curl/curl/issues/9431 is the most recent. Once
+ // that has been fixed, and some time has passed where we can be more
+ // confident that the 1.3 support won't cause issues, this can be
+ // removed.
+ //
+ // Windows 10 is unaffected. libcurl does not support TLS 1.3 on
+ // Windows 10. (Windows 10 sorta had support, but it required enabling
+ // an advanced option in the registry which was buggy, and libcurl
+ // does runtime checks to prevent it.)
+ handle.ssl_min_max_version(SslVersion::Default, SslVersion::Tlsv12)?;
+ }
+
+ if let Some(true) = http.debug {
+ handle.verbose(true)?;
+ log::debug!("{:#?}", curl::Version::get());
+ handle.debug_function(|kind, data| {
+ let (prefix, level) = match kind {
+ InfoType::Text => ("*", Level::Debug),
+ InfoType::HeaderIn => ("<", Level::Debug),
+ InfoType::HeaderOut => (">", Level::Debug),
+ InfoType::DataIn => ("{", Level::Trace),
+ InfoType::DataOut => ("}", Level::Trace),
+ InfoType::SslDataIn | InfoType::SslDataOut => return,
+ _ => return,
+ };
+ match str::from_utf8(data) {
+ Ok(s) => {
+ for mut line in s.lines() {
+ if line.starts_with("Authorization:") {
+ line = "Authorization: [REDACTED]";
+ } else if line[..line.len().min(10)].eq_ignore_ascii_case("set-cookie") {
+ line = "set-cookie: [REDACTED]";
+ }
+ log!(level, "http-debug: {} {}", prefix, line);
+ }
+ }
+ Err(_) => {
+ log!(
+ level,
+ "http-debug: {} ({} bytes of data)",
+ prefix,
+ data.len()
+ );
+ }
+ }
+ })?;
+ }
+
+ HttpTimeout::new(config)
+}
+
+#[must_use]
+pub struct HttpTimeout {
+ pub dur: Duration,
+ pub low_speed_limit: u32,
+}
+
+impl HttpTimeout {
+ pub fn new(config: &Config) -> CargoResult<HttpTimeout> {
+ let http_config = config.http_config()?;
+ let low_speed_limit = http_config.low_speed_limit.unwrap_or(10);
+ let seconds = http_config
+ .timeout
+ .or_else(|| {
+ config
+ .get_env("HTTP_TIMEOUT")
+ .ok()
+ .and_then(|s| s.parse().ok())
+ })
+ .unwrap_or(30);
+ Ok(HttpTimeout {
+ dur: Duration::new(seconds, 0),
+ low_speed_limit,
+ })
+ }
+
+ pub fn configure(&self, handle: &mut Easy) -> CargoResult<()> {
+ // The timeout option for libcurl by default times out the entire
+ // transfer, but we probably don't want this. Instead we only set
+ // timeouts for the connect phase as well as a "low speed" timeout so
+ // if we don't receive many bytes in a large-ish period of time then we
+ // time out.
+ handle.connect_timeout(self.dur)?;
+ handle.low_speed_time(self.dur)?;
+ handle.low_speed_limit(self.low_speed_limit)?;
+ Ok(())
+ }
+}
+
+/// Finds an explicit HTTP proxy if one is available.
+///
+/// Favor cargo's `http.proxy`, then git's `http.proxy`. Proxies specified
+/// via environment variables are picked up by libcurl.
+fn http_proxy(config: &Config) -> CargoResult<Option<String>> {
+ let http = config.http_config()?;
+ if let Some(s) = &http.proxy {
+ return Ok(Some(s.clone()));
+ }
+ if let Ok(cfg) = git2::Config::open_default() {
+ if let Ok(s) = cfg.get_string("http.proxy") {
+ return Ok(Some(s));
+ }
+ }
+ Ok(None)
+}
+
+/// Determine if an http proxy exists.
+///
+/// Checks the following for existence, in order:
+///
+/// * cargo's `http.proxy`
+/// * git's `http.proxy`
+/// * `http_proxy` env var
+/// * `HTTP_PROXY` env var
+/// * `https_proxy` env var
+/// * `HTTPS_PROXY` env var
+fn http_proxy_exists(config: &Config) -> CargoResult<bool> {
+ if http_proxy(config)?.is_some() {
+ Ok(true)
+ } else {
+ Ok(["http_proxy", "HTTP_PROXY", "https_proxy", "HTTPS_PROXY"]
+ .iter()
+ .any(|v| config.get_env(v).is_ok()))
+ }
+}
+
+pub fn registry_login(
+ config: &Config,
+ token: Option<Secret<&str>>,
+ reg: Option<&str>,
+ generate_keypair: bool,
+ secret_key_required: bool,
+ key_subject: Option<&str>,
+) -> CargoResult<()> {
+ let source_ids = get_source_id(config, None, reg)?;
+ let reg_cfg = auth::registry_credential_config(config, &source_ids.original)?;
+
+ let login_url = match registry(config, token.clone(), None, reg, false, None) {
+ Ok((registry, _)) => Some(format!("{}/me", registry.host())),
+ Err(e) if e.is::<AuthorizationError>() => e
+ .downcast::<AuthorizationError>()
+ .unwrap()
+ .login_url
+ .map(|u| u.to_string()),
+ Err(e) => return Err(e),
+ };
+ let new_token;
+ if generate_keypair || secret_key_required || key_subject.is_some() {
+ if !config.cli_unstable().registry_auth {
+ let flag = if generate_keypair {
+ "generate-keypair"
+ } else if secret_key_required {
+ "secret-key"
+ } else if key_subject.is_some() {
+ "key-subject"
+ } else {
+ unreachable!("how did we get here");
+ };
+ bail!(
+ "the `{flag}` flag is unstable, pass `-Z registry-auth` to enable it\n\
+ See https://github.com/rust-lang/cargo/issues/10519 for more \
+ information about the `{flag}` flag."
+ );
+ }
+ assert!(token.is_none());
+ // we are dealing with asymmetric tokens
+ let (old_secret_key, old_key_subject) = match &reg_cfg {
+ RegistryCredentialConfig::AsymmetricKey((old_secret_key, old_key_subject)) => {
+ (Some(old_secret_key), old_key_subject.clone())
+ }
+ _ => (None, None),
+ };
+ let secret_key: Secret<String>;
+ if generate_keypair {
+ assert!(!secret_key_required);
+ let kp = AsymmetricKeyPair::<pasetors::version3::V3>::generate().unwrap();
+ secret_key = Secret::default().map(|mut key| {
+ FormatAsPaserk::fmt(&kp.secret, &mut key).unwrap();
+ key
+ });
+ } else if secret_key_required {
+ assert!(!generate_keypair);
+ drop_println!(config, "please paste the API secret key below");
+ secret_key = Secret::default()
+ .map(|mut line| {
+ let input = io::stdin();
+ input
+ .lock()
+ .read_line(&mut line)
+ .with_context(|| "failed to read stdin")
+ .map(|_| line.trim().to_string())
+ })
+ .transpose()?;
+ } else {
+ secret_key = old_secret_key
+ .cloned()
+ .ok_or_else(|| anyhow!("need a secret_key to set a key_subject"))?;
+ }
+ if let Some(p) = paserk_public_from_paserk_secret(secret_key.as_deref()) {
+ drop_println!(config, "{}", &p);
+ } else {
+ bail!("not a validly formatted PASERK secret key");
+ }
+ new_token = RegistryCredentialConfig::AsymmetricKey((
+ secret_key,
+ match key_subject {
+ Some(key_subject) => Some(key_subject.to_string()),
+ None => old_key_subject,
+ },
+ ));
+ } else {
+ new_token = RegistryCredentialConfig::Token(match token {
+ Some(token) => token.owned(),
+ None => {
+ if let Some(login_url) = login_url {
+ drop_println!(
+ config,
+ "please paste the token found on {} below",
+ login_url
+ )
+ } else {
+ drop_println!(
+ config,
+ "please paste the token for {} below",
+ source_ids.original.display_registry_name()
+ )
+ }
+
+ let mut line = String::new();
+ let input = io::stdin();
+ input
+ .lock()
+ .read_line(&mut line)
+ .with_context(|| "failed to read stdin")?;
+ // Automatically remove `cargo login` from an inputted token to
+ // allow direct pastes from `registry.host()`/me.
+ Secret::from(line.replace("cargo login", "").trim().to_string())
+ }
+ });
+
+ if let Some(tok) = new_token.as_token() {
+ crates_io::check_token(tok.as_ref().expose())?;
+ }
+ }
+ if &reg_cfg == &new_token {
+ config.shell().status("Login", "already logged in")?;
+ return Ok(());
+ }
+
+ auth::login(config, &source_ids.original, new_token)?;
+
+ config.shell().status(
+ "Login",
+ format!("token for `{}` saved", reg.unwrap_or(CRATES_IO_DOMAIN)),
+ )?;
+ Ok(())
+}
+
+pub fn registry_logout(config: &Config, reg: Option<&str>) -> CargoResult<()> {
+ let source_ids = get_source_id(config, None, reg)?;
+ let reg_cfg = auth::registry_credential_config(config, &source_ids.original)?;
+ let reg_name = source_ids.original.display_registry_name();
+ if reg_cfg.is_none() {
+ config.shell().status(
+ "Logout",
+ format!("not currently logged in to `{}`", reg_name),
+ )?;
+ return Ok(());
+ }
+ auth::logout(config, &source_ids.original)?;
+ config.shell().status(
+ "Logout",
+ format!(
+ "token for `{}` has been removed from local storage",
+ reg_name
+ ),
+ )?;
+ let location = if source_ids.original.is_crates_io() {
+ "<https://crates.io/me>".to_string()
+ } else {
+ // The URL for the source requires network access to load the config.
+ // That could be a fairly heavy operation to perform just to provide a
+ // help message, so for now this just provides some generic text.
+ // Perhaps in the future this could have an API to fetch the config if
+ // it is cached, but avoid network access otherwise?
+ format!("the `{reg_name}` website")
+ };
+ config.shell().note(format!(
+ "This does not revoke the token on the registry server.\n \
+ If you need to revoke the token, visit {location} and follow the instructions there."
+ ))?;
+ Ok(())
+}
+
+pub struct OwnersOptions {
+ pub krate: Option<String>,
+ pub token: Option<Secret<String>>,
+ pub index: Option<String>,
+ pub to_add: Option<Vec<String>>,
+ pub to_remove: Option<Vec<String>>,
+ pub list: bool,
+ pub registry: Option<String>,
+}
+
+pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> {
+ let name = match opts.krate {
+ Some(ref name) => name.clone(),
+ None => {
+ let manifest_path = find_root_manifest_for_wd(config.cwd())?;
+ let ws = Workspace::new(&manifest_path, config)?;
+ ws.current()?.package_id().name().to_string()
+ }
+ };
+
+ let mutation = auth::Mutation::Owners { name: &name };
+
+ let (mut registry, _) = registry(
+ config,
+ opts.token.as_ref().map(Secret::as_deref),
+ opts.index.as_deref(),
+ opts.registry.as_deref(),
+ true,
+ Some(mutation),
+ )?;
+
+ if let Some(ref v) = opts.to_add {
+ let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>();
+ let msg = registry.add_owners(&name, &v).with_context(|| {
+ format!(
+ "failed to invite owners to crate `{}` on registry at {}",
+ name,
+ registry.host()
+ )
+ })?;
+
+ config.shell().status("Owner", msg)?;
+ }
+
+ if let Some(ref v) = opts.to_remove {
+ let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>();
+ config
+ .shell()
+ .status("Owner", format!("removing {:?} from crate {}", v, name))?;
+ registry.remove_owners(&name, &v).with_context(|| {
+ format!(
+ "failed to remove owners from crate `{}` on registry at {}",
+ name,
+ registry.host()
+ )
+ })?;
+ }
+
+ if opts.list {
+ let owners = registry.list_owners(&name).with_context(|| {
+ format!(
+ "failed to list owners of crate `{}` on registry at {}",
+ name,
+ registry.host()
+ )
+ })?;
+ for owner in owners.iter() {
+ drop_print!(config, "{}", owner.login);
+ match (owner.name.as_ref(), owner.email.as_ref()) {
+ (Some(name), Some(email)) => drop_println!(config, " ({} <{}>)", name, email),
+ (Some(s), None) | (None, Some(s)) => drop_println!(config, " ({})", s),
+ (None, None) => drop_println!(config),
+ }
+ }
+ }
+
+ Ok(())
+}
+
+pub fn yank(
+ config: &Config,
+ krate: Option<String>,
+ version: Option<String>,
+ token: Option<Secret<String>>,
+ index: Option<String>,
+ undo: bool,
+ reg: Option<String>,
+) -> CargoResult<()> {
+ let name = match krate {
+ Some(name) => name,
+ None => {
+ let manifest_path = find_root_manifest_for_wd(config.cwd())?;
+ let ws = Workspace::new(&manifest_path, config)?;
+ ws.current()?.package_id().name().to_string()
+ }
+ };
+ let version = match version {
+ Some(v) => v,
+ None => bail!("a version must be specified to yank"),
+ };
+
+ let message = if undo {
+ auth::Mutation::Unyank {
+ name: &name,
+ vers: &version,
+ }
+ } else {
+ auth::Mutation::Yank {
+ name: &name,
+ vers: &version,
+ }
+ };
+
+ let (mut registry, _) = registry(
+ config,
+ token.as_ref().map(Secret::as_deref),
+ index.as_deref(),
+ reg.as_deref(),
+ true,
+ Some(message),
+ )?;
+
+ let package_spec = format!("{}@{}", name, version);
+ if undo {
+ config.shell().status("Unyank", package_spec)?;
+ registry.unyank(&name, &version).with_context(|| {
+ format!(
+ "failed to undo a yank from the registry at {}",
+ registry.host()
+ )
+ })?;
+ } else {
+ config.shell().status("Yank", package_spec)?;
+ registry
+ .yank(&name, &version)
+ .with_context(|| format!("failed to yank from the registry at {}", registry.host()))?;
+ }
+
+ Ok(())
+}
+
+/// Gets the SourceId for an index or registry setting.
+///
+/// The `index` and `reg` values are from the command-line or config settings.
+/// If both are None, and no source-replacement is configured, returns the source for crates.io.
+/// If both are None, and source replacement is configured, returns an error.
+///
+/// The source for crates.io may be GitHub, index.crates.io, or a test-only registry depending
+/// on configuration.
+///
+/// If `reg` is set, source replacement is not followed.
+///
+/// The return value is a pair of `SourceId`s: The first may be a built-in replacement of
+/// crates.io (such as index.crates.io), while the second is always the original source.
+fn get_source_id(
+ config: &Config,
+ index: Option<&str>,
+ reg: Option<&str>,
+) -> CargoResult<RegistrySourceIds> {
+ let sid = match (reg, index) {
+ (None, None) => SourceId::crates_io(config)?,
+ (_, Some(i)) => SourceId::for_registry(&i.into_url()?)?,
+ (Some(r), None) => SourceId::alt_registry(config, r)?,
+ };
+ // Load source replacements that are built-in to Cargo.
+ let builtin_replacement_sid = SourceConfigMap::empty(config)?
+ .load(sid, &HashSet::new())?
+ .replaced_source_id();
+ let replacement_sid = SourceConfigMap::new(config)?
+ .load(sid, &HashSet::new())?
+ .replaced_source_id();
+ if reg.is_none() && index.is_none() && replacement_sid != builtin_replacement_sid {
+ // Neither --registry nor --index was passed and the user has configured source-replacement.
+ if let Some(replacement_name) = replacement_sid.alt_registry_key() {
+ bail!("crates-io is replaced with remote registry {replacement_name};\ninclude `--registry {replacement_name}` or `--registry crates-io`");
+ } else {
+ bail!("crates-io is replaced with non-remote-registry source {replacement_sid};\ninclude `--registry crates-io` to use crates.io");
+ }
+ } else {
+ Ok(RegistrySourceIds {
+ original: sid,
+ replacement: builtin_replacement_sid,
+ })
+ }
+}
+
+struct RegistrySourceIds {
+ /// Use when looking up the auth token, or writing out `Cargo.lock`
+ original: SourceId,
+ /// Use when interacting with the source (querying / publishing , etc)
+ ///
+ /// The source for crates.io may be replaced by a built-in source for accessing crates.io with
+ /// the sparse protocol, or a source for the testing framework (when the replace_crates_io
+ /// function is used)
+ ///
+ /// User-defined source replacement is not applied.
+ replacement: SourceId,
+}
+
+pub fn search(
+ query: &str,
+ config: &Config,
+ index: Option<String>,
+ limit: u32,
+ reg: Option<String>,
+) -> CargoResult<()> {
+ let (mut registry, source_ids) =
+ registry(config, None, index.as_deref(), reg.as_deref(), false, None)?;
+ let (crates, total_crates) = registry.search(query, limit).with_context(|| {
+ format!(
+ "failed to retrieve search results from the registry at {}",
+ registry.host()
+ )
+ })?;
+
+ let names = crates
+ .iter()
+ .map(|krate| format!("{} = \"{}\"", krate.name, krate.max_version))
+ .collect::<Vec<String>>();
+
+ let description_margin = names.iter().map(|s| s.len() + 4).max().unwrap_or_default();
+
+ let description_length = cmp::max(80, 128 - description_margin);
+
+ let descriptions = crates.iter().map(|krate| {
+ krate
+ .description
+ .as_ref()
+ .map(|desc| truncate_with_ellipsis(&desc.replace("\n", " "), description_length))
+ });
+
+ for (name, description) in names.into_iter().zip(descriptions) {
+ let line = match description {
+ Some(desc) => {
+ let space = repeat(' ')
+ .take(description_margin - name.len())
+ .collect::<String>();
+ name + &space + "# " + &desc
+ }
+ None => name,
+ };
+ let mut fragments = line.split(query).peekable();
+ while let Some(fragment) = fragments.next() {
+ let _ = config.shell().write_stdout(fragment, &ColorSpec::new());
+ if fragments.peek().is_some() {
+ let _ = config
+ .shell()
+ .write_stdout(query, &ColorSpec::new().set_bold(true).set_fg(Some(Green)));
+ }
+ }
+ let _ = config.shell().write_stdout("\n", &ColorSpec::new());
+ }
+
+ let search_max_limit = 100;
+ if total_crates > limit && limit < search_max_limit {
+ let _ = config.shell().write_stdout(
+ format_args!(
+ "... and {} crates more (use --limit N to see more)\n",
+ total_crates - limit
+ ),
+ &ColorSpec::new(),
+ );
+ } else if total_crates > limit && limit >= search_max_limit {
+ let extra = if source_ids.original.is_crates_io() {
+ let url = Url::parse_with_params("https://crates.io/search", &[("q", query)])?;
+ format!(" (go to {url} to see more)")
+ } else {
+ String::new()
+ };
+ let _ = config.shell().write_stdout(
+ format_args!("... and {} crates more{}\n", total_crates - limit, extra),
+ &ColorSpec::new(),
+ );
+ }
+
+ Ok(())
+}
diff --git a/src/tools/cargo/src/cargo/ops/resolve.rs b/src/tools/cargo/src/cargo/ops/resolve.rs
new file mode 100644
index 000000000..ea5eded4a
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/resolve.rs
@@ -0,0 +1,866 @@
+//! High-level APIs for executing the resolver.
+//!
+//! This module provides functions for running the resolver given a workspace, including loading
+//! the `Cargo.lock` file and checkinf if it needs updating.
+//!
+//! There are roughly 3 main functions:
+//!
+//! - [`resolve_ws`]: A simple, high-level function with no options.
+//! - [`resolve_ws_with_opts`]: A medium-level function with options like
+//! user-provided features. This is the most appropriate function to use in
+//! most cases.
+//! - [`resolve_with_previous`]: A low-level function for running the resolver,
+//! providing the most power and flexibility.
+//!
+//! ### Data Structures
+//!
+//! - [`Workspace`]:
+//! Usually created by [`crate::util::command_prelude::ArgMatchesExt::workspace`] which discovers the root of the
+//! workspace, and loads all the workspace members as a [`Package`] object
+//! - [`Package`]
+//! Corresponds with `Cargo.toml` manifest (deserialized as [`Manifest`]) and its associated files.
+//! - [`Target`]s are crates such as the library, binaries, integration test, or examples.
+//! They are what is actually compiled by `rustc`.
+//! Each `Target` defines a crate root, like `src/lib.rs` or `examples/foo.rs`.
+//! - [`PackageId`] --- A unique identifier for a package.
+//! - [`PackageRegistry`]:
+//! The primary interface for how the dependency
+//! resolver finds packages. It contains the `SourceMap`, and handles things
+//! like the `[patch]` table. The dependency resolver
+//! sends a query to the `PackageRegistry` to "get me all packages that match
+//! this dependency declaration". The `Registry` trait provides a generic interface
+//! to the `PackageRegistry`, but this is only used for providing an alternate
+//! implementation of the `PackageRegistry` for testing.
+//! - [`SourceMap`]: Map of all available sources.
+//! - [`Source`]: An abstraction for something that can fetch packages (a remote
+//! registry, a git repo, the local filesystem, etc.). Check out the [source
+//! implementations] for all the details about registries, indexes, git
+//! dependencies, etc.
+//! * [`SourceId`]: A unique identifier for a source.
+//! - [`Summary`]: A of a [`Manifest`], and is essentially
+//! the information that can be found in a registry index. Queries against the
+//! `PackageRegistry` yields a `Summary`. The resolver uses the summary
+//! information to build the dependency graph.
+//! - [`PackageSet`] --- Contains all of the `Package` objects. This works with the
+//! [`Downloads`] struct to coordinate downloading packages. It has a reference
+//! to the `SourceMap` to get the `Source` objects which tell the `Downloads`
+//! struct which URLs to fetch.
+//!
+//! [`Package`]: crate::core::package
+//! [`Target`]: crate::core::Target
+//! [`Manifest`]: crate::core::Manifest
+//! [`Source`]: crate::core::Source
+//! [`SourceMap`]: crate::core::SourceMap
+//! [`PackageRegistry`]: crate::core::registry::PackageRegistry
+//! [source implementations]: crate::sources
+//! [`Downloads`]: crate::core::package::Downloads
+
+use crate::core::compiler::{CompileKind, RustcTargetData};
+use crate::core::registry::{LockedPatchDependency, PackageRegistry};
+use crate::core::resolver::features::{
+ CliFeatures, FeatureOpts, FeatureResolver, ForceAllTargets, RequestedFeatures, ResolvedFeatures,
+};
+use crate::core::resolver::{
+ self, HasDevUnits, Resolve, ResolveOpts, ResolveVersion, VersionPreferences,
+};
+use crate::core::summary::Summary;
+use crate::core::Feature;
+use crate::core::{GitReference, PackageId, PackageIdSpec, PackageSet, SourceId, Workspace};
+use crate::ops;
+use crate::sources::PathSource;
+use crate::util::errors::CargoResult;
+use crate::util::{profile, CanonicalUrl};
+use anyhow::Context as _;
+use log::{debug, trace};
+use std::collections::{HashMap, HashSet};
+
+/// Result for `resolve_ws_with_opts`.
+pub struct WorkspaceResolve<'cfg> {
+ /// Packages to be downloaded.
+ pub pkg_set: PackageSet<'cfg>,
+ /// The resolve for the entire workspace.
+ ///
+ /// This may be `None` for things like `cargo install` and `-Zavoid-dev-deps`.
+ /// This does not include `paths` overrides.
+ pub workspace_resolve: Option<Resolve>,
+ /// The narrowed resolve, with the specific features enabled, and only the
+ /// given package specs requested.
+ pub targeted_resolve: Resolve,
+ /// The features activated per package.
+ pub resolved_features: ResolvedFeatures,
+}
+
+const UNUSED_PATCH_WARNING: &str = "\
+Check that the patched package version and available features are compatible
+with the dependency requirements. If the patch has a different version from
+what is locked in the Cargo.lock file, run `cargo update` to use the new
+version. This may also occur with an optional dependency that is not enabled.";
+
+/// Resolves all dependencies for the workspace using the previous
+/// lock file as a guide if present.
+///
+/// This function will also write the result of resolution as a new lock file
+/// (unless it is an ephemeral workspace such as `cargo install` or `cargo
+/// package`).
+///
+/// This is a simple interface used by commands like `clean`, `fetch`, and
+/// `package`, which don't specify any options or features.
+pub fn resolve_ws<'a>(ws: &Workspace<'a>) -> CargoResult<(PackageSet<'a>, Resolve)> {
+ let mut registry = PackageRegistry::new(ws.config())?;
+ let resolve = resolve_with_registry(ws, &mut registry)?;
+ let packages = get_resolved_packages(&resolve, registry)?;
+ Ok((packages, resolve))
+}
+
+/// Resolves dependencies for some packages of the workspace,
+/// taking into account `paths` overrides and activated features.
+///
+/// This function will also write the result of resolution as a new lock file
+/// (unless `Workspace::require_optional_deps` is false, such as `cargo
+/// install` or `-Z avoid-dev-deps`), or it is an ephemeral workspace (`cargo
+/// install` or `cargo package`).
+///
+/// `specs` may be empty, which indicates it should resolve all workspace
+/// members. In this case, `opts.all_features` must be `true`.
+pub fn resolve_ws_with_opts<'cfg>(
+ ws: &Workspace<'cfg>,
+ target_data: &RustcTargetData<'cfg>,
+ requested_targets: &[CompileKind],
+ cli_features: &CliFeatures,
+ specs: &[PackageIdSpec],
+ has_dev_units: HasDevUnits,
+ force_all_targets: ForceAllTargets,
+) -> CargoResult<WorkspaceResolve<'cfg>> {
+ let mut registry = PackageRegistry::new(ws.config())?;
+ let mut add_patches = true;
+ let resolve = if ws.ignore_lock() {
+ None
+ } else if ws.require_optional_deps() {
+ // First, resolve the root_package's *listed* dependencies, as well as
+ // downloading and updating all remotes and such.
+ let resolve = resolve_with_registry(ws, &mut registry)?;
+ // No need to add patches again, `resolve_with_registry` has done it.
+ add_patches = false;
+
+ // Second, resolve with precisely what we're doing. Filter out
+ // transitive dependencies if necessary, specify features, handle
+ // overrides, etc.
+ let _p = profile::start("resolving with overrides...");
+
+ add_overrides(&mut registry, ws)?;
+
+ for &(ref replace_spec, ref dep) in ws.root_replace() {
+ if !resolve
+ .iter()
+ .any(|r| replace_spec.matches(r) && !dep.matches_id(r))
+ {
+ ws.config()
+ .shell()
+ .warn(format!("package replacement is not used: {}", replace_spec))?
+ }
+
+ if dep.features().len() != 0 || !dep.uses_default_features() {
+ ws.config()
+ .shell()
+ .warn(format!(
+ "replacement for `{}` uses the features mechanism. \
+ default-features and features will not take effect because the replacement dependency does not support this mechanism",
+ dep.package_name()
+ ))?
+ }
+ }
+
+ Some(resolve)
+ } else {
+ ops::load_pkg_lockfile(ws)?
+ };
+
+ let resolved_with_overrides = resolve_with_previous(
+ &mut registry,
+ ws,
+ cli_features,
+ has_dev_units,
+ resolve.as_ref(),
+ None,
+ specs,
+ add_patches,
+ )?;
+
+ let pkg_set = get_resolved_packages(&resolved_with_overrides, registry)?;
+
+ let member_ids = ws
+ .members_with_features(specs, cli_features)?
+ .into_iter()
+ .map(|(p, _fts)| p.package_id())
+ .collect::<Vec<_>>();
+ pkg_set.download_accessible(
+ &resolved_with_overrides,
+ &member_ids,
+ has_dev_units,
+ requested_targets,
+ target_data,
+ force_all_targets,
+ )?;
+
+ let feature_opts = FeatureOpts::new(ws, has_dev_units, force_all_targets)?;
+ let resolved_features = FeatureResolver::resolve(
+ ws,
+ target_data,
+ &resolved_with_overrides,
+ &pkg_set,
+ cli_features,
+ specs,
+ requested_targets,
+ feature_opts,
+ )?;
+
+ pkg_set.warn_no_lib_packages_and_artifact_libs_overlapping_deps(
+ ws,
+ &resolved_with_overrides,
+ &member_ids,
+ has_dev_units,
+ requested_targets,
+ target_data,
+ force_all_targets,
+ )?;
+
+ Ok(WorkspaceResolve {
+ pkg_set,
+ workspace_resolve: resolve,
+ targeted_resolve: resolved_with_overrides,
+ resolved_features,
+ })
+}
+
+fn resolve_with_registry<'cfg>(
+ ws: &Workspace<'cfg>,
+ registry: &mut PackageRegistry<'cfg>,
+) -> CargoResult<Resolve> {
+ let prev = ops::load_pkg_lockfile(ws)?;
+ let mut resolve = resolve_with_previous(
+ registry,
+ ws,
+ &CliFeatures::new_all(true),
+ HasDevUnits::Yes,
+ prev.as_ref(),
+ None,
+ &[],
+ true,
+ )?;
+
+ if !ws.is_ephemeral() && ws.require_optional_deps() {
+ ops::write_pkg_lockfile(ws, &mut resolve)?;
+ }
+ Ok(resolve)
+}
+
+/// Resolves all dependencies for a package using an optional previous instance
+/// of resolve to guide the resolution process.
+///
+/// This also takes an optional hash set, `to_avoid`, which is a list of package
+/// IDs that should be avoided when consulting the previous instance of resolve
+/// (often used in pairings with updates).
+///
+/// The previous resolve normally comes from a lock file. This function does not
+/// read or write lock files from the filesystem.
+///
+/// `specs` may be empty, which indicates it should resolve all workspace
+/// members. In this case, `opts.all_features` must be `true`.
+///
+/// If `register_patches` is true, then entries from the `[patch]` table in
+/// the manifest will be added to the given `PackageRegistry`.
+pub fn resolve_with_previous<'cfg>(
+ registry: &mut PackageRegistry<'cfg>,
+ ws: &Workspace<'cfg>,
+ cli_features: &CliFeatures,
+ has_dev_units: HasDevUnits,
+ previous: Option<&Resolve>,
+ to_avoid: Option<&HashSet<PackageId>>,
+ specs: &[PackageIdSpec],
+ register_patches: bool,
+) -> CargoResult<Resolve> {
+ // We only want one Cargo at a time resolving a crate graph since this can
+ // involve a lot of frobbing of the global caches.
+ let _lock = ws.config().acquire_package_cache_lock()?;
+
+ // Here we place an artificial limitation that all non-registry sources
+ // cannot be locked at more than one revision. This means that if a Git
+ // repository provides more than one package, they must all be updated in
+ // step when any of them are updated.
+ //
+ // TODO: this seems like a hokey reason to single out the registry as being
+ // different.
+ let to_avoid_sources: HashSet<SourceId> = to_avoid
+ .map(|set| {
+ set.iter()
+ .map(|p| p.source_id())
+ .filter(|s| !s.is_registry())
+ .collect()
+ })
+ .unwrap_or_default();
+
+ let pre_patch_keep = |p: &PackageId| {
+ !to_avoid_sources.contains(&p.source_id())
+ && match to_avoid {
+ Some(set) => !set.contains(p),
+ None => true,
+ }
+ };
+
+ // While registering patches, we will record preferences for particular versions
+ // of various packages.
+ let mut version_prefs = VersionPreferences::default();
+
+ // This is a set of PackageIds of `[patch]` entries, and some related locked PackageIds, for
+ // which locking should be avoided (but which will be preferred when searching dependencies,
+ // via prefer_patch_deps below)
+ let mut avoid_patch_ids = HashSet::new();
+
+ if register_patches {
+ for (url, patches) in ws.root_patch()?.iter() {
+ for patch in patches {
+ version_prefs.prefer_dependency(patch.clone());
+ }
+ let previous = match previous {
+ Some(r) => r,
+ None => {
+ let patches: Vec<_> = patches.iter().map(|p| (p, None)).collect();
+ let unlock_ids = registry.patch(url, &patches)?;
+ // Since nothing is locked, this shouldn't possibly return anything.
+ assert!(unlock_ids.is_empty());
+ continue;
+ }
+ };
+
+ // This is a list of pairs where the first element of the pair is
+ // the raw `Dependency` which matches what's listed in `Cargo.toml`.
+ // The second element is, if present, the "locked" version of
+ // the `Dependency` as well as the `PackageId` that it previously
+ // resolved to. This second element is calculated by looking at the
+ // previous resolve graph, which is primarily what's done here to
+ // build the `registrations` list.
+ let mut registrations = Vec::new();
+ for dep in patches {
+ let candidates = || {
+ previous
+ .iter()
+ .chain(previous.unused_patches().iter().cloned())
+ .filter(&pre_patch_keep)
+ };
+
+ let lock = match candidates().find(|id| dep.matches_id(*id)) {
+ // If we found an exactly matching candidate in our list of
+ // candidates, then that's the one to use.
+ Some(package_id) => {
+ let mut locked_dep = dep.clone();
+ locked_dep.lock_to(package_id);
+ Some(LockedPatchDependency {
+ dependency: locked_dep,
+ package_id,
+ alt_package_id: None,
+ })
+ }
+ None => {
+ // If the candidate does not have a matching source id
+ // then we may still have a lock candidate. If we're
+ // loading a v2-encoded resolve graph and `dep` is a
+ // git dep with `branch = 'master'`, then this should
+ // also match candidates without `branch = 'master'`
+ // (which is now treated separately in Cargo).
+ //
+ // In this scenario we try to convert candidates located
+ // in the resolve graph to explicitly having the
+ // `master` branch (if they otherwise point to
+ // `DefaultBranch`). If this works and our `dep`
+ // matches that then this is something we'll lock to.
+ match candidates().find(|&id| {
+ match master_branch_git_source(id, previous) {
+ Some(id) => dep.matches_id(id),
+ None => false,
+ }
+ }) {
+ Some(id_using_default) => {
+ let id_using_master = id_using_default.with_source_id(
+ dep.source_id().with_precise(
+ id_using_default
+ .source_id()
+ .precise()
+ .map(|s| s.to_string()),
+ ),
+ );
+
+ let mut locked_dep = dep.clone();
+ locked_dep.lock_to(id_using_master);
+ Some(LockedPatchDependency {
+ dependency: locked_dep,
+ package_id: id_using_master,
+ // Note that this is where the magic
+ // happens, where the resolve graph
+ // probably has locks pointing to
+ // DefaultBranch sources, and by including
+ // this here those will get transparently
+ // rewritten to Branch("master") which we
+ // have a lock entry for.
+ alt_package_id: Some(id_using_default),
+ })
+ }
+
+ // No locked candidate was found
+ None => None,
+ }
+ }
+ };
+
+ registrations.push((dep, lock));
+ }
+
+ let canonical = CanonicalUrl::new(url)?;
+ for (orig_patch, unlock_id) in registry.patch(url, &registrations)? {
+ // Avoid the locked patch ID.
+ avoid_patch_ids.insert(unlock_id);
+ // Also avoid the thing it is patching.
+ avoid_patch_ids.extend(previous.iter().filter(|id| {
+ orig_patch.matches_ignoring_source(*id)
+ && *id.source_id().canonical_url() == canonical
+ }));
+ }
+ }
+ }
+ debug!("avoid_patch_ids={:?}", avoid_patch_ids);
+
+ let keep = |p: &PackageId| pre_patch_keep(p) && !avoid_patch_ids.contains(p);
+
+ let dev_deps = ws.require_optional_deps() || has_dev_units == HasDevUnits::Yes;
+
+ if let Some(r) = previous {
+ trace!("previous: {:?}", r);
+
+ // In the case where a previous instance of resolve is available, we
+ // want to lock as many packages as possible to the previous version
+ // without disturbing the graph structure.
+ register_previous_locks(ws, registry, r, &keep, dev_deps);
+
+ // Prefer to use anything in the previous lock file, aka we want to have conservative updates.
+ for id in r.iter().filter(keep) {
+ debug!("attempting to prefer {}", id);
+ version_prefs.prefer_package_id(id);
+ }
+ }
+
+ if register_patches {
+ registry.lock_patches();
+ }
+
+ // Some packages are already loaded when setting up a workspace. This
+ // makes it so anything that was already loaded will not be loaded again.
+ // Without this there were cases where members would be parsed multiple times
+ ws.preload(registry);
+
+ // In case any members were not already loaded or the Workspace is_ephemeral.
+ for member in ws.members() {
+ registry.add_sources(Some(member.package_id().source_id()))?;
+ }
+
+ let summaries: Vec<(Summary, ResolveOpts)> = ws
+ .members_with_features(specs, cli_features)?
+ .into_iter()
+ .map(|(member, features)| {
+ let summary = registry.lock(member.summary().clone());
+ (
+ summary,
+ ResolveOpts {
+ dev_deps,
+ features: RequestedFeatures::CliFeatures(features),
+ },
+ )
+ })
+ .collect();
+
+ let root_replace = ws.root_replace();
+
+ let replace = match previous {
+ Some(r) => root_replace
+ .iter()
+ .map(|&(ref spec, ref dep)| {
+ for (&key, &val) in r.replacements().iter() {
+ if spec.matches(key) && dep.matches_id(val) && keep(&val) {
+ let mut dep = dep.clone();
+ dep.lock_to(val);
+ return (spec.clone(), dep);
+ }
+ }
+ (spec.clone(), dep.clone())
+ })
+ .collect::<Vec<_>>(),
+ None => root_replace.to_vec(),
+ };
+
+ ws.preload(registry);
+ let mut resolved = resolver::resolve(
+ &summaries,
+ &replace,
+ registry,
+ &version_prefs,
+ Some(ws.config()),
+ ws.unstable_features()
+ .require(Feature::public_dependency())
+ .is_ok(),
+ )?;
+ let patches: Vec<_> = registry
+ .patches()
+ .values()
+ .flat_map(|v| v.iter().cloned())
+ .collect();
+ resolved.register_used_patches(&patches[..]);
+
+ if register_patches && !resolved.unused_patches().is_empty() {
+ emit_warnings_of_unused_patches(ws, &resolved, registry)?;
+ }
+
+ if let Some(previous) = previous {
+ resolved.merge_from(previous)?;
+ }
+ Ok(resolved)
+}
+
+/// Read the `paths` configuration variable to discover all path overrides that
+/// have been configured.
+pub fn add_overrides<'a>(
+ registry: &mut PackageRegistry<'a>,
+ ws: &Workspace<'a>,
+) -> CargoResult<()> {
+ let config = ws.config();
+ let paths = match config.get_list("paths")? {
+ Some(list) => list,
+ None => return Ok(()),
+ };
+
+ let paths = paths.val.iter().map(|(s, def)| {
+ // The path listed next to the string is the config file in which the
+ // key was located, so we want to pop off the `.cargo/config` component
+ // to get the directory containing the `.cargo` folder.
+ (def.root(config).join(s), def)
+ });
+
+ for (path, definition) in paths {
+ let id = SourceId::for_path(&path)?;
+ let mut source = PathSource::new_recursive(&path, id, ws.config());
+ source.update().with_context(|| {
+ format!(
+ "failed to update path override `{}` \
+ (defined in `{}`)",
+ path.display(),
+ definition
+ )
+ })?;
+ registry.add_override(Box::new(source));
+ }
+ Ok(())
+}
+
+pub fn get_resolved_packages<'cfg>(
+ resolve: &Resolve,
+ registry: PackageRegistry<'cfg>,
+) -> CargoResult<PackageSet<'cfg>> {
+ let ids: Vec<PackageId> = resolve.iter().collect();
+ registry.get(&ids)
+}
+
+/// In this function we're responsible for informing the `registry` of all
+/// locked dependencies from the previous lock file we had, `resolve`.
+///
+/// This gets particularly tricky for a couple of reasons. The first is that we
+/// want all updates to be conservative, so we actually want to take the
+/// `resolve` into account (and avoid unnecessary registry updates and such).
+/// the second, however, is that we want to be resilient to updates of
+/// manifests. For example if a dependency is added or a version is changed we
+/// want to make sure that we properly re-resolve (conservatively) instead of
+/// providing an opaque error.
+///
+/// The logic here is somewhat subtle, but there should be more comments below to
+/// clarify things.
+///
+/// Note that this function, at the time of this writing, is basically the
+/// entire fix for issue #4127.
+fn register_previous_locks(
+ ws: &Workspace<'_>,
+ registry: &mut PackageRegistry<'_>,
+ resolve: &Resolve,
+ keep: &dyn Fn(&PackageId) -> bool,
+ dev_deps: bool,
+) {
+ let path_pkg = |id: SourceId| {
+ if !id.is_path() {
+ return None;
+ }
+ if let Ok(path) = id.url().to_file_path() {
+ if let Ok(pkg) = ws.load(&path.join("Cargo.toml")) {
+ return Some(pkg);
+ }
+ }
+ None
+ };
+
+ // Ok so we've been passed in a `keep` function which basically says "if I
+ // return `true` then this package wasn't listed for an update on the command
+ // line". That is, if we run `cargo update -p foo` then `keep(bar)` will return
+ // `true`, whereas `keep(foo)` will return `false` (roughly speaking).
+ //
+ // This isn't actually quite what we want, however. Instead we want to
+ // further refine this `keep` function with *all transitive dependencies* of
+ // the packages we're not keeping. For example, consider a case like this:
+ //
+ // * There's a crate `log`.
+ // * There's a crate `serde` which depends on `log`.
+ //
+ // Let's say we then run `cargo update -p serde`. This may *also* want to
+ // update the `log` dependency as our newer version of `serde` may have a
+ // new minimum version required for `log`. Now this isn't always guaranteed
+ // to work. What'll happen here is we *won't* lock the `log` dependency nor
+ // the `log` crate itself, but we will inform the registry "please prefer
+ // this version of `log`". That way if our newer version of serde works with
+ // the older version of `log`, we conservatively won't update `log`. If,
+ // however, nothing else in the dependency graph depends on `log` and the
+ // newer version of `serde` requires a new version of `log` it'll get pulled
+ // in (as we didn't accidentally lock it to an old version).
+ //
+ // Additionally, here we process all path dependencies listed in the previous
+ // resolve. They can not only have their dependencies change but also
+ // the versions of the package change as well. If this ends up happening
+ // then we want to make sure we don't lock a package ID node that doesn't
+ // actually exist. Note that we don't do transitive visits of all the
+ // package's dependencies here as that'll be covered below to poison those
+ // if they changed.
+ let mut avoid_locking = HashSet::new();
+ registry.add_to_yanked_whitelist(resolve.iter().filter(keep));
+ for node in resolve.iter() {
+ if !keep(&node) {
+ add_deps(resolve, node, &mut avoid_locking);
+ } else if let Some(pkg) = path_pkg(node.source_id()) {
+ if pkg.package_id() != node {
+ avoid_locking.insert(node);
+ }
+ }
+ }
+
+ // Ok, but the above loop isn't the entire story! Updates to the dependency
+ // graph can come from two locations, the `cargo update` command or
+ // manifests themselves. For example a manifest on the filesystem may
+ // have been updated to have an updated version requirement on `serde`. In
+ // this case both `keep(serde)` and `keep(log)` return `true` (the `keep`
+ // that's an argument to this function). We, however, don't want to keep
+ // either of those! Otherwise we'll get obscure resolve errors about locked
+ // versions.
+ //
+ // To solve this problem we iterate over all packages with path sources
+ // (aka ones with manifests that are changing) and take a look at all of
+ // their dependencies. If any dependency does not match something in the
+ // previous lock file, then we're guaranteed that the main resolver will
+ // update the source of this dependency no matter what. Knowing this we
+ // poison all packages from the same source, forcing them all to get
+ // updated.
+ //
+ // This may seem like a heavy hammer, and it is! It means that if you change
+ // anything from crates.io then all of crates.io becomes unlocked. Note,
+ // however, that we still want conservative updates. This currently happens
+ // because the first candidate the resolver picks is the previously locked
+ // version, and only if that fails to activate to we move on and try
+ // a different version. (giving the guise of conservative updates)
+ //
+ // For example let's say we had `serde = "0.1"` written in our lock file.
+ // When we later edit this to `serde = "0.1.3"` we don't want to lock serde
+ // at its old version, 0.1.1. Instead we want to allow it to update to
+ // `0.1.3` and update its own dependencies (like above). To do this *all
+ // crates from crates.io* are not locked (aka added to `avoid_locking`).
+ // For dependencies like `log` their previous version in the lock file will
+ // come up first before newer version, if newer version are available.
+ let mut path_deps = ws.members().cloned().collect::<Vec<_>>();
+ let mut visited = HashSet::new();
+ while let Some(member) = path_deps.pop() {
+ if !visited.insert(member.package_id()) {
+ continue;
+ }
+ let is_ws_member = ws.is_member(&member);
+ for dep in member.dependencies() {
+ // If this dependency didn't match anything special then we may want
+ // to poison the source as it may have been added. If this path
+ // dependencies is **not** a workspace member, however, and it's an
+ // optional/non-transitive dependency then it won't be necessarily
+ // be in our lock file. If this shows up then we avoid poisoning
+ // this source as otherwise we'd repeatedly update the registry.
+ //
+ // TODO: this breaks adding an optional dependency in a
+ // non-workspace member and then simultaneously editing the
+ // dependency on that crate to enable the feature. For now,
+ // this bug is better than the always-updating registry though.
+ if !is_ws_member && (dep.is_optional() || !dep.is_transitive()) {
+ continue;
+ }
+
+ // If dev-dependencies aren't being resolved, skip them.
+ if !dep.is_transitive() && !dev_deps {
+ continue;
+ }
+
+ // If this is a path dependency, then try to push it onto our
+ // worklist.
+ if let Some(pkg) = path_pkg(dep.source_id()) {
+ path_deps.push(pkg);
+ continue;
+ }
+
+ // If we match *anything* in the dependency graph then we consider
+ // ourselves all ok, and assume that we'll resolve to that.
+ if resolve.iter().any(|id| dep.matches_ignoring_source(id)) {
+ continue;
+ }
+
+ // Ok if nothing matches, then we poison the source of these
+ // dependencies and the previous lock file.
+ debug!(
+ "poisoning {} because {} looks like it changed {}",
+ dep.source_id(),
+ member.package_id(),
+ dep.package_name()
+ );
+ for id in resolve
+ .iter()
+ .filter(|id| id.source_id() == dep.source_id())
+ {
+ add_deps(resolve, id, &mut avoid_locking);
+ }
+ }
+ }
+
+ // Alright now that we've got our new, fresh, shiny, and refined `keep`
+ // function let's put it to action. Take a look at the previous lock file,
+ // filter everything by this callback, and then shove everything else into
+ // the registry as a locked dependency.
+ let keep = |id: &PackageId| keep(id) && !avoid_locking.contains(id);
+
+ registry.clear_lock();
+ for node in resolve.iter().filter(keep) {
+ let deps = resolve
+ .deps_not_replaced(node)
+ .map(|p| p.0)
+ .filter(keep)
+ .collect::<Vec<_>>();
+
+ // In the v2 lockfile format and prior the `branch=master` dependency
+ // directive was serialized the same way as the no-branch-listed
+ // directive. Nowadays in Cargo, however, these two directives are
+ // considered distinct and are no longer represented the same way. To
+ // maintain compatibility with older lock files we register locked nodes
+ // for *both* the master branch and the default branch.
+ //
+ // Note that this is only applicable for loading older resolves now at
+ // this point. All new lock files are encoded as v3-or-later, so this is
+ // just compat for loading an old lock file successfully.
+ if let Some(node) = master_branch_git_source(node, resolve) {
+ registry.register_lock(node, deps.clone());
+ }
+
+ registry.register_lock(node, deps);
+ }
+
+ /// Recursively add `node` and all its transitive dependencies to `set`.
+ fn add_deps(resolve: &Resolve, node: PackageId, set: &mut HashSet<PackageId>) {
+ if !set.insert(node) {
+ return;
+ }
+ debug!("ignoring any lock pointing directly at {}", node);
+ for (dep, _) in resolve.deps_not_replaced(node) {
+ add_deps(resolve, dep, set);
+ }
+ }
+}
+
+fn master_branch_git_source(id: PackageId, resolve: &Resolve) -> Option<PackageId> {
+ if resolve.version() <= ResolveVersion::V2 {
+ let source = id.source_id();
+ if let Some(GitReference::DefaultBranch) = source.git_reference() {
+ let new_source =
+ SourceId::for_git(source.url(), GitReference::Branch("master".to_string()))
+ .unwrap()
+ .with_precise(source.precise().map(|s| s.to_string()));
+ return Some(id.with_source_id(new_source));
+ }
+ }
+ None
+}
+
+/// Emits warnings of unused patches case by case.
+///
+/// This function does its best to provide more targeted and helpful
+/// (such as showing close candidates that failed to match). However, that's
+/// not terribly easy to do, so just show a general help message if we cannot.
+fn emit_warnings_of_unused_patches(
+ ws: &Workspace<'_>,
+ resolve: &Resolve,
+ registry: &PackageRegistry<'_>,
+) -> CargoResult<()> {
+ const MESSAGE: &str = "was not used in the crate graph.";
+
+ // Patch package with the source URLs being patch
+ let mut patch_pkgid_to_urls = HashMap::new();
+ for (url, summaries) in registry.patches().iter() {
+ for summary in summaries.iter() {
+ patch_pkgid_to_urls
+ .entry(summary.package_id())
+ .or_insert_with(HashSet::new)
+ .insert(url);
+ }
+ }
+
+ // pkg name -> all source IDs of under the same pkg name
+ let mut source_ids_grouped_by_pkg_name = HashMap::new();
+ for pkgid in resolve.iter() {
+ source_ids_grouped_by_pkg_name
+ .entry(pkgid.name())
+ .or_insert_with(HashSet::new)
+ .insert(pkgid.source_id());
+ }
+
+ let mut unemitted_unused_patches = Vec::new();
+ for unused in resolve.unused_patches().iter() {
+ // Show alternative source URLs if the source URLs being patch
+ // cannot not be found in the crate graph.
+ match (
+ source_ids_grouped_by_pkg_name.get(&unused.name()),
+ patch_pkgid_to_urls.get(unused),
+ ) {
+ (Some(ids), Some(patched_urls))
+ if ids
+ .iter()
+ .all(|id| !patched_urls.contains(id.canonical_url())) =>
+ {
+ use std::fmt::Write;
+ let mut msg = String::new();
+ writeln!(msg, "Patch `{}` {}", unused, MESSAGE)?;
+ write!(
+ msg,
+ "Perhaps you misspelled the source URL being patched.\n\
+ Possible URLs for `[patch.<URL>]`:",
+ )?;
+ for id in ids.iter() {
+ write!(msg, "\n {}", id.display_registry_name())?;
+ }
+ ws.config().shell().warn(msg)?;
+ }
+ _ => unemitted_unused_patches.push(unused),
+ }
+ }
+
+ // Show general help message.
+ if !unemitted_unused_patches.is_empty() {
+ let warnings: Vec<_> = unemitted_unused_patches
+ .iter()
+ .map(|pkgid| format!("Patch `{}` {}", pkgid, MESSAGE))
+ .collect();
+ ws.config()
+ .shell()
+ .warn(format!("{}\n{}", warnings.join("\n"), UNUSED_PATCH_WARNING))?;
+ }
+
+ return Ok(());
+}
diff --git a/src/tools/cargo/src/cargo/ops/tree/format/mod.rs b/src/tools/cargo/src/cargo/ops/tree/format/mod.rs
new file mode 100644
index 000000000..d0b55b74d
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/tree/format/mod.rs
@@ -0,0 +1,135 @@
+use std::fmt;
+
+use anyhow::{bail, Error};
+
+use self::parse::{Parser, RawChunk};
+use super::{Graph, Node};
+
+mod parse;
+
+enum Chunk {
+ Raw(String),
+ Package,
+ License,
+ Repository,
+ Features,
+ LibName,
+}
+
+pub struct Pattern(Vec<Chunk>);
+
+impl Pattern {
+ pub fn new(format: &str) -> Result<Pattern, Error> {
+ let mut chunks = vec![];
+
+ for raw in Parser::new(format) {
+ let chunk = match raw {
+ RawChunk::Text(text) => Chunk::Raw(text.to_owned()),
+ RawChunk::Argument("p") => Chunk::Package,
+ RawChunk::Argument("l") => Chunk::License,
+ RawChunk::Argument("r") => Chunk::Repository,
+ RawChunk::Argument("f") => Chunk::Features,
+ RawChunk::Argument("lib") => Chunk::LibName,
+ RawChunk::Argument(a) => {
+ bail!("unsupported pattern `{}`", a);
+ }
+ RawChunk::Error(err) => bail!("{}", err),
+ };
+ chunks.push(chunk);
+ }
+
+ Ok(Pattern(chunks))
+ }
+
+ pub fn display<'a>(&'a self, graph: &'a Graph<'a>, node_index: usize) -> Display<'a> {
+ Display {
+ pattern: self,
+ graph,
+ node_index,
+ }
+ }
+}
+
+pub struct Display<'a> {
+ pattern: &'a Pattern,
+ graph: &'a Graph<'a>,
+ node_index: usize,
+}
+
+impl<'a> fmt::Display for Display<'a> {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let node = self.graph.node(self.node_index);
+ match node {
+ Node::Package {
+ package_id,
+ features,
+ ..
+ } => {
+ let package = self.graph.package_for_id(*package_id);
+ for chunk in &self.pattern.0 {
+ match chunk {
+ Chunk::Raw(s) => fmt.write_str(s)?,
+ Chunk::Package => {
+ let proc_macro_suffix = if package.proc_macro() {
+ " (proc-macro)"
+ } else {
+ ""
+ };
+ write!(
+ fmt,
+ "{} v{}{}",
+ package.name(),
+ package.version(),
+ proc_macro_suffix
+ )?;
+
+ let source_id = package.package_id().source_id();
+ if !source_id.is_crates_io() {
+ write!(fmt, " ({})", source_id)?;
+ }
+ }
+ Chunk::License => {
+ if let Some(license) = &package.manifest().metadata().license {
+ write!(fmt, "{}", license)?;
+ }
+ }
+ Chunk::Repository => {
+ if let Some(repository) = &package.manifest().metadata().repository {
+ write!(fmt, "{}", repository)?;
+ }
+ }
+ Chunk::Features => {
+ write!(fmt, "{}", features.join(","))?;
+ }
+ Chunk::LibName => {
+ if let Some(target) = package
+ .manifest()
+ .targets()
+ .iter()
+ .find(|target| target.is_lib())
+ {
+ write!(fmt, "{}", target.crate_name())?;
+ }
+ }
+ }
+ }
+ }
+ Node::Feature { name, node_index } => {
+ let for_node = self.graph.node(*node_index);
+ match for_node {
+ Node::Package { package_id, .. } => {
+ write!(fmt, "{} feature \"{}\"", package_id.name(), name)?;
+ if self.graph.is_cli_feature(self.node_index) {
+ write!(fmt, " (command-line)")?;
+ }
+ }
+ // The node_index in Node::Feature must point to a package
+ // node, see `add_feature`.
+ _ => panic!("unexpected feature node {:?}", for_node),
+ }
+ }
+ }
+
+ Ok(())
+ }
+}
diff --git a/src/tools/cargo/src/cargo/ops/tree/format/parse.rs b/src/tools/cargo/src/cargo/ops/tree/format/parse.rs
new file mode 100644
index 000000000..ee112fbee
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/tree/format/parse.rs
@@ -0,0 +1,123 @@
+//! Parser for the `--format` string for `cargo tree`.
+
+use std::iter;
+use std::str;
+
+pub enum RawChunk<'a> {
+ /// Raw text to include in the output.
+ Text(&'a str),
+ /// A substitution to place in the output. For example, the argument "p"
+ /// emits the package name.
+ Argument(&'a str),
+ /// Indicates an error in the format string. The given string is a
+ /// human-readable message explaining the error.
+ Error(&'static str),
+}
+
+/// `cargo tree` format parser.
+///
+/// The format string indicates how each package should be displayed. It
+/// includes simple markers surrounded in curly braces that will be
+/// substituted with their corresponding values. For example, the text
+/// "{p} license:{l}" will substitute the `{p}` with the package name/version
+/// (and optionally source), and the `{l}` will be the license from
+/// `Cargo.toml`.
+///
+/// Substitutions are alphabetic characters between curly braces, like `{p}`
+/// or `{foo}`. The actual interpretation of these are done in the `Pattern`
+/// struct.
+///
+/// Bare curly braces can be included in the output with double braces like
+/// `{{` will include a single `{`, similar to Rust's format strings.
+pub struct Parser<'a> {
+ s: &'a str,
+ it: iter::Peekable<str::CharIndices<'a>>,
+}
+
+impl<'a> Parser<'a> {
+ pub fn new(s: &'a str) -> Parser<'a> {
+ Parser {
+ s,
+ it: s.char_indices().peekable(),
+ }
+ }
+
+ fn consume(&mut self, ch: char) -> bool {
+ match self.it.peek() {
+ Some(&(_, c)) if c == ch => {
+ self.it.next();
+ true
+ }
+ _ => false,
+ }
+ }
+
+ fn argument(&mut self) -> RawChunk<'a> {
+ RawChunk::Argument(self.name())
+ }
+
+ fn name(&mut self) -> &'a str {
+ let start = match self.it.peek() {
+ Some(&(pos, ch)) if ch.is_alphabetic() => {
+ self.it.next();
+ pos
+ }
+ _ => return "",
+ };
+
+ loop {
+ match self.it.peek() {
+ Some(&(_, ch)) if ch.is_alphanumeric() => {
+ self.it.next();
+ }
+ Some(&(end, _)) => return &self.s[start..end],
+ None => return &self.s[start..],
+ }
+ }
+ }
+
+ fn text(&mut self, start: usize) -> RawChunk<'a> {
+ while let Some(&(pos, ch)) = self.it.peek() {
+ match ch {
+ '{' | '}' => return RawChunk::Text(&self.s[start..pos]),
+ _ => {
+ self.it.next();
+ }
+ }
+ }
+ RawChunk::Text(&self.s[start..])
+ }
+}
+
+impl<'a> Iterator for Parser<'a> {
+ type Item = RawChunk<'a>;
+
+ fn next(&mut self) -> Option<RawChunk<'a>> {
+ match self.it.peek() {
+ Some(&(_, '{')) => {
+ self.it.next();
+ if self.consume('{') {
+ Some(RawChunk::Text("{"))
+ } else {
+ let chunk = self.argument();
+ if self.consume('}') {
+ Some(chunk)
+ } else {
+ for _ in &mut self.it {}
+ Some(RawChunk::Error("expected '}'"))
+ }
+ }
+ }
+ Some(&(_, '}')) => {
+ self.it.next();
+ if self.consume('}') {
+ Some(RawChunk::Text("}"))
+ } else {
+ Some(RawChunk::Error("unexpected '}'"))
+ }
+ }
+ Some(&(i, _)) => Some(self.text(i)),
+ None => None,
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/ops/tree/graph.rs b/src/tools/cargo/src/cargo/ops/tree/graph.rs
new file mode 100644
index 000000000..20a9ca0b6
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/tree/graph.rs
@@ -0,0 +1,685 @@
+//! Code for building the graph used by `cargo tree`.
+
+use super::TreeOptions;
+use crate::core::compiler::{CompileKind, RustcTargetData};
+use crate::core::dependency::DepKind;
+use crate::core::resolver::features::{CliFeatures, FeaturesFor, ResolvedFeatures};
+use crate::core::resolver::Resolve;
+use crate::core::{FeatureMap, FeatureValue, Package, PackageId, PackageIdSpec, Workspace};
+use crate::util::interning::InternedString;
+use crate::util::CargoResult;
+use std::collections::{HashMap, HashSet};
+
+#[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)]
+pub enum Node {
+ Package {
+ package_id: PackageId,
+ /// Features that are enabled on this package.
+ features: Vec<InternedString>,
+ kind: CompileKind,
+ },
+ Feature {
+ /// Index of the package node this feature is for.
+ node_index: usize,
+ /// Name of the feature.
+ name: InternedString,
+ },
+}
+
+/// The kind of edge, for separating dependencies into different sections.
+#[derive(Debug, Copy, Hash, Eq, Clone, PartialEq)]
+pub enum EdgeKind {
+ Dep(DepKind),
+ Feature,
+}
+
+/// Set of outgoing edges for a single node.
+///
+/// Edges are separated by the edge kind (`DepKind` or `Feature`). This is
+/// primarily done so that the output can easily display separate sections
+/// like `[build-dependencies]`.
+///
+/// The value is a `Vec` because each edge kind can have multiple outgoing
+/// edges. For example, package "foo" can have multiple normal dependencies.
+#[derive(Clone)]
+struct Edges(HashMap<EdgeKind, Vec<usize>>);
+
+impl Edges {
+ fn new() -> Edges {
+ Edges(HashMap::new())
+ }
+
+ /// Adds an edge pointing to the given node.
+ fn add_edge(&mut self, kind: EdgeKind, index: usize) {
+ let indexes = self.0.entry(kind).or_default();
+ if !indexes.contains(&index) {
+ indexes.push(index)
+ }
+ }
+}
+
+/// A graph of dependencies.
+pub struct Graph<'a> {
+ nodes: Vec<Node>,
+ /// The indexes of `edges` correspond to the `nodes`. That is, `edges[0]`
+ /// is the set of outgoing edges for `nodes[0]`. They should always be in
+ /// sync.
+ edges: Vec<Edges>,
+ /// Index maps a node to an index, for fast lookup.
+ index: HashMap<Node, usize>,
+ /// Map for looking up packages.
+ package_map: HashMap<PackageId, &'a Package>,
+ /// Set of indexes of feature nodes that were added via the command-line.
+ ///
+ /// For example `--features foo` will mark the "foo" node here.
+ cli_features: HashSet<usize>,
+ /// Map of dependency names, used for building internal feature map for
+ /// dep_name/feat_name syntax.
+ ///
+ /// Key is the index of a package node, value is a map of dep_name to a
+ /// set of `(pkg_node_index, is_optional)`.
+ dep_name_map: HashMap<usize, HashMap<InternedString, HashSet<(usize, bool)>>>,
+}
+
+impl<'a> Graph<'a> {
+ fn new(package_map: HashMap<PackageId, &'a Package>) -> Graph<'a> {
+ Graph {
+ nodes: Vec::new(),
+ edges: Vec::new(),
+ index: HashMap::new(),
+ package_map,
+ cli_features: HashSet::new(),
+ dep_name_map: HashMap::new(),
+ }
+ }
+
+ /// Adds a new node to the graph, returning its new index.
+ fn add_node(&mut self, node: Node) -> usize {
+ let from_index = self.nodes.len();
+ self.nodes.push(node);
+ self.edges.push(Edges::new());
+ self.index
+ .insert(self.nodes[from_index].clone(), from_index);
+ from_index
+ }
+
+ /// Returns a list of nodes the given node index points to for the given kind.
+ pub fn connected_nodes(&self, from: usize, kind: &EdgeKind) -> Vec<usize> {
+ match self.edges[from].0.get(kind) {
+ Some(indexes) => {
+ // Created a sorted list for consistent output.
+ let mut indexes = indexes.clone();
+ indexes.sort_unstable_by(|a, b| self.nodes[*a].cmp(&self.nodes[*b]));
+ indexes
+ }
+ None => Vec::new(),
+ }
+ }
+
+ /// Returns `true` if the given node has any outgoing edges.
+ pub fn has_outgoing_edges(&self, index: usize) -> bool {
+ !self.edges[index].0.is_empty()
+ }
+
+ /// Gets a node by index.
+ pub fn node(&self, index: usize) -> &Node {
+ &self.nodes[index]
+ }
+
+ /// Given a slice of PackageIds, returns the indexes of all nodes that match.
+ pub fn indexes_from_ids(&self, package_ids: &[PackageId]) -> Vec<usize> {
+ let mut result: Vec<(&Node, usize)> = self
+ .nodes
+ .iter()
+ .enumerate()
+ .filter(|(_i, node)| match node {
+ Node::Package { package_id, .. } => package_ids.contains(package_id),
+ _ => false,
+ })
+ .map(|(i, node)| (node, i))
+ .collect();
+ // Sort for consistent output (the same command should always return
+ // the same output). "unstable" since nodes should always be unique.
+ result.sort_unstable();
+ result.into_iter().map(|(_node, i)| i).collect()
+ }
+
+ pub fn package_for_id(&self, id: PackageId) -> &Package {
+ self.package_map[&id]
+ }
+
+ fn package_id_for_index(&self, index: usize) -> PackageId {
+ match self.nodes[index] {
+ Node::Package { package_id, .. } => package_id,
+ Node::Feature { .. } => panic!("unexpected feature node"),
+ }
+ }
+
+ /// Returns `true` if the given feature node index is a feature enabled
+ /// via the command-line.
+ pub fn is_cli_feature(&self, index: usize) -> bool {
+ self.cli_features.contains(&index)
+ }
+
+ /// Returns a new graph by removing all nodes not reachable from the
+ /// given nodes.
+ pub fn from_reachable(&self, roots: &[usize]) -> Graph<'a> {
+ // Graph built with features does not (yet) support --duplicates.
+ assert!(self.dep_name_map.is_empty());
+ let mut new_graph = Graph::new(self.package_map.clone());
+ // Maps old index to new index. None if not yet visited.
+ let mut remap: Vec<Option<usize>> = vec![None; self.nodes.len()];
+
+ fn visit(
+ graph: &Graph<'_>,
+ new_graph: &mut Graph<'_>,
+ remap: &mut Vec<Option<usize>>,
+ index: usize,
+ ) -> usize {
+ if let Some(new_index) = remap[index] {
+ // Already visited.
+ return new_index;
+ }
+ let node = graph.node(index).clone();
+ let new_from = new_graph.add_node(node);
+ remap[index] = Some(new_from);
+ // Visit dependencies.
+ for (edge_kind, edge_indexes) in &graph.edges[index].0 {
+ for edge_index in edge_indexes {
+ let new_to_index = visit(graph, new_graph, remap, *edge_index);
+ new_graph.edges[new_from].add_edge(*edge_kind, new_to_index);
+ }
+ }
+ new_from
+ }
+
+ // Walk the roots, generating a new graph as it goes along.
+ for root in roots {
+ visit(self, &mut new_graph, &mut remap, *root);
+ }
+
+ new_graph
+ }
+
+ /// Inverts the direction of all edges.
+ pub fn invert(&mut self) {
+ let mut new_edges = vec![Edges::new(); self.edges.len()];
+ for (from_idx, node_edges) in self.edges.iter().enumerate() {
+ for (kind, edges) in &node_edges.0 {
+ for edge_idx in edges {
+ new_edges[*edge_idx].add_edge(*kind, from_idx);
+ }
+ }
+ }
+ self.edges = new_edges;
+ }
+
+ /// Returns a list of nodes that are considered "duplicates" (same package
+ /// name, with different versions/features/source/etc.).
+ pub fn find_duplicates(&self) -> Vec<usize> {
+ // Graph built with features does not (yet) support --duplicates.
+ assert!(self.dep_name_map.is_empty());
+
+ // Collect a map of package name to Vec<(&Node, usize)>.
+ let mut packages = HashMap::new();
+ for (i, node) in self.nodes.iter().enumerate() {
+ if let Node::Package { package_id, .. } = node {
+ packages
+ .entry(package_id.name())
+ .or_insert_with(Vec::new)
+ .push((node, i));
+ }
+ }
+
+ let mut dupes: Vec<(&Node, usize)> = packages
+ .into_iter()
+ .filter(|(_name, indexes)| {
+ indexes
+ .into_iter()
+ .map(|(node, _)| {
+ match node {
+ Node::Package {
+ package_id,
+ features,
+ ..
+ } => {
+ // Do not treat duplicates on the host or target as duplicates.
+ Node::Package {
+ package_id: package_id.clone(),
+ features: features.clone(),
+ kind: CompileKind::Host,
+ }
+ }
+ _ => unreachable!(),
+ }
+ })
+ .collect::<HashSet<_>>()
+ .len()
+ > 1
+ })
+ .flat_map(|(_name, indexes)| indexes)
+ .collect();
+
+ // For consistent output.
+ dupes.sort_unstable();
+ dupes.into_iter().map(|(_node, i)| i).collect()
+ }
+}
+
+/// Builds the graph.
+pub fn build<'a>(
+ ws: &Workspace<'_>,
+ resolve: &Resolve,
+ resolved_features: &ResolvedFeatures,
+ specs: &[PackageIdSpec],
+ cli_features: &CliFeatures,
+ target_data: &RustcTargetData<'_>,
+ requested_kinds: &[CompileKind],
+ package_map: HashMap<PackageId, &'a Package>,
+ opts: &TreeOptions,
+) -> CargoResult<Graph<'a>> {
+ let mut graph = Graph::new(package_map);
+ let mut members_with_features = ws.members_with_features(specs, cli_features)?;
+ members_with_features.sort_unstable_by_key(|e| e.0.package_id());
+ for (member, cli_features) in members_with_features {
+ let member_id = member.package_id();
+ let features_for = FeaturesFor::from_for_host(member.proc_macro());
+ for kind in requested_kinds {
+ let member_index = add_pkg(
+ &mut graph,
+ resolve,
+ resolved_features,
+ member_id,
+ features_for,
+ target_data,
+ *kind,
+ opts,
+ );
+ if opts.graph_features {
+ let fmap = resolve.summary(member_id).features();
+ add_cli_features(&mut graph, member_index, &cli_features, fmap);
+ }
+ }
+ }
+ if opts.graph_features {
+ add_internal_features(&mut graph, resolve);
+ }
+ Ok(graph)
+}
+
+/// Adds a single package node (if it does not already exist).
+///
+/// This will also recursively add all of its dependencies.
+///
+/// Returns the index to the package node.
+fn add_pkg(
+ graph: &mut Graph<'_>,
+ resolve: &Resolve,
+ resolved_features: &ResolvedFeatures,
+ package_id: PackageId,
+ features_for: FeaturesFor,
+ target_data: &RustcTargetData<'_>,
+ requested_kind: CompileKind,
+ opts: &TreeOptions,
+) -> usize {
+ let node_features = resolved_features.activated_features(package_id, features_for);
+ let node_kind = match features_for {
+ FeaturesFor::HostDep => CompileKind::Host,
+ FeaturesFor::ArtifactDep(target) => CompileKind::Target(target),
+ FeaturesFor::NormalOrDev => requested_kind,
+ };
+ let node = Node::Package {
+ package_id,
+ features: node_features,
+ kind: node_kind,
+ };
+ if let Some(idx) = graph.index.get(&node) {
+ return *idx;
+ }
+ let from_index = graph.add_node(node);
+ // Compute the dep name map which is later used for foo/bar feature lookups.
+ let mut dep_name_map: HashMap<InternedString, HashSet<(usize, bool)>> = HashMap::new();
+ let mut deps: Vec<_> = resolve.deps(package_id).collect();
+ deps.sort_unstable_by_key(|(dep_id, _)| *dep_id);
+ let show_all_targets = opts.target == super::Target::All;
+ for (dep_id, deps) in deps {
+ let mut deps: Vec<_> = deps
+ .iter()
+ // This filter is *similar* to the one found in `unit_dependencies::compute_deps`.
+ // Try to keep them in sync!
+ .filter(|dep| {
+ let kind = match (node_kind, dep.kind()) {
+ (CompileKind::Host, _) => CompileKind::Host,
+ (_, DepKind::Build) => CompileKind::Host,
+ (_, DepKind::Normal) => node_kind,
+ (_, DepKind::Development) => node_kind,
+ };
+ // Filter out inactivated targets.
+ if !show_all_targets && !target_data.dep_platform_activated(dep, kind) {
+ return false;
+ }
+ // Filter out dev-dependencies if requested.
+ if !opts.edge_kinds.contains(&EdgeKind::Dep(dep.kind())) {
+ return false;
+ }
+ if dep.is_optional() {
+ // If the new feature resolver does not enable this
+ // optional dep, then don't use it.
+ if !resolved_features.is_dep_activated(
+ package_id,
+ features_for,
+ dep.name_in_toml(),
+ ) {
+ return false;
+ }
+ }
+ true
+ })
+ .collect();
+
+ // This dependency is eliminated from the dependency tree under
+ // the current target and feature set.
+ if deps.is_empty() {
+ continue;
+ }
+
+ deps.sort_unstable_by_key(|dep| dep.name_in_toml());
+ let dep_pkg = graph.package_map[&dep_id];
+
+ for dep in deps {
+ let dep_features_for = if dep.is_build() || dep_pkg.proc_macro() {
+ FeaturesFor::HostDep
+ } else {
+ features_for
+ };
+ let dep_index = add_pkg(
+ graph,
+ resolve,
+ resolved_features,
+ dep_id,
+ dep_features_for,
+ target_data,
+ requested_kind,
+ opts,
+ );
+ if opts.graph_features {
+ // Add the dependency node with feature nodes in-between.
+ dep_name_map
+ .entry(dep.name_in_toml())
+ .or_default()
+ .insert((dep_index, dep.is_optional()));
+ if dep.uses_default_features() {
+ add_feature(
+ graph,
+ InternedString::new("default"),
+ Some(from_index),
+ dep_index,
+ EdgeKind::Dep(dep.kind()),
+ );
+ }
+ for feature in dep.features().iter() {
+ add_feature(
+ graph,
+ *feature,
+ Some(from_index),
+ dep_index,
+ EdgeKind::Dep(dep.kind()),
+ );
+ }
+ if !dep.uses_default_features() && dep.features().is_empty() {
+ // No features, use a direct connection.
+ graph.edges[from_index].add_edge(EdgeKind::Dep(dep.kind()), dep_index);
+ }
+ } else {
+ graph.edges[from_index].add_edge(EdgeKind::Dep(dep.kind()), dep_index);
+ }
+ }
+ }
+ if opts.graph_features {
+ assert!(graph
+ .dep_name_map
+ .insert(from_index, dep_name_map)
+ .is_none());
+ }
+
+ from_index
+}
+
+/// Adds a feature node between two nodes.
+///
+/// That is, it adds the following:
+///
+/// ```text
+/// from -Edge-> featname -Edge::Feature-> to
+/// ```
+///
+/// Returns a tuple `(missing, index)`.
+/// `missing` is true if this feature edge was already added.
+/// `index` is the index of the index in the graph of the `Feature` node.
+fn add_feature(
+ graph: &mut Graph<'_>,
+ name: InternedString,
+ from: Option<usize>,
+ to: usize,
+ kind: EdgeKind,
+) -> (bool, usize) {
+ // `to` *must* point to a package node.
+ assert!(matches! {graph.nodes[to], Node::Package{..}});
+ let node = Node::Feature {
+ node_index: to,
+ name,
+ };
+ let (missing, node_index) = match graph.index.get(&node) {
+ Some(idx) => (false, *idx),
+ None => (true, graph.add_node(node)),
+ };
+ if let Some(from) = from {
+ graph.edges[from].add_edge(kind, node_index);
+ }
+ graph.edges[node_index].add_edge(EdgeKind::Feature, to);
+ (missing, node_index)
+}
+
+/// Adds nodes for features requested on the command-line for the given member.
+///
+/// Feature nodes are added as "roots" (i.e., they have no "from" index),
+/// because they come from the outside world. They usually only appear with
+/// `--invert`.
+fn add_cli_features(
+ graph: &mut Graph<'_>,
+ package_index: usize,
+ cli_features: &CliFeatures,
+ feature_map: &FeatureMap,
+) {
+ // NOTE: Recursive enabling of features will be handled by
+ // add_internal_features.
+
+ // Create a set of feature names requested on the command-line.
+ let mut to_add: HashSet<FeatureValue> = HashSet::new();
+ if cli_features.all_features {
+ to_add.extend(feature_map.keys().map(|feat| FeatureValue::Feature(*feat)));
+ }
+
+ if cli_features.uses_default_features {
+ to_add.insert(FeatureValue::Feature(InternedString::new("default")));
+ }
+ to_add.extend(cli_features.features.iter().cloned());
+
+ // Add each feature as a node, and mark as "from command-line" in graph.cli_features.
+ for fv in to_add {
+ match fv {
+ FeatureValue::Feature(feature) => {
+ let index = add_feature(graph, feature, None, package_index, EdgeKind::Feature).1;
+ graph.cli_features.insert(index);
+ }
+ // This is enforced by CliFeatures.
+ FeatureValue::Dep { .. } => panic!("unexpected cli dep feature {}", fv),
+ FeatureValue::DepFeature {
+ dep_name,
+ dep_feature,
+ weak,
+ } => {
+ let dep_connections = match graph.dep_name_map[&package_index].get(&dep_name) {
+ // Clone to deal with immutable borrow of `graph`. :(
+ Some(dep_connections) => dep_connections.clone(),
+ None => {
+ // --features bar?/feat where `bar` is not activated should be ignored.
+ // If this wasn't weak, then this is a bug.
+ if weak {
+ continue;
+ }
+ panic!(
+ "missing dep graph connection for CLI feature `{}` for member {:?}\n\
+ Please file a bug report at https://github.com/rust-lang/cargo/issues",
+ fv,
+ graph.nodes.get(package_index)
+ );
+ }
+ };
+ for (dep_index, is_optional) in dep_connections {
+ if is_optional {
+ // Activate the optional dep on self.
+ let index =
+ add_feature(graph, dep_name, None, package_index, EdgeKind::Feature).1;
+ graph.cli_features.insert(index);
+ }
+ let index =
+ add_feature(graph, dep_feature, None, dep_index, EdgeKind::Feature).1;
+ graph.cli_features.insert(index);
+ }
+ }
+ }
+ }
+}
+
+/// Recursively adds connections between features in the `[features]` table
+/// for every package.
+fn add_internal_features(graph: &mut Graph<'_>, resolve: &Resolve) {
+ // Collect features already activated by dependencies or command-line.
+ let feature_nodes: Vec<(PackageId, usize, usize, InternedString)> = graph
+ .nodes
+ .iter()
+ .enumerate()
+ .filter_map(|(i, node)| match node {
+ Node::Package { .. } => None,
+ Node::Feature { node_index, name } => {
+ let package_id = graph.package_id_for_index(*node_index);
+ Some((package_id, *node_index, i, *name))
+ }
+ })
+ .collect();
+
+ for (package_id, package_index, feature_index, feature_name) in feature_nodes {
+ add_feature_rec(
+ graph,
+ resolve,
+ feature_name,
+ package_id,
+ feature_index,
+ package_index,
+ );
+ }
+}
+
+/// Recursively add feature nodes for all features enabled by the given feature.
+///
+/// `from` is the index of the node that enables this feature.
+/// `package_index` is the index of the package node for the feature.
+fn add_feature_rec(
+ graph: &mut Graph<'_>,
+ resolve: &Resolve,
+ feature_name: InternedString,
+ package_id: PackageId,
+ from: usize,
+ package_index: usize,
+) {
+ let feature_map = resolve.summary(package_id).features();
+ let fvs = match feature_map.get(&feature_name) {
+ Some(fvs) => fvs,
+ None => return,
+ };
+ for fv in fvs {
+ match fv {
+ FeatureValue::Feature(dep_name) => {
+ let (missing, feat_index) = add_feature(
+ graph,
+ *dep_name,
+ Some(from),
+ package_index,
+ EdgeKind::Feature,
+ );
+ // Don't recursive if the edge already exists to deal with cycles.
+ if missing {
+ add_feature_rec(
+ graph,
+ resolve,
+ *dep_name,
+ package_id,
+ feat_index,
+ package_index,
+ );
+ }
+ }
+ // Dependencies are already shown in the graph as dep edges. I'm
+ // uncertain whether or not this might be confusing in some cases
+ // (like feature `"somefeat" = ["dep:somedep"]`), so maybe in the
+ // future consider explicitly showing this?
+ FeatureValue::Dep { .. } => {}
+ FeatureValue::DepFeature {
+ dep_name,
+ dep_feature,
+ // Note: `weak` is mostly handled when the graph is built in
+ // `is_dep_activated` which is responsible for skipping
+ // unactivated weak dependencies. Here it is only used to
+ // determine if the feature of the dependency name is
+ // activated on self.
+ weak,
+ } => {
+ let dep_indexes = match graph.dep_name_map[&package_index].get(dep_name) {
+ Some(indexes) => indexes.clone(),
+ None => {
+ log::debug!(
+ "enabling feature {} on {}, found {}/{}, \
+ dep appears to not be enabled",
+ feature_name,
+ package_id,
+ dep_name,
+ dep_feature
+ );
+ continue;
+ }
+ };
+ for (dep_index, is_optional) in dep_indexes {
+ let dep_pkg_id = graph.package_id_for_index(dep_index);
+ if is_optional && !weak {
+ // Activate the optional dep on self.
+ add_feature(
+ graph,
+ *dep_name,
+ Some(from),
+ package_index,
+ EdgeKind::Feature,
+ );
+ }
+ let (missing, feat_index) = add_feature(
+ graph,
+ *dep_feature,
+ Some(from),
+ dep_index,
+ EdgeKind::Feature,
+ );
+ if missing {
+ add_feature_rec(
+ graph,
+ resolve,
+ *dep_feature,
+ dep_pkg_id,
+ feat_index,
+ dep_index,
+ );
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/ops/tree/mod.rs b/src/tools/cargo/src/cargo/ops/tree/mod.rs
new file mode 100644
index 000000000..02459f78f
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/tree/mod.rs
@@ -0,0 +1,451 @@
+//! Implementation of `cargo tree`.
+
+use self::format::Pattern;
+use crate::core::compiler::{CompileKind, RustcTargetData};
+use crate::core::dependency::DepKind;
+use crate::core::resolver::{features::CliFeatures, ForceAllTargets, HasDevUnits};
+use crate::core::{Package, PackageId, PackageIdSpec, Workspace};
+use crate::ops::{self, Packages};
+use crate::util::{CargoResult, Config};
+use crate::{drop_print, drop_println};
+use anyhow::Context;
+use graph::Graph;
+use std::collections::{HashMap, HashSet};
+use std::str::FromStr;
+
+mod format;
+mod graph;
+
+pub use {graph::EdgeKind, graph::Node};
+
+pub struct TreeOptions {
+ pub cli_features: CliFeatures,
+ /// The packages to display the tree for.
+ pub packages: Packages,
+ /// The platform to filter for.
+ pub target: Target,
+ /// The dependency kinds to display.
+ pub edge_kinds: HashSet<EdgeKind>,
+ pub invert: Vec<String>,
+ /// The packages to prune from the display of the dependency tree.
+ pub pkgs_to_prune: Vec<String>,
+ /// The style of prefix for each line.
+ pub prefix: Prefix,
+ /// If `true`, duplicates will be repeated.
+ /// If `false`, duplicates will be marked with `*`, and their dependencies
+ /// won't be shown.
+ pub no_dedupe: bool,
+ /// If `true`, run in a special mode where it will scan for packages that
+ /// appear with different versions, and report if any where found. Implies
+ /// `invert`.
+ pub duplicates: bool,
+ /// The style of characters to use.
+ pub charset: Charset,
+ /// A format string indicating how each package should be displayed.
+ pub format: String,
+ /// Includes features in the tree as separate nodes.
+ pub graph_features: bool,
+ /// Maximum display depth of the dependency tree.
+ pub max_display_depth: u32,
+ /// Excludes proc-macro dependencies.
+ pub no_proc_macro: bool,
+}
+
+#[derive(PartialEq)]
+pub enum Target {
+ Host,
+ Specific(Vec<String>),
+ All,
+}
+
+impl Target {
+ pub fn from_cli(targets: Vec<String>) -> Target {
+ match targets.len() {
+ 0 => Target::Host,
+ 1 if targets[0] == "all" => Target::All,
+ _ => Target::Specific(targets),
+ }
+ }
+}
+
+pub enum Charset {
+ Utf8,
+ Ascii,
+}
+
+impl FromStr for Charset {
+ type Err = &'static str;
+
+ fn from_str(s: &str) -> Result<Charset, &'static str> {
+ match s {
+ "utf8" => Ok(Charset::Utf8),
+ "ascii" => Ok(Charset::Ascii),
+ _ => Err("invalid charset"),
+ }
+ }
+}
+
+#[derive(Clone, Copy)]
+pub enum Prefix {
+ None,
+ Indent,
+ Depth,
+}
+
+impl FromStr for Prefix {
+ type Err = &'static str;
+
+ fn from_str(s: &str) -> Result<Prefix, &'static str> {
+ match s {
+ "none" => Ok(Prefix::None),
+ "indent" => Ok(Prefix::Indent),
+ "depth" => Ok(Prefix::Depth),
+ _ => Err("invalid prefix"),
+ }
+ }
+}
+
+struct Symbols {
+ down: &'static str,
+ tee: &'static str,
+ ell: &'static str,
+ right: &'static str,
+}
+
+static UTF8_SYMBOLS: Symbols = Symbols {
+ down: "│",
+ tee: "├",
+ ell: "└",
+ right: "─",
+};
+
+static ASCII_SYMBOLS: Symbols = Symbols {
+ down: "|",
+ tee: "|",
+ ell: "`",
+ right: "-",
+};
+
+/// Entry point for the `cargo tree` command.
+pub fn build_and_print(ws: &Workspace<'_>, opts: &TreeOptions) -> CargoResult<()> {
+ let requested_targets = match &opts.target {
+ Target::All | Target::Host => Vec::new(),
+ Target::Specific(t) => t.clone(),
+ };
+ // TODO: Target::All is broken with -Zfeatures=itarget. To handle that properly,
+ // `FeatureResolver` will need to be taught what "all" means.
+ let requested_kinds = CompileKind::from_requested_targets(ws.config(), &requested_targets)?;
+ let target_data = RustcTargetData::new(ws, &requested_kinds)?;
+ let specs = opts.packages.to_package_id_specs(ws)?;
+ let has_dev = if opts
+ .edge_kinds
+ .contains(&EdgeKind::Dep(DepKind::Development))
+ {
+ HasDevUnits::Yes
+ } else {
+ HasDevUnits::No
+ };
+ let force_all = if opts.target == Target::All {
+ ForceAllTargets::Yes
+ } else {
+ ForceAllTargets::No
+ };
+ let ws_resolve = ops::resolve_ws_with_opts(
+ ws,
+ &target_data,
+ &requested_kinds,
+ &opts.cli_features,
+ &specs,
+ has_dev,
+ force_all,
+ )?;
+
+ let package_map: HashMap<PackageId, &Package> = ws_resolve
+ .pkg_set
+ .packages()
+ .map(|pkg| (pkg.package_id(), pkg))
+ .collect();
+
+ let mut graph = graph::build(
+ ws,
+ &ws_resolve.targeted_resolve,
+ &ws_resolve.resolved_features,
+ &specs,
+ &opts.cli_features,
+ &target_data,
+ &requested_kinds,
+ package_map,
+ opts,
+ )?;
+
+ let root_specs = if opts.invert.is_empty() {
+ specs
+ } else {
+ opts.invert
+ .iter()
+ .map(|p| PackageIdSpec::parse(p))
+ .collect::<CargoResult<Vec<PackageIdSpec>>>()?
+ };
+ let root_ids = ws_resolve.targeted_resolve.specs_to_ids(&root_specs)?;
+ let root_indexes = graph.indexes_from_ids(&root_ids);
+
+ let root_indexes = if opts.duplicates {
+ // `-d -p foo` will only show duplicates within foo's subtree
+ graph = graph.from_reachable(root_indexes.as_slice());
+ graph.find_duplicates()
+ } else {
+ root_indexes
+ };
+
+ if !opts.invert.is_empty() || opts.duplicates {
+ graph.invert();
+ }
+
+ // Packages to prune.
+ let pkgs_to_prune = opts
+ .pkgs_to_prune
+ .iter()
+ .map(|p| PackageIdSpec::parse(p))
+ .map(|r| {
+ // Provide an error message if pkgid is not within the resolved
+ // dependencies graph.
+ r.and_then(|spec| spec.query(ws_resolve.targeted_resolve.iter()).and(Ok(spec)))
+ })
+ .collect::<CargoResult<Vec<PackageIdSpec>>>()?;
+
+ if root_indexes.len() == 0 {
+ ws.config().shell().warn(
+ "nothing to print.\n\n\
+ To find dependencies that require specific target platforms, \
+ try to use option `--target all` first, and then narrow your search scope accordingly.",
+ )?;
+ } else {
+ print(ws.config(), opts, root_indexes, &pkgs_to_prune, &graph)?;
+ }
+ Ok(())
+}
+
+/// Prints a tree for each given root.
+fn print(
+ config: &Config,
+ opts: &TreeOptions,
+ roots: Vec<usize>,
+ pkgs_to_prune: &[PackageIdSpec],
+ graph: &Graph<'_>,
+) -> CargoResult<()> {
+ let format = Pattern::new(&opts.format)
+ .with_context(|| format!("tree format `{}` not valid", opts.format))?;
+
+ let symbols = match opts.charset {
+ Charset::Utf8 => &UTF8_SYMBOLS,
+ Charset::Ascii => &ASCII_SYMBOLS,
+ };
+
+ // The visited deps is used to display a (*) whenever a dep has
+ // already been printed (ignored with --no-dedupe).
+ let mut visited_deps = HashSet::new();
+
+ for (i, root_index) in roots.into_iter().enumerate() {
+ if i != 0 {
+ drop_println!(config);
+ }
+
+ // A stack of bools used to determine where | symbols should appear
+ // when printing a line.
+ let mut levels_continue = vec![];
+ // The print stack is used to detect dependency cycles when
+ // --no-dedupe is used. It contains a Node for each level.
+ let mut print_stack = vec![];
+
+ print_node(
+ config,
+ graph,
+ root_index,
+ &format,
+ symbols,
+ pkgs_to_prune,
+ opts.prefix,
+ opts.no_dedupe,
+ opts.max_display_depth,
+ opts.no_proc_macro,
+ &mut visited_deps,
+ &mut levels_continue,
+ &mut print_stack,
+ );
+ }
+
+ Ok(())
+}
+
+/// Prints a package and all of its dependencies.
+fn print_node<'a>(
+ config: &Config,
+ graph: &'a Graph<'_>,
+ node_index: usize,
+ format: &Pattern,
+ symbols: &Symbols,
+ pkgs_to_prune: &[PackageIdSpec],
+ prefix: Prefix,
+ no_dedupe: bool,
+ max_display_depth: u32,
+ no_proc_macro: bool,
+ visited_deps: &mut HashSet<usize>,
+ levels_continue: &mut Vec<bool>,
+ print_stack: &mut Vec<usize>,
+) {
+ let new = no_dedupe || visited_deps.insert(node_index);
+
+ match prefix {
+ Prefix::Depth => drop_print!(config, "{}", levels_continue.len()),
+ Prefix::Indent => {
+ if let Some((last_continues, rest)) = levels_continue.split_last() {
+ for continues in rest {
+ let c = if *continues { symbols.down } else { " " };
+ drop_print!(config, "{} ", c);
+ }
+
+ let c = if *last_continues {
+ symbols.tee
+ } else {
+ symbols.ell
+ };
+ drop_print!(config, "{0}{1}{1} ", c, symbols.right);
+ }
+ }
+ Prefix::None => {}
+ }
+
+ let in_cycle = print_stack.contains(&node_index);
+ // If this node does not have any outgoing edges, don't include the (*)
+ // since there isn't really anything "deduplicated", and it generally just
+ // adds noise.
+ let has_deps = graph.has_outgoing_edges(node_index);
+ let star = if (new && !in_cycle) || !has_deps {
+ ""
+ } else {
+ " (*)"
+ };
+ drop_println!(config, "{}{}", format.display(graph, node_index), star);
+
+ if !new || in_cycle {
+ return;
+ }
+ print_stack.push(node_index);
+
+ for kind in &[
+ EdgeKind::Dep(DepKind::Normal),
+ EdgeKind::Dep(DepKind::Build),
+ EdgeKind::Dep(DepKind::Development),
+ EdgeKind::Feature,
+ ] {
+ print_dependencies(
+ config,
+ graph,
+ node_index,
+ format,
+ symbols,
+ pkgs_to_prune,
+ prefix,
+ no_dedupe,
+ max_display_depth,
+ no_proc_macro,
+ visited_deps,
+ levels_continue,
+ print_stack,
+ kind,
+ );
+ }
+ print_stack.pop();
+}
+
+/// Prints all the dependencies of a package for the given dependency kind.
+fn print_dependencies<'a>(
+ config: &Config,
+ graph: &'a Graph<'_>,
+ node_index: usize,
+ format: &Pattern,
+ symbols: &Symbols,
+ pkgs_to_prune: &[PackageIdSpec],
+ prefix: Prefix,
+ no_dedupe: bool,
+ max_display_depth: u32,
+ no_proc_macro: bool,
+ visited_deps: &mut HashSet<usize>,
+ levels_continue: &mut Vec<bool>,
+ print_stack: &mut Vec<usize>,
+ kind: &EdgeKind,
+) {
+ let deps = graph.connected_nodes(node_index, kind);
+ if deps.is_empty() {
+ return;
+ }
+
+ let name = match kind {
+ EdgeKind::Dep(DepKind::Normal) => None,
+ EdgeKind::Dep(DepKind::Build) => Some("[build-dependencies]"),
+ EdgeKind::Dep(DepKind::Development) => Some("[dev-dependencies]"),
+ EdgeKind::Feature => None,
+ };
+
+ if let Prefix::Indent = prefix {
+ if let Some(name) = name {
+ for continues in &**levels_continue {
+ let c = if *continues { symbols.down } else { " " };
+ drop_print!(config, "{} ", c);
+ }
+
+ drop_println!(config, "{}", name);
+ }
+ }
+
+ // Current level exceeds maximum display depth. Skip.
+ if levels_continue.len() + 1 > max_display_depth as usize {
+ return;
+ }
+
+ let mut it = deps
+ .iter()
+ .filter(|dep| {
+ // Filter out proc-macro dependencies.
+ if no_proc_macro {
+ match graph.node(**dep) {
+ &Node::Package { package_id, .. } => {
+ !graph.package_for_id(package_id).proc_macro()
+ }
+ _ => true,
+ }
+ } else {
+ true
+ }
+ })
+ .filter(|dep| {
+ // Filter out packages to prune.
+ match graph.node(**dep) {
+ Node::Package { package_id, .. } => {
+ !pkgs_to_prune.iter().any(|spec| spec.matches(*package_id))
+ }
+ _ => true,
+ }
+ })
+ .peekable();
+
+ while let Some(dependency) = it.next() {
+ levels_continue.push(it.peek().is_some());
+ print_node(
+ config,
+ graph,
+ *dependency,
+ format,
+ symbols,
+ pkgs_to_prune,
+ prefix,
+ no_dedupe,
+ max_display_depth,
+ no_proc_macro,
+ visited_deps,
+ levels_continue,
+ print_stack,
+ );
+ levels_continue.pop();
+ }
+}
diff --git a/src/tools/cargo/src/cargo/ops/vendor.rs b/src/tools/cargo/src/cargo/ops/vendor.rs
new file mode 100644
index 000000000..3ee46db32
--- /dev/null
+++ b/src/tools/cargo/src/cargo/ops/vendor.rs
@@ -0,0 +1,426 @@
+use crate::core::package::MANIFEST_PREAMBLE;
+use crate::core::shell::Verbosity;
+use crate::core::{GitReference, Package, Workspace};
+use crate::ops;
+use crate::sources::path::PathSource;
+use crate::sources::CRATES_IO_REGISTRY;
+use crate::util::{try_canonicalize, CargoResult, Config};
+use anyhow::{bail, Context as _};
+use cargo_util::{paths, Sha256};
+use serde::Serialize;
+use std::collections::HashSet;
+use std::collections::{BTreeMap, BTreeSet, HashMap};
+use std::ffi::OsStr;
+use std::fs::{self, File, OpenOptions};
+use std::io::{Read, Write};
+use std::path::{Path, PathBuf};
+
+pub struct VendorOptions<'a> {
+ pub no_delete: bool,
+ pub versioned_dirs: bool,
+ pub destination: &'a Path,
+ pub extra: Vec<PathBuf>,
+}
+
+pub fn vendor(ws: &Workspace<'_>, opts: &VendorOptions<'_>) -> CargoResult<()> {
+ let config = ws.config();
+ let mut extra_workspaces = Vec::new();
+ for extra in opts.extra.iter() {
+ let extra = config.cwd().join(extra);
+ let ws = Workspace::new(&extra, config)?;
+ extra_workspaces.push(ws);
+ }
+ let workspaces = extra_workspaces.iter().chain(Some(ws)).collect::<Vec<_>>();
+ let vendor_config = sync(config, &workspaces, opts).with_context(|| "failed to sync")?;
+
+ if config.shell().verbosity() != Verbosity::Quiet {
+ if vendor_config.source.is_empty() {
+ crate::drop_eprintln!(config, "There is no dependency to vendor in this project.");
+ } else {
+ crate::drop_eprint!(
+ config,
+ "To use vendored sources, add this to your .cargo/config.toml for this project:\n\n"
+ );
+ crate::drop_print!(
+ config,
+ "{}",
+ &toml::to_string_pretty(&vendor_config).unwrap()
+ );
+ }
+ }
+
+ Ok(())
+}
+
+#[derive(Serialize)]
+struct VendorConfig {
+ source: BTreeMap<String, VendorSource>,
+}
+
+#[derive(Serialize)]
+#[serde(rename_all = "lowercase", untagged)]
+enum VendorSource {
+ Directory {
+ directory: String,
+ },
+ Registry {
+ registry: Option<String>,
+ #[serde(rename = "replace-with")]
+ replace_with: String,
+ },
+ Git {
+ git: String,
+ branch: Option<String>,
+ tag: Option<String>,
+ rev: Option<String>,
+ #[serde(rename = "replace-with")]
+ replace_with: String,
+ },
+}
+
+fn sync(
+ config: &Config,
+ workspaces: &[&Workspace<'_>],
+ opts: &VendorOptions<'_>,
+) -> CargoResult<VendorConfig> {
+ let canonical_destination = try_canonicalize(opts.destination);
+ let canonical_destination = canonical_destination.as_deref().unwrap_or(opts.destination);
+ let dest_dir_already_exists = canonical_destination.exists();
+
+ paths::create_dir_all(&canonical_destination)?;
+ let mut to_remove = HashSet::new();
+ if !opts.no_delete {
+ for entry in canonical_destination.read_dir()? {
+ let entry = entry?;
+ if !entry
+ .file_name()
+ .to_str()
+ .map_or(false, |s| s.starts_with('.'))
+ {
+ to_remove.insert(entry.path());
+ }
+ }
+ }
+
+ // First up attempt to work around rust-lang/cargo#5956. Apparently build
+ // artifacts sprout up in Cargo's global cache for whatever reason, although
+ // it's unsure what tool is causing these issues at this time. For now we
+ // apply a heavy-hammer approach which is to delete Cargo's unpacked version
+ // of each crate to start off with. After we do this we'll re-resolve and
+ // redownload again, which should trigger Cargo to re-extract all the
+ // crates.
+ //
+ // Note that errors are largely ignored here as this is a best-effort
+ // attempt. If anything fails here we basically just move on to the next
+ // crate to work with.
+ for ws in workspaces {
+ let (packages, resolve) =
+ ops::resolve_ws(ws).with_context(|| "failed to load pkg lockfile")?;
+
+ packages
+ .get_many(resolve.iter())
+ .with_context(|| "failed to download packages")?;
+
+ for pkg in resolve.iter() {
+ // Don't delete actual source code!
+ if pkg.source_id().is_path() {
+ if let Ok(path) = pkg.source_id().url().to_file_path() {
+ if let Ok(path) = try_canonicalize(path) {
+ to_remove.remove(&path);
+ }
+ }
+ continue;
+ }
+ if pkg.source_id().is_git() {
+ continue;
+ }
+ if let Ok(pkg) = packages.get_one(pkg) {
+ drop(fs::remove_dir_all(pkg.manifest_path().parent().unwrap()));
+ }
+ }
+ }
+
+ let mut checksums = HashMap::new();
+ let mut ids = BTreeMap::new();
+
+ // Next up let's actually download all crates and start storing internal
+ // tables about them.
+ for ws in workspaces {
+ let (packages, resolve) =
+ ops::resolve_ws(ws).with_context(|| "failed to load pkg lockfile")?;
+
+ packages
+ .get_many(resolve.iter())
+ .with_context(|| "failed to download packages")?;
+
+ for pkg in resolve.iter() {
+ // No need to vendor path crates since they're already in the
+ // repository
+ if pkg.source_id().is_path() {
+ continue;
+ }
+ ids.insert(
+ pkg,
+ packages
+ .get_one(pkg)
+ .with_context(|| "failed to fetch package")?
+ .clone(),
+ );
+
+ checksums.insert(pkg, resolve.checksums().get(&pkg).cloned());
+ }
+ }
+
+ let mut versions = HashMap::new();
+ for id in ids.keys() {
+ let map = versions.entry(id.name()).or_insert_with(BTreeMap::default);
+ if let Some(prev) = map.get(&id.version()) {
+ bail!(
+ "found duplicate version of package `{} v{}` \
+ vendored from two sources:\n\
+ \n\
+ \tsource 1: {}\n\
+ \tsource 2: {}",
+ id.name(),
+ id.version(),
+ prev,
+ id.source_id()
+ );
+ }
+ map.insert(id.version(), id.source_id());
+ }
+
+ let mut sources = BTreeSet::new();
+ let mut tmp_buf = [0; 64 * 1024];
+ for (id, pkg) in ids.iter() {
+ // Next up, copy it to the vendor directory
+ let src = pkg
+ .manifest_path()
+ .parent()
+ .expect("manifest_path should point to a file");
+ let max_version = *versions[&id.name()].iter().rev().next().unwrap().0;
+ let dir_has_version_suffix = opts.versioned_dirs || id.version() != max_version;
+ let dst_name = if dir_has_version_suffix {
+ // Eg vendor/futures-0.1.13
+ format!("{}-{}", id.name(), id.version())
+ } else {
+ // Eg vendor/futures
+ id.name().to_string()
+ };
+
+ sources.insert(id.source_id());
+ let dst = canonical_destination.join(&dst_name);
+ to_remove.remove(&dst);
+ let cksum = dst.join(".cargo-checksum.json");
+ if dir_has_version_suffix && cksum.exists() {
+ // Always re-copy directory without version suffix in case the version changed
+ continue;
+ }
+
+ config.shell().status(
+ "Vendoring",
+ &format!("{} ({}) to {}", id, src.to_string_lossy(), dst.display()),
+ )?;
+
+ let _ = fs::remove_dir_all(&dst);
+ let pathsource = PathSource::new(src, id.source_id(), config);
+ let paths = pathsource.list_files(pkg)?;
+ let mut map = BTreeMap::new();
+ cp_sources(pkg, src, &paths, &dst, &mut map, &mut tmp_buf)
+ .with_context(|| format!("failed to copy over vendored sources for: {}", id))?;
+
+ // Finally, emit the metadata about this package
+ let json = serde_json::json!({
+ "package": checksums.get(id),
+ "files": map,
+ });
+
+ paths::write(&cksum, json.to_string())?;
+ }
+
+ for path in to_remove {
+ if path.is_dir() {
+ paths::remove_dir_all(&path)?;
+ } else {
+ paths::remove_file(&path)?;
+ }
+ }
+
+ // add our vendored source
+ let mut config = BTreeMap::new();
+
+ let merged_source_name = "vendored-sources";
+
+ // replace original sources with vendor
+ for source_id in sources {
+ let name = if source_id.is_crates_io() {
+ CRATES_IO_REGISTRY.to_string()
+ } else {
+ // Remove `precise` since that makes the source name very long,
+ // and isn't needed to disambiguate multiple sources.
+ source_id.with_precise(None).as_url().to_string()
+ };
+
+ let source = if source_id.is_crates_io() {
+ VendorSource::Registry {
+ registry: None,
+ replace_with: merged_source_name.to_string(),
+ }
+ } else if source_id.is_remote_registry() {
+ let registry = source_id.url().to_string();
+ VendorSource::Registry {
+ registry: Some(registry),
+ replace_with: merged_source_name.to_string(),
+ }
+ } else if source_id.is_git() {
+ let mut branch = None;
+ let mut tag = None;
+ let mut rev = None;
+ if let Some(reference) = source_id.git_reference() {
+ match *reference {
+ GitReference::Branch(ref b) => branch = Some(b.clone()),
+ GitReference::Tag(ref t) => tag = Some(t.clone()),
+ GitReference::Rev(ref r) => rev = Some(r.clone()),
+ GitReference::DefaultBranch => {}
+ }
+ }
+ VendorSource::Git {
+ git: source_id.url().to_string(),
+ branch,
+ tag,
+ rev,
+ replace_with: merged_source_name.to_string(),
+ }
+ } else {
+ panic!("Invalid source ID: {}", source_id)
+ };
+ config.insert(name, source);
+ }
+
+ if !config.is_empty() {
+ config.insert(
+ merged_source_name.to_string(),
+ VendorSource::Directory {
+ // Windows-flavour paths are valid here on Windows but Unix.
+ // This backslash normalization is for making output paths more
+ // cross-platform compatible.
+ directory: opts.destination.to_string_lossy().replace("\\", "/"),
+ },
+ );
+ } else if !dest_dir_already_exists {
+ // Nothing to vendor. Remove the destination dir we've just created.
+ paths::remove_dir(canonical_destination)?;
+ }
+
+ Ok(VendorConfig { source: config })
+}
+
+fn cp_sources(
+ pkg: &Package,
+ src: &Path,
+ paths: &[PathBuf],
+ dst: &Path,
+ cksums: &mut BTreeMap<String, String>,
+ tmp_buf: &mut [u8],
+) -> CargoResult<()> {
+ for p in paths {
+ let relative = p.strip_prefix(&src).unwrap();
+
+ match relative.to_str() {
+ // Skip git config files as they're not relevant to builds most of
+ // the time and if we respect them (e.g. in git) then it'll
+ // probably mess with the checksums when a vendor dir is checked
+ // into someone else's source control
+ Some(".gitattributes") | Some(".gitignore") | Some(".git") => continue,
+
+ // Temporary Cargo files
+ Some(".cargo-ok") => continue,
+
+ // Skip patch-style orig/rej files. Published crates on crates.io
+ // have `Cargo.toml.orig` which we don't want to use here and
+ // otherwise these are rarely used as part of the build process.
+ Some(filename) => {
+ if filename.ends_with(".orig") || filename.ends_with(".rej") {
+ continue;
+ }
+ }
+ _ => {}
+ };
+
+ // Join pathname components individually to make sure that the joined
+ // path uses the correct directory separators everywhere, since
+ // `relative` may use Unix-style and `dst` may require Windows-style
+ // backslashes.
+ let dst = relative
+ .iter()
+ .fold(dst.to_owned(), |acc, component| acc.join(&component));
+
+ paths::create_dir_all(dst.parent().unwrap())?;
+ let mut dst_opts = OpenOptions::new();
+ dst_opts.write(true).create(true).truncate(true);
+ // When vendoring git dependencies, the manifest has not been normalized like it would be
+ // when published. This causes issue when the manifest is using workspace inheritance.
+ // To get around this issue we use the "original" manifest after `{}.workspace = true`
+ // has been resolved for git dependencies.
+ let cksum = if dst.file_name() == Some(OsStr::new("Cargo.toml"))
+ && pkg.package_id().source_id().is_git()
+ {
+ let original_toml = toml::to_string_pretty(pkg.manifest().original())?;
+ let contents = format!("{}\n{}", MANIFEST_PREAMBLE, original_toml);
+ copy_and_checksum(
+ &dst,
+ &mut dst_opts,
+ &mut contents.as_bytes(),
+ "Generated Cargo.toml",
+ tmp_buf,
+ )?
+ } else {
+ let mut src = File::open(&p).with_context(|| format!("failed to open {:?}", &p))?;
+ #[cfg(unix)]
+ {
+ use std::os::unix::fs::{MetadataExt, OpenOptionsExt};
+ let src_metadata = src
+ .metadata()
+ .with_context(|| format!("failed to stat {:?}", p))?;
+ dst_opts.mode(src_metadata.mode());
+ }
+ copy_and_checksum(
+ &dst,
+ &mut dst_opts,
+ &mut src,
+ &p.display().to_string(),
+ tmp_buf,
+ )?
+ };
+
+ cksums.insert(relative.to_str().unwrap().replace("\\", "/"), cksum);
+ }
+ Ok(())
+}
+
+fn copy_and_checksum<T: Read>(
+ dst_path: &Path,
+ dst_opts: &mut OpenOptions,
+ contents: &mut T,
+ contents_path: &str,
+ buf: &mut [u8],
+) -> CargoResult<String> {
+ let mut dst = dst_opts
+ .open(dst_path)
+ .with_context(|| format!("failed to create {:?}", dst_path))?;
+ // Not going to bother setting mode on pre-existing files, since there
+ // shouldn't be any under normal conditions.
+ let mut cksum = Sha256::new();
+ loop {
+ let n = contents
+ .read(buf)
+ .with_context(|| format!("failed to read from {:?}", contents_path))?;
+ if n == 0 {
+ break Ok(cksum.finish_hex());
+ }
+ let data = &buf[..n];
+ cksum.update(data);
+ dst.write_all(data)
+ .with_context(|| format!("failed to write to {:?}", dst_path))?;
+ }
+}
diff --git a/src/tools/cargo/src/cargo/sources/config.rs b/src/tools/cargo/src/cargo/sources/config.rs
new file mode 100644
index 000000000..97a23a0b4
--- /dev/null
+++ b/src/tools/cargo/src/cargo/sources/config.rs
@@ -0,0 +1,317 @@
+//! Implementation of configuration for various sources
+//!
+//! This module will parse the various `source.*` TOML configuration keys into a
+//! structure usable by Cargo itself. Currently this is primarily used to map
+//! sources to one another via the `replace-with` key in `.cargo/config`.
+
+use crate::core::{GitReference, PackageId, Source, SourceId};
+use crate::sources::{ReplacedSource, CRATES_IO_REGISTRY};
+use crate::util::config::{self, ConfigRelativePath, OptValue};
+use crate::util::errors::CargoResult;
+use crate::util::{Config, IntoUrl};
+use anyhow::{bail, Context as _};
+use log::debug;
+use std::collections::{HashMap, HashSet};
+use url::Url;
+
+#[derive(Clone)]
+pub struct SourceConfigMap<'cfg> {
+ /// Mapping of source name to the toml configuration.
+ cfgs: HashMap<String, SourceConfig>,
+ /// Mapping of `SourceId` to the source name.
+ id2name: HashMap<SourceId, String>,
+ config: &'cfg Config,
+}
+
+/// Definition of a source in a config file.
+#[derive(Debug, serde::Deserialize)]
+#[serde(rename_all = "kebab-case")]
+struct SourceConfigDef {
+ /// Indicates this source should be replaced with another of the given name.
+ replace_with: OptValue<String>,
+ /// A directory source.
+ directory: Option<ConfigRelativePath>,
+ /// A registry source. Value is a URL.
+ registry: OptValue<String>,
+ /// A local registry source.
+ local_registry: Option<ConfigRelativePath>,
+ /// A git source. Value is a URL.
+ git: OptValue<String>,
+ /// The git branch.
+ branch: OptValue<String>,
+ /// The git tag.
+ tag: OptValue<String>,
+ /// The git revision.
+ rev: OptValue<String>,
+}
+
+/// Configuration for a particular source, found in TOML looking like:
+///
+/// ```toml
+/// [source.crates-io]
+/// registry = 'https://github.com/rust-lang/crates.io-index'
+/// replace-with = 'foo' # optional
+/// ```
+#[derive(Clone)]
+struct SourceConfig {
+ /// `SourceId` this source corresponds to, inferred from the various
+ /// defined keys in the configuration.
+ id: SourceId,
+
+ /// Whether or not this source is replaced with another.
+ ///
+ /// This field is a tuple of `(name, location)` where `location` is where
+ /// this configuration key was defined (such as the `.cargo/config` path
+ /// or the environment variable name).
+ replace_with: Option<(String, String)>,
+}
+
+impl<'cfg> SourceConfigMap<'cfg> {
+ pub fn new(config: &'cfg Config) -> CargoResult<SourceConfigMap<'cfg>> {
+ let mut base = SourceConfigMap::empty(config)?;
+ let sources: Option<HashMap<String, SourceConfigDef>> = config.get("source")?;
+ if let Some(sources) = sources {
+ for (key, value) in sources.into_iter() {
+ base.add_config(key, value)?;
+ }
+ }
+ Ok(base)
+ }
+
+ pub fn empty(config: &'cfg Config) -> CargoResult<SourceConfigMap<'cfg>> {
+ let mut base = SourceConfigMap {
+ cfgs: HashMap::new(),
+ id2name: HashMap::new(),
+ config,
+ };
+ base.add(
+ CRATES_IO_REGISTRY,
+ SourceConfig {
+ id: SourceId::crates_io(config)?,
+ replace_with: None,
+ },
+ )?;
+ if SourceId::crates_io_is_sparse(config)? {
+ base.add(
+ CRATES_IO_REGISTRY,
+ SourceConfig {
+ id: SourceId::crates_io_maybe_sparse_http(config)?,
+ replace_with: None,
+ },
+ )?;
+ }
+ if let Ok(url) = config.get_env("__CARGO_TEST_CRATES_IO_URL_DO_NOT_USE_THIS") {
+ base.add(
+ CRATES_IO_REGISTRY,
+ SourceConfig {
+ id: SourceId::for_alt_registry(&url.parse()?, CRATES_IO_REGISTRY)?,
+ replace_with: None,
+ },
+ )?;
+ }
+ Ok(base)
+ }
+
+ pub fn config(&self) -> &'cfg Config {
+ self.config
+ }
+
+ /// Get the `Source` for a given `SourceId`.
+ pub fn load(
+ &self,
+ id: SourceId,
+ yanked_whitelist: &HashSet<PackageId>,
+ ) -> CargoResult<Box<dyn Source + 'cfg>> {
+ debug!("loading: {}", id);
+
+ let mut name = match self.id2name.get(&id) {
+ Some(name) => name,
+ None => return id.load(self.config, yanked_whitelist),
+ };
+ let mut cfg_loc = "";
+ let orig_name = name;
+ let new_id = loop {
+ let cfg = match self.cfgs.get(name) {
+ Some(cfg) => cfg,
+ None => {
+ // Attempt to interpret the source name as an alt registry name
+ if let Ok(alt_id) = SourceId::alt_registry(self.config, name) {
+ debug!("following pointer to registry {}", name);
+ break alt_id.with_precise(id.precise().map(str::to_string));
+ }
+ bail!(
+ "could not find a configured source with the \
+ name `{}` when attempting to lookup `{}` \
+ (configuration in `{}`)",
+ name,
+ orig_name,
+ cfg_loc
+ );
+ }
+ };
+ match &cfg.replace_with {
+ Some((s, c)) => {
+ name = s;
+ cfg_loc = c;
+ }
+ None if id == cfg.id => return id.load(self.config, yanked_whitelist),
+ None => {
+ break cfg.id.with_precise(id.precise().map(|s| s.to_string()));
+ }
+ }
+ debug!("following pointer to {}", name);
+ if name == orig_name {
+ bail!(
+ "detected a cycle of `replace-with` sources, the source \
+ `{}` is eventually replaced with itself \
+ (configuration in `{}`)",
+ name,
+ cfg_loc
+ )
+ }
+ };
+
+ let new_src = new_id.load(
+ self.config,
+ &yanked_whitelist
+ .iter()
+ .map(|p| p.map_source(id, new_id))
+ .collect(),
+ )?;
+ let old_src = id.load(self.config, yanked_whitelist)?;
+ if !new_src.supports_checksums() && old_src.supports_checksums() {
+ bail!(
+ "\
+cannot replace `{orig}` with `{name}`, the source `{orig}` supports \
+checksums, but `{name}` does not
+
+a lock file compatible with `{orig}` cannot be generated in this situation
+",
+ orig = orig_name,
+ name = name
+ );
+ }
+
+ if old_src.requires_precise() && id.precise().is_none() {
+ bail!(
+ "\
+the source {orig} requires a lock file to be present first before it can be
+used against vendored source code
+
+remove the source replacement configuration, generate a lock file, and then
+restore the source replacement configuration to continue the build
+",
+ orig = orig_name
+ );
+ }
+
+ Ok(Box::new(ReplacedSource::new(id, new_id, new_src)))
+ }
+
+ fn add(&mut self, name: &str, cfg: SourceConfig) -> CargoResult<()> {
+ if let Some(old_name) = self.id2name.insert(cfg.id, name.to_string()) {
+ // The user is allowed to redefine the built-in crates-io
+ // definition from `empty()`.
+ if name != CRATES_IO_REGISTRY {
+ bail!(
+ "source `{}` defines source {}, but that source is already defined by `{}`\n\
+ note: Sources are not allowed to be defined multiple times.",
+ name,
+ cfg.id,
+ old_name
+ );
+ }
+ }
+ self.cfgs.insert(name.to_string(), cfg);
+ Ok(())
+ }
+
+ fn add_config(&mut self, name: String, def: SourceConfigDef) -> CargoResult<()> {
+ let mut srcs = Vec::new();
+ if let Some(registry) = def.registry {
+ let url = url(&registry, &format!("source.{}.registry", name))?;
+ srcs.push(SourceId::for_alt_registry(&url, &name)?);
+ }
+ if let Some(local_registry) = def.local_registry {
+ let path = local_registry.resolve_path(self.config);
+ srcs.push(SourceId::for_local_registry(&path)?);
+ }
+ if let Some(directory) = def.directory {
+ let path = directory.resolve_path(self.config);
+ srcs.push(SourceId::for_directory(&path)?);
+ }
+ if let Some(git) = def.git {
+ let url = url(&git, &format!("source.{}.git", name))?;
+ let reference = match def.branch {
+ Some(b) => GitReference::Branch(b.val),
+ None => match def.tag {
+ Some(b) => GitReference::Tag(b.val),
+ None => match def.rev {
+ Some(b) => GitReference::Rev(b.val),
+ None => GitReference::DefaultBranch,
+ },
+ },
+ };
+ srcs.push(SourceId::for_git(&url, reference)?);
+ } else {
+ let check_not_set = |key, v: OptValue<String>| {
+ if let Some(val) = v {
+ bail!(
+ "source definition `source.{}` specifies `{}`, \
+ but that requires a `git` key to be specified (in {})",
+ name,
+ key,
+ val.definition
+ );
+ }
+ Ok(())
+ };
+ check_not_set("branch", def.branch)?;
+ check_not_set("tag", def.tag)?;
+ check_not_set("rev", def.rev)?;
+ }
+ if name == CRATES_IO_REGISTRY && srcs.is_empty() {
+ srcs.push(SourceId::crates_io_maybe_sparse_http(self.config)?);
+ }
+
+ match srcs.len() {
+ 0 => bail!(
+ "no source location specified for `source.{}`, need \
+ `registry`, `local-registry`, `directory`, or `git` defined",
+ name
+ ),
+ 1 => {}
+ _ => bail!(
+ "more than one source location specified for `source.{}`",
+ name
+ ),
+ }
+ let src = srcs[0];
+
+ let replace_with = def
+ .replace_with
+ .map(|val| (val.val, val.definition.to_string()));
+
+ self.add(
+ &name,
+ SourceConfig {
+ id: src,
+ replace_with,
+ },
+ )?;
+
+ return Ok(());
+
+ fn url(val: &config::Value<String>, key: &str) -> CargoResult<Url> {
+ let url = val.val.into_url().with_context(|| {
+ format!(
+ "configuration key `{}` specified an invalid \
+ URL (in {})",
+ key, val.definition
+ )
+ })?;
+
+ Ok(url)
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/sources/directory.rs b/src/tools/cargo/src/cargo/sources/directory.rs
new file mode 100644
index 000000000..46acb9f86
--- /dev/null
+++ b/src/tools/cargo/src/cargo/sources/directory.rs
@@ -0,0 +1,226 @@
+use std::collections::HashMap;
+use std::fmt::{self, Debug, Formatter};
+use std::path::{Path, PathBuf};
+use std::task::Poll;
+
+use crate::core::source::MaybePackage;
+use crate::core::{Dependency, Package, PackageId, QueryKind, Source, SourceId, Summary};
+use crate::sources::PathSource;
+use crate::util::errors::CargoResult;
+use crate::util::Config;
+
+use anyhow::Context as _;
+use cargo_util::{paths, Sha256};
+use serde::Deserialize;
+
+pub struct DirectorySource<'cfg> {
+ source_id: SourceId,
+ root: PathBuf,
+ packages: HashMap<PackageId, (Package, Checksum)>,
+ config: &'cfg Config,
+ updated: bool,
+}
+
+#[derive(Deserialize)]
+struct Checksum {
+ package: Option<String>,
+ files: HashMap<String, String>,
+}
+
+impl<'cfg> DirectorySource<'cfg> {
+ pub fn new(path: &Path, id: SourceId, config: &'cfg Config) -> DirectorySource<'cfg> {
+ DirectorySource {
+ source_id: id,
+ root: path.to_path_buf(),
+ config,
+ packages: HashMap::new(),
+ updated: false,
+ }
+ }
+}
+
+impl<'cfg> Debug for DirectorySource<'cfg> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+ write!(f, "DirectorySource {{ root: {:?} }}", self.root)
+ }
+}
+
+impl<'cfg> Source for DirectorySource<'cfg> {
+ fn query(
+ &mut self,
+ dep: &Dependency,
+ kind: QueryKind,
+ f: &mut dyn FnMut(Summary),
+ ) -> Poll<CargoResult<()>> {
+ if !self.updated {
+ return Poll::Pending;
+ }
+ let packages = self.packages.values().map(|p| &p.0);
+ let matches = packages.filter(|pkg| match kind {
+ QueryKind::Exact => dep.matches(pkg.summary()),
+ QueryKind::Fuzzy => true,
+ });
+ for summary in matches.map(|pkg| pkg.summary().clone()) {
+ f(summary);
+ }
+ Poll::Ready(Ok(()))
+ }
+
+ fn supports_checksums(&self) -> bool {
+ true
+ }
+
+ fn requires_precise(&self) -> bool {
+ true
+ }
+
+ fn source_id(&self) -> SourceId {
+ self.source_id
+ }
+
+ fn block_until_ready(&mut self) -> CargoResult<()> {
+ if self.updated {
+ return Ok(());
+ }
+ self.packages.clear();
+ let entries = self.root.read_dir().with_context(|| {
+ format!(
+ "failed to read root of directory source: {}",
+ self.root.display()
+ )
+ })?;
+
+ for entry in entries {
+ let entry = entry?;
+ let path = entry.path();
+
+ // Ignore hidden/dot directories as they typically don't contain
+ // crates and otherwise may conflict with a VCS
+ // (rust-lang/cargo#3414).
+ if let Some(s) = path.file_name().and_then(|s| s.to_str()) {
+ if s.starts_with('.') {
+ continue;
+ }
+ }
+
+ // Vendor directories are often checked into a VCS, but throughout
+ // the lifetime of a vendor dir crates are often added and deleted.
+ // Some VCS implementations don't always fully delete the directory
+ // when a dir is removed from a different checkout. Sometimes a
+ // mostly-empty dir is left behind.
+ //
+ // Additionally vendor directories are sometimes accompanied with
+ // readme files and other auxiliary information not too interesting
+ // to Cargo.
+ //
+ // To help handle all this we only try processing folders with a
+ // `Cargo.toml` in them. This has the upside of being pretty
+ // flexible with the contents of vendor directories but has the
+ // downside of accidentally misconfigured vendor directories
+ // silently returning less crates.
+ if !path.join("Cargo.toml").exists() {
+ continue;
+ }
+
+ let mut src = PathSource::new(&path, self.source_id, self.config);
+ src.update()?;
+ let mut pkg = src.root_package()?;
+
+ let cksum_file = path.join(".cargo-checksum.json");
+ let cksum = paths::read(&path.join(cksum_file)).with_context(|| {
+ format!(
+ "failed to load checksum `.cargo-checksum.json` \
+ of {} v{}",
+ pkg.package_id().name(),
+ pkg.package_id().version()
+ )
+ })?;
+ let cksum: Checksum = serde_json::from_str(&cksum).with_context(|| {
+ format!(
+ "failed to decode `.cargo-checksum.json` of \
+ {} v{}",
+ pkg.package_id().name(),
+ pkg.package_id().version()
+ )
+ })?;
+
+ if let Some(package) = &cksum.package {
+ pkg.manifest_mut()
+ .summary_mut()
+ .set_checksum(package.clone());
+ }
+ self.packages.insert(pkg.package_id(), (pkg, cksum));
+ }
+
+ self.updated = true;
+ Ok(())
+ }
+
+ fn download(&mut self, id: PackageId) -> CargoResult<MaybePackage> {
+ self.packages
+ .get(&id)
+ .map(|p| &p.0)
+ .cloned()
+ .map(MaybePackage::Ready)
+ .ok_or_else(|| anyhow::format_err!("failed to find package with id: {}", id))
+ }
+
+ fn finish_download(&mut self, _id: PackageId, _data: Vec<u8>) -> CargoResult<Package> {
+ panic!("no downloads to do")
+ }
+
+ fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
+ Ok(pkg.package_id().version().to_string())
+ }
+
+ fn verify(&self, id: PackageId) -> CargoResult<()> {
+ let (pkg, cksum) = match self.packages.get(&id) {
+ Some(&(ref pkg, ref cksum)) => (pkg, cksum),
+ None => anyhow::bail!("failed to find entry for `{}` in directory source", id),
+ };
+
+ for (file, cksum) in cksum.files.iter() {
+ let file = pkg.root().join(file);
+ let actual = Sha256::new()
+ .update_path(&file)
+ .with_context(|| format!("failed to calculate checksum of: {}", file.display()))?
+ .finish_hex();
+ if &*actual != cksum {
+ anyhow::bail!(
+ "the listed checksum of `{}` has changed:\n\
+ expected: {}\n\
+ actual: {}\n\
+ \n\
+ directory sources are not intended to be edited, if \
+ modifications are required then it is recommended \
+ that `[patch]` is used with a forked copy of the \
+ source\
+ ",
+ file.display(),
+ cksum,
+ actual
+ );
+ }
+ }
+
+ Ok(())
+ }
+
+ fn describe(&self) -> String {
+ format!("directory source `{}`", self.root.display())
+ }
+
+ fn add_to_yanked_whitelist(&mut self, _pkgs: &[PackageId]) {}
+
+ fn is_yanked(&mut self, _pkg: PackageId) -> Poll<CargoResult<bool>> {
+ Poll::Ready(Ok(false))
+ }
+
+ fn invalidate_cache(&mut self) {
+ // Directory source has no local cache.
+ }
+
+ fn set_quiet(&mut self, _quiet: bool) {
+ // Directory source does not display status
+ }
+}
diff --git a/src/tools/cargo/src/cargo/sources/git/known_hosts.rs b/src/tools/cargo/src/cargo/sources/git/known_hosts.rs
new file mode 100644
index 000000000..9a623151e
--- /dev/null
+++ b/src/tools/cargo/src/cargo/sources/git/known_hosts.rs
@@ -0,0 +1,925 @@
+//! SSH host key validation support.
+//!
+//! A primary goal with this implementation is to provide user-friendly error
+//! messages, guiding them to understand the issue and how to resolve it.
+//!
+//! Note that there are a lot of limitations here. This reads OpenSSH
+//! known_hosts files from well-known locations, but it does not read OpenSSH
+//! config files. The config file can change the behavior of how OpenSSH
+//! handles known_hosts files. For example, some things we don't handle:
+//!
+//! - `GlobalKnownHostsFile` — Changes the location of the global host file.
+//! - `UserKnownHostsFile` — Changes the location of the user's host file.
+//! - `KnownHostsCommand` — A command to fetch known hosts.
+//! - `CheckHostIP` — DNS spoofing checks.
+//! - `VisualHostKey` — Shows a visual ascii-art key.
+//! - `VerifyHostKeyDNS` — Uses SSHFP DNS records to fetch a host key.
+//!
+//! There's also a number of things that aren't supported but could be easily
+//! added (it just adds a little complexity). For example, hostname patterns,
+//! and revoked markers. See "FIXME" comments littered in this file.
+
+use crate::util::config::{Config, Definition, Value};
+use base64::engine::general_purpose::STANDARD;
+use base64::engine::general_purpose::STANDARD_NO_PAD;
+use base64::Engine as _;
+use git2::cert::{Cert, SshHostKeyType};
+use git2::CertificateCheckStatus;
+use hmac::Mac;
+use std::collections::HashSet;
+use std::fmt::{Display, Write};
+use std::path::{Path, PathBuf};
+
+/// These are host keys that are hard-coded in cargo to provide convenience.
+///
+/// If GitHub ever publishes new keys, the user can add them to their own
+/// configuration file to use those instead.
+///
+/// The GitHub keys are sourced from <https://api.github.com/meta> or
+/// <https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/githubs-ssh-key-fingerprints>.
+///
+/// These will be ignored if the user adds their own entries for `github.com`,
+/// which can be useful if GitHub ever revokes their old keys.
+static BUNDLED_KEYS: &[(&str, &str, &str)] = &[
+ ("github.com", "ssh-ed25519", "AAAAC3NzaC1lZDI1NTE5AAAAIOMqqnkVzrm0SdG6UOoqKLsabgH5C9okWi0dh2l9GKJl"),
+ ("github.com", "ecdsa-sha2-nistp256", "AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBEmKSENjQEezOmxkZMy7opKgwFB9nkt5YRrYMjNuG5N87uRgg6CLrbo5wAdT/y6v0mKV0U2w0WZ2YB/++Tpockg="),
+ ("github.com", "ssh-rsa", "AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1vN1/wsjk="),
+];
+
+/// List of keys that public hosts have rotated away from.
+///
+/// We explicitly distrust these keys as users with the old key in their
+/// local configuration will otherwise be vulnerable to MITM attacks if the
+/// attacker has access to the old key. As there is no other way to distribute
+/// revocations of ssh host keys, we need to bundle them with the client.
+///
+/// Unlike [`BUNDLED_KEYS`], these revocations will not be ignored if the user
+/// has their own entries: we *know* that these keys are bad.
+static BUNDLED_REVOCATIONS: &[(&str, &str, &str)] = &[
+ // Used until March 24, 2023: https://github.blog/2023-03-23-we-updated-our-rsa-ssh-host-key/
+ ("github.com", "ssh-rsa", "AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ=="),
+];
+
+enum KnownHostError {
+ /// Some general error happened while validating the known hosts.
+ CheckError(anyhow::Error),
+ /// The host key was not found.
+ HostKeyNotFound {
+ hostname: String,
+ key_type: SshHostKeyType,
+ remote_host_key: String,
+ remote_fingerprint: String,
+ other_hosts: Vec<KnownHost>,
+ },
+ /// The host key was found, but does not match the remote's key.
+ HostKeyHasChanged {
+ hostname: String,
+ key_type: SshHostKeyType,
+ old_known_host: KnownHost,
+ remote_host_key: String,
+ remote_fingerprint: String,
+ },
+ /// The host key was found with a @revoked marker, it must not be accepted.
+ HostKeyRevoked {
+ hostname: String,
+ key_type: SshHostKeyType,
+ remote_host_key: String,
+ location: KnownHostLocation,
+ },
+ /// The host key was not found, but there was a matching known host with a
+ /// @cert-authority marker (which Cargo doesn't yet support).
+ HostHasOnlyCertAuthority {
+ hostname: String,
+ location: KnownHostLocation,
+ },
+}
+
+impl From<anyhow::Error> for KnownHostError {
+ fn from(err: anyhow::Error) -> KnownHostError {
+ KnownHostError::CheckError(err.into())
+ }
+}
+
+/// The location where a host key was located.
+#[derive(Clone)]
+enum KnownHostLocation {
+ /// Loaded from a file from disk.
+ File { path: PathBuf, lineno: u32 },
+ /// Loaded from cargo's config system.
+ Config { definition: Definition },
+ /// Part of the hard-coded bundled keys in Cargo.
+ Bundled,
+}
+
+impl Display for KnownHostLocation {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let loc = match self {
+ KnownHostLocation::File { path, lineno } => {
+ format!("{} line {lineno}", path.display())
+ }
+ KnownHostLocation::Config { definition } => {
+ format!("config value from {definition}")
+ }
+ KnownHostLocation::Bundled => format!("bundled with cargo"),
+ };
+ f.write_str(&loc)
+ }
+}
+
+/// The git2 callback used to validate a certificate (only ssh known hosts are validated).
+pub fn certificate_check(
+ config: &Config,
+ cert: &Cert<'_>,
+ host: &str,
+ port: Option<u16>,
+ config_known_hosts: Option<&Vec<Value<String>>>,
+ diagnostic_home_config: &str,
+) -> Result<CertificateCheckStatus, git2::Error> {
+ let Some(host_key) = cert.as_hostkey() else {
+ // Return passthrough for TLS X509 certificates to use whatever validation
+ // was done in git2.
+ return Ok(CertificateCheckStatus::CertificatePassthrough)
+ };
+ // If a nonstandard port is in use, check for that first.
+ // The fallback to check without a port is handled in the HostKeyNotFound handler.
+ let host_maybe_port = match port {
+ Some(port) if port != 22 => format!("[{host}]:{port}"),
+ _ => host.to_string(),
+ };
+ // The error message must be constructed as a string to pass through the libgit2 C API.
+ let err_msg = match check_ssh_known_hosts(
+ config,
+ host_key,
+ &host_maybe_port,
+ config_known_hosts,
+ ) {
+ Ok(()) => {
+ return Ok(CertificateCheckStatus::CertificateOk);
+ }
+ Err(KnownHostError::CheckError(e)) => {
+ format!("error: failed to validate host key:\n{:#}", e)
+ }
+ Err(KnownHostError::HostKeyNotFound {
+ hostname,
+ key_type,
+ remote_host_key,
+ remote_fingerprint,
+ other_hosts,
+ }) => {
+ // Try checking without the port.
+ if port.is_some()
+ && !matches!(port, Some(22))
+ && check_ssh_known_hosts(config, host_key, host, config_known_hosts).is_ok()
+ {
+ return Ok(CertificateCheckStatus::CertificateOk);
+ }
+ let key_type_short_name = key_type.short_name();
+ let key_type_name = key_type.name();
+ let known_hosts_location = user_known_host_location_to_add(diagnostic_home_config);
+ let other_hosts_message = if other_hosts.is_empty() {
+ String::new()
+ } else {
+ let mut msg = String::from(
+ "Note: This host key was found, \
+ but is associated with a different host:\n",
+ );
+ for known_host in other_hosts {
+ write!(
+ msg,
+ " {loc}: {patterns}\n",
+ loc = known_host.location,
+ patterns = known_host.patterns
+ )
+ .unwrap();
+ }
+ msg
+ };
+ format!("error: unknown SSH host key\n\
+ The SSH host key for `{hostname}` is not known and cannot be validated.\n\
+ \n\
+ To resolve this issue, add the host key to {known_hosts_location}\n\
+ \n\
+ The key to add is:\n\
+ \n\
+ {hostname} {key_type_name} {remote_host_key}\n\
+ \n\
+ The {key_type_short_name} key fingerprint is: SHA256:{remote_fingerprint}\n\
+ This fingerprint should be validated with the server administrator that it is correct.\n\
+ {other_hosts_message}\n\
+ See https://doc.rust-lang.org/stable/cargo/appendix/git-authentication.html#ssh-known-hosts \
+ for more information.\n\
+ ")
+ }
+ Err(KnownHostError::HostKeyHasChanged {
+ hostname,
+ key_type,
+ old_known_host,
+ remote_host_key,
+ remote_fingerprint,
+ }) => {
+ let key_type_short_name = key_type.short_name();
+ let key_type_name = key_type.name();
+ let known_hosts_location = user_known_host_location_to_add(diagnostic_home_config);
+ let old_key_resolution = match old_known_host.location {
+ KnownHostLocation::File { path, lineno } => {
+ let old_key_location = path.display();
+ format!(
+ "removing the old {key_type_name} key for `{hostname}` \
+ located at {old_key_location} line {lineno}, \
+ and adding the new key to {known_hosts_location}",
+ )
+ }
+ KnownHostLocation::Config { definition } => {
+ format!(
+ "removing the old {key_type_name} key for `{hostname}` \
+ loaded from Cargo's config at {definition}, \
+ and adding the new key to {known_hosts_location}"
+ )
+ }
+ KnownHostLocation::Bundled => {
+ format!(
+ "adding the new key to {known_hosts_location}\n\
+ The current host key is bundled as part of Cargo."
+ )
+ }
+ };
+ format!("error: SSH host key has changed for `{hostname}`\n\
+ *********************************\n\
+ * WARNING: HOST KEY HAS CHANGED *\n\
+ *********************************\n\
+ This may be caused by a man-in-the-middle attack, or the \
+ server may have changed its host key.\n\
+ \n\
+ The {key_type_short_name} fingerprint for the key from the remote host is:\n\
+ SHA256:{remote_fingerprint}\n\
+ \n\
+ You are strongly encouraged to contact the server \
+ administrator for `{hostname}` to verify that this new key is \
+ correct.\n\
+ \n\
+ If you can verify that the server has a new key, you can \
+ resolve this error by {old_key_resolution}\n\
+ \n\
+ The key provided by the remote host is:\n\
+ \n\
+ {hostname} {key_type_name} {remote_host_key}\n\
+ \n\
+ See https://doc.rust-lang.org/stable/cargo/appendix/git-authentication.html#ssh-known-hosts \
+ for more information.\n\
+ ")
+ }
+ Err(KnownHostError::HostKeyRevoked {
+ hostname,
+ key_type,
+ remote_host_key,
+ location,
+ }) => {
+ let key_type_short_name = key_type.short_name();
+ format!(
+ "error: Key has been revoked for `{hostname}`\n\
+ **************************************\n\
+ * WARNING: REVOKED HOST KEY DETECTED *\n\
+ **************************************\n\
+ This may indicate that the key provided by this host has been\n\
+ compromised and should not be accepted.
+ \n\
+ The host key {key_type_short_name} {remote_host_key} is revoked\n\
+ in {location} and has been rejected.\n\
+ "
+ )
+ }
+ Err(KnownHostError::HostHasOnlyCertAuthority { hostname, location }) => {
+ format!("error: Found a `@cert-authority` marker for `{hostname}`\n\
+ \n\
+ Cargo doesn't support certificate authorities for host key verification. It is\n\
+ recommended that the command line Git client is used instead. This can be achieved\n\
+ by setting `net.git-fetch-with-cli` to `true` in the Cargo config.\n\
+ \n
+ The `@cert-authority` line was found in {location}.\n\
+ \n\
+ See https://doc.rust-lang.org/stable/cargo/appendix/git-authentication.html#ssh-known-hosts \
+ for more information.\n\
+ ")
+ }
+ };
+ Err(git2::Error::new(
+ git2::ErrorCode::GenericError,
+ git2::ErrorClass::Callback,
+ err_msg,
+ ))
+}
+
+/// Checks if the given host/host key pair is known.
+fn check_ssh_known_hosts(
+ config: &Config,
+ cert_host_key: &git2::cert::CertHostkey<'_>,
+ host: &str,
+ config_known_hosts: Option<&Vec<Value<String>>>,
+) -> Result<(), KnownHostError> {
+ let Some(remote_host_key) = cert_host_key.hostkey() else {
+ return Err(anyhow::format_err!("remote host key is not available").into());
+ };
+ let remote_key_type = cert_host_key.hostkey_type().unwrap();
+
+ // Collect all the known host entries from disk.
+ let mut known_hosts = Vec::new();
+ for path in known_host_files(config) {
+ if !path.exists() {
+ continue;
+ }
+ let hosts = load_hostfile(&path)?;
+ known_hosts.extend(hosts);
+ }
+ if let Some(config_known_hosts) = config_known_hosts {
+ // Format errors aren't an error in case the format needs to change in
+ // the future, to retain forwards compatibility.
+ for line_value in config_known_hosts {
+ let location = KnownHostLocation::Config {
+ definition: line_value.definition.clone(),
+ };
+ match parse_known_hosts_line(&line_value.val, location) {
+ Some(known_host) => known_hosts.push(known_host),
+ None => log::warn!(
+ "failed to parse known host {} from {}",
+ line_value.val,
+ line_value.definition
+ ),
+ }
+ }
+ }
+ // Load the bundled keys. Don't add keys for hosts that the user has
+ // configured, which gives them the option to override them. This could be
+ // useful if the keys are ever revoked.
+ let configured_hosts: HashSet<_> = known_hosts
+ .iter()
+ .flat_map(|known_host| {
+ known_host
+ .patterns
+ .split(',')
+ .map(|pattern| pattern.to_lowercase())
+ })
+ .collect();
+ for (patterns, key_type, key) in BUNDLED_KEYS {
+ if !configured_hosts.contains(*patterns) {
+ let key = STANDARD.decode(key).unwrap();
+ known_hosts.push(KnownHost {
+ location: KnownHostLocation::Bundled,
+ patterns: patterns.to_string(),
+ key_type: key_type.to_string(),
+ key,
+ line_type: KnownHostLineType::Key,
+ });
+ }
+ }
+ for (patterns, key_type, key) in BUNDLED_REVOCATIONS {
+ let key = STANDARD.decode(key).unwrap();
+ known_hosts.push(KnownHost {
+ location: KnownHostLocation::Bundled,
+ patterns: patterns.to_string(),
+ key_type: key_type.to_string(),
+ key,
+ line_type: KnownHostLineType::Revoked,
+ });
+ }
+ check_ssh_known_hosts_loaded(&known_hosts, host, remote_key_type, remote_host_key)
+}
+
+/// Checks a host key against a loaded set of known hosts.
+fn check_ssh_known_hosts_loaded(
+ known_hosts: &[KnownHost],
+ host: &str,
+ remote_key_type: SshHostKeyType,
+ remote_host_key: &[u8],
+) -> Result<(), KnownHostError> {
+ // `latent_error` keeps track of a potential error that will be returned
+ // in case a matching host key isn't found.
+ let mut latent_errors: Vec<KnownHostError> = Vec::new();
+
+ // `other_hosts` keeps track of any entries that have an identical key,
+ // but a different hostname.
+ let mut other_hosts = Vec::new();
+
+ // `accepted_known_host_found` keeps track of whether we've found a matching
+ // line in the `known_hosts` file that we would accept. We can't return that
+ // immediately, because there may be a subsequent @revoked key.
+ let mut accepted_known_host_found = false;
+
+ // Older versions of OpenSSH (before 6.8, March 2015) showed MD5
+ // fingerprints (see FingerprintHash ssh config option). Here we only
+ // support SHA256.
+ let mut remote_fingerprint = cargo_util::Sha256::new();
+ remote_fingerprint.update(remote_host_key.clone());
+ let remote_fingerprint = STANDARD_NO_PAD.encode(remote_fingerprint.finish());
+ let remote_host_key_encoded = STANDARD.encode(remote_host_key);
+
+ for known_host in known_hosts {
+ // The key type from libgit2 needs to match the key type from the host file.
+ if known_host.key_type != remote_key_type.name() {
+ continue;
+ }
+ let key_matches = known_host.key == remote_host_key;
+ if !known_host.host_matches(host) {
+ if key_matches {
+ other_hosts.push(known_host.clone());
+ }
+ continue;
+ }
+ match known_host.line_type {
+ KnownHostLineType::Key => {
+ if key_matches {
+ accepted_known_host_found = true;
+ } else {
+ // The host and key type matched, but the key itself did not.
+ // This indicates the key has changed.
+ // This is only reported as an error if no subsequent lines have a
+ // correct key.
+ latent_errors.push(KnownHostError::HostKeyHasChanged {
+ hostname: host.to_string(),
+ key_type: remote_key_type,
+ old_known_host: known_host.clone(),
+ remote_host_key: remote_host_key_encoded.clone(),
+ remote_fingerprint: remote_fingerprint.clone(),
+ });
+ }
+ }
+ KnownHostLineType::Revoked => {
+ if key_matches {
+ return Err(KnownHostError::HostKeyRevoked {
+ hostname: host.to_string(),
+ key_type: remote_key_type,
+ remote_host_key: remote_host_key_encoded,
+ location: known_host.location.clone(),
+ });
+ }
+ }
+ KnownHostLineType::CertAuthority => {
+ // The host matches a @cert-authority line, which is unsupported.
+ latent_errors.push(KnownHostError::HostHasOnlyCertAuthority {
+ hostname: host.to_string(),
+ location: known_host.location.clone(),
+ });
+ }
+ }
+ }
+
+ // We have an accepted host key and it hasn't been revoked.
+ if accepted_known_host_found {
+ return Ok(());
+ }
+
+ if latent_errors.is_empty() {
+ // FIXME: Ideally the error message should include the IP address of the
+ // remote host (to help the user validate that they are connecting to the
+ // host they were expecting to). However, I don't see a way to obtain that
+ // information from libgit2.
+ Err(KnownHostError::HostKeyNotFound {
+ hostname: host.to_string(),
+ key_type: remote_key_type,
+ remote_host_key: remote_host_key_encoded,
+ remote_fingerprint,
+ other_hosts,
+ })
+ } else {
+ // We're going to take the first HostKeyHasChanged error if
+ // we find one, otherwise we'll take the first error (which
+ // we expect to be a CertAuthority error).
+ if let Some(index) = latent_errors
+ .iter()
+ .position(|e| matches!(e, KnownHostError::HostKeyHasChanged { .. }))
+ {
+ return Err(latent_errors.remove(index));
+ } else {
+ // Otherwise, we take the first error (which we expect to be
+ // a CertAuthority error).
+ Err(latent_errors.pop().unwrap())
+ }
+ }
+}
+
+/// Returns a list of files to try loading OpenSSH-formatted known hosts.
+fn known_host_files(config: &Config) -> Vec<PathBuf> {
+ let mut result = Vec::new();
+ if config
+ .get_env_os("__CARGO_TEST_DISABLE_GLOBAL_KNOWN_HOST")
+ .is_some()
+ {
+ } else if cfg!(unix) {
+ result.push(PathBuf::from("/etc/ssh/ssh_known_hosts"));
+ } else if cfg!(windows) {
+ // The msys/cygwin version of OpenSSH uses `/etc` from the posix root
+ // filesystem there (such as `C:\msys64\etc\ssh\ssh_known_hosts`).
+ // However, I do not know of a way to obtain that location from
+ // Windows-land. The ProgramData version here is what the PowerShell
+ // port of OpenSSH does.
+ if let Some(progdata) = config.get_env_os("ProgramData") {
+ let mut progdata = PathBuf::from(progdata);
+ progdata.push("ssh");
+ progdata.push("ssh_known_hosts");
+ result.push(progdata)
+ }
+ }
+ result.extend(user_known_host_location());
+ result
+}
+
+/// The location of the user's known_hosts file.
+fn user_known_host_location() -> Option<PathBuf> {
+ // NOTE: This is a potentially inaccurate prediction of what the user
+ // actually wants. The actual location depends on several factors:
+ //
+ // - Windows OpenSSH Powershell version: I believe this looks up the home
+ // directory via ProfileImagePath in the registry, falling back to
+ // `GetWindowsDirectoryW` if that fails.
+ // - OpenSSH Portable (under msys): This is very complicated. I got lost
+ // after following it through some ldap/active directory stuff.
+ // - OpenSSH (most unix platforms): Uses `pw->pw_dir` from `getpwuid()`.
+ //
+ // This doesn't do anything close to that. home_dir's behavior is:
+ // - Windows: $USERPROFILE, or SHGetFolderPathW()
+ // - Unix: $HOME, or getpwuid_r()
+ //
+ // Since there is a mismatch here, the location returned here might be
+ // different than what the user's `ssh` CLI command uses. We may want to
+ // consider trying to align it better.
+ home::home_dir().map(|mut home| {
+ home.push(".ssh");
+ home.push("known_hosts");
+ home
+ })
+}
+
+/// The location to display in an error message instructing the user where to
+/// add the new key.
+fn user_known_host_location_to_add(diagnostic_home_config: &str) -> String {
+ // Note that we don't bother with the legacy known_hosts2 files.
+ let user = user_known_host_location();
+ let openssh_loc = match &user {
+ Some(path) => path.to_str().expect("utf-8 home"),
+ None => "~/.ssh/known_hosts",
+ };
+ format!(
+ "the `net.ssh.known-hosts` array in your Cargo configuration \
+ (such as {diagnostic_home_config}) \
+ or in your OpenSSH known_hosts file at {openssh_loc}"
+ )
+}
+
+const HASH_HOSTNAME_PREFIX: &str = "|1|";
+
+#[derive(Clone)]
+enum KnownHostLineType {
+ Key,
+ CertAuthority,
+ Revoked,
+}
+
+/// A single known host entry.
+#[derive(Clone)]
+struct KnownHost {
+ location: KnownHostLocation,
+ /// The hostname. May be comma separated to match multiple hosts.
+ patterns: String,
+ key_type: String,
+ key: Vec<u8>,
+ line_type: KnownHostLineType,
+}
+
+impl KnownHost {
+ /// Returns whether or not the given host matches this known host entry.
+ fn host_matches(&self, host: &str) -> bool {
+ let mut match_found = false;
+ let host = host.to_lowercase();
+ if let Some(hashed) = self.patterns.strip_prefix(HASH_HOSTNAME_PREFIX) {
+ return hashed_hostname_matches(&host, hashed);
+ }
+ for pattern in self.patterns.split(',') {
+ let pattern = pattern.to_lowercase();
+ // FIXME: support * and ? wildcards
+ if let Some(pattern) = pattern.strip_prefix('!') {
+ if pattern == host {
+ return false;
+ }
+ } else {
+ match_found |= pattern == host;
+ }
+ }
+ match_found
+ }
+}
+
+fn hashed_hostname_matches(host: &str, hashed: &str) -> bool {
+ let Some((b64_salt, b64_host)) = hashed.split_once('|') else { return false; };
+ let Ok(salt) = STANDARD.decode(b64_salt) else { return false; };
+ let Ok(hashed_host) = STANDARD.decode(b64_host) else { return false; };
+ let Ok(mut mac) = hmac::Hmac::<sha1::Sha1>::new_from_slice(&salt) else { return false; };
+ mac.update(host.as_bytes());
+ let result = mac.finalize().into_bytes();
+ hashed_host == &result[..]
+}
+
+/// Loads an OpenSSH known_hosts file.
+fn load_hostfile(path: &Path) -> Result<Vec<KnownHost>, anyhow::Error> {
+ let contents = cargo_util::paths::read(path)?;
+ Ok(load_hostfile_contents(path, &contents))
+}
+
+fn load_hostfile_contents(path: &Path, contents: &str) -> Vec<KnownHost> {
+ let entries = contents
+ .lines()
+ .enumerate()
+ .filter_map(|(lineno, line)| {
+ let location = KnownHostLocation::File {
+ path: path.to_path_buf(),
+ lineno: lineno as u32 + 1,
+ };
+ parse_known_hosts_line(line, location)
+ })
+ .collect();
+ entries
+}
+
+fn parse_known_hosts_line(line: &str, location: KnownHostLocation) -> Option<KnownHost> {
+ let line = line.trim();
+ if line.is_empty() || line.starts_with('#') {
+ return None;
+ }
+ let mut parts = line.split([' ', '\t']).filter(|s| !s.is_empty());
+
+ let line_type = if line.starts_with("@") {
+ let line_type = parts.next()?;
+
+ if line_type == "@cert-authority" {
+ KnownHostLineType::CertAuthority
+ } else if line_type == "@revoked" {
+ KnownHostLineType::Revoked
+ } else {
+ // No other markers are defined
+ return None;
+ }
+ } else {
+ KnownHostLineType::Key
+ };
+
+ let patterns = parts.next()?;
+ let key_type = parts.next()?;
+ let key = parts.next().map(|p| STANDARD.decode(p))?.ok()?;
+ Some(KnownHost {
+ line_type,
+ location,
+ patterns: patterns.to_string(),
+ key_type: key_type.to_string(),
+ key,
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ static COMMON_CONTENTS: &str = r#"
+ # Comments allowed at start of line
+
+ example.com,rust-lang.org ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC5MzWIpZwpkpDjyCNiTIEVFhSA9OUUQvjFo7CgZBGCAj/cqeUIgiLsgtfmtBsfWIkAECQpM7ePP7NLZFGJcHvoyg5jXJiIX5s0eKo9IlcuTLLrMkW5MkHXE7bNklVbW1WdCfF2+y7Ao25B4L8FFRokMh0yp/H6+8xZ7PdVwL3FRPEg8ftZ5R0kuups6xiMHPRX+f/07vfJzA47YDPmXfhkn+JK8kL0JYw8iy8BtNBfRQL99d9iXJzWXnNce5NHMuKD5rOonD3aQHLDlwK+KhrFRrdaxQEM8ZWxNti0ux8yT4Dl5jJY0CrIu3Xl6+qroVgTqJGNkTbhs5DGWdFh6BLPTTH15rN4buisg7uMyLyHqx06ckborqD33gWu+Jig7O+PV6KJmL5mp1O1HXvZqkpBdTiT6GiDKG3oECCIXkUk0BSU9VG9VQcrMxxvgiHlyoXUAfYQoXv/lnxkTnm+Sr36kutsVOs7n5B43ZKAeuaxyQ11huJZpxamc0RA1HM641s= eric@host
+ Example.net ssh-dss AAAAB3NzaC1kc3MAAACBAK2Ek3jVxisXmz5UcZ7W65BAj/nDJCCVvSe0Aytndn4PH6k7sVesut5OoY6PdksZ9tEfuFjjS9HR5SJb8j1GW0GxtaSHHbf+rNc36PeU75bffzyIWwpA8uZFONt5swUAXJXcsHOoapNbUFuhHsRhB2hXxz9QGNiiwIwRJeSHixKRAAAAFQChKfxO1z9H2/757697xP5nJ/Z5dwAAAIEAoc+HIWas+4WowtB/KtAp6XE0B9oHI+55wKtdcGwwb7zHKK9scWNXwxIcMhSvyB3Oe2I7dQQlvyIWxsdZlzOkX0wdsTHjIAnBAP68MyvMv4kq3+I5GAVcFsqoLZfZvh0dlcgUq1/YNYZwKlt89tnzk8Fp4KLWmuw8Bd8IShYVa78AAACAL3qd8kNTY7CthgsQ8iWdjbkGSF/1KCeFyt8UjurInp9wvPDjqagwakbyLOzN7y3/ItTPCaGuX+RjFP0zZTf8i9bsAVyjFJiJ7vzRXcWytuFWANrpzLTn1qzPfh63iK92Aw8AVBYvEA/4bxo+XReAvhNBB/m78G6OedTeu6ZoTsI= eric@host
+ [example.net]:2222 ssh-dss AAAAB3NzaC1kc3MAAACBAJJN5kLZEpOJpXWyMT4KwYvLAj+b9ErNtglxOi86C6Kw7oZeYdDMCfD3lc3PJyX64udQcWGfO4abSESMiYdY43yFAZH279QGH5Q/B5CklVvTqYpfAUR+1r9TQxy3OVQHk7FB2wOi4xNQ3myO0vaYlBOB9il+P223aERbXx4JTWdvAAAAFQCTHWTcXxLK5Z6ZVPmfdSDyHzkF2wAAAIEAhp41/mTnM0Y0EWSyCXuETMW1QSpKGF8sqoZKp6wdzyhLXu0i32gLdXj4p24em/jObYh93hr+MwgxqWq+FHgD+D80Qg5f6vj4yEl4Uu5hqtTpCBFWUQoyEckbUkPf8uZ4/XzAne+tUSjZm09xATCmK9U2IGqZE+D+90eBkf1Svc8AAACAeKhi4EtfwenFYqKz60ZoEEhIsE1yI2jH73akHnfHpcW84w+fk3YlwjcfDfyYso+D0jZBdJeK5qIdkbUWhAX8wDjJVO0WL6r/YPr4yu/CgEyW1H59tAbujGJ4NR0JDqioulzYqNHnxpiw1RJukZnPBfSFKzRElvPOCq/NkQM/Mwk= eric@host
+ nistp256.example.org ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBJ4iYGCcJrUIfrHfzlsv8e8kaF36qpcUpe3VNAKVCZX/BDptIdlEe8u8vKNRTPgUO9jqS0+tjTcPiQd8/8I9qng= eric@host
+ nistp384.example.org ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBNuGT3TqMz2rcwOt2ZqkiNqq7dvWPE66W2qPCoZsh0pQhVU3BnhKIc6nEr6+Wts0Z3jdF3QWwxbbTjbVTVhdr8fMCFhDCWiQFm9xLerYPKnu9qHvx9K87/fjc5+0pu4hLA== eric@host
+ nistp521.example.org ecdsa-sha2-nistp521 AAAAE2VjZHNhLXNoYTItbmlzdHA1MjEAAAAIbmlzdHA1MjEAAACFBAD35HH6OsK4DN75BrKipVj/GvZaUzjPNa1F8wMjUdPB1JlVcUfgzJjWSxrhmaNN3u0soiZw8WNRFINsGPCw5E7DywF1689WcIj2Ye2rcy99je15FknScTzBBD04JgIyOI50mCUaPCBoF14vFlN6BmO00cFo+yzy5N8GuQ2sx9kr21xmFQ== eric@host
+ # Revoked is supported, but without Cert-Authority support, it will only negate some other fixed key.
+ @revoked revoked.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKtQsi+KPYispwm2rkMidQf30fG1Niy8XNkvASfePoca eric@host
+ # Cert-Authority is not supported (below key should not be valid anyway)
+ @cert-authority ca.example.com ssh-rsa AABBB5Wm
+ example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIAWkjI6XT2SZh3xNk5NhisA3o3sGzWR+VAKMSqHtI0aY eric@host
+ 192.168.42.12 ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKVYJpa0yUGaNk0NXQTPWa0tHjqRpx+7hl2diReH6DtR eric@host
+ |1|QxzZoTXIWLhUsuHAXjuDMIV3FjQ=|M6NCOIkjiWdCWqkh5+Q+/uFLGjs= ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIHgN3O21U4LWtP5OzjTzPnUnSDmCNDvyvlaj6Hi65JC eric@host
+ # Negation isn't terribly useful without globs.
+ neg.example.com,!neg.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOXfUnaAHTlo1Qi//rNk26OcmHikmkns1Z6WW/UuuS3K eric@host
+ "#;
+
+ #[test]
+ fn known_hosts_parse() {
+ let kh_path = Path::new("/home/abc/.known_hosts");
+ let khs = load_hostfile_contents(kh_path, COMMON_CONTENTS);
+ assert_eq!(khs.len(), 12);
+ match &khs[0].location {
+ KnownHostLocation::File { path, lineno } => {
+ assert_eq!(path, kh_path);
+ assert_eq!(*lineno, 4);
+ }
+ _ => panic!("unexpected"),
+ }
+ assert_eq!(khs[0].patterns, "example.com,rust-lang.org");
+ assert_eq!(khs[0].key_type, "ssh-rsa");
+ assert_eq!(khs[0].key.len(), 407);
+ assert_eq!(&khs[0].key[..30], b"\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x03\x01\x00\x01\x00\x00\x01\x81\x00\xb935\x88\xa5\x9c)");
+ match &khs[1].location {
+ KnownHostLocation::File { path, lineno } => {
+ assert_eq!(path, kh_path);
+ assert_eq!(*lineno, 5);
+ }
+ _ => panic!("unexpected"),
+ }
+ assert_eq!(khs[2].patterns, "[example.net]:2222");
+ assert_eq!(khs[3].patterns, "nistp256.example.org");
+ assert_eq!(khs[9].patterns, "192.168.42.12");
+ }
+
+ #[test]
+ fn host_matches() {
+ let kh_path = Path::new("/home/abc/.known_hosts");
+ let khs = load_hostfile_contents(kh_path, COMMON_CONTENTS);
+ assert!(khs[0].host_matches("example.com"));
+ assert!(khs[0].host_matches("rust-lang.org"));
+ assert!(khs[0].host_matches("EXAMPLE.COM"));
+ assert!(khs[1].host_matches("example.net"));
+ assert!(!khs[0].host_matches("example.net"));
+ assert!(khs[2].host_matches("[example.net]:2222"));
+ assert!(!khs[2].host_matches("example.net"));
+ assert!(khs[10].host_matches("hashed.example.com"));
+ assert!(!khs[10].host_matches("example.com"));
+ assert!(!khs[11].host_matches("neg.example.com"));
+ }
+
+ #[test]
+ fn check_match() {
+ let kh_path = Path::new("/home/abc/.known_hosts");
+ let khs = load_hostfile_contents(kh_path, COMMON_CONTENTS);
+
+ assert!(check_ssh_known_hosts_loaded(
+ &khs,
+ "example.com",
+ SshHostKeyType::Rsa,
+ &khs[0].key
+ )
+ .is_ok());
+
+ match check_ssh_known_hosts_loaded(&khs, "example.com", SshHostKeyType::Dss, &khs[0].key) {
+ Err(KnownHostError::HostKeyNotFound {
+ hostname,
+ remote_fingerprint,
+ other_hosts,
+ ..
+ }) => {
+ assert_eq!(
+ remote_fingerprint,
+ "yn+pONDn0EcgdOCVptgB4RZd/wqmsVKrPnQMLtrvhw8"
+ );
+ assert_eq!(hostname, "example.com");
+ assert_eq!(other_hosts.len(), 0);
+ }
+ _ => panic!("unexpected"),
+ }
+
+ match check_ssh_known_hosts_loaded(
+ &khs,
+ "foo.example.com",
+ SshHostKeyType::Rsa,
+ &khs[0].key,
+ ) {
+ Err(KnownHostError::HostKeyNotFound { other_hosts, .. }) => {
+ assert_eq!(other_hosts.len(), 1);
+ assert_eq!(other_hosts[0].patterns, "example.com,rust-lang.org");
+ }
+ _ => panic!("unexpected"),
+ }
+
+ let mut modified_key = khs[0].key.clone();
+ modified_key[0] = 1;
+ match check_ssh_known_hosts_loaded(&khs, "example.com", SshHostKeyType::Rsa, &modified_key)
+ {
+ Err(KnownHostError::HostKeyHasChanged { old_known_host, .. }) => {
+ assert!(matches!(
+ old_known_host.location,
+ KnownHostLocation::File { lineno: 4, .. }
+ ));
+ }
+ _ => panic!("unexpected"),
+ }
+ }
+
+ #[test]
+ fn revoked() {
+ let kh_path = Path::new("/home/abc/.known_hosts");
+ let khs = load_hostfile_contents(kh_path, COMMON_CONTENTS);
+
+ match check_ssh_known_hosts_loaded(
+ &khs,
+ "revoked.example.com",
+ SshHostKeyType::Ed255219,
+ &khs[6].key,
+ ) {
+ Err(KnownHostError::HostKeyRevoked {
+ hostname, location, ..
+ }) => {
+ assert_eq!("revoked.example.com", hostname);
+ assert!(matches!(
+ location,
+ KnownHostLocation::File { lineno: 11, .. }
+ ));
+ }
+ _ => panic!("Expected key to be revoked for revoked.example.com."),
+ }
+ }
+
+ #[test]
+ fn cert_authority() {
+ let kh_path = Path::new("/home/abc/.known_hosts");
+ let khs = load_hostfile_contents(kh_path, COMMON_CONTENTS);
+
+ match check_ssh_known_hosts_loaded(
+ &khs,
+ "ca.example.com",
+ SshHostKeyType::Rsa,
+ &khs[0].key, // The key should not matter
+ ) {
+ Err(KnownHostError::HostHasOnlyCertAuthority {
+ hostname, location, ..
+ }) => {
+ assert_eq!("ca.example.com", hostname);
+ assert!(matches!(
+ location,
+ KnownHostLocation::File { lineno: 13, .. }
+ ));
+ }
+ Err(KnownHostError::HostKeyNotFound { hostname, .. }) => {
+ panic!("host key not found... {}", hostname);
+ }
+ _ => panic!("Expected host to only have @cert-authority line (which is unsupported)."),
+ }
+ }
+
+ #[test]
+ fn multiple_errors() {
+ let contents = r#"
+ not-used.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIAWkjI6XT2SZh3xNk5NhisA3o3sGzWR+VAKMSqHtI0aY eric@host
+ # Cert-authority and changed key for the same host - changed key error should prevail
+ @cert-authority example.com ssh-ed25519 AABBB5Wm
+ example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKVYJpa0yUGaNk0NXQTPWa0tHjqRpx+7hl2diReH6DtR eric@host
+ "#;
+
+ let kh_path = Path::new("/home/abc/.known_hosts");
+ let khs = load_hostfile_contents(kh_path, contents);
+
+ match check_ssh_known_hosts_loaded(
+ &khs,
+ "example.com",
+ SshHostKeyType::Ed255219,
+ &khs[0].key,
+ ) {
+ Err(KnownHostError::HostKeyHasChanged {
+ hostname,
+ old_known_host,
+ remote_host_key,
+ ..
+ }) => {
+ assert_eq!("example.com", hostname);
+ assert_eq!(
+ "AAAAC3NzaC1lZDI1NTE5AAAAIAWkjI6XT2SZh3xNk5NhisA3o3sGzWR+VAKMSqHtI0aY",
+ remote_host_key
+ );
+ assert!(matches!(
+ old_known_host.location,
+ KnownHostLocation::File { lineno: 5, .. }
+ ));
+ }
+ _ => panic!("Expected error to be of type HostKeyHasChanged."),
+ }
+ }
+
+ #[test]
+ fn known_host_and_revoked() {
+ let contents = r#"
+ example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKVYJpa0yUGaNk0NXQTPWa0tHjqRpx+7hl2diReH6DtR eric@host
+ # Later in the file the same host key is revoked
+ @revoked example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKVYJpa0yUGaNk0NXQTPWa0tHjqRpx+7hl2diReH6DtR eric@host
+ "#;
+
+ let kh_path = Path::new("/home/abc/.known_hosts");
+ let khs = load_hostfile_contents(kh_path, contents);
+
+ match check_ssh_known_hosts_loaded(
+ &khs,
+ "example.com",
+ SshHostKeyType::Ed255219,
+ &khs[0].key,
+ ) {
+ Err(KnownHostError::HostKeyRevoked {
+ hostname,
+ remote_host_key,
+ location,
+ ..
+ }) => {
+ assert_eq!("example.com", hostname);
+ assert_eq!(
+ "AAAAC3NzaC1lZDI1NTE5AAAAIKVYJpa0yUGaNk0NXQTPWa0tHjqRpx+7hl2diReH6DtR",
+ remote_host_key
+ );
+ assert!(matches!(
+ location,
+ KnownHostLocation::File { lineno: 4, .. }
+ ));
+ }
+ _ => panic!("Expected host key to be reject with error HostKeyRevoked."),
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/sources/git/mod.rs b/src/tools/cargo/src/cargo/sources/git/mod.rs
new file mode 100644
index 000000000..6c230be93
--- /dev/null
+++ b/src/tools/cargo/src/cargo/sources/git/mod.rs
@@ -0,0 +1,10 @@
+pub use self::source::GitSource;
+pub use self::utils::{fetch, GitCheckout, GitDatabase, GitRemote};
+mod known_hosts;
+mod oxide;
+mod source;
+mod utils;
+
+pub mod fetch {
+ pub type Error = gix::env::collate::fetch::Error<gix::refspec::parse::Error>;
+}
diff --git a/src/tools/cargo/src/cargo/sources/git/oxide.rs b/src/tools/cargo/src/cargo/sources/git/oxide.rs
new file mode 100644
index 000000000..0270579da
--- /dev/null
+++ b/src/tools/cargo/src/cargo/sources/git/oxide.rs
@@ -0,0 +1,355 @@
+//! This module contains all code sporting `gitoxide` for operations on `git` repositories and it mirrors
+//! `utils` closely for now. One day it can be renamed into `utils` once `git2` isn't required anymore.
+
+use crate::ops::HttpTimeout;
+use crate::util::{human_readable_bytes, network, MetricsCounter, Progress};
+use crate::{CargoResult, Config};
+use cargo_util::paths;
+use gix::bstr::{BString, ByteSlice};
+use log::debug;
+use std::cell::RefCell;
+use std::path::Path;
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::sync::{Arc, Weak};
+use std::time::{Duration, Instant};
+
+/// For the time being, `repo_path` makes it easy to instantiate a gitoxide repo just for fetching.
+/// In future this may change to be the gitoxide repository itself.
+pub fn with_retry_and_progress(
+ repo_path: &std::path::Path,
+ config: &Config,
+ cb: &(dyn Fn(
+ &std::path::Path,
+ &AtomicBool,
+ &mut gix::progress::tree::Item,
+ &mut dyn FnMut(&gix::bstr::BStr),
+ ) -> Result<(), crate::sources::git::fetch::Error>
+ + Send
+ + Sync),
+) -> CargoResult<()> {
+ std::thread::scope(|s| {
+ let mut progress_bar = Progress::new("Fetch", config);
+ network::retry::with_retry(config, || {
+ let progress_root: Arc<gix::progress::tree::Root> =
+ gix::progress::tree::root::Options {
+ initial_capacity: 10,
+ message_buffer_capacity: 10,
+ }
+ .into();
+ let root = Arc::downgrade(&progress_root);
+ let thread = s.spawn(move || {
+ let mut progress = progress_root.add_child("operation");
+ let mut urls = RefCell::new(Default::default());
+ let res = cb(
+ &repo_path,
+ &AtomicBool::default(),
+ &mut progress,
+ &mut |url| {
+ *urls.borrow_mut() = Some(url.to_owned());
+ },
+ );
+ amend_authentication_hints(res, urls.get_mut().take())
+ });
+ translate_progress_to_bar(&mut progress_bar, root)?;
+ thread.join().expect("no panic in scoped thread")
+ })
+ })
+}
+
+fn translate_progress_to_bar(
+ progress_bar: &mut Progress<'_>,
+ root: Weak<gix::progress::tree::Root>,
+) -> CargoResult<()> {
+ let read_pack_bytes: gix::progress::Id =
+ gix::odb::pack::bundle::write::ProgressId::ReadPackBytes.into();
+ let delta_index_objects: gix::progress::Id =
+ gix::odb::pack::index::write::ProgressId::IndexObjects.into();
+ let resolve_objects: gix::progress::Id =
+ gix::odb::pack::index::write::ProgressId::ResolveObjects.into();
+
+ // We choose `N=10` here to make a `300ms * 10slots ~= 3000ms`
+ // sliding window for tracking the data transfer rate (in bytes/s).
+ let mut last_percentage_update = Instant::now();
+ let mut last_fast_update = Instant::now();
+ let mut counter = MetricsCounter::<10>::new(0, last_percentage_update);
+
+ let mut tasks = Vec::with_capacity(10);
+ let slow_check_interval = std::time::Duration::from_millis(300);
+ let fast_check_interval = Duration::from_millis(50);
+ let sleep_interval = Duration::from_millis(10);
+ debug_assert_eq!(
+ slow_check_interval.as_millis() % fast_check_interval.as_millis(),
+ 0,
+ "progress should be smoother by keeping these as multiples of each other"
+ );
+ debug_assert_eq!(
+ fast_check_interval.as_millis() % sleep_interval.as_millis(),
+ 0,
+ "progress should be smoother by keeping these as multiples of each other"
+ );
+
+ while let Some(root) = root.upgrade() {
+ std::thread::sleep(sleep_interval);
+ let needs_update = last_fast_update.elapsed() >= fast_check_interval;
+ if !needs_update {
+ continue;
+ }
+ let now = Instant::now();
+ last_fast_update = now;
+
+ root.sorted_snapshot(&mut tasks);
+
+ fn progress_by_id(
+ id: gix::progress::Id,
+ task: &gix::progress::Task,
+ ) -> Option<&gix::progress::Value> {
+ (task.id == id).then(|| task.progress.as_ref()).flatten()
+ }
+ fn find_in<K>(
+ tasks: &[(K, gix::progress::Task)],
+ cb: impl Fn(&gix::progress::Task) -> Option<&gix::progress::Value>,
+ ) -> Option<&gix::progress::Value> {
+ tasks.iter().find_map(|(_, t)| cb(t))
+ }
+
+ const NUM_PHASES: usize = 2; // indexing + delta-resolution, both with same amount of objects to handle
+ if let Some(objs) = find_in(&tasks, |t| progress_by_id(resolve_objects, t)) {
+ // Resolving deltas.
+ let objects = objs.step.load(Ordering::Relaxed);
+ let total_objects = objs.done_at.expect("known amount of objects");
+ let msg = format!(", ({objects}/{total_objects}) resolving deltas");
+
+ progress_bar.tick(total_objects + objects, total_objects * NUM_PHASES, &msg)?;
+ } else if let Some((objs, read_pack)) =
+ find_in(&tasks, |t| progress_by_id(read_pack_bytes, t)).and_then(|read| {
+ find_in(&tasks, |t| progress_by_id(delta_index_objects, t))
+ .map(|delta| (delta, read))
+ })
+ {
+ // Receiving objects.
+ let objects = objs.step.load(Ordering::Relaxed);
+ let total_objects = objs.done_at.expect("known amount of objects");
+ let received_bytes = read_pack.step.load(Ordering::Relaxed);
+
+ let needs_percentage_update = last_percentage_update.elapsed() >= slow_check_interval;
+ if needs_percentage_update {
+ counter.add(received_bytes, now);
+ last_percentage_update = now;
+ }
+ let (rate, unit) = human_readable_bytes(counter.rate() as u64);
+ let msg = format!(", {rate:.2}{unit}/s");
+
+ progress_bar.tick(objects, total_objects * NUM_PHASES, &msg)?;
+ }
+ }
+ Ok(())
+}
+
+fn amend_authentication_hints(
+ res: Result<(), crate::sources::git::fetch::Error>,
+ last_url_for_authentication: Option<gix::bstr::BString>,
+) -> CargoResult<()> {
+ let Err(err) = res else { return Ok(()) };
+ let e = match &err {
+ crate::sources::git::fetch::Error::PrepareFetch(
+ gix::remote::fetch::prepare::Error::RefMap(gix::remote::ref_map::Error::Handshake(err)),
+ ) => Some(err),
+ _ => None,
+ };
+ if let Some(e) = e {
+ use anyhow::Context;
+ let auth_message = match e {
+ gix::protocol::handshake::Error::Credentials(_) => {
+ "\n* attempted to find username/password via \
+ git's `credential.helper` support, but failed"
+ .into()
+ }
+ gix::protocol::handshake::Error::InvalidCredentials { .. } => {
+ "\n* attempted to find username/password via \
+ `credential.helper`, but maybe the found \
+ credentials were incorrect"
+ .into()
+ }
+ gix::protocol::handshake::Error::Transport(_) => {
+ let msg = concat!(
+ "network failure seems to have happened\n",
+ "if a proxy or similar is necessary `net.git-fetch-with-cli` may help here\n",
+ "https://doc.rust-lang.org/cargo/reference/config.html#netgit-fetch-with-cli"
+ );
+ return Err(anyhow::Error::from(err)).context(msg);
+ }
+ _ => None,
+ };
+ if let Some(auth_message) = auth_message {
+ let mut msg = "failed to authenticate when downloading \
+ repository"
+ .to_string();
+ if let Some(url) = last_url_for_authentication {
+ msg.push_str(": ");
+ msg.push_str(url.to_str_lossy().as_ref());
+ }
+ msg.push('\n');
+ msg.push_str(auth_message);
+ msg.push_str("\n\n");
+ msg.push_str("if the git CLI succeeds then `net.git-fetch-with-cli` may help here\n");
+ msg.push_str(
+ "https://doc.rust-lang.org/cargo/reference/config.html#netgit-fetch-with-cli",
+ );
+ return Err(anyhow::Error::from(err)).context(msg);
+ }
+ }
+ Err(err.into())
+}
+
+/// The reason we are opening a git repository.
+///
+/// This can affect the way we open it and the cost associated with it.
+pub enum OpenMode {
+ /// We need `git_binary` configuration as well for being able to see credential helpers
+ /// that are configured with the `git` installation itself.
+ /// However, this is slow on windows (~150ms) and most people won't need it as they use the
+ /// standard index which won't ever need authentication, so we only enable this when needed.
+ ForFetch,
+}
+
+impl OpenMode {
+ /// Sometimes we don't need to pay for figuring out the system's git installation, and this tells
+ /// us if that is the case.
+ pub fn needs_git_binary_config(&self) -> bool {
+ match self {
+ OpenMode::ForFetch => true,
+ }
+ }
+}
+
+/// Produce a repository with everything pre-configured according to `config`. Most notably this includes
+/// transport configuration. Knowing its `purpose` helps to optimize the way we open the repository.
+/// Use `config_overrides` to configure the new repository.
+pub fn open_repo(
+ repo_path: &std::path::Path,
+ config_overrides: Vec<BString>,
+ purpose: OpenMode,
+) -> Result<gix::Repository, gix::open::Error> {
+ gix::open_opts(repo_path, {
+ let mut opts = gix::open::Options::default();
+ opts.permissions.config = gix::permissions::Config::all();
+ opts.permissions.config.git_binary = purpose.needs_git_binary_config();
+ opts.with(gix::sec::Trust::Full)
+ .config_overrides(config_overrides)
+ })
+}
+
+/// Convert `git` related cargo configuration into the respective `git` configuration which can be
+/// used when opening new repositories.
+pub fn cargo_config_to_gitoxide_overrides(config: &Config) -> CargoResult<Vec<BString>> {
+ use gix::config::tree::{gitoxide, Core, Http, Key};
+ let timeout = HttpTimeout::new(config)?;
+ let http = config.http_config()?;
+
+ let mut values = vec![
+ gitoxide::Http::CONNECT_TIMEOUT.validated_assignment_fmt(&timeout.dur.as_millis())?,
+ Http::LOW_SPEED_LIMIT.validated_assignment_fmt(&timeout.low_speed_limit)?,
+ Http::LOW_SPEED_TIME.validated_assignment_fmt(&timeout.dur.as_secs())?,
+ // Assure we are not depending on committer information when updating refs after cloning.
+ Core::LOG_ALL_REF_UPDATES.validated_assignment_fmt(&false)?,
+ ];
+ if let Some(proxy) = &http.proxy {
+ values.push(Http::PROXY.validated_assignment_fmt(proxy)?);
+ }
+ if let Some(check_revoke) = http.check_revoke {
+ values.push(Http::SCHANNEL_CHECK_REVOKE.validated_assignment_fmt(&check_revoke)?);
+ }
+ if let Some(cainfo) = &http.cainfo {
+ values.push(
+ Http::SSL_CA_INFO.validated_assignment_fmt(&cainfo.resolve_path(config).display())?,
+ );
+ }
+
+ values.push(if let Some(user_agent) = &http.user_agent {
+ Http::USER_AGENT.validated_assignment_fmt(user_agent)
+ } else {
+ Http::USER_AGENT.validated_assignment_fmt(&format!("cargo {}", crate::version()))
+ }?);
+ if let Some(ssl_version) = &http.ssl_version {
+ use crate::util::config::SslVersionConfig;
+ match ssl_version {
+ SslVersionConfig::Single(version) => {
+ values.push(Http::SSL_VERSION.validated_assignment_fmt(&version)?);
+ }
+ SslVersionConfig::Range(range) => {
+ values.push(
+ gitoxide::Http::SSL_VERSION_MIN
+ .validated_assignment_fmt(&range.min.as_deref().unwrap_or("default"))?,
+ );
+ values.push(
+ gitoxide::Http::SSL_VERSION_MAX
+ .validated_assignment_fmt(&range.max.as_deref().unwrap_or("default"))?,
+ );
+ }
+ }
+ } else if cfg!(windows) {
+ // This text is copied from https://github.com/rust-lang/cargo/blob/39c13e67a5962466cc7253d41bc1099bbcb224c3/src/cargo/ops/registry.rs#L658-L674 .
+ // This is a temporary workaround for some bugs with libcurl and
+ // schannel and TLS 1.3.
+ //
+ // Our libcurl on Windows is usually built with schannel.
+ // On Windows 11 (or Windows Server 2022), libcurl recently (late
+ // 2022) gained support for TLS 1.3 with schannel, and it now defaults
+ // to 1.3. Unfortunately there have been some bugs with this.
+ // https://github.com/curl/curl/issues/9431 is the most recent. Once
+ // that has been fixed, and some time has passed where we can be more
+ // confident that the 1.3 support won't cause issues, this can be
+ // removed.
+ //
+ // Windows 10 is unaffected. libcurl does not support TLS 1.3 on
+ // Windows 10. (Windows 10 sorta had support, but it required enabling
+ // an advanced option in the registry which was buggy, and libcurl
+ // does runtime checks to prevent it.)
+ values.push(gitoxide::Http::SSL_VERSION_MIN.validated_assignment_fmt(&"default")?);
+ values.push(gitoxide::Http::SSL_VERSION_MAX.validated_assignment_fmt(&"tlsv1.2")?);
+ }
+ if let Some(debug) = http.debug {
+ values.push(gitoxide::Http::VERBOSE.validated_assignment_fmt(&debug)?);
+ }
+ if let Some(multiplexing) = http.multiplexing {
+ let http_version = multiplexing.then(|| "HTTP/2").unwrap_or("HTTP/1.1");
+ // Note that failing to set the HTTP version in `gix-transport` isn't fatal,
+ // which is why we don't have to try to figure out if HTTP V2 is supported in the
+ // currently linked version (see `try_old_curl!()`)
+ values.push(Http::VERSION.validated_assignment_fmt(&http_version)?);
+ }
+
+ Ok(values)
+}
+
+pub fn reinitialize(git_dir: &Path) -> CargoResult<()> {
+ fn init(path: &Path, bare: bool) -> CargoResult<()> {
+ let mut opts = git2::RepositoryInitOptions::new();
+ // Skip anything related to templates, they just call all sorts of issues as
+ // we really don't want to use them yet they insist on being used. See #6240
+ // for an example issue that comes up.
+ opts.external_template(false);
+ opts.bare(bare);
+ git2::Repository::init_opts(&path, &opts)?;
+ Ok(())
+ }
+ // Here we want to drop the current repository object pointed to by `repo`,
+ // so we initialize temporary repository in a sub-folder, blow away the
+ // existing git folder, and then recreate the git repo. Finally we blow away
+ // the `tmp` folder we allocated.
+ debug!("reinitializing git repo at {:?}", git_dir);
+ let tmp = git_dir.join("tmp");
+ let bare = !git_dir.ends_with(".git");
+ init(&tmp, false)?;
+ for entry in git_dir.read_dir()? {
+ let entry = entry?;
+ if entry.file_name().to_str() == Some("tmp") {
+ continue;
+ }
+ let path = entry.path();
+ drop(paths::remove_file(&path).or_else(|_| paths::remove_dir_all(&path)));
+ }
+ init(git_dir, bare)?;
+ paths::remove_dir_all(&tmp)?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/cargo/sources/git/source.rs b/src/tools/cargo/src/cargo/sources/git/source.rs
new file mode 100644
index 000000000..90c47093d
--- /dev/null
+++ b/src/tools/cargo/src/cargo/sources/git/source.rs
@@ -0,0 +1,295 @@
+use crate::core::source::{MaybePackage, QueryKind, Source, SourceId};
+use crate::core::GitReference;
+use crate::core::{Dependency, Package, PackageId, Summary};
+use crate::sources::git::utils::GitRemote;
+use crate::sources::PathSource;
+use crate::util::errors::CargoResult;
+use crate::util::hex::short_hash;
+use crate::util::Config;
+use anyhow::Context;
+use cargo_util::paths::exclude_from_backups_and_indexing;
+use log::trace;
+use std::fmt::{self, Debug, Formatter};
+use std::task::Poll;
+use url::Url;
+
+pub struct GitSource<'cfg> {
+ remote: GitRemote,
+ manifest_reference: GitReference,
+ locked_rev: Option<git2::Oid>,
+ source_id: SourceId,
+ path_source: Option<PathSource<'cfg>>,
+ ident: String,
+ config: &'cfg Config,
+ quiet: bool,
+}
+
+impl<'cfg> GitSource<'cfg> {
+ pub fn new(source_id: SourceId, config: &'cfg Config) -> CargoResult<GitSource<'cfg>> {
+ assert!(source_id.is_git(), "id is not git, id={}", source_id);
+
+ let remote = GitRemote::new(source_id.url());
+ let ident = ident(&source_id);
+
+ let source = GitSource {
+ remote,
+ manifest_reference: source_id.git_reference().unwrap().clone(),
+ locked_rev: match source_id.precise() {
+ Some(s) => Some(git2::Oid::from_str(s).with_context(|| {
+ format!("precise value for git is not a git revision: {}", s)
+ })?),
+ None => None,
+ },
+ source_id,
+ path_source: None,
+ ident,
+ config,
+ quiet: false,
+ };
+
+ Ok(source)
+ }
+
+ pub fn url(&self) -> &Url {
+ self.remote.url()
+ }
+
+ pub fn read_packages(&mut self) -> CargoResult<Vec<Package>> {
+ if self.path_source.is_none() {
+ self.invalidate_cache();
+ self.block_until_ready()?;
+ }
+ self.path_source.as_mut().unwrap().read_packages()
+ }
+}
+
+fn ident(id: &SourceId) -> String {
+ let ident = id
+ .canonical_url()
+ .raw_canonicalized_url()
+ .path_segments()
+ .and_then(|s| s.rev().next())
+ .unwrap_or("");
+
+ let ident = if ident.is_empty() { "_empty" } else { ident };
+
+ format!("{}-{}", ident, short_hash(id.canonical_url()))
+}
+
+impl<'cfg> Debug for GitSource<'cfg> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+ write!(f, "git repo at {}", self.remote.url())?;
+
+ match self.manifest_reference.pretty_ref() {
+ Some(s) => write!(f, " ({})", s),
+ None => Ok(()),
+ }
+ }
+}
+
+impl<'cfg> Source for GitSource<'cfg> {
+ fn query(
+ &mut self,
+ dep: &Dependency,
+ kind: QueryKind,
+ f: &mut dyn FnMut(Summary),
+ ) -> Poll<CargoResult<()>> {
+ if let Some(src) = self.path_source.as_mut() {
+ src.query(dep, kind, f)
+ } else {
+ Poll::Pending
+ }
+ }
+
+ fn supports_checksums(&self) -> bool {
+ false
+ }
+
+ fn requires_precise(&self) -> bool {
+ true
+ }
+
+ fn source_id(&self) -> SourceId {
+ self.source_id
+ }
+
+ fn block_until_ready(&mut self) -> CargoResult<()> {
+ if self.path_source.is_some() {
+ return Ok(());
+ }
+
+ let git_fs = self.config.git_path();
+ // Ignore errors creating it, in case this is a read-only filesystem:
+ // perhaps the later operations can succeed anyhow.
+ let _ = git_fs.create_dir();
+ let git_path = self.config.assert_package_cache_locked(&git_fs);
+
+ // Before getting a checkout, make sure that `<cargo_home>/git` is
+ // marked as excluded from indexing and backups. Older versions of Cargo
+ // didn't do this, so we do it here regardless of whether `<cargo_home>`
+ // exists.
+ //
+ // This does not use `create_dir_all_excluded_from_backups_atomic` for
+ // the same reason: we want to exclude it even if the directory already
+ // exists.
+ exclude_from_backups_and_indexing(&git_path);
+
+ let db_path = git_path.join("db").join(&self.ident);
+
+ let db = self.remote.db_at(&db_path).ok();
+ let (db, actual_rev) = match (self.locked_rev, db) {
+ // If we have a locked revision, and we have a preexisting database
+ // which has that revision, then no update needs to happen.
+ (Some(rev), Some(db)) if db.contains(rev) => (db, rev),
+
+ // If we're in offline mode, we're not locked, and we have a
+ // database, then try to resolve our reference with the preexisting
+ // repository.
+ (None, Some(db)) if self.config.offline() => {
+ let rev = db.resolve(&self.manifest_reference).with_context(|| {
+ "failed to lookup reference in preexisting repository, and \
+ can't check for updates in offline mode (--offline)"
+ })?;
+ (db, rev)
+ }
+
+ // ... otherwise we use this state to update the git database. Note
+ // that we still check for being offline here, for example in the
+ // situation that we have a locked revision but the database
+ // doesn't have it.
+ (locked_rev, db) => {
+ if self.config.offline() {
+ anyhow::bail!(
+ "can't checkout from '{}': you are in the offline mode (--offline)",
+ self.remote.url()
+ );
+ }
+ if !self.quiet {
+ self.config.shell().status(
+ "Updating",
+ format!("git repository `{}`", self.remote.url()),
+ )?;
+ }
+
+ trace!("updating git source `{:?}`", self.remote);
+
+ self.remote.checkout(
+ &db_path,
+ db,
+ &self.manifest_reference,
+ locked_rev,
+ self.config,
+ )?
+ }
+ };
+
+ // Don’t use the full hash, in order to contribute less to reaching the
+ // path length limit on Windows. See
+ // <https://github.com/servo/servo/pull/14397>.
+ let short_id = db.to_short_id(actual_rev)?;
+
+ // Check out `actual_rev` from the database to a scoped location on the
+ // filesystem. This will use hard links and such to ideally make the
+ // checkout operation here pretty fast.
+ let checkout_path = git_path
+ .join("checkouts")
+ .join(&self.ident)
+ .join(short_id.as_str());
+ let parent_remote_url = self.url();
+ db.copy_to(actual_rev, &checkout_path, self.config, parent_remote_url)?;
+
+ let source_id = self.source_id.with_precise(Some(actual_rev.to_string()));
+ let path_source = PathSource::new_recursive(&checkout_path, source_id, self.config);
+
+ self.path_source = Some(path_source);
+ self.locked_rev = Some(actual_rev);
+ self.path_source.as_mut().unwrap().update()
+ }
+
+ fn download(&mut self, id: PackageId) -> CargoResult<MaybePackage> {
+ trace!(
+ "getting packages for package ID `{}` from `{:?}`",
+ id,
+ self.remote
+ );
+ self.path_source
+ .as_mut()
+ .expect("BUG: `update()` must be called before `get()`")
+ .download(id)
+ }
+
+ fn finish_download(&mut self, _id: PackageId, _data: Vec<u8>) -> CargoResult<Package> {
+ panic!("no download should have started")
+ }
+
+ fn fingerprint(&self, _pkg: &Package) -> CargoResult<String> {
+ Ok(self.locked_rev.as_ref().unwrap().to_string())
+ }
+
+ fn describe(&self) -> String {
+ format!("Git repository {}", self.source_id)
+ }
+
+ fn add_to_yanked_whitelist(&mut self, _pkgs: &[PackageId]) {}
+
+ fn is_yanked(&mut self, _pkg: PackageId) -> Poll<CargoResult<bool>> {
+ Poll::Ready(Ok(false))
+ }
+
+ fn invalidate_cache(&mut self) {}
+
+ fn set_quiet(&mut self, quiet: bool) {
+ self.quiet = quiet;
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::ident;
+ use crate::core::{GitReference, SourceId};
+ use crate::util::IntoUrl;
+
+ #[test]
+ pub fn test_url_to_path_ident_with_path() {
+ let ident = ident(&src("https://github.com/carlhuda/cargo"));
+ assert!(ident.starts_with("cargo-"));
+ }
+
+ #[test]
+ pub fn test_url_to_path_ident_without_path() {
+ let ident = ident(&src("https://github.com"));
+ assert!(ident.starts_with("_empty-"));
+ }
+
+ #[test]
+ fn test_canonicalize_idents_by_stripping_trailing_url_slash() {
+ let ident1 = ident(&src("https://github.com/PistonDevelopers/piston/"));
+ let ident2 = ident(&src("https://github.com/PistonDevelopers/piston"));
+ assert_eq!(ident1, ident2);
+ }
+
+ #[test]
+ fn test_canonicalize_idents_by_lowercasing_github_urls() {
+ let ident1 = ident(&src("https://github.com/PistonDevelopers/piston"));
+ let ident2 = ident(&src("https://github.com/pistondevelopers/piston"));
+ assert_eq!(ident1, ident2);
+ }
+
+ #[test]
+ fn test_canonicalize_idents_by_stripping_dot_git() {
+ let ident1 = ident(&src("https://github.com/PistonDevelopers/piston"));
+ let ident2 = ident(&src("https://github.com/PistonDevelopers/piston.git"));
+ assert_eq!(ident1, ident2);
+ }
+
+ #[test]
+ fn test_canonicalize_idents_different_protocols() {
+ let ident1 = ident(&src("https://github.com/PistonDevelopers/piston"));
+ let ident2 = ident(&src("git://github.com/PistonDevelopers/piston"));
+ assert_eq!(ident1, ident2);
+ }
+
+ fn src(s: &str) -> SourceId {
+ SourceId::for_git(&s.into_url().unwrap(), GitReference::DefaultBranch).unwrap()
+ }
+}
diff --git a/src/tools/cargo/src/cargo/sources/git/utils.rs b/src/tools/cargo/src/cargo/sources/git/utils.rs
new file mode 100644
index 000000000..a7ffccf79
--- /dev/null
+++ b/src/tools/cargo/src/cargo/sources/git/utils.rs
@@ -0,0 +1,1366 @@
+//! Utilities for handling git repositories, mainly around
+//! authentication/cloning.
+
+use crate::core::{GitReference, Verbosity};
+use crate::sources::git::oxide;
+use crate::sources::git::oxide::cargo_config_to_gitoxide_overrides;
+use crate::util::errors::CargoResult;
+use crate::util::{human_readable_bytes, network, Config, IntoUrl, MetricsCounter, Progress};
+use anyhow::{anyhow, Context as _};
+use cargo_util::{paths, ProcessBuilder};
+use curl::easy::List;
+use git2::{self, ErrorClass, ObjectType, Oid};
+use log::{debug, info};
+use serde::ser;
+use serde::Serialize;
+use std::borrow::Cow;
+use std::fmt;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+use std::str;
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::time::{Duration, Instant};
+use url::Url;
+
+fn serialize_str<T, S>(t: &T, s: S) -> Result<S::Ok, S::Error>
+where
+ T: fmt::Display,
+ S: ser::Serializer,
+{
+ s.collect_str(t)
+}
+
+pub struct GitShortID(git2::Buf);
+
+impl GitShortID {
+ pub fn as_str(&self) -> &str {
+ self.0.as_str().unwrap()
+ }
+}
+
+/// `GitRemote` represents a remote repository. It gets cloned into a local
+/// `GitDatabase`.
+#[derive(PartialEq, Clone, Debug, Serialize)]
+pub struct GitRemote {
+ #[serde(serialize_with = "serialize_str")]
+ url: Url,
+}
+
+/// `GitDatabase` is a local clone of a remote repository's database. Multiple
+/// `GitCheckouts` can be cloned from this `GitDatabase`.
+#[derive(Serialize)]
+pub struct GitDatabase {
+ remote: GitRemote,
+ path: PathBuf,
+ #[serde(skip_serializing)]
+ repo: git2::Repository,
+}
+
+/// `GitCheckout` is a local checkout of a particular revision. Calling
+/// `clone_into` with a reference will resolve the reference into a revision,
+/// and return an `anyhow::Error` if no revision for that reference was found.
+#[derive(Serialize)]
+pub struct GitCheckout<'a> {
+ database: &'a GitDatabase,
+ location: PathBuf,
+ #[serde(serialize_with = "serialize_str")]
+ revision: git2::Oid,
+ #[serde(skip_serializing)]
+ repo: git2::Repository,
+}
+
+// Implementations
+
+impl GitRemote {
+ pub fn new(url: &Url) -> GitRemote {
+ GitRemote { url: url.clone() }
+ }
+
+ pub fn url(&self) -> &Url {
+ &self.url
+ }
+
+ pub fn rev_for(&self, path: &Path, reference: &GitReference) -> CargoResult<git2::Oid> {
+ reference.resolve(&self.db_at(path)?.repo)
+ }
+
+ pub fn checkout(
+ &self,
+ into: &Path,
+ db: Option<GitDatabase>,
+ reference: &GitReference,
+ locked_rev: Option<git2::Oid>,
+ cargo_config: &Config,
+ ) -> CargoResult<(GitDatabase, git2::Oid)> {
+ // If we have a previous instance of `GitDatabase` then fetch into that
+ // if we can. If that can successfully load our revision then we've
+ // populated the database with the latest version of `reference`, so
+ // return that database and the rev we resolve to.
+ if let Some(mut db) = db {
+ fetch(&mut db.repo, self.url.as_str(), reference, cargo_config)
+ .context(format!("failed to fetch into: {}", into.display()))?;
+ match locked_rev {
+ Some(rev) => {
+ if db.contains(rev) {
+ return Ok((db, rev));
+ }
+ }
+ None => {
+ if let Ok(rev) = reference.resolve(&db.repo) {
+ return Ok((db, rev));
+ }
+ }
+ }
+ }
+
+ // Otherwise start from scratch to handle corrupt git repositories.
+ // After our fetch (which is interpreted as a clone now) we do the same
+ // resolution to figure out what we cloned.
+ if into.exists() {
+ paths::remove_dir_all(into)?;
+ }
+ paths::create_dir_all(into)?;
+ let mut repo = init(into, true)?;
+ fetch(&mut repo, self.url.as_str(), reference, cargo_config)
+ .context(format!("failed to clone into: {}", into.display()))?;
+ let rev = match locked_rev {
+ Some(rev) => rev,
+ None => reference.resolve(&repo)?,
+ };
+
+ Ok((
+ GitDatabase {
+ remote: self.clone(),
+ path: into.to_path_buf(),
+ repo,
+ },
+ rev,
+ ))
+ }
+
+ pub fn db_at(&self, db_path: &Path) -> CargoResult<GitDatabase> {
+ let repo = git2::Repository::open(db_path)?;
+ Ok(GitDatabase {
+ remote: self.clone(),
+ path: db_path.to_path_buf(),
+ repo,
+ })
+ }
+}
+
+impl GitDatabase {
+ pub fn copy_to(
+ &self,
+ rev: git2::Oid,
+ dest: &Path,
+ cargo_config: &Config,
+ parent_remote_url: &Url,
+ ) -> CargoResult<GitCheckout<'_>> {
+ // If the existing checkout exists, and it is fresh, use it.
+ // A non-fresh checkout can happen if the checkout operation was
+ // interrupted. In that case, the checkout gets deleted and a new
+ // clone is created.
+ let checkout = match git2::Repository::open(dest)
+ .ok()
+ .map(|repo| GitCheckout::new(dest, self, rev, repo))
+ .filter(|co| co.is_fresh())
+ {
+ Some(co) => co,
+ None => GitCheckout::clone_into(dest, self, rev, cargo_config)?,
+ };
+ checkout.update_submodules(cargo_config, parent_remote_url)?;
+ Ok(checkout)
+ }
+
+ pub fn to_short_id(&self, revision: git2::Oid) -> CargoResult<GitShortID> {
+ let obj = self.repo.find_object(revision, None)?;
+ Ok(GitShortID(obj.short_id()?))
+ }
+
+ pub fn contains(&self, oid: git2::Oid) -> bool {
+ self.repo.revparse_single(&oid.to_string()).is_ok()
+ }
+
+ pub fn resolve(&self, r: &GitReference) -> CargoResult<git2::Oid> {
+ r.resolve(&self.repo)
+ }
+}
+
+impl GitReference {
+ pub fn resolve(&self, repo: &git2::Repository) -> CargoResult<git2::Oid> {
+ let id = match self {
+ // Note that we resolve the named tag here in sync with where it's
+ // fetched into via `fetch` below.
+ GitReference::Tag(s) => (|| -> CargoResult<git2::Oid> {
+ let refname = format!("refs/remotes/origin/tags/{}", s);
+ let id = repo.refname_to_id(&refname)?;
+ let obj = repo.find_object(id, None)?;
+ let obj = obj.peel(ObjectType::Commit)?;
+ Ok(obj.id())
+ })()
+ .with_context(|| format!("failed to find tag `{}`", s))?,
+
+ // Resolve the remote name since that's all we're configuring in
+ // `fetch` below.
+ GitReference::Branch(s) => {
+ let name = format!("origin/{}", s);
+ let b = repo
+ .find_branch(&name, git2::BranchType::Remote)
+ .with_context(|| format!("failed to find branch `{}`", s))?;
+ b.get()
+ .target()
+ .ok_or_else(|| anyhow::format_err!("branch `{}` did not have a target", s))?
+ }
+
+ // We'll be using the HEAD commit
+ GitReference::DefaultBranch => {
+ let head_id = repo.refname_to_id("refs/remotes/origin/HEAD")?;
+ let head = repo.find_object(head_id, None)?;
+ head.peel(ObjectType::Commit)?.id()
+ }
+
+ GitReference::Rev(s) => {
+ let obj = repo.revparse_single(s)?;
+ match obj.as_tag() {
+ Some(tag) => tag.target_id(),
+ None => obj.id(),
+ }
+ }
+ };
+ Ok(id)
+ }
+}
+
+impl<'a> GitCheckout<'a> {
+ fn new(
+ path: &Path,
+ database: &'a GitDatabase,
+ revision: git2::Oid,
+ repo: git2::Repository,
+ ) -> GitCheckout<'a> {
+ GitCheckout {
+ location: path.to_path_buf(),
+ database,
+ revision,
+ repo,
+ }
+ }
+
+ fn clone_into(
+ into: &Path,
+ database: &'a GitDatabase,
+ revision: git2::Oid,
+ config: &Config,
+ ) -> CargoResult<GitCheckout<'a>> {
+ let dirname = into.parent().unwrap();
+ paths::create_dir_all(&dirname)?;
+ if into.exists() {
+ paths::remove_dir_all(into)?;
+ }
+
+ // we're doing a local filesystem-to-filesystem clone so there should
+ // be no need to respect global configuration options, so pass in
+ // an empty instance of `git2::Config` below.
+ let git_config = git2::Config::new()?;
+
+ // Clone the repository, but make sure we use the "local" option in
+ // libgit2 which will attempt to use hardlinks to set up the database.
+ // This should speed up the clone operation quite a bit if it works.
+ //
+ // Note that we still use the same fetch options because while we don't
+ // need authentication information we may want progress bars and such.
+ let url = database.path.into_url()?;
+ let mut repo = None;
+ with_fetch_options(&git_config, url.as_str(), config, &mut |fopts| {
+ let mut checkout = git2::build::CheckoutBuilder::new();
+ checkout.dry_run(); // we'll do this below during a `reset`
+
+ let r = git2::build::RepoBuilder::new()
+ // use hard links and/or copy the database, we're doing a
+ // filesystem clone so this'll speed things up quite a bit.
+ .clone_local(git2::build::CloneLocal::Local)
+ .with_checkout(checkout)
+ .fetch_options(fopts)
+ .clone(url.as_str(), into)?;
+ repo = Some(r);
+ Ok(())
+ })?;
+ let repo = repo.unwrap();
+
+ let checkout = GitCheckout::new(into, database, revision, repo);
+ checkout.reset(config)?;
+ Ok(checkout)
+ }
+
+ fn is_fresh(&self) -> bool {
+ match self.repo.revparse_single("HEAD") {
+ Ok(ref head) if head.id() == self.revision => {
+ // See comments in reset() for why we check this
+ self.location.join(".cargo-ok").exists()
+ }
+ _ => false,
+ }
+ }
+
+ fn reset(&self, config: &Config) -> CargoResult<()> {
+ // If we're interrupted while performing this reset (e.g., we die because
+ // of a signal) Cargo needs to be sure to try to check out this repo
+ // again on the next go-round.
+ //
+ // To enable this we have a dummy file in our checkout, .cargo-ok, which
+ // if present means that the repo has been successfully reset and is
+ // ready to go. Hence if we start to do a reset, we make sure this file
+ // *doesn't* exist, and then once we're done we create the file.
+ let ok_file = self.location.join(".cargo-ok");
+ let _ = paths::remove_file(&ok_file);
+ info!("reset {} to {}", self.repo.path().display(), self.revision);
+
+ // Ensure libgit2 won't mess with newlines when we vendor.
+ if let Ok(mut git_config) = self.repo.config() {
+ git_config.set_bool("core.autocrlf", false)?;
+ }
+
+ let object = self.repo.find_object(self.revision, None)?;
+ reset(&self.repo, &object, config)?;
+ paths::create(ok_file)?;
+ Ok(())
+ }
+
+ fn update_submodules(&self, cargo_config: &Config, parent_remote_url: &Url) -> CargoResult<()> {
+ return update_submodules(&self.repo, cargo_config, parent_remote_url);
+
+ fn update_submodules(
+ repo: &git2::Repository,
+ cargo_config: &Config,
+ parent_remote_url: &Url,
+ ) -> CargoResult<()> {
+ debug!("update submodules for: {:?}", repo.workdir().unwrap());
+
+ for mut child in repo.submodules()? {
+ update_submodule(repo, &mut child, cargo_config, parent_remote_url).with_context(
+ || {
+ format!(
+ "failed to update submodule `{}`",
+ child.name().unwrap_or("")
+ )
+ },
+ )?;
+ }
+ Ok(())
+ }
+
+ fn update_submodule(
+ parent: &git2::Repository,
+ child: &mut git2::Submodule<'_>,
+ cargo_config: &Config,
+ parent_remote_url: &Url,
+ ) -> CargoResult<()> {
+ child.init(false)?;
+
+ let child_url_str = child.url().ok_or_else(|| {
+ anyhow::format_err!("non-utf8 url for submodule {:?}?", child.path())
+ })?;
+
+ // Skip the submodule if the config says not to update it.
+ if child.update_strategy() == git2::SubmoduleUpdate::None {
+ cargo_config.shell().status(
+ "Skipping",
+ format!(
+ "git submodule `{}` due to update strategy in .gitmodules",
+ child_url_str
+ ),
+ )?;
+ return Ok(());
+ }
+
+ // Git only assumes a URL is a relative path if it starts with `./` or `../`.
+ // See [`git submodule add`] documentation.
+ //
+ // [`git submodule add`]: https://git-scm.com/docs/git-submodule
+ let url = if child_url_str.starts_with("./") || child_url_str.starts_with("../") {
+ let mut new_parent_remote_url = parent_remote_url.clone();
+
+ let mut new_path = Cow::from(parent_remote_url.path());
+ if !new_path.ends_with('/') {
+ new_path.to_mut().push('/');
+ }
+ new_parent_remote_url.set_path(&new_path);
+
+ match new_parent_remote_url.join(child_url_str) {
+ Ok(x) => x.to_string(),
+ Err(err) => Err(err).with_context(|| {
+ format!(
+ "failed to parse relative child submodule url `{}` using parent base url `{}`",
+ child_url_str, new_parent_remote_url
+ )
+ })?,
+ }
+ } else {
+ child_url_str.to_string()
+ };
+
+ // A submodule which is listed in .gitmodules but not actually
+ // checked out will not have a head id, so we should ignore it.
+ let head = match child.head_id() {
+ Some(head) => head,
+ None => return Ok(()),
+ };
+
+ // If the submodule hasn't been checked out yet, we need to
+ // clone it. If it has been checked out and the head is the same
+ // as the submodule's head, then we can skip an update and keep
+ // recursing.
+ let head_and_repo = child.open().and_then(|repo| {
+ let target = repo.head()?.target();
+ Ok((target, repo))
+ });
+ let mut repo = match head_and_repo {
+ Ok((head, repo)) => {
+ if child.head_id() == head {
+ return update_submodules(&repo, cargo_config, parent_remote_url);
+ }
+ repo
+ }
+ Err(..) => {
+ let path = parent.workdir().unwrap().join(child.path());
+ let _ = paths::remove_dir_all(&path);
+ init(&path, false)?
+ }
+ };
+ // Fetch data from origin and reset to the head commit
+ let reference = GitReference::Rev(head.to_string());
+ cargo_config
+ .shell()
+ .status("Updating", format!("git submodule `{}`", url))?;
+ fetch(&mut repo, &url, &reference, cargo_config).with_context(|| {
+ format!(
+ "failed to fetch submodule `{}` from {}",
+ child.name().unwrap_or(""),
+ url
+ )
+ })?;
+
+ let obj = repo.find_object(head, None)?;
+ reset(&repo, &obj, cargo_config)?;
+ update_submodules(&repo, cargo_config, parent_remote_url)
+ }
+ }
+}
+
+/// Prepare the authentication callbacks for cloning a git repository.
+///
+/// The main purpose of this function is to construct the "authentication
+/// callback" which is used to clone a repository. This callback will attempt to
+/// find the right authentication on the system (without user input) and will
+/// guide libgit2 in doing so.
+///
+/// The callback is provided `allowed` types of credentials, and we try to do as
+/// much as possible based on that:
+///
+/// * Prioritize SSH keys from the local ssh agent as they're likely the most
+/// reliable. The username here is prioritized from the credential
+/// callback, then from whatever is configured in git itself, and finally
+/// we fall back to the generic user of `git`.
+///
+/// * If a username/password is allowed, then we fallback to git2-rs's
+/// implementation of the credential helper. This is what is configured
+/// with `credential.helper` in git, and is the interface for the macOS
+/// keychain, for example.
+///
+/// * After the above two have failed, we just kinda grapple attempting to
+/// return *something*.
+///
+/// If any form of authentication fails, libgit2 will repeatedly ask us for
+/// credentials until we give it a reason to not do so. To ensure we don't
+/// just sit here looping forever we keep track of authentications we've
+/// attempted and we don't try the same ones again.
+fn with_authentication<T, F>(
+ cargo_config: &Config,
+ url: &str,
+ cfg: &git2::Config,
+ mut f: F,
+) -> CargoResult<T>
+where
+ F: FnMut(&mut git2::Credentials<'_>) -> CargoResult<T>,
+{
+ let mut cred_helper = git2::CredentialHelper::new(url);
+ cred_helper.config(cfg);
+
+ let mut ssh_username_requested = false;
+ let mut cred_helper_bad = None;
+ let mut ssh_agent_attempts = Vec::new();
+ let mut any_attempts = false;
+ let mut tried_sshkey = false;
+ let mut url_attempt = None;
+
+ let orig_url = url;
+ let mut res = f(&mut |url, username, allowed| {
+ any_attempts = true;
+ if url != orig_url {
+ url_attempt = Some(url.to_string());
+ }
+ // libgit2's "USERNAME" authentication actually means that it's just
+ // asking us for a username to keep going. This is currently only really
+ // used for SSH authentication and isn't really an authentication type.
+ // The logic currently looks like:
+ //
+ // let user = ...;
+ // if (user.is_null())
+ // user = callback(USERNAME, null, ...);
+ //
+ // callback(SSH_KEY, user, ...)
+ //
+ // So if we're being called here then we know that (a) we're using ssh
+ // authentication and (b) no username was specified in the URL that
+ // we're trying to clone. We need to guess an appropriate username here,
+ // but that may involve a few attempts. Unfortunately we can't switch
+ // usernames during one authentication session with libgit2, so to
+ // handle this we bail out of this authentication session after setting
+ // the flag `ssh_username_requested`, and then we handle this below.
+ if allowed.contains(git2::CredentialType::USERNAME) {
+ debug_assert!(username.is_none());
+ ssh_username_requested = true;
+ return Err(git2::Error::from_str("gonna try usernames later"));
+ }
+
+ // An "SSH_KEY" authentication indicates that we need some sort of SSH
+ // authentication. This can currently either come from the ssh-agent
+ // process or from a raw in-memory SSH key. Cargo only supports using
+ // ssh-agent currently.
+ //
+ // If we get called with this then the only way that should be possible
+ // is if a username is specified in the URL itself (e.g., `username` is
+ // Some), hence the unwrap() here. We try custom usernames down below.
+ if allowed.contains(git2::CredentialType::SSH_KEY) && !tried_sshkey {
+ // If ssh-agent authentication fails, libgit2 will keep
+ // calling this callback asking for other authentication
+ // methods to try. Make sure we only try ssh-agent once,
+ // to avoid looping forever.
+ tried_sshkey = true;
+ let username = username.unwrap();
+ debug_assert!(!ssh_username_requested);
+ ssh_agent_attempts.push(username.to_string());
+ return git2::Cred::ssh_key_from_agent(username);
+ }
+
+ // Sometimes libgit2 will ask for a username/password in plaintext. This
+ // is where Cargo would have an interactive prompt if we supported it,
+ // but we currently don't! Right now the only way we support fetching a
+ // plaintext password is through the `credential.helper` support, so
+ // fetch that here.
+ //
+ // If ssh-agent authentication fails, libgit2 will keep calling this
+ // callback asking for other authentication methods to try. Check
+ // cred_helper_bad to make sure we only try the git credential helper
+ // once, to avoid looping forever.
+ if allowed.contains(git2::CredentialType::USER_PASS_PLAINTEXT) && cred_helper_bad.is_none()
+ {
+ let r = git2::Cred::credential_helper(cfg, url, username);
+ cred_helper_bad = Some(r.is_err());
+ return r;
+ }
+
+ // I'm... not sure what the DEFAULT kind of authentication is, but seems
+ // easy to support?
+ if allowed.contains(git2::CredentialType::DEFAULT) {
+ return git2::Cred::default();
+ }
+
+ // Whelp, we tried our best
+ Err(git2::Error::from_str("no authentication methods succeeded"))
+ });
+
+ // Ok, so if it looks like we're going to be doing ssh authentication, we
+ // want to try a few different usernames as one wasn't specified in the URL
+ // for us to use. In order, we'll try:
+ //
+ // * A credential helper's username for this URL, if available.
+ // * This account's username.
+ // * "git"
+ //
+ // We have to restart the authentication session each time (due to
+ // constraints in libssh2 I guess? maybe this is inherent to ssh?), so we
+ // call our callback, `f`, in a loop here.
+ if ssh_username_requested {
+ debug_assert!(res.is_err());
+ let mut attempts = vec![String::from("git")];
+ if let Ok(s) = cargo_config
+ .get_env("USER")
+ .or_else(|_| cargo_config.get_env("USERNAME"))
+ {
+ attempts.push(s);
+ }
+ if let Some(ref s) = cred_helper.username {
+ attempts.push(s.clone());
+ }
+
+ while let Some(s) = attempts.pop() {
+ // We should get `USERNAME` first, where we just return our attempt,
+ // and then after that we should get `SSH_KEY`. If the first attempt
+ // fails we'll get called again, but we don't have another option so
+ // we bail out.
+ let mut attempts = 0;
+ res = f(&mut |_url, username, allowed| {
+ if allowed.contains(git2::CredentialType::USERNAME) {
+ return git2::Cred::username(&s);
+ }
+ if allowed.contains(git2::CredentialType::SSH_KEY) {
+ debug_assert_eq!(Some(&s[..]), username);
+ attempts += 1;
+ if attempts == 1 {
+ ssh_agent_attempts.push(s.to_string());
+ return git2::Cred::ssh_key_from_agent(&s);
+ }
+ }
+ Err(git2::Error::from_str("no authentication methods succeeded"))
+ });
+
+ // If we made two attempts then that means:
+ //
+ // 1. A username was requested, we returned `s`.
+ // 2. An ssh key was requested, we returned to look up `s` in the
+ // ssh agent.
+ // 3. For whatever reason that lookup failed, so we were asked again
+ // for another mode of authentication.
+ //
+ // Essentially, if `attempts == 2` then in theory the only error was
+ // that this username failed to authenticate (e.g., no other network
+ // errors happened). Otherwise something else is funny so we bail
+ // out.
+ if attempts != 2 {
+ break;
+ }
+ }
+ }
+ let mut err = match res {
+ Ok(e) => return Ok(e),
+ Err(e) => e,
+ };
+
+ // In the case of an authentication failure (where we tried something) then
+ // we try to give a more helpful error message about precisely what we
+ // tried.
+ if any_attempts {
+ let mut msg = "failed to authenticate when downloading \
+ repository"
+ .to_string();
+
+ if let Some(attempt) = &url_attempt {
+ if url != attempt {
+ msg.push_str(": ");
+ msg.push_str(attempt);
+ }
+ }
+ msg.push('\n');
+ if !ssh_agent_attempts.is_empty() {
+ let names = ssh_agent_attempts
+ .iter()
+ .map(|s| format!("`{}`", s))
+ .collect::<Vec<_>>()
+ .join(", ");
+ msg.push_str(&format!(
+ "\n* attempted ssh-agent authentication, but \
+ no usernames succeeded: {}",
+ names
+ ));
+ }
+ if let Some(failed_cred_helper) = cred_helper_bad {
+ if failed_cred_helper {
+ msg.push_str(
+ "\n* attempted to find username/password via \
+ git's `credential.helper` support, but failed",
+ );
+ } else {
+ msg.push_str(
+ "\n* attempted to find username/password via \
+ `credential.helper`, but maybe the found \
+ credentials were incorrect",
+ );
+ }
+ }
+ msg.push_str("\n\n");
+ msg.push_str("if the git CLI succeeds then `net.git-fetch-with-cli` may help here\n");
+ msg.push_str("https://doc.rust-lang.org/cargo/reference/config.html#netgit-fetch-with-cli");
+ err = err.context(msg);
+
+ // Otherwise if we didn't even get to the authentication phase them we may
+ // have failed to set up a connection, in these cases hint on the
+ // `net.git-fetch-with-cli` configuration option.
+ } else if let Some(e) = err.downcast_ref::<git2::Error>() {
+ match e.class() {
+ ErrorClass::Net
+ | ErrorClass::Ssl
+ | ErrorClass::Submodule
+ | ErrorClass::FetchHead
+ | ErrorClass::Ssh
+ | ErrorClass::Http => {
+ let mut msg = "network failure seems to have happened\n".to_string();
+ msg.push_str(
+ "if a proxy or similar is necessary `net.git-fetch-with-cli` may help here\n",
+ );
+ msg.push_str(
+ "https://doc.rust-lang.org/cargo/reference/config.html#netgit-fetch-with-cli",
+ );
+ err = err.context(msg);
+ }
+ ErrorClass::Callback => {
+ // This unwraps the git2 error. We're using the callback error
+ // specifically to convey errors from Rust land through the C
+ // callback interface. We don't need the `; class=Callback
+ // (26)` that gets tacked on to the git2 error message.
+ err = anyhow::format_err!("{}", e.message());
+ }
+ _ => {}
+ }
+ }
+
+ Err(err)
+}
+
+fn reset(repo: &git2::Repository, obj: &git2::Object<'_>, config: &Config) -> CargoResult<()> {
+ let mut pb = Progress::new("Checkout", config);
+ let mut opts = git2::build::CheckoutBuilder::new();
+ opts.progress(|_, cur, max| {
+ drop(pb.tick(cur, max, ""));
+ });
+ debug!("doing reset");
+ repo.reset(obj, git2::ResetType::Hard, Some(&mut opts))?;
+ debug!("reset done");
+ Ok(())
+}
+
+pub fn with_fetch_options(
+ git_config: &git2::Config,
+ url: &str,
+ config: &Config,
+ cb: &mut dyn FnMut(git2::FetchOptions<'_>) -> CargoResult<()>,
+) -> CargoResult<()> {
+ let mut progress = Progress::new("Fetch", config);
+ let ssh_config = config.net_config()?.ssh.as_ref();
+ let config_known_hosts = ssh_config.and_then(|ssh| ssh.known_hosts.as_ref());
+ let diagnostic_home_config = config.diagnostic_home_config();
+ network::retry::with_retry(config, || {
+ with_authentication(config, url, git_config, |f| {
+ let port = Url::parse(url).ok().and_then(|url| url.port());
+ let mut last_update = Instant::now();
+ let mut rcb = git2::RemoteCallbacks::new();
+ // We choose `N=10` here to make a `300ms * 10slots ~= 3000ms`
+ // sliding window for tracking the data transfer rate (in bytes/s).
+ let mut counter = MetricsCounter::<10>::new(0, last_update);
+ rcb.credentials(f);
+ rcb.certificate_check(|cert, host| {
+ super::known_hosts::certificate_check(
+ config,
+ cert,
+ host,
+ port,
+ config_known_hosts,
+ &diagnostic_home_config,
+ )
+ });
+ rcb.transfer_progress(|stats| {
+ let indexed_deltas = stats.indexed_deltas();
+ let msg = if indexed_deltas > 0 {
+ // Resolving deltas.
+ format!(
+ ", ({}/{}) resolving deltas",
+ indexed_deltas,
+ stats.total_deltas()
+ )
+ } else {
+ // Receiving objects.
+ //
+ // # Caveat
+ //
+ // Progress bar relies on git2 calling `transfer_progress`
+ // to update its transfer rate, but we cannot guarantee a
+ // periodic call of that callback. Thus if we don't receive
+ // any data for, say, 10 seconds, the rate will get stuck
+ // and never go down to 0B/s.
+ // In the future, we need to find away to update the rate
+ // even when the callback is not called.
+ let now = Instant::now();
+ // Scrape a `received_bytes` to the counter every 300ms.
+ if now - last_update > Duration::from_millis(300) {
+ counter.add(stats.received_bytes(), now);
+ last_update = now;
+ }
+ let (rate, unit) = human_readable_bytes(counter.rate() as u64);
+ format!(", {:.2}{}/s", rate, unit)
+ };
+ progress
+ .tick(stats.indexed_objects(), stats.total_objects(), &msg)
+ .is_ok()
+ });
+
+ // Create a local anonymous remote in the repository to fetch the
+ // url
+ let mut opts = git2::FetchOptions::new();
+ opts.remote_callbacks(rcb);
+ cb(opts)
+ })?;
+ Ok(())
+ })
+}
+
+pub fn fetch(
+ repo: &mut git2::Repository,
+ orig_url: &str,
+ reference: &GitReference,
+ config: &Config,
+) -> CargoResult<()> {
+ if config.frozen() {
+ anyhow::bail!(
+ "attempting to update a git repository, but --frozen \
+ was specified"
+ )
+ }
+ if !config.network_allowed() {
+ anyhow::bail!("can't update a git repository in the offline mode")
+ }
+
+ // If we're fetching from GitHub, attempt GitHub's special fast path for
+ // testing if we've already got an up-to-date copy of the repository
+ let oid_to_fetch = match github_fast_path(repo, orig_url, reference, config) {
+ Ok(FastPathRev::UpToDate) => return Ok(()),
+ Ok(FastPathRev::NeedsFetch(rev)) => Some(rev),
+ Ok(FastPathRev::Indeterminate) => None,
+ Err(e) => {
+ debug!("failed to check github {:?}", e);
+ None
+ }
+ };
+
+ // We reuse repositories quite a lot, so before we go through and update the
+ // repo check to see if it's a little too old and could benefit from a gc.
+ // In theory this shouldn't be too expensive compared to the network
+ // request we're about to issue.
+ maybe_gc_repo(repo, config)?;
+
+ clean_repo_temp_files(repo);
+
+ // Translate the reference desired here into an actual list of refspecs
+ // which need to get fetched. Additionally record if we're fetching tags.
+ let mut refspecs = Vec::new();
+ let mut tags = false;
+ // The `+` symbol on the refspec means to allow a forced (fast-forward)
+ // update which is needed if there is ever a force push that requires a
+ // fast-forward.
+ match reference {
+ // For branches and tags we can fetch simply one reference and copy it
+ // locally, no need to fetch other branches/tags.
+ GitReference::Branch(b) => {
+ refspecs.push(format!("+refs/heads/{0}:refs/remotes/origin/{0}", b));
+ }
+ GitReference::Tag(t) => {
+ refspecs.push(format!("+refs/tags/{0}:refs/remotes/origin/tags/{0}", t));
+ }
+
+ GitReference::DefaultBranch => {
+ refspecs.push(String::from("+HEAD:refs/remotes/origin/HEAD"));
+ }
+
+ GitReference::Rev(rev) => {
+ if rev.starts_with("refs/") {
+ refspecs.push(format!("+{0}:{0}", rev));
+ } else if let Some(oid_to_fetch) = oid_to_fetch {
+ refspecs.push(format!("+{0}:refs/commit/{0}", oid_to_fetch));
+ } else {
+ // We don't know what the rev will point to. To handle this
+ // situation we fetch all branches and tags, and then we pray
+ // it's somewhere in there.
+ refspecs.push(String::from("+refs/heads/*:refs/remotes/origin/*"));
+ refspecs.push(String::from("+HEAD:refs/remotes/origin/HEAD"));
+ tags = true;
+ }
+ }
+ }
+
+ // Unfortunately `libgit2` is notably lacking in the realm of authentication
+ // when compared to the `git` command line. As a result, allow an escape
+ // hatch for users that would prefer to use `git`-the-CLI for fetching
+ // repositories instead of `libgit2`-the-library. This should make more
+ // flavors of authentication possible while also still giving us all the
+ // speed and portability of using `libgit2`.
+ if let Some(true) = config.net_config()?.git_fetch_with_cli {
+ return fetch_with_cli(repo, orig_url, &refspecs, tags, config);
+ }
+ if config
+ .cli_unstable()
+ .gitoxide
+ .map_or(false, |git| git.fetch)
+ {
+ let git2_repo = repo;
+ let config_overrides = cargo_config_to_gitoxide_overrides(config)?;
+ let repo_reinitialized = AtomicBool::default();
+ let res = oxide::with_retry_and_progress(
+ &git2_repo.path().to_owned(),
+ config,
+ &|repo_path,
+ should_interrupt,
+ mut progress,
+ url_for_authentication: &mut dyn FnMut(&gix::bstr::BStr)| {
+ // The `fetch` operation here may fail spuriously due to a corrupt
+ // repository. It could also fail, however, for a whole slew of other
+ // reasons (aka network related reasons). We want Cargo to automatically
+ // recover from corrupt repositories, but we don't want Cargo to stomp
+ // over other legitimate errors.
+ //
+ // Consequently we save off the error of the `fetch` operation and if it
+ // looks like a "corrupt repo" error then we blow away the repo and try
+ // again. If it looks like any other kind of error, or if we've already
+ // blown away the repository, then we want to return the error as-is.
+ loop {
+ let res = oxide::open_repo(
+ repo_path,
+ config_overrides.clone(),
+ oxide::OpenMode::ForFetch,
+ )
+ .map_err(crate::sources::git::fetch::Error::from)
+ .and_then(|repo| {
+ debug!("initiating fetch of {:?} from {}", refspecs, orig_url);
+ let url_for_authentication = &mut *url_for_authentication;
+ let remote = repo
+ .remote_at(orig_url)?
+ .with_fetch_tags(if tags {
+ gix::remote::fetch::Tags::All
+ } else {
+ gix::remote::fetch::Tags::Included
+ })
+ .with_refspecs(
+ refspecs.iter().map(|s| s.as_str()),
+ gix::remote::Direction::Fetch,
+ )
+ .map_err(crate::sources::git::fetch::Error::Other)?;
+ let url = remote
+ .url(gix::remote::Direction::Fetch)
+ .expect("set at init")
+ .to_owned();
+ let connection =
+ remote.connect(gix::remote::Direction::Fetch, &mut progress)?;
+ let mut authenticate = connection.configured_credentials(url)?;
+ let connection = connection.with_credentials(
+ move |action: gix::protocol::credentials::helper::Action| {
+ if let Some(url) = action
+ .context()
+ .and_then(|ctx| ctx.url.as_ref().filter(|url| *url != orig_url))
+ {
+ url_for_authentication(url.as_ref());
+ }
+ authenticate(action)
+ },
+ );
+ let outcome = connection
+ .prepare_fetch(gix::remote::ref_map::Options::default())?
+ .receive(should_interrupt)?;
+ Ok(outcome)
+ });
+ let err = match res {
+ Ok(_) => break,
+ Err(e) => e,
+ };
+ debug!("fetch failed: {}", err);
+
+ if !repo_reinitialized.load(Ordering::Relaxed)
+ // We check for errors that could occour if the configuration, refs or odb files are corrupted.
+ // We don't check for errors related to writing as `gitoxide` is expected to create missing leading
+ // folder before writing files into it, or else not even open a directory as git repository (which is
+ // also handled here).
+ && err.is_corrupted()
+ {
+ repo_reinitialized.store(true, Ordering::Relaxed);
+ debug!(
+ "looks like this is a corrupt repository, reinitializing \
+ and trying again"
+ );
+ if oxide::reinitialize(repo_path).is_ok() {
+ continue;
+ }
+ }
+
+ return Err(err.into());
+ }
+ Ok(())
+ },
+ );
+ if repo_reinitialized.load(Ordering::Relaxed) {
+ *git2_repo = git2::Repository::open(git2_repo.path())?;
+ }
+ res
+ } else {
+ debug!("doing a fetch for {}", orig_url);
+ let git_config = git2::Config::open_default()?;
+ with_fetch_options(&git_config, orig_url, config, &mut |mut opts| {
+ if tags {
+ opts.download_tags(git2::AutotagOption::All);
+ }
+ // The `fetch` operation here may fail spuriously due to a corrupt
+ // repository. It could also fail, however, for a whole slew of other
+ // reasons (aka network related reasons). We want Cargo to automatically
+ // recover from corrupt repositories, but we don't want Cargo to stomp
+ // over other legitimate errors.
+ //
+ // Consequently we save off the error of the `fetch` operation and if it
+ // looks like a "corrupt repo" error then we blow away the repo and try
+ // again. If it looks like any other kind of error, or if we've already
+ // blown away the repository, then we want to return the error as-is.
+ let mut repo_reinitialized = false;
+ loop {
+ debug!("initiating fetch of {:?} from {}", refspecs, orig_url);
+ let res = repo
+ .remote_anonymous(orig_url)?
+ .fetch(&refspecs, Some(&mut opts), None);
+ let err = match res {
+ Ok(()) => break,
+ Err(e) => e,
+ };
+ debug!("fetch failed: {}", err);
+
+ if !repo_reinitialized
+ && matches!(err.class(), ErrorClass::Reference | ErrorClass::Odb)
+ {
+ repo_reinitialized = true;
+ debug!(
+ "looks like this is a corrupt repository, reinitializing \
+ and trying again"
+ );
+ if reinitialize(repo).is_ok() {
+ continue;
+ }
+ }
+
+ return Err(err.into());
+ }
+ Ok(())
+ })
+ }
+}
+
+fn fetch_with_cli(
+ repo: &mut git2::Repository,
+ url: &str,
+ refspecs: &[String],
+ tags: bool,
+ config: &Config,
+) -> CargoResult<()> {
+ let mut cmd = ProcessBuilder::new("git");
+ cmd.arg("fetch");
+ if tags {
+ cmd.arg("--tags");
+ }
+ match config.shell().verbosity() {
+ Verbosity::Normal => {}
+ Verbosity::Verbose => {
+ cmd.arg("--verbose");
+ }
+ Verbosity::Quiet => {
+ cmd.arg("--quiet");
+ }
+ }
+ cmd.arg("--force") // handle force pushes
+ .arg("--update-head-ok") // see discussion in #2078
+ .arg(url)
+ .args(refspecs)
+ // If cargo is run by git (for example, the `exec` command in `git
+ // rebase`), the GIT_DIR is set by git and will point to the wrong
+ // location (this takes precedence over the cwd). Make sure this is
+ // unset so git will look at cwd for the repo.
+ .env_remove("GIT_DIR")
+ // The reset of these may not be necessary, but I'm including them
+ // just to be extra paranoid and avoid any issues.
+ .env_remove("GIT_WORK_TREE")
+ .env_remove("GIT_INDEX_FILE")
+ .env_remove("GIT_OBJECT_DIRECTORY")
+ .env_remove("GIT_ALTERNATE_OBJECT_DIRECTORIES")
+ .cwd(repo.path());
+ config
+ .shell()
+ .verbose(|s| s.status("Running", &cmd.to_string()))?;
+ cmd.exec()?;
+ Ok(())
+}
+
+/// Cargo has a bunch of long-lived git repositories in its global cache and
+/// some, like the index, are updated very frequently. Right now each update
+/// creates a new "pack file" inside the git database, and over time this can
+/// cause bad performance and bad current behavior in libgit2.
+///
+/// One pathological use case today is where libgit2 opens hundreds of file
+/// descriptors, getting us dangerously close to blowing out the OS limits of
+/// how many fds we can have open. This is detailed in #4403.
+///
+/// To try to combat this problem we attempt a `git gc` here. Note, though, that
+/// we may not even have `git` installed on the system! As a result we
+/// opportunistically try a `git gc` when the pack directory looks too big, and
+/// failing that we just blow away the repository and start over.
+fn maybe_gc_repo(repo: &mut git2::Repository, config: &Config) -> CargoResult<()> {
+ // Here we arbitrarily declare that if you have more than 100 files in your
+ // `pack` folder that we need to do a gc.
+ let entries = match repo.path().join("objects/pack").read_dir() {
+ Ok(e) => e.count(),
+ Err(_) => {
+ debug!("skipping gc as pack dir appears gone");
+ return Ok(());
+ }
+ };
+ let max = config
+ .get_env("__CARGO_PACKFILE_LIMIT")
+ .ok()
+ .and_then(|s| s.parse::<usize>().ok())
+ .unwrap_or(100);
+ if entries < max {
+ debug!("skipping gc as there's only {} pack files", entries);
+ return Ok(());
+ }
+
+ // First up, try a literal `git gc` by shelling out to git. This is pretty
+ // likely to fail though as we may not have `git` installed. Note that
+ // libgit2 doesn't currently implement the gc operation, so there's no
+ // equivalent there.
+ match Command::new("git")
+ .arg("gc")
+ .current_dir(repo.path())
+ .output()
+ {
+ Ok(out) => {
+ debug!(
+ "git-gc status: {}\n\nstdout ---\n{}\nstderr ---\n{}",
+ out.status,
+ String::from_utf8_lossy(&out.stdout),
+ String::from_utf8_lossy(&out.stderr)
+ );
+ if out.status.success() {
+ let new = git2::Repository::open(repo.path())?;
+ *repo = new;
+ return Ok(());
+ }
+ }
+ Err(e) => debug!("git-gc failed to spawn: {}", e),
+ }
+
+ // Alright all else failed, let's start over.
+ reinitialize(repo)
+}
+
+/// Removes temporary files left from previous activity.
+///
+/// If libgit2 is interrupted while indexing pack files, it will leave behind
+/// some temporary files that it doesn't clean up. These can be quite large in
+/// size, so this tries to clean things up.
+///
+/// This intentionally ignores errors. This is only an opportunistic cleaning,
+/// and we don't really care if there are issues (there's unlikely anything
+/// that can be done).
+///
+/// The git CLI has similar behavior (its temp files look like
+/// `objects/pack/tmp_pack_9kUSA8`). Those files are normally deleted via `git
+/// prune` which is run by `git gc`. However, it doesn't know about libgit2's
+/// filenames, so they never get cleaned up.
+fn clean_repo_temp_files(repo: &git2::Repository) {
+ let path = repo.path().join("objects/pack/pack_git2_*");
+ let pattern = match path.to_str() {
+ Some(p) => p,
+ None => {
+ log::warn!("cannot convert {path:?} to a string");
+ return;
+ }
+ };
+ let paths = match glob::glob(pattern) {
+ Ok(paths) => paths,
+ Err(_) => return,
+ };
+ for path in paths {
+ if let Ok(path) = path {
+ match paths::remove_file(&path) {
+ Ok(_) => log::debug!("removed stale temp git file {path:?}"),
+ Err(e) => log::warn!("failed to remove {path:?} while cleaning temp files: {e}"),
+ }
+ }
+ }
+}
+
+fn reinitialize(repo: &mut git2::Repository) -> CargoResult<()> {
+ // Here we want to drop the current repository object pointed to by `repo`,
+ // so we initialize temporary repository in a sub-folder, blow away the
+ // existing git folder, and then recreate the git repo. Finally we blow away
+ // the `tmp` folder we allocated.
+ let path = repo.path().to_path_buf();
+ debug!("reinitializing git repo at {:?}", path);
+ let tmp = path.join("tmp");
+ let bare = !repo.path().ends_with(".git");
+ *repo = init(&tmp, false)?;
+ for entry in path.read_dir()? {
+ let entry = entry?;
+ if entry.file_name().to_str() == Some("tmp") {
+ continue;
+ }
+ let path = entry.path();
+ drop(paths::remove_file(&path).or_else(|_| paths::remove_dir_all(&path)));
+ }
+ *repo = init(&path, bare)?;
+ paths::remove_dir_all(&tmp)?;
+ Ok(())
+}
+
+fn init(path: &Path, bare: bool) -> CargoResult<git2::Repository> {
+ let mut opts = git2::RepositoryInitOptions::new();
+ // Skip anything related to templates, they just call all sorts of issues as
+ // we really don't want to use them yet they insist on being used. See #6240
+ // for an example issue that comes up.
+ opts.external_template(false);
+ opts.bare(bare);
+ Ok(git2::Repository::init_opts(&path, &opts)?)
+}
+
+enum FastPathRev {
+ /// The local rev (determined by `reference.resolve(repo)`) is already up to
+ /// date with what this rev resolves to on GitHub's server.
+ UpToDate,
+ /// The following SHA must be fetched in order for the local rev to become
+ /// up to date.
+ NeedsFetch(Oid),
+ /// Don't know whether local rev is up to date. We'll fetch _all_ branches
+ /// and tags from the server and see what happens.
+ Indeterminate,
+}
+
+/// Updating the index is done pretty regularly so we want it to be as fast as
+/// possible. For registries hosted on GitHub (like the crates.io index) there's
+/// a fast path available to use [1] to tell us that there's no updates to be
+/// made.
+///
+/// This function will attempt to hit that fast path and verify that the `oid`
+/// is actually the current branch of the repository.
+///
+/// [1]: https://developer.github.com/v3/repos/commits/#get-the-sha-1-of-a-commit-reference
+///
+/// Note that this function should never cause an actual failure because it's
+/// just a fast path. As a result all errors are ignored in this function and we
+/// just return a `bool`. Any real errors will be reported through the normal
+/// update path above.
+fn github_fast_path(
+ repo: &mut git2::Repository,
+ url: &str,
+ reference: &GitReference,
+ config: &Config,
+) -> CargoResult<FastPathRev> {
+ let url = Url::parse(url)?;
+ if !is_github(&url) {
+ return Ok(FastPathRev::Indeterminate);
+ }
+
+ let local_object = reference.resolve(repo).ok();
+
+ let github_branch_name = match reference {
+ GitReference::Branch(branch) => branch,
+ GitReference::Tag(tag) => tag,
+ GitReference::DefaultBranch => "HEAD",
+ GitReference::Rev(rev) => {
+ if rev.starts_with("refs/") {
+ rev
+ } else if looks_like_commit_hash(rev) {
+ // `revparse_single` (used by `resolve`) is the only way to turn
+ // short hash -> long hash, but it also parses other things,
+ // like branch and tag names, which might coincidentally be
+ // valid hex.
+ //
+ // We only return early if `rev` is a prefix of the object found
+ // by `revparse_single`. Don't bother talking to GitHub in that
+ // case, since commit hashes are permanent. If a commit with the
+ // requested hash is already present in the local clone, its
+ // contents must be the same as what is on the server for that
+ // hash.
+ //
+ // If `rev` is not found locally by `revparse_single`, we'll
+ // need GitHub to resolve it and get a hash. If `rev` is found
+ // but is not a short hash of the found object, it's probably a
+ // branch and we also need to get a hash from GitHub, in case
+ // the branch has moved.
+ if let Some(local_object) = local_object {
+ if is_short_hash_of(rev, local_object) {
+ return Ok(FastPathRev::UpToDate);
+ }
+ }
+ rev
+ } else {
+ debug!("can't use github fast path with `rev = \"{}\"`", rev);
+ return Ok(FastPathRev::Indeterminate);
+ }
+ }
+ };
+
+ // This expects GitHub urls in the form `github.com/user/repo` and nothing
+ // else
+ let mut pieces = url
+ .path_segments()
+ .ok_or_else(|| anyhow!("no path segments on url"))?;
+ let username = pieces
+ .next()
+ .ok_or_else(|| anyhow!("couldn't find username"))?;
+ let repository = pieces
+ .next()
+ .ok_or_else(|| anyhow!("couldn't find repository name"))?;
+ if pieces.next().is_some() {
+ anyhow::bail!("too many segments on URL");
+ }
+
+ // Trim off the `.git` from the repository, if present, since that's
+ // optional for GitHub and won't work when we try to use the API as well.
+ let repository = repository.strip_suffix(".git").unwrap_or(repository);
+
+ let url = format!(
+ "https://api.github.com/repos/{}/{}/commits/{}",
+ username, repository, github_branch_name,
+ );
+ let mut handle = config.http()?.borrow_mut();
+ debug!("attempting GitHub fast path for {}", url);
+ handle.get(true)?;
+ handle.url(&url)?;
+ handle.useragent("cargo")?;
+ handle.http_headers({
+ let mut headers = List::new();
+ headers.append("Accept: application/vnd.github.3.sha")?;
+ if let Some(local_object) = local_object {
+ headers.append(&format!("If-None-Match: \"{}\"", local_object))?;
+ }
+ headers
+ })?;
+
+ let mut response_body = Vec::new();
+ let mut transfer = handle.transfer();
+ transfer.write_function(|data| {
+ response_body.extend_from_slice(data);
+ Ok(data.len())
+ })?;
+ transfer.perform()?;
+ drop(transfer); // end borrow of handle so that response_code can be called
+
+ let response_code = handle.response_code()?;
+ if response_code == 304 {
+ Ok(FastPathRev::UpToDate)
+ } else if response_code == 200 {
+ let oid_to_fetch = str::from_utf8(&response_body)?.parse::<Oid>()?;
+ Ok(FastPathRev::NeedsFetch(oid_to_fetch))
+ } else {
+ // Usually response_code == 404 if the repository does not exist, and
+ // response_code == 422 if exists but GitHub is unable to resolve the
+ // requested rev.
+ Ok(FastPathRev::Indeterminate)
+ }
+}
+
+fn is_github(url: &Url) -> bool {
+ url.host_str() == Some("github.com")
+}
+
+fn looks_like_commit_hash(rev: &str) -> bool {
+ rev.len() >= 7 && rev.chars().all(|ch| ch.is_ascii_hexdigit())
+}
+
+fn is_short_hash_of(rev: &str, oid: Oid) -> bool {
+ let long_hash = oid.to_string();
+ match long_hash.get(..rev.len()) {
+ Some(truncated_long_hash) => truncated_long_hash.eq_ignore_ascii_case(rev),
+ None => false,
+ }
+}
diff --git a/src/tools/cargo/src/cargo/sources/mod.rs b/src/tools/cargo/src/cargo/sources/mod.rs
new file mode 100644
index 000000000..7d238d47d
--- /dev/null
+++ b/src/tools/cargo/src/cargo/sources/mod.rs
@@ -0,0 +1,13 @@
+pub use self::config::SourceConfigMap;
+pub use self::directory::DirectorySource;
+pub use self::git::GitSource;
+pub use self::path::PathSource;
+pub use self::registry::{RegistrySource, CRATES_IO_DOMAIN, CRATES_IO_INDEX, CRATES_IO_REGISTRY};
+pub use self::replaced::ReplacedSource;
+
+pub mod config;
+pub mod directory;
+pub mod git;
+pub mod path;
+pub mod registry;
+pub mod replaced;
diff --git a/src/tools/cargo/src/cargo/sources/path.rs b/src/tools/cargo/src/cargo/sources/path.rs
new file mode 100644
index 000000000..37e1e1f0f
--- /dev/null
+++ b/src/tools/cargo/src/cargo/sources/path.rs
@@ -0,0 +1,581 @@
+use std::collections::HashSet;
+use std::fmt::{self, Debug, Formatter};
+use std::path::{Path, PathBuf};
+use std::task::Poll;
+
+use crate::core::source::MaybePackage;
+use crate::core::{Dependency, Package, PackageId, QueryKind, Source, SourceId, Summary};
+use crate::ops;
+use crate::util::{internal, CargoResult, Config};
+use anyhow::Context as _;
+use cargo_util::paths;
+use filetime::FileTime;
+use ignore::gitignore::GitignoreBuilder;
+use log::{trace, warn};
+use walkdir::WalkDir;
+
+pub struct PathSource<'cfg> {
+ source_id: SourceId,
+ path: PathBuf,
+ updated: bool,
+ packages: Vec<Package>,
+ config: &'cfg Config,
+ recursive: bool,
+}
+
+impl<'cfg> PathSource<'cfg> {
+ /// Invoked with an absolute path to a directory that contains a `Cargo.toml`.
+ ///
+ /// This source will only return the package at precisely the `path`
+ /// specified, and it will be an error if there's not a package at `path`.
+ pub fn new(path: &Path, source_id: SourceId, config: &'cfg Config) -> PathSource<'cfg> {
+ PathSource {
+ source_id,
+ path: path.to_path_buf(),
+ updated: false,
+ packages: Vec::new(),
+ config,
+ recursive: false,
+ }
+ }
+
+ /// Creates a new source which is walked recursively to discover packages.
+ ///
+ /// This is similar to the `new` method except that instead of requiring a
+ /// valid package to be present at `root` the folder is walked entirely to
+ /// crawl for packages.
+ ///
+ /// Note that this should be used with care and likely shouldn't be chosen
+ /// by default!
+ pub fn new_recursive(root: &Path, id: SourceId, config: &'cfg Config) -> PathSource<'cfg> {
+ PathSource {
+ recursive: true,
+ ..PathSource::new(root, id, config)
+ }
+ }
+
+ pub fn preload_with(&mut self, pkg: Package) {
+ assert!(!self.updated);
+ assert!(!self.recursive);
+ assert!(self.packages.is_empty());
+ self.updated = true;
+ self.packages.push(pkg);
+ }
+
+ pub fn root_package(&mut self) -> CargoResult<Package> {
+ trace!("root_package; source={:?}", self);
+
+ self.update()?;
+
+ match self.packages.iter().find(|p| p.root() == &*self.path) {
+ Some(pkg) => Ok(pkg.clone()),
+ None => Err(internal(format!(
+ "no package found in source {:?}",
+ self.path
+ ))),
+ }
+ }
+
+ pub fn read_packages(&self) -> CargoResult<Vec<Package>> {
+ if self.updated {
+ Ok(self.packages.clone())
+ } else if self.recursive {
+ ops::read_packages(&self.path, self.source_id, self.config)
+ } else {
+ let path = self.path.join("Cargo.toml");
+ let (pkg, _) = ops::read_package(&path, self.source_id, self.config)?;
+ Ok(vec![pkg])
+ }
+ }
+
+ /// List all files relevant to building this package inside this source.
+ ///
+ /// This function will use the appropriate methods to determine the
+ /// set of files underneath this source's directory which are relevant for
+ /// building `pkg`.
+ ///
+ /// The basic assumption of this method is that all files in the directory
+ /// are relevant for building this package, but it also contains logic to
+ /// use other methods like .gitignore to filter the list of files.
+ pub fn list_files(&self, pkg: &Package) -> CargoResult<Vec<PathBuf>> {
+ self._list_files(pkg).with_context(|| {
+ format!(
+ "failed to determine list of files in {}",
+ pkg.root().display()
+ )
+ })
+ }
+
+ fn _list_files(&self, pkg: &Package) -> CargoResult<Vec<PathBuf>> {
+ let root = pkg.root();
+ let no_include_option = pkg.manifest().include().is_empty();
+ let git_repo = if no_include_option {
+ self.discover_git_repo(root)?
+ } else {
+ None
+ };
+
+ let mut exclude_builder = GitignoreBuilder::new(root);
+ if no_include_option && git_repo.is_none() {
+ // no include option and not git repo discovered (see rust-lang/cargo#7183).
+ exclude_builder.add_line(None, ".*")?;
+ }
+ for rule in pkg.manifest().exclude() {
+ exclude_builder.add_line(None, rule)?;
+ }
+ let ignore_exclude = exclude_builder.build()?;
+
+ let mut include_builder = GitignoreBuilder::new(root);
+ for rule in pkg.manifest().include() {
+ include_builder.add_line(None, rule)?;
+ }
+ let ignore_include = include_builder.build()?;
+
+ let ignore_should_package = |relative_path: &Path, is_dir: bool| {
+ // "Include" and "exclude" options are mutually exclusive.
+ if no_include_option {
+ !ignore_exclude
+ .matched_path_or_any_parents(relative_path, is_dir)
+ .is_ignore()
+ } else {
+ if is_dir {
+ // Generally, include directives don't list every
+ // directory (nor should they!). Just skip all directory
+ // checks, and only check files.
+ return true;
+ }
+ ignore_include
+ .matched_path_or_any_parents(relative_path, /* is_dir */ false)
+ .is_ignore()
+ }
+ };
+
+ let filter = |path: &Path, is_dir: bool| {
+ let relative_path = match path.strip_prefix(root) {
+ Ok(p) => p,
+ Err(_) => return false,
+ };
+
+ let rel = relative_path.as_os_str();
+ if rel == "Cargo.lock" {
+ return pkg.include_lockfile();
+ } else if rel == "Cargo.toml" {
+ return true;
+ }
+
+ ignore_should_package(relative_path, is_dir)
+ };
+
+ // Attempt Git-prepopulate only if no `include` (see rust-lang/cargo#4135).
+ if no_include_option {
+ if let Some(repo) = git_repo {
+ return self.list_files_git(pkg, &repo, &filter);
+ }
+ }
+ self.list_files_walk(pkg, &filter)
+ }
+
+ /// Returns `Some(git2::Repository)` if found sibling `Cargo.toml` and `.git`
+ /// directory; otherwise, caller should fall back on full file list.
+ fn discover_git_repo(&self, root: &Path) -> CargoResult<Option<git2::Repository>> {
+ let repo = match git2::Repository::discover(root) {
+ Ok(repo) => repo,
+ Err(e) => {
+ log::debug!(
+ "could not discover git repo at or above {}: {}",
+ root.display(),
+ e
+ );
+ return Ok(None);
+ }
+ };
+ let index = repo
+ .index()
+ .with_context(|| format!("failed to open git index at {}", repo.path().display()))?;
+ let repo_root = repo.workdir().ok_or_else(|| {
+ anyhow::format_err!(
+ "did not expect repo at {} to be bare",
+ repo.path().display()
+ )
+ })?;
+ let repo_relative_path = match paths::strip_prefix_canonical(root, repo_root) {
+ Ok(p) => p,
+ Err(e) => {
+ log::warn!(
+ "cannot determine if path `{:?}` is in git repo `{:?}`: {:?}",
+ root,
+ repo_root,
+ e
+ );
+ return Ok(None);
+ }
+ };
+ let manifest_path = repo_relative_path.join("Cargo.toml");
+ if index.get_path(&manifest_path, 0).is_some() {
+ return Ok(Some(repo));
+ }
+ // Package Cargo.toml is not in git, don't use git to guide our selection.
+ Ok(None)
+ }
+
+ fn list_files_git(
+ &self,
+ pkg: &Package,
+ repo: &git2::Repository,
+ filter: &dyn Fn(&Path, bool) -> bool,
+ ) -> CargoResult<Vec<PathBuf>> {
+ warn!("list_files_git {}", pkg.package_id());
+ let index = repo.index()?;
+ let root = repo
+ .workdir()
+ .ok_or_else(|| anyhow::format_err!("can't list files on a bare repository"))?;
+ let pkg_path = pkg.root();
+
+ let mut ret = Vec::<PathBuf>::new();
+
+ // We use information from the Git repository to guide us in traversing
+ // its tree. The primary purpose of this is to take advantage of the
+ // `.gitignore` and auto-ignore files that don't matter.
+ //
+ // Here we're also careful to look at both tracked and untracked files as
+ // the untracked files are often part of a build and may become relevant
+ // as part of a future commit.
+ let index_files = index.iter().map(|entry| {
+ use libgit2_sys::{GIT_FILEMODE_COMMIT, GIT_FILEMODE_LINK};
+ // ``is_dir`` is an optimization to avoid calling
+ // ``fs::metadata`` on every file.
+ let is_dir = if entry.mode == GIT_FILEMODE_LINK as u32 {
+ // Let the code below figure out if this symbolic link points
+ // to a directory or not.
+ None
+ } else {
+ Some(entry.mode == GIT_FILEMODE_COMMIT as u32)
+ };
+ (join(root, &entry.path), is_dir)
+ });
+ let mut opts = git2::StatusOptions::new();
+ opts.include_untracked(true);
+ if let Ok(suffix) = pkg_path.strip_prefix(root) {
+ opts.pathspec(suffix);
+ }
+ let statuses = repo.statuses(Some(&mut opts))?;
+ let mut skip_paths = HashSet::new();
+ let untracked: Vec<_> = statuses
+ .iter()
+ .filter_map(|entry| {
+ match entry.status() {
+ // Don't include Cargo.lock if it is untracked. Packaging will
+ // generate a new one as needed.
+ git2::Status::WT_NEW if entry.path() != Some("Cargo.lock") => {
+ Some(Ok((join(root, entry.path_bytes()), None)))
+ }
+ git2::Status::WT_DELETED => {
+ let path = match join(root, entry.path_bytes()) {
+ Ok(p) => p,
+ Err(e) => return Some(Err(e)),
+ };
+ skip_paths.insert(path);
+ None
+ }
+ _ => None,
+ }
+ })
+ .collect::<CargoResult<_>>()?;
+
+ let mut subpackages_found = Vec::new();
+
+ for (file_path, is_dir) in index_files.chain(untracked) {
+ let file_path = file_path?;
+ if skip_paths.contains(&file_path) {
+ continue;
+ }
+
+ // Filter out files blatantly outside this package. This is helped a
+ // bit above via the `pathspec` function call, but we need to filter
+ // the entries in the index as well.
+ if !file_path.starts_with(pkg_path) {
+ continue;
+ }
+
+ match file_path.file_name().and_then(|s| s.to_str()) {
+ // The `target` directory is never included.
+ Some("target") => continue,
+
+ // Keep track of all sub-packages found and also strip out all
+ // matches we've found so far. Note, though, that if we find
+ // our own `Cargo.toml`, we keep going.
+ Some("Cargo.toml") => {
+ let path = file_path.parent().unwrap();
+ if path != pkg_path {
+ warn!("subpackage found: {}", path.display());
+ ret.retain(|p| !p.starts_with(path));
+ subpackages_found.push(path.to_path_buf());
+ continue;
+ }
+ }
+
+ _ => {}
+ }
+
+ // If this file is part of any other sub-package we've found so far,
+ // skip it.
+ if subpackages_found.iter().any(|p| file_path.starts_with(p)) {
+ continue;
+ }
+
+ // `is_dir` is None for symlinks. The `unwrap` checks if the
+ // symlink points to a directory.
+ let is_dir = is_dir.unwrap_or_else(|| file_path.is_dir());
+ if is_dir {
+ warn!(" found submodule {}", file_path.display());
+ let rel = file_path.strip_prefix(root)?;
+ let rel = rel.to_str().ok_or_else(|| {
+ anyhow::format_err!("invalid utf-8 filename: {}", rel.display())
+ })?;
+ // Git submodules are currently only named through `/` path
+ // separators, explicitly not `\` which windows uses. Who knew?
+ let rel = rel.replace(r"\", "/");
+ match repo.find_submodule(&rel).and_then(|s| s.open()) {
+ Ok(repo) => {
+ let files = self.list_files_git(pkg, &repo, filter)?;
+ ret.extend(files.into_iter());
+ }
+ Err(..) => {
+ self.walk(&file_path, &mut ret, false, filter)?;
+ }
+ }
+ } else if filter(&file_path, is_dir) {
+ assert!(!is_dir);
+ // We found a file!
+ warn!(" found {}", file_path.display());
+ ret.push(file_path);
+ }
+ }
+ return Ok(ret);
+
+ #[cfg(unix)]
+ fn join(path: &Path, data: &[u8]) -> CargoResult<PathBuf> {
+ use std::ffi::OsStr;
+ use std::os::unix::prelude::*;
+ Ok(path.join(<OsStr as OsStrExt>::from_bytes(data)))
+ }
+ #[cfg(windows)]
+ fn join(path: &Path, data: &[u8]) -> CargoResult<PathBuf> {
+ use std::str;
+ match str::from_utf8(data) {
+ Ok(s) => Ok(path.join(s)),
+ Err(e) => Err(anyhow::format_err!(
+ "cannot process path in git with a non utf8 filename: {}\n{:?}",
+ e,
+ data
+ )),
+ }
+ }
+ }
+
+ fn list_files_walk(
+ &self,
+ pkg: &Package,
+ filter: &dyn Fn(&Path, bool) -> bool,
+ ) -> CargoResult<Vec<PathBuf>> {
+ let mut ret = Vec::new();
+ self.walk(pkg.root(), &mut ret, true, filter)?;
+ Ok(ret)
+ }
+
+ fn walk(
+ &self,
+ path: &Path,
+ ret: &mut Vec<PathBuf>,
+ is_root: bool,
+ filter: &dyn Fn(&Path, bool) -> bool,
+ ) -> CargoResult<()> {
+ let walkdir = WalkDir::new(path)
+ .follow_links(true)
+ .into_iter()
+ .filter_entry(|entry| {
+ let path = entry.path();
+ let at_root = is_root && entry.depth() == 0;
+ let is_dir = entry.file_type().is_dir();
+
+ if !at_root && !filter(path, is_dir) {
+ return false;
+ }
+
+ if !is_dir {
+ return true;
+ }
+
+ // Don't recurse into any sub-packages that we have.
+ if !at_root && path.join("Cargo.toml").exists() {
+ return false;
+ }
+
+ // Skip root Cargo artifacts.
+ if is_root
+ && entry.depth() == 1
+ && path.file_name().and_then(|s| s.to_str()) == Some("target")
+ {
+ return false;
+ }
+
+ true
+ });
+ for entry in walkdir {
+ match entry {
+ Ok(entry) => {
+ if !entry.file_type().is_dir() {
+ ret.push(entry.into_path());
+ }
+ }
+ Err(err) if err.loop_ancestor().is_some() => {
+ self.config.shell().warn(err)?;
+ }
+ Err(err) => match err.path() {
+ // If an error occurs with a path, filter it again.
+ // If it is excluded, Just ignore it in this case.
+ // See issue rust-lang/cargo#10917
+ Some(path) if !filter(path, path.is_dir()) => {}
+ // Otherwise, simply recover from it.
+ // Don't worry about error skipping here, the callers would
+ // still hit the IO error if they do access it thereafter.
+ Some(path) => ret.push(path.to_path_buf()),
+ None => return Err(err.into()),
+ },
+ }
+ }
+
+ Ok(())
+ }
+
+ pub fn last_modified_file(&self, pkg: &Package) -> CargoResult<(FileTime, PathBuf)> {
+ if !self.updated {
+ return Err(internal(format!(
+ "BUG: source `{:?}` was not updated",
+ self.path
+ )));
+ }
+
+ let mut max = FileTime::zero();
+ let mut max_path = PathBuf::new();
+ for file in self.list_files(pkg).with_context(|| {
+ format!(
+ "failed to determine the most recently modified file in {}",
+ pkg.root().display()
+ )
+ })? {
+ // An `fs::stat` error here is either because path is a
+ // broken symlink, a permissions error, or a race
+ // condition where this path was `rm`-ed -- either way,
+ // we can ignore the error and treat the path's `mtime`
+ // as `0`.
+ let mtime = paths::mtime(&file).unwrap_or_else(|_| FileTime::zero());
+ if mtime > max {
+ max = mtime;
+ max_path = file;
+ }
+ }
+ trace!("last modified file {}: {}", self.path.display(), max);
+ Ok((max, max_path))
+ }
+
+ pub fn path(&self) -> &Path {
+ &self.path
+ }
+
+ pub fn update(&mut self) -> CargoResult<()> {
+ if !self.updated {
+ let packages = self.read_packages()?;
+ self.packages.extend(packages.into_iter());
+ self.updated = true;
+ }
+
+ Ok(())
+ }
+}
+
+impl<'cfg> Debug for PathSource<'cfg> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+ write!(f, "the paths source")
+ }
+}
+
+impl<'cfg> Source for PathSource<'cfg> {
+ fn query(
+ &mut self,
+ dep: &Dependency,
+ kind: QueryKind,
+ f: &mut dyn FnMut(Summary),
+ ) -> Poll<CargoResult<()>> {
+ self.update()?;
+ for s in self.packages.iter().map(|p| p.summary()) {
+ let matched = match kind {
+ QueryKind::Exact => dep.matches(s),
+ QueryKind::Fuzzy => true,
+ };
+ if matched {
+ f(s.clone())
+ }
+ }
+ Poll::Ready(Ok(()))
+ }
+
+ fn supports_checksums(&self) -> bool {
+ false
+ }
+
+ fn requires_precise(&self) -> bool {
+ false
+ }
+
+ fn source_id(&self) -> SourceId {
+ self.source_id
+ }
+
+ fn download(&mut self, id: PackageId) -> CargoResult<MaybePackage> {
+ trace!("getting packages; id={}", id);
+ self.update()?;
+ let pkg = self.packages.iter().find(|pkg| pkg.package_id() == id);
+ pkg.cloned()
+ .map(MaybePackage::Ready)
+ .ok_or_else(|| internal(format!("failed to find {} in path source", id)))
+ }
+
+ fn finish_download(&mut self, _id: PackageId, _data: Vec<u8>) -> CargoResult<Package> {
+ panic!("no download should have started")
+ }
+
+ fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
+ let (max, max_path) = self.last_modified_file(pkg)?;
+ // Note that we try to strip the prefix of this package to get a
+ // relative path to ensure that the fingerprint remains consistent
+ // across entire project directory renames.
+ let max_path = max_path.strip_prefix(&self.path).unwrap_or(&max_path);
+ Ok(format!("{} ({})", max, max_path.display()))
+ }
+
+ fn describe(&self) -> String {
+ match self.source_id.url().to_file_path() {
+ Ok(path) => path.display().to_string(),
+ Err(_) => self.source_id.to_string(),
+ }
+ }
+
+ fn add_to_yanked_whitelist(&mut self, _pkgs: &[PackageId]) {}
+
+ fn is_yanked(&mut self, _pkg: PackageId) -> Poll<CargoResult<bool>> {
+ Poll::Ready(Ok(false))
+ }
+
+ fn block_until_ready(&mut self) -> CargoResult<()> {
+ self.update()
+ }
+
+ fn invalidate_cache(&mut self) {
+ // Path source has no local cache.
+ }
+
+ fn set_quiet(&mut self, _quiet: bool) {
+ // Path source does not display status
+ }
+}
diff --git a/src/tools/cargo/src/cargo/sources/registry/download.rs b/src/tools/cargo/src/cargo/sources/registry/download.rs
new file mode 100644
index 000000000..723c55ffd
--- /dev/null
+++ b/src/tools/cargo/src/cargo/sources/registry/download.rs
@@ -0,0 +1,130 @@
+use anyhow::Context;
+use cargo_util::Sha256;
+
+use crate::core::PackageId;
+use crate::sources::registry::make_dep_prefix;
+use crate::sources::registry::MaybeLock;
+use crate::sources::registry::{
+ RegistryConfig, CHECKSUM_TEMPLATE, CRATE_TEMPLATE, LOWER_PREFIX_TEMPLATE, PREFIX_TEMPLATE,
+ VERSION_TEMPLATE,
+};
+use crate::util::auth;
+use crate::util::errors::CargoResult;
+use crate::util::{Config, Filesystem};
+use std::fmt::Write as FmtWrite;
+use std::fs::{self, File, OpenOptions};
+use std::io::prelude::*;
+use std::io::SeekFrom;
+use std::str;
+
+pub(super) fn filename(pkg: PackageId) -> String {
+ format!("{}-{}.crate", pkg.name(), pkg.version())
+}
+
+pub(super) fn download(
+ cache_path: &Filesystem,
+ config: &Config,
+ pkg: PackageId,
+ checksum: &str,
+ registry_config: RegistryConfig,
+) -> CargoResult<MaybeLock> {
+ let filename = filename(pkg);
+ let path = cache_path.join(&filename);
+ let path = config.assert_package_cache_locked(&path);
+
+ // Attempt to open a read-only copy first to avoid an exclusive write
+ // lock and also work with read-only filesystems. Note that we check the
+ // length of the file like below to handle interrupted downloads.
+ //
+ // If this fails then we fall through to the exclusive path where we may
+ // have to redownload the file.
+ if let Ok(dst) = File::open(path) {
+ let meta = dst.metadata()?;
+ if meta.len() > 0 {
+ return Ok(MaybeLock::Ready(dst));
+ }
+ }
+
+ let mut url = registry_config.dl;
+ if !url.contains(CRATE_TEMPLATE)
+ && !url.contains(VERSION_TEMPLATE)
+ && !url.contains(PREFIX_TEMPLATE)
+ && !url.contains(LOWER_PREFIX_TEMPLATE)
+ && !url.contains(CHECKSUM_TEMPLATE)
+ {
+ // Original format before customizing the download URL was supported.
+ write!(
+ url,
+ "/{}/{}/download",
+ pkg.name(),
+ pkg.version().to_string()
+ )
+ .unwrap();
+ } else {
+ let prefix = make_dep_prefix(&*pkg.name());
+ url = url
+ .replace(CRATE_TEMPLATE, &*pkg.name())
+ .replace(VERSION_TEMPLATE, &pkg.version().to_string())
+ .replace(PREFIX_TEMPLATE, &prefix)
+ .replace(LOWER_PREFIX_TEMPLATE, &prefix.to_lowercase())
+ .replace(CHECKSUM_TEMPLATE, checksum);
+ }
+
+ let authorization = if registry_config.auth_required {
+ Some(auth::auth_token(config, &pkg.source_id(), None, None)?)
+ } else {
+ None
+ };
+
+ Ok(MaybeLock::Download {
+ url,
+ descriptor: pkg.to_string(),
+ authorization: authorization,
+ })
+}
+
+pub(super) fn finish_download(
+ cache_path: &Filesystem,
+ config: &Config,
+ pkg: PackageId,
+ checksum: &str,
+ data: &[u8],
+) -> CargoResult<File> {
+ // Verify what we just downloaded
+ let actual = Sha256::new().update(data).finish_hex();
+ if actual != checksum {
+ anyhow::bail!("failed to verify the checksum of `{}`", pkg)
+ }
+
+ let filename = filename(pkg);
+ cache_path.create_dir()?;
+ let path = cache_path.join(&filename);
+ let path = config.assert_package_cache_locked(&path);
+ let mut dst = OpenOptions::new()
+ .create(true)
+ .read(true)
+ .write(true)
+ .open(&path)
+ .with_context(|| format!("failed to open `{}`", path.display()))?;
+ let meta = dst.metadata()?;
+ if meta.len() > 0 {
+ return Ok(dst);
+ }
+
+ dst.write_all(data)?;
+ dst.seek(SeekFrom::Start(0))?;
+ Ok(dst)
+}
+
+pub(super) fn is_crate_downloaded(
+ cache_path: &Filesystem,
+ config: &Config,
+ pkg: PackageId,
+) -> bool {
+ let path = cache_path.join(filename(pkg));
+ let path = config.assert_package_cache_locked(&path);
+ if let Ok(meta) = fs::metadata(path) {
+ return meta.len() > 0;
+ }
+ false
+}
diff --git a/src/tools/cargo/src/cargo/sources/registry/http_remote.rs b/src/tools/cargo/src/cargo/sources/registry/http_remote.rs
new file mode 100644
index 000000000..7e1f2a587
--- /dev/null
+++ b/src/tools/cargo/src/cargo/sources/registry/http_remote.rs
@@ -0,0 +1,854 @@
+//! Access to a HTTP-based crate registry.
+//!
+//! See [`HttpRegistry`] for details.
+
+use crate::core::{PackageId, SourceId};
+use crate::ops::{self};
+use crate::sources::registry::download;
+use crate::sources::registry::MaybeLock;
+use crate::sources::registry::{LoadResponse, RegistryConfig, RegistryData};
+use crate::util::errors::{CargoResult, HttpNotSuccessful, DEBUG_HEADERS};
+use crate::util::network::retry::{Retry, RetryResult};
+use crate::util::network::sleep::SleepTracker;
+use crate::util::{auth, Config, Filesystem, IntoUrl, Progress, ProgressStyle};
+use anyhow::Context;
+use cargo_util::paths;
+use curl::easy::{Easy, HttpVersion, List};
+use curl::multi::{EasyHandle, Multi};
+use log::{debug, trace, warn};
+use std::cell::RefCell;
+use std::collections::{HashMap, HashSet};
+use std::fs::{self, File};
+use std::io::ErrorKind;
+use std::path::{Path, PathBuf};
+use std::str;
+use std::task::{ready, Poll};
+use std::time::Duration;
+use url::Url;
+
+// HTTP headers
+const ETAG: &'static str = "etag";
+const LAST_MODIFIED: &'static str = "last-modified";
+const WWW_AUTHENTICATE: &'static str = "www-authenticate";
+const IF_NONE_MATCH: &'static str = "if-none-match";
+const IF_MODIFIED_SINCE: &'static str = "if-modified-since";
+
+const UNKNOWN: &'static str = "Unknown";
+
+/// A registry served by the HTTP-based registry API.
+///
+/// This type is primarily accessed through the [`RegistryData`] trait.
+///
+/// `HttpRegistry` implements the HTTP-based registry API outlined in [RFC 2789]. Read the RFC for
+/// the complete protocol, but _roughly_ the implementation loads each index file (e.g.,
+/// config.json or re/ge/regex) from an HTTP service rather than from a locally cloned git
+/// repository. The remote service can more or less be a static file server that simply serves the
+/// contents of the origin git repository.
+///
+/// Implemented naively, this leads to a significant amount of network traffic, as a lookup of any
+/// index file would need to check with the remote backend if the index file has changed. This
+/// cost is somewhat mitigated by the use of HTTP conditional fetches (`If-Modified-Since` and
+/// `If-None-Match` for `ETag`s) which can be efficiently handled by HTTP/2.
+///
+/// [RFC 2789]: https://github.com/rust-lang/rfcs/pull/2789
+pub struct HttpRegistry<'cfg> {
+ index_path: Filesystem,
+ cache_path: Filesystem,
+ source_id: SourceId,
+ config: &'cfg Config,
+
+ /// Store the server URL without the protocol prefix (sparse+)
+ url: Url,
+
+ /// HTTP multi-handle for asynchronous/parallel requests.
+ multi: Multi,
+
+ /// Has the client requested a cache update?
+ ///
+ /// Only if they have do we double-check the freshness of each locally-stored index file.
+ requested_update: bool,
+
+ /// State for currently pending index downloads.
+ downloads: Downloads<'cfg>,
+
+ /// Does the config say that we can use HTTP multiplexing?
+ multiplexing: bool,
+
+ /// What paths have we already fetched since the last index update?
+ ///
+ /// We do not need to double-check any of these index files since we have already done so.
+ fresh: HashSet<PathBuf>,
+
+ /// Have we started to download any index files?
+ fetch_started: bool,
+
+ /// Cached registry configuration.
+ registry_config: Option<RegistryConfig>,
+
+ /// Should we include the authorization header?
+ auth_required: bool,
+
+ /// Url to get a token for the registry.
+ login_url: Option<Url>,
+
+ /// Disables status messages.
+ quiet: bool,
+}
+
+/// Helper for downloading crates.
+struct Downloads<'cfg> {
+ /// When a download is started, it is added to this map. The key is a
+ /// "token" (see `Download::token`). It is removed once the download is
+ /// finished.
+ pending: HashMap<usize, (Download<'cfg>, EasyHandle)>,
+ /// Set of paths currently being downloaded.
+ /// This should stay in sync with `pending`.
+ pending_paths: HashSet<PathBuf>,
+ /// Downloads that have failed and are waiting to retry again later.
+ sleeping: SleepTracker<(Download<'cfg>, Easy)>,
+ /// The final result of each download.
+ results: HashMap<PathBuf, CargoResult<CompletedDownload>>,
+ /// The next ID to use for creating a token (see `Download::token`).
+ next: usize,
+ /// Progress bar.
+ progress: RefCell<Option<Progress<'cfg>>>,
+ /// Number of downloads that have successfully finished.
+ downloads_finished: usize,
+ /// Number of times the caller has requested blocking. This is used for
+ /// an estimate of progress.
+ blocking_calls: usize,
+}
+
+struct Download<'cfg> {
+ /// The token for this download, used as the key of the `Downloads::pending` map
+ /// and stored in `EasyHandle` as well.
+ token: usize,
+
+ /// The path of the package that we're downloading.
+ path: PathBuf,
+
+ /// Actual downloaded data, updated throughout the lifetime of this download.
+ data: RefCell<Vec<u8>>,
+
+ /// HTTP headers.
+ header_map: RefCell<Headers>,
+
+ /// Logic used to track retrying this download if it's a spurious failure.
+ retry: Retry<'cfg>,
+}
+
+#[derive(Default)]
+struct Headers {
+ last_modified: Option<String>,
+ etag: Option<String>,
+ www_authenticate: Vec<String>,
+ others: Vec<String>,
+}
+
+enum StatusCode {
+ Success,
+ NotModified,
+ NotFound,
+ Unauthorized,
+}
+
+struct CompletedDownload {
+ response_code: StatusCode,
+ data: Vec<u8>,
+ header_map: Headers,
+}
+
+impl<'cfg> HttpRegistry<'cfg> {
+ pub fn new(
+ source_id: SourceId,
+ config: &'cfg Config,
+ name: &str,
+ ) -> CargoResult<HttpRegistry<'cfg>> {
+ let url = source_id.url().as_str();
+ // Ensure the url ends with a slash so we can concatenate paths.
+ if !url.ends_with('/') {
+ anyhow::bail!("sparse registry url must end in a slash `/`: {url}")
+ }
+ assert!(source_id.is_sparse());
+ let url = url
+ .strip_prefix("sparse+")
+ .expect("sparse registry needs sparse+ prefix")
+ .into_url()
+ .expect("a url with the sparse+ stripped should still be valid");
+
+ Ok(HttpRegistry {
+ index_path: config.registry_index_path().join(name),
+ cache_path: config.registry_cache_path().join(name),
+ source_id,
+ config,
+ url,
+ multi: Multi::new(),
+ multiplexing: false,
+ downloads: Downloads {
+ next: 0,
+ pending: HashMap::new(),
+ pending_paths: HashSet::new(),
+ sleeping: SleepTracker::new(),
+ results: HashMap::new(),
+ progress: RefCell::new(Some(Progress::with_style(
+ "Fetch",
+ ProgressStyle::Indeterminate,
+ config,
+ ))),
+ downloads_finished: 0,
+ blocking_calls: 0,
+ },
+ fresh: HashSet::new(),
+ requested_update: false,
+ fetch_started: false,
+ registry_config: None,
+ auth_required: false,
+ login_url: None,
+ quiet: false,
+ })
+ }
+
+ fn handle_http_header(buf: &[u8]) -> Option<(&str, &str)> {
+ if buf.is_empty() {
+ return None;
+ }
+ let buf = std::str::from_utf8(buf).ok()?.trim_end();
+ // Don't let server sneak extra lines anywhere.
+ if buf.contains('\n') {
+ return None;
+ }
+ let (tag, value) = buf.split_once(':')?;
+ let value = value.trim();
+ Some((tag, value))
+ }
+
+ fn start_fetch(&mut self) -> CargoResult<()> {
+ if self.fetch_started {
+ // We only need to run the setup code once.
+ return Ok(());
+ }
+ self.fetch_started = true;
+
+ // We've enabled the `http2` feature of `curl` in Cargo, so treat
+ // failures here as fatal as it would indicate a build-time problem.
+ self.multiplexing = self.config.http_config()?.multiplexing.unwrap_or(true);
+
+ self.multi
+ .pipelining(false, self.multiplexing)
+ .with_context(|| "failed to enable multiplexing/pipelining in curl")?;
+
+ // let's not flood the server with connections
+ self.multi.set_max_host_connections(2)?;
+
+ if !self.quiet {
+ self.config
+ .shell()
+ .status("Updating", self.source_id.display_index())?;
+ }
+
+ Ok(())
+ }
+
+ fn handle_completed_downloads(&mut self) -> CargoResult<()> {
+ assert_eq!(
+ self.downloads.pending.len(),
+ self.downloads.pending_paths.len()
+ );
+
+ // Collect the results from the Multi handle.
+ let results = {
+ let mut results = Vec::new();
+ let pending = &mut self.downloads.pending;
+ self.multi.messages(|msg| {
+ let token = msg.token().expect("failed to read token");
+ let (_, handle) = &pending[&token];
+ if let Some(result) = msg.result_for(handle) {
+ results.push((token, result));
+ };
+ });
+ results
+ };
+ for (token, result) in results {
+ let (mut download, handle) = self.downloads.pending.remove(&token).unwrap();
+ let was_present = self.downloads.pending_paths.remove(&download.path);
+ assert!(
+ was_present,
+ "expected pending_paths to contain {:?}",
+ download.path
+ );
+ let mut handle = self.multi.remove(handle)?;
+ let data = download.data.take();
+ let url = self.full_url(&download.path);
+ let result = match download.retry.r#try(|| {
+ result.with_context(|| format!("failed to download from `{}`", url))?;
+ let code = handle.response_code()?;
+ // Keep this list of expected status codes in sync with the codes handled in `load`
+ let code = match code {
+ 200 => StatusCode::Success,
+ 304 => StatusCode::NotModified,
+ 401 => StatusCode::Unauthorized,
+ 404 | 410 | 451 => StatusCode::NotFound,
+ _ => {
+ return Err(HttpNotSuccessful::new_from_handle(
+ &mut handle,
+ &url,
+ data,
+ download.header_map.take().others,
+ )
+ .into());
+ }
+ };
+ Ok((data, code))
+ }) {
+ RetryResult::Success((data, code)) => Ok(CompletedDownload {
+ response_code: code,
+ data,
+ header_map: download.header_map.take(),
+ }),
+ RetryResult::Err(e) => Err(e),
+ RetryResult::Retry(sleep) => {
+ debug!("download retry {:?} for {sleep}ms", download.path);
+ self.downloads.sleeping.push(sleep, (download, handle));
+ continue;
+ }
+ };
+
+ self.downloads.results.insert(download.path, result);
+ self.downloads.downloads_finished += 1;
+ }
+
+ self.downloads.tick()?;
+
+ Ok(())
+ }
+
+ fn full_url(&self, path: &Path) -> String {
+ // self.url always ends with a slash.
+ format!("{}{}", self.url, path.display())
+ }
+
+ fn is_fresh(&self, path: &Path) -> bool {
+ if !self.requested_update {
+ trace!(
+ "using local {} as user did not request update",
+ path.display()
+ );
+ true
+ } else if self.config.cli_unstable().no_index_update {
+ trace!("using local {} in no_index_update mode", path.display());
+ true
+ } else if self.config.offline() {
+ trace!("using local {} in offline mode", path.display());
+ true
+ } else if self.fresh.contains(path) {
+ trace!("using local {} as it was already fetched", path.display());
+ true
+ } else {
+ debug!("checking freshness of {}", path.display());
+ false
+ }
+ }
+
+ /// Get the cached registry configuration, if it exists.
+ fn config_cached(&mut self) -> CargoResult<Option<&RegistryConfig>> {
+ if self.registry_config.is_some() {
+ return Ok(self.registry_config.as_ref());
+ }
+ let config_json_path = self
+ .assert_index_locked(&self.index_path)
+ .join("config.json");
+ match fs::read(&config_json_path) {
+ Ok(raw_data) => match serde_json::from_slice(&raw_data) {
+ Ok(json) => {
+ self.registry_config = Some(json);
+ }
+ Err(e) => log::debug!("failed to decode cached config.json: {}", e),
+ },
+ Err(e) => {
+ if e.kind() != ErrorKind::NotFound {
+ log::debug!("failed to read config.json cache: {}", e)
+ }
+ }
+ }
+ Ok(self.registry_config.as_ref())
+ }
+
+ /// Get the registry configuration.
+ fn config(&mut self) -> Poll<CargoResult<&RegistryConfig>> {
+ debug!("loading config");
+ let index_path = self.assert_index_locked(&self.index_path);
+ let config_json_path = index_path.join("config.json");
+ if self.is_fresh(Path::new("config.json")) && self.config_cached()?.is_some() {
+ return Poll::Ready(Ok(self.registry_config.as_ref().unwrap()));
+ }
+
+ match ready!(self.load(Path::new(""), Path::new("config.json"), None)?) {
+ LoadResponse::Data {
+ raw_data,
+ index_version: _,
+ } => {
+ trace!("config loaded");
+ self.registry_config = Some(serde_json::from_slice(&raw_data)?);
+ if paths::create_dir_all(&config_json_path.parent().unwrap()).is_ok() {
+ if let Err(e) = fs::write(&config_json_path, &raw_data) {
+ log::debug!("failed to write config.json cache: {}", e);
+ }
+ }
+ Poll::Ready(Ok(self.registry_config.as_ref().unwrap()))
+ }
+ LoadResponse::NotFound => {
+ Poll::Ready(Err(anyhow::anyhow!("config.json not found in registry")))
+ }
+ LoadResponse::CacheValid => Poll::Ready(Err(crate::util::internal(
+ "config.json is never stored in the index cache",
+ ))),
+ }
+ }
+
+ fn add_sleepers(&mut self) -> CargoResult<()> {
+ for (dl, handle) in self.downloads.sleeping.to_retry() {
+ let mut handle = self.multi.add(handle)?;
+ handle.set_token(dl.token)?;
+ let is_new = self.downloads.pending_paths.insert(dl.path.to_path_buf());
+ assert!(is_new, "path queued for download more than once");
+ let previous = self.downloads.pending.insert(dl.token, (dl, handle));
+ assert!(previous.is_none(), "dl token queued more than once");
+ }
+ Ok(())
+ }
+}
+
+impl<'cfg> RegistryData for HttpRegistry<'cfg> {
+ fn prepare(&self) -> CargoResult<()> {
+ Ok(())
+ }
+
+ fn index_path(&self) -> &Filesystem {
+ &self.index_path
+ }
+
+ fn assert_index_locked<'a>(&self, path: &'a Filesystem) -> &'a Path {
+ self.config.assert_package_cache_locked(path)
+ }
+
+ fn is_updated(&self) -> bool {
+ self.requested_update
+ }
+
+ fn load(
+ &mut self,
+ _root: &Path,
+ path: &Path,
+ index_version: Option<&str>,
+ ) -> Poll<CargoResult<LoadResponse>> {
+ trace!("load: {}", path.display());
+ if let Some(_token) = self.downloads.pending_paths.get(path) {
+ debug!("dependency is still pending: {}", path.display());
+ return Poll::Pending;
+ }
+
+ if let Some(index_version) = index_version {
+ trace!(
+ "local cache of {} is available at version `{}`",
+ path.display(),
+ index_version
+ );
+ if self.is_fresh(path) {
+ return Poll::Ready(Ok(LoadResponse::CacheValid));
+ }
+ } else if self.fresh.contains(path) {
+ // We have no cached copy of this file, and we already downloaded it.
+ debug!(
+ "cache did not contain previously downloaded file {}",
+ path.display()
+ );
+ return Poll::Ready(Ok(LoadResponse::NotFound));
+ }
+
+ if self.config.offline() || self.config.cli_unstable().no_index_update {
+ // Return NotFound in offline mode when the file doesn't exist in the cache.
+ // If this results in resolution failure, the resolver will suggest
+ // removing the --offline flag.
+ return Poll::Ready(Ok(LoadResponse::NotFound));
+ }
+
+ if let Some(result) = self.downloads.results.remove(path) {
+ let result =
+ result.with_context(|| format!("download of {} failed", path.display()))?;
+
+ let is_new = self.fresh.insert(path.to_path_buf());
+ assert!(
+ is_new,
+ "downloaded the index file `{}` twice",
+ path.display()
+ );
+
+ // The status handled here need to be kept in sync with the codes handled
+ // in `handle_completed_downloads`
+ match result.response_code {
+ StatusCode::Success => {
+ let response_index_version = if let Some(etag) = result.header_map.etag {
+ format!("{}: {}", ETAG, etag)
+ } else if let Some(lm) = result.header_map.last_modified {
+ format!("{}: {}", LAST_MODIFIED, lm)
+ } else {
+ UNKNOWN.to_string()
+ };
+ trace!("index file version: {}", response_index_version);
+ return Poll::Ready(Ok(LoadResponse::Data {
+ raw_data: result.data,
+ index_version: Some(response_index_version),
+ }));
+ }
+ StatusCode::NotModified => {
+ // Not Modified: the data in the cache is still the latest.
+ if index_version.is_none() {
+ return Poll::Ready(Err(anyhow::anyhow!(
+ "server said not modified (HTTP 304) when no local cache exists"
+ )));
+ }
+ return Poll::Ready(Ok(LoadResponse::CacheValid));
+ }
+ StatusCode::NotFound => {
+ // The crate was not found or deleted from the registry.
+ return Poll::Ready(Ok(LoadResponse::NotFound));
+ }
+ StatusCode::Unauthorized
+ if !self.auth_required
+ && path == Path::new("config.json")
+ && self.config.cli_unstable().registry_auth =>
+ {
+ debug!("re-attempting request for config.json with authorization included.");
+ self.fresh.remove(path);
+ self.auth_required = true;
+
+ // Look for a `www-authenticate` header with the `Cargo` scheme.
+ for header in &result.header_map.www_authenticate {
+ for challenge in http_auth::ChallengeParser::new(header) {
+ match challenge {
+ Ok(challenge) if challenge.scheme.eq_ignore_ascii_case("Cargo") => {
+ // Look for the `login_url` parameter.
+ for (param, value) in challenge.params {
+ if param.eq_ignore_ascii_case("login_url") {
+ self.login_url = Some(value.to_unescaped().into_url()?);
+ }
+ }
+ }
+ Ok(challenge) => {
+ debug!("ignoring non-Cargo challenge: {}", challenge.scheme)
+ }
+ Err(e) => debug!("failed to parse challenge: {}", e),
+ }
+ }
+ }
+ }
+ StatusCode::Unauthorized => {
+ let err = Err(HttpNotSuccessful {
+ code: 401,
+ body: result.data,
+ url: self.full_url(path),
+ ip: None,
+ headers: result.header_map.others,
+ }
+ .into());
+ if self.auth_required {
+ return Poll::Ready(err.context(auth::AuthorizationError {
+ sid: self.source_id.clone(),
+ default_registry: self.config.default_registry()?,
+ login_url: self.login_url.clone(),
+ reason: auth::AuthorizationErrorReason::TokenRejected,
+ }));
+ } else {
+ return Poll::Ready(err);
+ }
+ }
+ }
+ }
+
+ if path != Path::new("config.json") {
+ self.auth_required = ready!(self.config()?).auth_required;
+ } else if !self.auth_required {
+ // Check if there's a cached config that says auth is required.
+ // This allows avoiding the initial unauthenticated request to probe.
+ if let Some(config) = self.config_cached()? {
+ self.auth_required = config.auth_required;
+ }
+ }
+
+ if !self.config.cli_unstable().registry_auth {
+ self.auth_required = false;
+ }
+
+ // Looks like we're going to have to do a network request.
+ self.start_fetch()?;
+
+ let mut handle = ops::http_handle(self.config)?;
+ let full_url = self.full_url(path);
+ debug!("fetch {}", full_url);
+ handle.get(true)?;
+ handle.url(&full_url)?;
+ handle.follow_location(true)?;
+
+ // Enable HTTP/2 if possible.
+ if self.multiplexing {
+ crate::try_old_curl!(handle.http_version(HttpVersion::V2), "HTTP2");
+ } else {
+ handle.http_version(HttpVersion::V11)?;
+ }
+
+ // This is an option to `libcurl` which indicates that if there's a
+ // bunch of parallel requests to the same host they all wait until the
+ // pipelining status of the host is known. This means that we won't
+ // initiate dozens of connections to crates.io, but rather only one.
+ // Once the main one is opened we realized that pipelining is possible
+ // and multiplexing is possible with static.crates.io. All in all this
+ // reduces the number of connections done to a more manageable state.
+ crate::try_old_curl!(handle.pipewait(true), "pipewait");
+
+ let mut headers = List::new();
+ // Include a header to identify the protocol. This allows the server to
+ // know that Cargo is attempting to use the sparse protocol.
+ headers.append("cargo-protocol: version=1")?;
+ headers.append("accept: text/plain")?;
+
+ // If we have a cached copy of the file, include IF_NONE_MATCH or IF_MODIFIED_SINCE header.
+ if let Some(index_version) = index_version {
+ if let Some((key, value)) = index_version.split_once(':') {
+ match key {
+ ETAG => headers.append(&format!("{}: {}", IF_NONE_MATCH, value.trim()))?,
+ LAST_MODIFIED => {
+ headers.append(&format!("{}: {}", IF_MODIFIED_SINCE, value.trim()))?
+ }
+ _ => debug!("unexpected index version: {}", index_version),
+ }
+ }
+ }
+ if self.auth_required {
+ let authorization =
+ auth::auth_token(self.config, &self.source_id, self.login_url.as_ref(), None)?;
+ headers.append(&format!("Authorization: {}", authorization))?;
+ trace!("including authorization for {}", full_url);
+ }
+ handle.http_headers(headers)?;
+
+ // We're going to have a bunch of downloads all happening "at the same time".
+ // So, we need some way to track what headers/data/responses are for which request.
+ // We do that through this token. Each request (and associated response) gets one.
+ let token = self.downloads.next;
+ self.downloads.next += 1;
+ debug!("downloading {} as {}", path.display(), token);
+ let is_new = self.downloads.pending_paths.insert(path.to_path_buf());
+ assert!(is_new, "path queued for download more than once");
+
+ // Each write should go to self.downloads.pending[&token].data.
+ // Since the write function must be 'static, we access downloads through a thread-local.
+ // That thread-local is set up in `block_until_ready` when it calls self.multi.perform,
+ // which is what ultimately calls this method.
+ handle.write_function(move |buf| {
+ trace!("{} - {} bytes of data", token, buf.len());
+ tls::with(|downloads| {
+ if let Some(downloads) = downloads {
+ downloads.pending[&token]
+ .0
+ .data
+ .borrow_mut()
+ .extend_from_slice(buf);
+ }
+ });
+ Ok(buf.len())
+ })?;
+
+ // And ditto for the header function.
+ handle.header_function(move |buf| {
+ if let Some((tag, value)) = Self::handle_http_header(buf) {
+ tls::with(|downloads| {
+ if let Some(downloads) = downloads {
+ let mut header_map = downloads.pending[&token].0.header_map.borrow_mut();
+ match tag.to_ascii_lowercase().as_str() {
+ LAST_MODIFIED => header_map.last_modified = Some(value.to_string()),
+ ETAG => header_map.etag = Some(value.to_string()),
+ WWW_AUTHENTICATE => header_map.www_authenticate.push(value.to_string()),
+ _ => {
+ if DEBUG_HEADERS.iter().any(|prefix| tag.starts_with(prefix)) {
+ header_map.others.push(format!("{tag}: {value}"));
+ }
+ }
+ }
+ }
+ });
+ }
+
+ true
+ })?;
+
+ let dl = Download {
+ token,
+ path: path.to_path_buf(),
+ data: RefCell::new(Vec::new()),
+ header_map: Default::default(),
+ retry: Retry::new(self.config)?,
+ };
+
+ // Finally add the request we've lined up to the pool of requests that cURL manages.
+ let mut handle = self.multi.add(handle)?;
+ handle.set_token(token)?;
+ self.downloads.pending.insert(dl.token, (dl, handle));
+
+ Poll::Pending
+ }
+
+ fn config(&mut self) -> Poll<CargoResult<Option<RegistryConfig>>> {
+ let mut cfg = ready!(self.config()?).clone();
+ if !self.config.cli_unstable().registry_auth {
+ cfg.auth_required = false;
+ }
+ Poll::Ready(Ok(Some(cfg)))
+ }
+
+ fn invalidate_cache(&mut self) {
+ // Actually updating the index is more or less a no-op for this implementation.
+ // All it does is ensure that a subsequent load will double-check files with the
+ // server rather than rely on a locally cached copy of the index files.
+ debug!("invalidated index cache");
+ self.fresh.clear();
+ self.requested_update = true;
+ }
+
+ fn set_quiet(&mut self, quiet: bool) {
+ self.quiet = quiet;
+ self.downloads.progress.replace(None);
+ }
+
+ fn download(&mut self, pkg: PackageId, checksum: &str) -> CargoResult<MaybeLock> {
+ let registry_config = loop {
+ match self.config()? {
+ Poll::Pending => self.block_until_ready()?,
+ Poll::Ready(cfg) => break cfg.to_owned(),
+ }
+ };
+ download::download(
+ &self.cache_path,
+ &self.config,
+ pkg,
+ checksum,
+ registry_config,
+ )
+ }
+
+ fn finish_download(
+ &mut self,
+ pkg: PackageId,
+ checksum: &str,
+ data: &[u8],
+ ) -> CargoResult<File> {
+ download::finish_download(&self.cache_path, &self.config, pkg, checksum, data)
+ }
+
+ fn is_crate_downloaded(&self, pkg: PackageId) -> bool {
+ download::is_crate_downloaded(&self.cache_path, &self.config, pkg)
+ }
+
+ fn block_until_ready(&mut self) -> CargoResult<()> {
+ trace!(
+ "block_until_ready: {} transfers pending",
+ self.downloads.pending.len()
+ );
+ self.downloads.blocking_calls += 1;
+
+ loop {
+ self.handle_completed_downloads()?;
+ self.add_sleepers()?;
+
+ let remaining_in_multi = tls::set(&self.downloads, || {
+ self.multi
+ .perform()
+ .with_context(|| "failed to perform http requests")
+ })?;
+ trace!("{} transfers remaining", remaining_in_multi);
+
+ if remaining_in_multi + self.downloads.sleeping.len() as u32 == 0 {
+ return Ok(());
+ }
+
+ if self.downloads.pending.is_empty() {
+ let delay = self.downloads.sleeping.time_to_next().unwrap();
+ debug!("sleeping main thread for {delay:?}");
+ std::thread::sleep(delay);
+ } else {
+ // We have no more replies to provide the caller with,
+ // so we need to wait until cURL has something new for us.
+ let timeout = self
+ .multi
+ .get_timeout()?
+ .unwrap_or_else(|| Duration::new(1, 0));
+ self.multi
+ .wait(&mut [], timeout)
+ .with_context(|| "failed to wait on curl `Multi`")?;
+ }
+ }
+ }
+}
+
+impl<'cfg> Downloads<'cfg> {
+ fn tick(&self) -> CargoResult<()> {
+ let mut progress = self.progress.borrow_mut();
+ let Some(progress) = progress.as_mut() else { return Ok(()); };
+
+ // Since the sparse protocol discovers dependencies as it goes,
+ // it's not possible to get an accurate progress indication.
+ //
+ // As an approximation, we assume that the depth of the dependency graph
+ // is fixed, and base the progress on how many times the caller has asked
+ // for blocking. If there are actually additional dependencies, the progress
+ // bar will get stuck. If there are fewer dependencies, it will disappear
+ // early. It will never go backwards.
+ //
+ // The status text also contains the number of completed & pending requests, which
+ // gives an better indication of forward progress.
+ let approximate_tree_depth = 10;
+
+ progress.tick(
+ self.blocking_calls.min(approximate_tree_depth),
+ approximate_tree_depth + 1,
+ &format!(
+ " {} complete; {} pending",
+ self.downloads_finished,
+ self.pending.len() + self.sleeping.len()
+ ),
+ )
+ }
+}
+
+mod tls {
+ use super::Downloads;
+ use std::cell::Cell;
+
+ thread_local!(static PTR: Cell<usize> = Cell::new(0));
+
+ pub(super) fn with<R>(f: impl FnOnce(Option<&Downloads<'_>>) -> R) -> R {
+ let ptr = PTR.with(|p| p.get());
+ if ptr == 0 {
+ f(None)
+ } else {
+ // Safety: * `ptr` is only set by `set` below which ensures the type is correct.
+ let ptr = unsafe { &*(ptr as *const Downloads<'_>) };
+ f(Some(ptr))
+ }
+ }
+
+ pub(super) fn set<R>(dl: &Downloads<'_>, f: impl FnOnce() -> R) -> R {
+ struct Reset<'a, T: Copy>(&'a Cell<T>, T);
+
+ impl<'a, T: Copy> Drop for Reset<'a, T> {
+ fn drop(&mut self) {
+ self.0.set(self.1);
+ }
+ }
+
+ PTR.with(|p| {
+ let _reset = Reset(p, p.get());
+ p.set(dl as *const Downloads<'_> as usize);
+ f()
+ })
+ }
+}
diff --git a/src/tools/cargo/src/cargo/sources/registry/index.rs b/src/tools/cargo/src/cargo/sources/registry/index.rs
new file mode 100644
index 000000000..a21511434
--- /dev/null
+++ b/src/tools/cargo/src/cargo/sources/registry/index.rs
@@ -0,0 +1,864 @@
+//! Management of the index of a registry source
+//!
+//! This module contains management of the index and various operations, such as
+//! actually parsing the index, looking for crates, etc. This is intended to be
+//! abstract over remote indices (downloaded via git) and local registry indices
+//! (which are all just present on the filesystem).
+//!
+//! ## Index Performance
+//!
+//! One important aspect of the index is that we want to optimize the "happy
+//! path" as much as possible. Whenever you type `cargo build` Cargo will
+//! *always* reparse the registry and learn about dependency information. This
+//! is done because Cargo needs to learn about the upstream crates.io crates
+//! that you're using and ensure that the preexisting `Cargo.lock` still matches
+//! the current state of the world.
+//!
+//! Consequently, Cargo "null builds" (the index that Cargo adds to each build
+//! itself) need to be fast when accessing the index. The primary performance
+//! optimization here is to avoid parsing JSON blobs from the registry if we
+//! don't need them. Most secondary optimizations are centered around removing
+//! allocations and such, but avoiding parsing JSON is the #1 optimization.
+//!
+//! When we get queries from the resolver we're given a `Dependency`. This
+//! dependency in turn has a version requirement, and with lock files that
+//! already exist these version requirements are exact version requirements
+//! `=a.b.c`. This means that we in theory only need to parse one line of JSON
+//! per query in the registry, the one that matches version `a.b.c`.
+//!
+//! The crates.io index, however, is not amenable to this form of query. Instead
+//! the crates.io index simply is a file where each line is a JSON blob. To
+//! learn about the versions in each JSON blob we would need to parse the JSON,
+//! defeating the purpose of trying to parse as little as possible.
+//!
+//! > Note that as a small aside even *loading* the JSON from the registry is
+//! > actually pretty slow. For crates.io and remote registries we don't
+//! > actually check out the git index on disk because that takes quite some
+//! > time and is quite large. Instead we use `libgit2` to read the JSON from
+//! > the raw git objects. This in turn can be slow (aka show up high in
+//! > profiles) because libgit2 has to do deflate decompression and such.
+//!
+//! To solve all these issues a strategy is employed here where Cargo basically
+//! creates an index into the index. The first time a package is queried about
+//! (first time being for an entire computer) Cargo will load the contents
+//! (slowly via libgit2) from the registry. It will then (slowly) parse every
+//! single line to learn about its versions. Afterwards, however, Cargo will
+//! emit a new file (a cache) which is amenable for speedily parsing in future
+//! invocations.
+//!
+//! This cache file is currently organized by basically having the semver
+//! version extracted from each JSON blob. That way Cargo can quickly and easily
+//! parse all versions contained and which JSON blob they're associated with.
+//! The JSON blob then doesn't actually need to get parsed unless the version is
+//! parsed.
+//!
+//! Altogether the initial measurements of this shows a massive improvement for
+//! Cargo null build performance. It's expected that the improvements earned
+//! here will continue to grow over time in the sense that the previous
+//! implementation (parse all lines each time) actually continues to slow down
+//! over time as new versions of a crate are published. In any case when first
+//! implemented a null build of Cargo itself would parse 3700 JSON blobs from
+//! the registry and load 150 blobs from git. Afterwards it parses 150 JSON
+//! blobs and loads 0 files git. Removing 200ms or more from Cargo's startup
+//! time is certainly nothing to sneeze at!
+//!
+//! Note that this is just a high-level overview, there's of course lots of
+//! details like invalidating caches and whatnot which are handled below, but
+//! hopefully those are more obvious inline in the code itself.
+
+use crate::core::{PackageId, SourceId, Summary};
+use crate::sources::registry::{LoadResponse, RegistryData, RegistryPackage, INDEX_V_MAX};
+use crate::util::interning::InternedString;
+use crate::util::{internal, CargoResult, Config, Filesystem, OptVersionReq, ToSemver};
+use anyhow::bail;
+use cargo_util::{paths, registry::make_dep_path};
+use log::{debug, info};
+use semver::Version;
+use std::collections::{HashMap, HashSet};
+use std::fs;
+use std::io::ErrorKind;
+use std::path::Path;
+use std::str;
+use std::task::{ready, Poll};
+
+/// Crates.io treats hyphen and underscores as interchangeable, but the index and old Cargo do not.
+/// Therefore, the index must store uncanonicalized version of the name so old Cargo's can find it.
+/// This loop tries all possible combinations of switching hyphen and underscores to find the
+/// uncanonicalized one. As all stored inputs have the correct spelling, we start with the spelling
+/// as-provided.
+pub struct UncanonicalizedIter<'s> {
+ input: &'s str,
+ num_hyphen_underscore: u32,
+ hyphen_combination_num: u16,
+}
+
+impl<'s> UncanonicalizedIter<'s> {
+ pub fn new(input: &'s str) -> Self {
+ let num_hyphen_underscore = input.chars().filter(|&c| c == '_' || c == '-').count() as u32;
+ UncanonicalizedIter {
+ input,
+ num_hyphen_underscore,
+ hyphen_combination_num: 0,
+ }
+ }
+}
+
+impl<'s> Iterator for UncanonicalizedIter<'s> {
+ type Item = String;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.hyphen_combination_num > 0
+ && self.hyphen_combination_num.trailing_zeros() >= self.num_hyphen_underscore
+ {
+ return None;
+ }
+
+ let ret = Some(
+ self.input
+ .chars()
+ .scan(0u16, |s, c| {
+ // the check against 15 here's to prevent
+ // shift overflow on inputs with more than 15 hyphens
+ if (c == '_' || c == '-') && *s <= 15 {
+ let switch = (self.hyphen_combination_num & (1u16 << *s)) > 0;
+ let out = if (c == '_') ^ switch { '_' } else { '-' };
+ *s += 1;
+ Some(out)
+ } else {
+ Some(c)
+ }
+ })
+ .collect(),
+ );
+ self.hyphen_combination_num += 1;
+ ret
+ }
+}
+
+#[test]
+fn no_hyphen() {
+ assert_eq!(
+ UncanonicalizedIter::new("test").collect::<Vec<_>>(),
+ vec!["test".to_string()]
+ )
+}
+
+#[test]
+fn two_hyphen() {
+ assert_eq!(
+ UncanonicalizedIter::new("te-_st").collect::<Vec<_>>(),
+ vec![
+ "te-_st".to_string(),
+ "te__st".to_string(),
+ "te--st".to_string(),
+ "te_-st".to_string()
+ ]
+ )
+}
+
+#[test]
+fn overflow_hyphen() {
+ assert_eq!(
+ UncanonicalizedIter::new("te-_-_-_-_-_-_-_-_-st")
+ .take(100)
+ .count(),
+ 100
+ )
+}
+
+/// Manager for handling the on-disk index.
+///
+/// Note that local and remote registries store the index differently. Local
+/// is a simple on-disk tree of files of the raw index. Remote registries are
+/// stored as a raw git repository. The different means of access are handled
+/// via the [`RegistryData`] trait abstraction.
+///
+/// This transparently handles caching of the index in a more efficient format.
+pub struct RegistryIndex<'cfg> {
+ source_id: SourceId,
+ /// Root directory of the index for the registry.
+ path: Filesystem,
+ /// Cache of summary data.
+ ///
+ /// This is keyed off the package name. The [`Summaries`] value handles
+ /// loading the summary data. It keeps an optimized on-disk representation
+ /// of the JSON files, which is created in an as-needed fashion. If it
+ /// hasn't been cached already, it uses [`RegistryData::load`] to access
+ /// to JSON files from the index, and the creates the optimized on-disk
+ /// summary cache.
+ summaries_cache: HashMap<InternedString, Summaries>,
+ /// [`Config`] reference for convenience.
+ config: &'cfg Config,
+}
+
+/// An internal cache of summaries for a particular package.
+///
+/// A list of summaries are loaded from disk via one of two methods:
+///
+/// 1. Primarily Cargo will parse the corresponding file for a crate in the
+/// upstream crates.io registry. That's just a JSON blob per line which we
+/// can parse, extract the version, and then store here.
+///
+/// 2. Alternatively, if Cargo has previously run, we'll have a cached index of
+/// dependencies for the upstream index. This is a file that Cargo maintains
+/// lazily on the local filesystem and is much faster to parse since it
+/// doesn't involve parsing all of the JSON.
+///
+/// The outward-facing interface of this doesn't matter too much where it's
+/// loaded from, but it's important when reading the implementation to note that
+/// we try to parse as little as possible!
+#[derive(Default)]
+struct Summaries {
+ /// A raw vector of uninterpreted bytes. This is what `Unparsed` start/end
+ /// fields are indexes into. If a `Summaries` is loaded from the crates.io
+ /// index then this field will be empty since nothing is `Unparsed`.
+ raw_data: Vec<u8>,
+
+ /// All known versions of a crate, keyed from their `Version` to the
+ /// possibly parsed or unparsed version of the full summary.
+ versions: HashMap<Version, MaybeIndexSummary>,
+}
+
+/// A lazily parsed `IndexSummary`.
+enum MaybeIndexSummary {
+ /// A summary which has not been parsed, The `start` and `end` are pointers
+ /// into `Summaries::raw_data` which this is an entry of.
+ Unparsed { start: usize, end: usize },
+
+ /// An actually parsed summary.
+ Parsed(IndexSummary),
+}
+
+/// A parsed representation of a summary from the index.
+///
+/// In addition to a full `Summary` we have information on whether it is `yanked`.
+pub struct IndexSummary {
+ pub summary: Summary,
+ pub yanked: bool,
+ /// Schema version, see [`RegistryPackage`].
+ v: u32,
+}
+
+/// A representation of the cache on disk that Cargo maintains of summaries.
+/// Cargo will initially parse all summaries in the registry and will then
+/// serialize that into this form and place it in a new location on disk,
+/// ensuring that access in the future is much speedier.
+#[derive(Default)]
+struct SummariesCache<'a> {
+ versions: Vec<(Version, &'a [u8])>,
+ index_version: &'a str,
+}
+
+impl<'cfg> RegistryIndex<'cfg> {
+ pub fn new(
+ source_id: SourceId,
+ path: &Filesystem,
+ config: &'cfg Config,
+ ) -> RegistryIndex<'cfg> {
+ RegistryIndex {
+ source_id,
+ path: path.clone(),
+ summaries_cache: HashMap::new(),
+ config,
+ }
+ }
+
+ /// Returns the hash listed for a specified `PackageId`.
+ pub fn hash(&mut self, pkg: PackageId, load: &mut dyn RegistryData) -> Poll<CargoResult<&str>> {
+ let req = OptVersionReq::exact(pkg.version());
+ let summary = self.summaries(&pkg.name(), &req, load)?;
+ let summary = ready!(summary).next();
+ Poll::Ready(Ok(summary
+ .ok_or_else(|| internal(format!("no hash listed for {}", pkg)))?
+ .summary
+ .checksum()
+ .ok_or_else(|| internal(format!("no hash listed for {}", pkg)))?))
+ }
+
+ /// Load a list of summaries for `name` package in this registry which
+ /// match `req`
+ ///
+ /// This function will semantically parse the on-disk index, match all
+ /// versions, and then return an iterator over all summaries which matched.
+ /// Internally there's quite a few layer of caching to amortize this cost
+ /// though since this method is called quite a lot on null builds in Cargo.
+ pub fn summaries<'a, 'b>(
+ &'a mut self,
+ name: &str,
+ req: &'b OptVersionReq,
+ load: &mut dyn RegistryData,
+ ) -> Poll<CargoResult<impl Iterator<Item = &'a IndexSummary> + 'b>>
+ where
+ 'a: 'b,
+ {
+ let source_id = self.source_id;
+ let config = self.config;
+
+ // First up actually parse what summaries we have available. If Cargo
+ // has run previously this will parse a Cargo-specific cache file rather
+ // than the registry itself. In effect this is intended to be a quite
+ // cheap operation.
+ let name = InternedString::new(name);
+ let summaries = ready!(self.load_summaries(name, load)?);
+
+ // Iterate over our summaries, extract all relevant ones which match our
+ // version requirement, and then parse all corresponding rows in the
+ // registry. As a reminder this `summaries` method is called for each
+ // entry in a lock file on every build, so we want to absolutely
+ // minimize the amount of work being done here and parse as little as
+ // necessary.
+ let raw_data = &summaries.raw_data;
+ Poll::Ready(Ok(summaries
+ .versions
+ .iter_mut()
+ .filter_map(move |(k, v)| if req.matches(k) { Some(v) } else { None })
+ .filter_map(
+ move |maybe| match maybe.parse(config, raw_data, source_id) {
+ Ok(summary) => Some(summary),
+ Err(e) => {
+ info!("failed to parse `{}` registry package: {}", name, e);
+ None
+ }
+ },
+ )
+ .filter(move |is| {
+ if is.v > INDEX_V_MAX {
+ debug!(
+ "unsupported schema version {} ({} {})",
+ is.v,
+ is.summary.name(),
+ is.summary.version()
+ );
+ false
+ } else {
+ true
+ }
+ })))
+ }
+
+ fn load_summaries(
+ &mut self,
+ name: InternedString,
+ load: &mut dyn RegistryData,
+ ) -> Poll<CargoResult<&mut Summaries>> {
+ // If we've previously loaded what versions are present for `name`, just
+ // return that since our cache should still be valid.
+ if self.summaries_cache.contains_key(&name) {
+ return Poll::Ready(Ok(self.summaries_cache.get_mut(&name).unwrap()));
+ }
+
+ // Prepare the `RegistryData` which will lazily initialize internal data
+ // structures.
+ load.prepare()?;
+
+ let root = load.assert_index_locked(&self.path);
+ let cache_root = root.join(".cache");
+
+ // See module comment in `registry/mod.rs` for why this is structured
+ // the way it is.
+ let fs_name = name
+ .chars()
+ .flat_map(|c| c.to_lowercase())
+ .collect::<String>();
+
+ let path = make_dep_path(&fs_name, false);
+ let summaries = ready!(Summaries::parse(
+ root,
+ &cache_root,
+ path.as_ref(),
+ self.source_id,
+ load,
+ self.config,
+ ))?
+ .unwrap_or_default();
+ self.summaries_cache.insert(name, summaries);
+ Poll::Ready(Ok(self.summaries_cache.get_mut(&name).unwrap()))
+ }
+
+ /// Clears the in-memory summaries cache.
+ pub fn clear_summaries_cache(&mut self) {
+ self.summaries_cache.clear();
+ }
+
+ pub fn query_inner(
+ &mut self,
+ name: &str,
+ req: &OptVersionReq,
+ load: &mut dyn RegistryData,
+ yanked_whitelist: &HashSet<PackageId>,
+ f: &mut dyn FnMut(Summary),
+ ) -> Poll<CargoResult<()>> {
+ if self.config.offline() {
+ // This should only return `Poll::Ready(Ok(()))` if there is at least 1 match.
+ //
+ // If there are 0 matches it should fall through and try again with online.
+ // This is necessary for dependencies that are not used (such as
+ // target-cfg or optional), but are not downloaded. Normally the
+ // build should succeed if they are not downloaded and not used,
+ // but they still need to resolve. If they are actually needed
+ // then cargo will fail to download and an error message
+ // indicating that the required dependency is unavailable while
+ // offline will be displayed.
+ if ready!(self.query_inner_with_online(name, req, load, yanked_whitelist, f, false)?)
+ > 0
+ {
+ return Poll::Ready(Ok(()));
+ }
+ }
+ self.query_inner_with_online(name, req, load, yanked_whitelist, f, true)
+ .map_ok(|_| ())
+ }
+
+ fn query_inner_with_online(
+ &mut self,
+ name: &str,
+ req: &OptVersionReq,
+ load: &mut dyn RegistryData,
+ yanked_whitelist: &HashSet<PackageId>,
+ f: &mut dyn FnMut(Summary),
+ online: bool,
+ ) -> Poll<CargoResult<usize>> {
+ let source_id = self.source_id;
+
+ let summaries = ready!(self.summaries(name, req, load))?;
+
+ let summaries = summaries
+ // First filter summaries for `--offline`. If we're online then
+ // everything is a candidate, otherwise if we're offline we're only
+ // going to consider candidates which are actually present on disk.
+ //
+ // Note: This particular logic can cause problems with
+ // optional dependencies when offline. If at least 1 version
+ // of an optional dependency is downloaded, but that version
+ // does not satisfy the requirements, then resolution will
+ // fail. Unfortunately, whether or not something is optional
+ // is not known here.
+ .filter(|s| (online || load.is_crate_downloaded(s.summary.package_id())))
+ // Next filter out all yanked packages. Some yanked packages may
+ // leak through if they're in a whitelist (aka if they were
+ // previously in `Cargo.lock`
+ .filter(|s| !s.yanked || yanked_whitelist.contains(&s.summary.package_id()))
+ .map(|s| s.summary.clone());
+
+ // Handle `cargo update --precise` here. If specified, our own source
+ // will have a precise version listed of the form
+ // `<pkg>=<p_req>o-><f_req>` where `<pkg>` is the name of a crate on
+ // this source, `<p_req>` is the version installed and `<f_req> is the
+ // version requested (argument to `--precise`).
+ let precise = match source_id.precise() {
+ Some(p) if p.starts_with(name) && p[name.len()..].starts_with('=') => {
+ let mut vers = p[name.len() + 1..].splitn(2, "->");
+ let current_vers = vers.next().unwrap().to_semver().unwrap();
+ let requested_vers = vers.next().unwrap().to_semver().unwrap();
+ Some((current_vers, requested_vers))
+ }
+ _ => None,
+ };
+ let summaries = summaries.filter(|s| match &precise {
+ Some((current, requested)) => {
+ if req.matches(current) {
+ // Unfortunately crates.io allows versions to differ only
+ // by build metadata. This shouldn't be allowed, but since
+ // it is, this will honor it if requested. However, if not
+ // specified, then ignore it.
+ let s_vers = s.version();
+ match (s_vers.build.is_empty(), requested.build.is_empty()) {
+ (true, true) => s_vers == requested,
+ (true, false) => false,
+ (false, true) => {
+ // Strip out the metadata.
+ s_vers.major == requested.major
+ && s_vers.minor == requested.minor
+ && s_vers.patch == requested.patch
+ && s_vers.pre == requested.pre
+ }
+ (false, false) => s_vers == requested,
+ }
+ } else {
+ true
+ }
+ }
+ None => true,
+ });
+
+ let mut count = 0;
+ for summary in summaries {
+ f(summary);
+ count += 1;
+ }
+ Poll::Ready(Ok(count))
+ }
+
+ pub fn is_yanked(
+ &mut self,
+ pkg: PackageId,
+ load: &mut dyn RegistryData,
+ ) -> Poll<CargoResult<bool>> {
+ let req = OptVersionReq::exact(pkg.version());
+ let found = self
+ .summaries(&pkg.name(), &req, load)
+ .map_ok(|mut p| p.any(|summary| summary.yanked));
+ found
+ }
+}
+
+impl Summaries {
+ /// Parse out a `Summaries` instances from on-disk state.
+ ///
+ /// This will attempt to prefer parsing a previous cache file that already
+ /// exists from a previous invocation of Cargo (aka you're typing `cargo
+ /// build` again after typing it previously). If parsing fails or the cache
+ /// isn't found, then we take a slower path which loads the full descriptor
+ /// for `relative` from the underlying index (aka typically libgit2 with
+ /// crates.io) and then parse everything in there.
+ ///
+ /// * `root` - this is the root argument passed to `load`
+ /// * `cache_root` - this is the root on the filesystem itself of where to
+ /// store cache files.
+ /// * `relative` - this is the file we're loading from cache or the index
+ /// data
+ /// * `source_id` - the registry's SourceId used when parsing JSON blobs to
+ /// create summaries.
+ /// * `load` - the actual index implementation which may be very slow to
+ /// call. We avoid this if we can.
+ pub fn parse(
+ root: &Path,
+ cache_root: &Path,
+ relative: &Path,
+ source_id: SourceId,
+ load: &mut dyn RegistryData,
+ config: &Config,
+ ) -> Poll<CargoResult<Option<Summaries>>> {
+ // First up, attempt to load the cache. This could fail for all manner
+ // of reasons, but consider all of them non-fatal and just log their
+ // occurrence in case anyone is debugging anything.
+ let cache_path = cache_root.join(relative);
+ let mut cached_summaries = None;
+ let mut index_version = None;
+ match fs::read(&cache_path) {
+ Ok(contents) => match Summaries::parse_cache(contents) {
+ Ok((s, v)) => {
+ cached_summaries = Some(s);
+ index_version = Some(v);
+ }
+ Err(e) => {
+ log::debug!("failed to parse {:?} cache: {}", relative, e);
+ }
+ },
+ Err(e) => log::debug!("cache missing for {:?} error: {}", relative, e),
+ }
+
+ let response = ready!(load.load(root, relative, index_version.as_deref())?);
+
+ match response {
+ LoadResponse::CacheValid => {
+ log::debug!("fast path for registry cache of {:?}", relative);
+ return Poll::Ready(Ok(cached_summaries));
+ }
+ LoadResponse::NotFound => {
+ if let Err(e) = fs::remove_file(cache_path) {
+ if e.kind() != ErrorKind::NotFound {
+ log::debug!("failed to remove from cache: {}", e);
+ }
+ }
+ return Poll::Ready(Ok(None));
+ }
+ LoadResponse::Data {
+ raw_data,
+ index_version,
+ } => {
+ // This is the fallback path where we actually talk to the registry backend to load
+ // information. Here we parse every single line in the index (as we need
+ // to find the versions)
+ log::debug!("slow path for {:?}", relative);
+ let mut cache = SummariesCache::default();
+ let mut ret = Summaries::default();
+ ret.raw_data = raw_data;
+ for line in split(&ret.raw_data, b'\n') {
+ // Attempt forwards-compatibility on the index by ignoring
+ // everything that we ourselves don't understand, that should
+ // allow future cargo implementations to break the
+ // interpretation of each line here and older cargo will simply
+ // ignore the new lines.
+ let summary = match IndexSummary::parse(config, line, source_id) {
+ Ok(summary) => summary,
+ Err(e) => {
+ // This should only happen when there is an index
+ // entry from a future version of cargo that this
+ // version doesn't understand. Hopefully, those future
+ // versions of cargo correctly set INDEX_V_MAX and
+ // CURRENT_CACHE_VERSION, otherwise this will skip
+ // entries in the cache preventing those newer
+ // versions from reading them (that is, until the
+ // cache is rebuilt).
+ log::info!("failed to parse {:?} registry package: {}", relative, e);
+ continue;
+ }
+ };
+ let version = summary.summary.package_id().version().clone();
+ cache.versions.push((version.clone(), line));
+ ret.versions.insert(version, summary.into());
+ }
+ if let Some(index_version) = index_version {
+ log::trace!("caching index_version {}", index_version);
+ let cache_bytes = cache.serialize(index_version.as_str());
+ // Once we have our `cache_bytes` which represents the `Summaries` we're
+ // about to return, write that back out to disk so future Cargo
+ // invocations can use it.
+ //
+ // This is opportunistic so we ignore failure here but are sure to log
+ // something in case of error.
+ if paths::create_dir_all(cache_path.parent().unwrap()).is_ok() {
+ let path = Filesystem::new(cache_path.clone());
+ config.assert_package_cache_locked(&path);
+ if let Err(e) = fs::write(cache_path, &cache_bytes) {
+ log::info!("failed to write cache: {}", e);
+ }
+ }
+
+ // If we've got debug assertions enabled read back in the cached values
+ // and assert they match the expected result.
+ #[cfg(debug_assertions)]
+ {
+ let readback = SummariesCache::parse(&cache_bytes)
+ .expect("failed to parse cache we just wrote");
+ assert_eq!(
+ readback.index_version, index_version,
+ "index_version mismatch"
+ );
+ assert_eq!(readback.versions, cache.versions, "versions mismatch");
+ }
+ }
+ Poll::Ready(Ok(Some(ret)))
+ }
+ }
+ }
+
+ /// Parses an open `File` which represents information previously cached by
+ /// Cargo.
+ pub fn parse_cache(contents: Vec<u8>) -> CargoResult<(Summaries, InternedString)> {
+ let cache = SummariesCache::parse(&contents)?;
+ let index_version = InternedString::new(cache.index_version);
+ let mut ret = Summaries::default();
+ for (version, summary) in cache.versions {
+ let (start, end) = subslice_bounds(&contents, summary);
+ ret.versions
+ .insert(version, MaybeIndexSummary::Unparsed { start, end });
+ }
+ ret.raw_data = contents;
+ return Ok((ret, index_version));
+
+ // Returns the start/end offsets of `inner` with `outer`. Asserts that
+ // `inner` is a subslice of `outer`.
+ fn subslice_bounds(outer: &[u8], inner: &[u8]) -> (usize, usize) {
+ let outer_start = outer.as_ptr() as usize;
+ let outer_end = outer_start + outer.len();
+ let inner_start = inner.as_ptr() as usize;
+ let inner_end = inner_start + inner.len();
+ assert!(inner_start >= outer_start);
+ assert!(inner_end <= outer_end);
+ (inner_start - outer_start, inner_end - outer_start)
+ }
+ }
+}
+
+// Implementation of serializing/deserializing the cache of summaries on disk.
+// Currently the format looks like:
+//
+// +--------------------+----------------------+-------------+---+
+// | cache version byte | index format version | git sha rev | 0 |
+// +--------------------+----------------------+-------------+---+
+//
+// followed by...
+//
+// +----------------+---+------------+---+
+// | semver version | 0 | JSON blob | 0 | ...
+// +----------------+---+------------+---+
+//
+// The idea is that this is a very easy file for Cargo to parse in future
+// invocations. The read from disk should be quite fast and then afterwards all
+// we need to know is what versions correspond to which JSON blob.
+//
+// The leading version byte is intended to ensure that there's some level of
+// future compatibility against changes to this cache format so if different
+// versions of Cargo share the same cache they don't get too confused. The git
+// sha lets us know when the file needs to be regenerated (it needs regeneration
+// whenever the index itself updates).
+//
+// Cache versions:
+// * `1`: The original version.
+// * `2`: Added the "index format version" field so that if the index format
+// changes, different versions of cargo won't get confused reading each
+// other's caches.
+// * `3`: Bumped the version to work around an issue where multiple versions of
+// a package were published that differ only by semver metadata. For
+// example, openssl-src 110.0.0 and 110.0.0+1.1.0f. Previously, the cache
+// would be incorrectly populated with two entries, both 110.0.0. After
+// this, the metadata will be correctly included. This isn't really a format
+// change, just a version bump to clear the incorrect cache entries. Note:
+// the index shouldn't allow these, but unfortunately crates.io doesn't
+// check it.
+
+const CURRENT_CACHE_VERSION: u8 = 3;
+
+impl<'a> SummariesCache<'a> {
+ fn parse(data: &'a [u8]) -> CargoResult<SummariesCache<'a>> {
+ // NB: keep this method in sync with `serialize` below
+ let (first_byte, rest) = data
+ .split_first()
+ .ok_or_else(|| anyhow::format_err!("malformed cache"))?;
+ if *first_byte != CURRENT_CACHE_VERSION {
+ bail!("looks like a different Cargo's cache, bailing out");
+ }
+ let index_v_bytes = rest
+ .get(..4)
+ .ok_or_else(|| anyhow::anyhow!("cache expected 4 bytes for index version"))?;
+ let index_v = u32::from_le_bytes(index_v_bytes.try_into().unwrap());
+ if index_v != INDEX_V_MAX {
+ bail!(
+ "index format version {} doesn't match the version I know ({})",
+ index_v,
+ INDEX_V_MAX
+ );
+ }
+ let rest = &rest[4..];
+
+ let mut iter = split(rest, 0);
+ let last_index_update = if let Some(update) = iter.next() {
+ str::from_utf8(update)?
+ } else {
+ bail!("malformed file");
+ };
+ let mut ret = SummariesCache::default();
+ ret.index_version = last_index_update;
+ while let Some(version) = iter.next() {
+ let version = str::from_utf8(version)?;
+ let version = Version::parse(version)?;
+ let summary = iter.next().unwrap();
+ ret.versions.push((version, summary));
+ }
+ Ok(ret)
+ }
+
+ fn serialize(&self, index_version: &str) -> Vec<u8> {
+ // NB: keep this method in sync with `parse` above
+ let size = self
+ .versions
+ .iter()
+ .map(|(_version, data)| (10 + data.len()))
+ .sum();
+ let mut contents = Vec::with_capacity(size);
+ contents.push(CURRENT_CACHE_VERSION);
+ contents.extend(&u32::to_le_bytes(INDEX_V_MAX));
+ contents.extend_from_slice(index_version.as_bytes());
+ contents.push(0);
+ for (version, data) in self.versions.iter() {
+ contents.extend_from_slice(version.to_string().as_bytes());
+ contents.push(0);
+ contents.extend_from_slice(data);
+ contents.push(0);
+ }
+ contents
+ }
+}
+
+impl MaybeIndexSummary {
+ /// Parses this "maybe a summary" into a `Parsed` for sure variant.
+ ///
+ /// Does nothing if this is already `Parsed`, and otherwise the `raw_data`
+ /// passed in is sliced with the bounds in `Unparsed` and then actually
+ /// parsed.
+ fn parse(
+ &mut self,
+ config: &Config,
+ raw_data: &[u8],
+ source_id: SourceId,
+ ) -> CargoResult<&IndexSummary> {
+ let (start, end) = match self {
+ MaybeIndexSummary::Unparsed { start, end } => (*start, *end),
+ MaybeIndexSummary::Parsed(summary) => return Ok(summary),
+ };
+ let summary = IndexSummary::parse(config, &raw_data[start..end], source_id)?;
+ *self = MaybeIndexSummary::Parsed(summary);
+ match self {
+ MaybeIndexSummary::Unparsed { .. } => unreachable!(),
+ MaybeIndexSummary::Parsed(summary) => Ok(summary),
+ }
+ }
+}
+
+impl From<IndexSummary> for MaybeIndexSummary {
+ fn from(summary: IndexSummary) -> MaybeIndexSummary {
+ MaybeIndexSummary::Parsed(summary)
+ }
+}
+
+impl IndexSummary {
+ /// Parses a line from the registry's index file into an `IndexSummary` for
+ /// a package.
+ ///
+ /// The `line` provided is expected to be valid JSON.
+ fn parse(config: &Config, line: &[u8], source_id: SourceId) -> CargoResult<IndexSummary> {
+ // ****CAUTION**** Please be extremely careful with returning errors
+ // from this function. Entries that error are not included in the
+ // index cache, and can cause cargo to get confused when switching
+ // between different versions that understand the index differently.
+ // Make sure to consider the INDEX_V_MAX and CURRENT_CACHE_VERSION
+ // values carefully when making changes here.
+ let RegistryPackage {
+ name,
+ vers,
+ cksum,
+ deps,
+ mut features,
+ features2,
+ yanked,
+ links,
+ v,
+ } = serde_json::from_slice(line)?;
+ let v = v.unwrap_or(1);
+ log::trace!("json parsed registry {}/{}", name, vers);
+ let pkgid = PackageId::new(name, &vers, source_id)?;
+ let deps = deps
+ .into_iter()
+ .map(|dep| dep.into_dep(source_id))
+ .collect::<CargoResult<Vec<_>>>()?;
+ if let Some(features2) = features2 {
+ for (name, values) in features2 {
+ features.entry(name).or_default().extend(values);
+ }
+ }
+ let mut summary = Summary::new(config, pkgid, deps, &features, links)?;
+ summary.set_checksum(cksum);
+ Ok(IndexSummary {
+ summary,
+ yanked: yanked.unwrap_or(false),
+ v,
+ })
+ }
+}
+
+fn split(haystack: &[u8], needle: u8) -> impl Iterator<Item = &[u8]> {
+ struct Split<'a> {
+ haystack: &'a [u8],
+ needle: u8,
+ }
+
+ impl<'a> Iterator for Split<'a> {
+ type Item = &'a [u8];
+
+ fn next(&mut self) -> Option<&'a [u8]> {
+ if self.haystack.is_empty() {
+ return None;
+ }
+ let (ret, remaining) = match memchr::memchr(self.needle, self.haystack) {
+ Some(pos) => (&self.haystack[..pos], &self.haystack[pos + 1..]),
+ None => (self.haystack, &[][..]),
+ };
+ self.haystack = remaining;
+ Some(ret)
+ }
+ }
+
+ Split { haystack, needle }
+}
diff --git a/src/tools/cargo/src/cargo/sources/registry/local.rs b/src/tools/cargo/src/cargo/sources/registry/local.rs
new file mode 100644
index 000000000..89419191f
--- /dev/null
+++ b/src/tools/cargo/src/cargo/sources/registry/local.rs
@@ -0,0 +1,157 @@
+use crate::core::PackageId;
+use crate::sources::registry::{LoadResponse, MaybeLock, RegistryConfig, RegistryData};
+use crate::util::errors::CargoResult;
+use crate::util::{Config, Filesystem};
+use cargo_util::{paths, Sha256};
+use std::fs::File;
+use std::io::SeekFrom;
+use std::io::{self, prelude::*};
+use std::path::Path;
+use std::task::Poll;
+
+/// A local registry is a registry that lives on the filesystem as a set of
+/// `.crate` files with an `index` directory in the same format as a remote
+/// registry.
+pub struct LocalRegistry<'cfg> {
+ index_path: Filesystem,
+ root: Filesystem,
+ src_path: Filesystem,
+ config: &'cfg Config,
+ updated: bool,
+ quiet: bool,
+}
+
+impl<'cfg> LocalRegistry<'cfg> {
+ pub fn new(root: &Path, config: &'cfg Config, name: &str) -> LocalRegistry<'cfg> {
+ LocalRegistry {
+ src_path: config.registry_source_path().join(name),
+ index_path: Filesystem::new(root.join("index")),
+ root: Filesystem::new(root.to_path_buf()),
+ config,
+ updated: false,
+ quiet: false,
+ }
+ }
+}
+
+impl<'cfg> RegistryData for LocalRegistry<'cfg> {
+ fn prepare(&self) -> CargoResult<()> {
+ Ok(())
+ }
+
+ fn index_path(&self) -> &Filesystem {
+ &self.index_path
+ }
+
+ fn assert_index_locked<'a>(&self, path: &'a Filesystem) -> &'a Path {
+ // Note that the `*_unlocked` variant is used here since we're not
+ // modifying the index and it's required to be externally synchronized.
+ path.as_path_unlocked()
+ }
+
+ fn load(
+ &mut self,
+ root: &Path,
+ path: &Path,
+ _index_version: Option<&str>,
+ ) -> Poll<CargoResult<LoadResponse>> {
+ if self.updated {
+ let raw_data = match paths::read_bytes(&root.join(path)) {
+ Err(e)
+ if e.downcast_ref::<io::Error>()
+ .map_or(false, |ioe| ioe.kind() == io::ErrorKind::NotFound) =>
+ {
+ return Poll::Ready(Ok(LoadResponse::NotFound));
+ }
+ r => r,
+ }?;
+ Poll::Ready(Ok(LoadResponse::Data {
+ raw_data,
+ index_version: None,
+ }))
+ } else {
+ Poll::Pending
+ }
+ }
+
+ fn config(&mut self) -> Poll<CargoResult<Option<RegistryConfig>>> {
+ // Local registries don't have configuration for remote APIs or anything
+ // like that
+ Poll::Ready(Ok(None))
+ }
+
+ fn block_until_ready(&mut self) -> CargoResult<()> {
+ if self.updated {
+ return Ok(());
+ }
+ // Nothing to update, we just use what's on disk. Verify it actually
+ // exists though. We don't use any locks as we're just checking whether
+ // these directories exist.
+ let root = self.root.clone().into_path_unlocked();
+ if !root.is_dir() {
+ anyhow::bail!("local registry path is not a directory: {}", root.display());
+ }
+ let index_path = self.index_path.clone().into_path_unlocked();
+ if !index_path.is_dir() {
+ anyhow::bail!(
+ "local registry index path is not a directory: {}",
+ index_path.display()
+ );
+ }
+ self.updated = true;
+ Ok(())
+ }
+
+ fn invalidate_cache(&mut self) {
+ // Local registry has no cache - just reads from disk.
+ }
+
+ fn set_quiet(&mut self, _quiet: bool) {
+ self.quiet = true;
+ }
+
+ fn is_updated(&self) -> bool {
+ self.updated
+ }
+
+ fn download(&mut self, pkg: PackageId, checksum: &str) -> CargoResult<MaybeLock> {
+ let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version());
+
+ // Note that the usage of `into_path_unlocked` here is because the local
+ // crate files here never change in that we're not the one writing them,
+ // so it's not our responsibility to synchronize access to them.
+ let path = self.root.join(&crate_file).into_path_unlocked();
+ let mut crate_file = paths::open(&path)?;
+
+ // If we've already got an unpacked version of this crate, then skip the
+ // checksum below as it is in theory already verified.
+ let dst = format!("{}-{}", pkg.name(), pkg.version());
+ if self.src_path.join(dst).into_path_unlocked().exists() {
+ return Ok(MaybeLock::Ready(crate_file));
+ }
+
+ if !self.quiet {
+ self.config.shell().status("Unpacking", pkg)?;
+ }
+
+ // We don't actually need to download anything per-se, we just need to
+ // verify the checksum matches the .crate file itself.
+ let actual = Sha256::new().update_file(&crate_file)?.finish_hex();
+ if actual != checksum {
+ anyhow::bail!("failed to verify the checksum of `{}`", pkg)
+ }
+
+ crate_file.seek(SeekFrom::Start(0))?;
+
+ Ok(MaybeLock::Ready(crate_file))
+ }
+
+ fn finish_download(
+ &mut self,
+ _pkg: PackageId,
+ _checksum: &str,
+ _data: &[u8],
+ ) -> CargoResult<File> {
+ panic!("this source doesn't download")
+ }
+}
diff --git a/src/tools/cargo/src/cargo/sources/registry/mod.rs b/src/tools/cargo/src/cargo/sources/registry/mod.rs
new file mode 100644
index 000000000..aa3f5dc5f
--- /dev/null
+++ b/src/tools/cargo/src/cargo/sources/registry/mod.rs
@@ -0,0 +1,1014 @@
+//! A `Source` for registry-based packages.
+//!
+//! # What's a Registry?
+//!
+//! Registries are central locations where packages can be uploaded to,
+//! discovered, and searched for. The purpose of a registry is to have a
+//! location that serves as permanent storage for versions of a crate over time.
+//!
+//! Compared to git sources, a registry provides many packages as well as many
+//! versions simultaneously. Git sources can also have commits deleted through
+//! rebasings where registries cannot have their versions deleted.
+//!
+//! # The Index of a Registry
+//!
+//! One of the major difficulties with a registry is that hosting so many
+//! packages may quickly run into performance problems when dealing with
+//! dependency graphs. It's infeasible for cargo to download the entire contents
+//! of the registry just to resolve one package's dependencies, for example. As
+//! a result, cargo needs some efficient method of querying what packages are
+//! available on a registry, what versions are available, and what the
+//! dependencies for each version is.
+//!
+//! One method of doing so would be having the registry expose an HTTP endpoint
+//! which can be queried with a list of packages and a response of their
+//! dependencies and versions is returned. This is somewhat inefficient however
+//! as we may have to hit the endpoint many times and we may have already
+//! queried for much of the data locally already (for other packages, for
+//! example). This also involves inventing a transport format between the
+//! registry and Cargo itself, so this route was not taken.
+//!
+//! Instead, Cargo communicates with registries through a git repository
+//! referred to as the Index. The Index of a registry is essentially an easily
+//! query-able version of the registry's database for a list of versions of a
+//! package as well as a list of dependencies for each version.
+//!
+//! Using git to host this index provides a number of benefits:
+//!
+//! * The entire index can be stored efficiently locally on disk. This means
+//! that all queries of a registry can happen locally and don't need to touch
+//! the network.
+//!
+//! * Updates of the index are quite efficient. Using git buys incremental
+//! updates, compressed transmission, etc for free. The index must be updated
+//! each time we need fresh information from a registry, but this is one
+//! update of a git repository that probably hasn't changed a whole lot so
+//! it shouldn't be too expensive.
+//!
+//! Additionally, each modification to the index is just appending a line at
+//! the end of a file (the exact format is described later). This means that
+//! the commits for an index are quite small and easily applied/compressible.
+//!
+//! ## The format of the Index
+//!
+//! The index is a store for the list of versions for all packages known, so its
+//! format on disk is optimized slightly to ensure that `ls registry` doesn't
+//! produce a list of all packages ever known. The index also wants to ensure
+//! that there's not a million files which may actually end up hitting
+//! filesystem limits at some point. To this end, a few decisions were made
+//! about the format of the registry:
+//!
+//! 1. Each crate will have one file corresponding to it. Each version for a
+//! crate will just be a line in this file.
+//! 2. There will be two tiers of directories for crate names, under which
+//! crates corresponding to those tiers will be located.
+//!
+//! As an example, this is an example hierarchy of an index:
+//!
+//! ```notrust
+//! .
+//! ├── 3
+//! │   └── u
+//! │   └── url
+//! ├── bz
+//! │   └── ip
+//! │   └── bzip2
+//! ├── config.json
+//! ├── en
+//! │   └── co
+//! │   └── encoding
+//! └── li
+//!    ├── bg
+//!    │   └── libgit2
+//!    └── nk
+//!    └── link-config
+//! ```
+//!
+//! The root of the index contains a `config.json` file with a few entries
+//! corresponding to the registry (see [`RegistryConfig`] below).
+//!
+//! Otherwise, there are three numbered directories (1, 2, 3) for crates with
+//! names 1, 2, and 3 characters in length. The 1/2 directories simply have the
+//! crate files underneath them, while the 3 directory is sharded by the first
+//! letter of the crate name.
+//!
+//! Otherwise the top-level directory contains many two-letter directory names,
+//! each of which has many sub-folders with two letters. At the end of all these
+//! are the actual crate files themselves.
+//!
+//! The purpose of this layout is to hopefully cut down on `ls` sizes as well as
+//! efficient lookup based on the crate name itself.
+//!
+//! ## Crate files
+//!
+//! Each file in the index is the history of one crate over time. Each line in
+//! the file corresponds to one version of a crate, stored in JSON format (see
+//! the `RegistryPackage` structure below).
+//!
+//! As new versions are published, new lines are appended to this file. The only
+//! modifications to this file that should happen over time are yanks of a
+//! particular version.
+//!
+//! # Downloading Packages
+//!
+//! The purpose of the Index was to provide an efficient method to resolve the
+//! dependency graph for a package. So far we only required one network
+//! interaction to update the registry's repository (yay!). After resolution has
+//! been performed, however we need to download the contents of packages so we
+//! can read the full manifest and build the source code.
+//!
+//! To accomplish this, this source's `download` method will make an HTTP
+//! request per-package requested to download tarballs into a local cache. These
+//! tarballs will then be unpacked into a destination folder.
+//!
+//! Note that because versions uploaded to the registry are frozen forever that
+//! the HTTP download and unpacking can all be skipped if the version has
+//! already been downloaded and unpacked. This caching allows us to only
+//! download a package when absolutely necessary.
+//!
+//! # Filesystem Hierarchy
+//!
+//! Overall, the `$HOME/.cargo` looks like this when talking about the registry:
+//!
+//! ```notrust
+//! # A folder under which all registry metadata is hosted (similar to
+//! # $HOME/.cargo/git)
+//! $HOME/.cargo/registry/
+//!
+//! # For each registry that cargo knows about (keyed by hostname + hash)
+//! # there is a folder which is the checked out version of the index for
+//! # the registry in this location. Note that this is done so cargo can
+//! # support multiple registries simultaneously
+//! index/
+//! registry1-<hash>/
+//! registry2-<hash>/
+//! ...
+//!
+//! # This folder is a cache for all downloaded tarballs from a registry.
+//! # Once downloaded and verified, a tarball never changes.
+//! cache/
+//! registry1-<hash>/<pkg>-<version>.crate
+//! ...
+//!
+//! # Location in which all tarballs are unpacked. Each tarball is known to
+//! # be frozen after downloading, so transitively this folder is also
+//! # frozen once its unpacked (it's never unpacked again)
+//! src/
+//! registry1-<hash>/<pkg>-<version>/...
+//! ...
+//! ```
+
+use std::borrow::Cow;
+use std::collections::BTreeMap;
+use std::collections::HashSet;
+use std::fs::{File, OpenOptions};
+use std::io::{self, Write};
+use std::path::{Path, PathBuf};
+use std::task::{ready, Poll};
+
+use anyhow::Context as _;
+use cargo_util::paths::{self, exclude_from_backups_and_indexing};
+use flate2::read::GzDecoder;
+use log::debug;
+use semver::Version;
+use serde::Deserialize;
+use tar::Archive;
+
+use crate::core::dependency::{DepKind, Dependency};
+use crate::core::source::MaybePackage;
+use crate::core::{Package, PackageId, QueryKind, Source, SourceId, Summary};
+use crate::sources::PathSource;
+use crate::util::hex;
+use crate::util::interning::InternedString;
+use crate::util::into_url::IntoUrl;
+use crate::util::network::PollExt;
+use crate::util::{
+ restricted_names, CargoResult, Config, Filesystem, LimitErrorReader, OptVersionReq,
+};
+
+const PACKAGE_SOURCE_LOCK: &str = ".cargo-ok";
+pub const CRATES_IO_INDEX: &str = "https://github.com/rust-lang/crates.io-index";
+pub const CRATES_IO_HTTP_INDEX: &str = "sparse+https://index.crates.io/";
+pub const CRATES_IO_REGISTRY: &str = "crates-io";
+pub const CRATES_IO_DOMAIN: &str = "crates.io";
+const CRATE_TEMPLATE: &str = "{crate}";
+const VERSION_TEMPLATE: &str = "{version}";
+const PREFIX_TEMPLATE: &str = "{prefix}";
+const LOWER_PREFIX_TEMPLATE: &str = "{lowerprefix}";
+const CHECKSUM_TEMPLATE: &str = "{sha256-checksum}";
+const MAX_UNPACK_SIZE: u64 = 512 * 1024 * 1024;
+const MAX_COMPRESSION_RATIO: usize = 20; // 20:1
+
+/// A "source" for a local (see `local::LocalRegistry`) or remote (see
+/// `remote::RemoteRegistry`) registry.
+///
+/// This contains common functionality that is shared between the two registry
+/// kinds, with the registry-specific logic implemented as part of the
+/// [`RegistryData`] trait referenced via the `ops` field.
+pub struct RegistrySource<'cfg> {
+ source_id: SourceId,
+ /// The path where crate files are extracted (`$CARGO_HOME/registry/src/$REG-HASH`).
+ src_path: Filesystem,
+ /// Local reference to [`Config`] for convenience.
+ config: &'cfg Config,
+ /// Abstraction for interfacing to the different registry kinds.
+ ops: Box<dyn RegistryData + 'cfg>,
+ /// Interface for managing the on-disk index.
+ index: index::RegistryIndex<'cfg>,
+ /// A set of packages that should be allowed to be used, even if they are
+ /// yanked.
+ ///
+ /// This is populated from the entries in `Cargo.lock` to ensure that
+ /// `cargo update -p somepkg` won't unlock yanked entries in `Cargo.lock`.
+ /// Otherwise, the resolver would think that those entries no longer
+ /// exist, and it would trigger updates to unrelated packages.
+ yanked_whitelist: HashSet<PackageId>,
+}
+
+/// The `config.json` file stored in the index.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "kebab-case")]
+pub struct RegistryConfig {
+ /// Download endpoint for all crates.
+ ///
+ /// The string is a template which will generate the download URL for the
+ /// tarball of a specific version of a crate. The substrings `{crate}` and
+ /// `{version}` will be replaced with the crate's name and version
+ /// respectively. The substring `{prefix}` will be replaced with the
+ /// crate's prefix directory name, and the substring `{lowerprefix}` will
+ /// be replaced with the crate's prefix directory name converted to
+ /// lowercase. The substring `{sha256-checksum}` will be replaced with the
+ /// crate's sha256 checksum.
+ ///
+ /// For backwards compatibility, if the string does not contain any
+ /// markers (`{crate}`, `{version}`, `{prefix}`, or ``{lowerprefix}`), it
+ /// will be extended with `/{crate}/{version}/download` to
+ /// support registries like crates.io which were created before the
+ /// templating setup was created.
+ pub dl: String,
+
+ /// API endpoint for the registry. This is what's actually hit to perform
+ /// operations like yanks, owner modifications, publish new crates, etc.
+ /// If this is None, the registry does not support API commands.
+ pub api: Option<String>,
+
+ /// Whether all operations require authentication.
+ #[serde(default)]
+ pub auth_required: bool,
+}
+
+/// The maximum version of the `v` field in the index this version of cargo
+/// understands.
+pub(crate) const INDEX_V_MAX: u32 = 2;
+
+/// A single line in the index representing a single version of a package.
+#[derive(Deserialize)]
+pub struct RegistryPackage<'a> {
+ name: InternedString,
+ vers: Version,
+ #[serde(borrow)]
+ deps: Vec<RegistryDependency<'a>>,
+ features: BTreeMap<InternedString, Vec<InternedString>>,
+ /// This field contains features with new, extended syntax. Specifically,
+ /// namespaced features (`dep:`) and weak dependencies (`pkg?/feat`).
+ ///
+ /// This is separated from `features` because versions older than 1.19
+ /// will fail to load due to not being able to parse the new syntax, even
+ /// with a `Cargo.lock` file.
+ features2: Option<BTreeMap<InternedString, Vec<InternedString>>>,
+ cksum: String,
+ /// If `true`, Cargo will skip this version when resolving.
+ ///
+ /// This was added in 2014. Everything in the crates.io index has this set
+ /// now, so this probably doesn't need to be an option anymore.
+ yanked: Option<bool>,
+ /// Native library name this package links to.
+ ///
+ /// Added early 2018 (see <https://github.com/rust-lang/cargo/pull/4978>),
+ /// can be `None` if published before then.
+ links: Option<InternedString>,
+ /// The schema version for this entry.
+ ///
+ /// If this is None, it defaults to version 1. Entries with unknown
+ /// versions are ignored.
+ ///
+ /// Version `2` format adds the `features2` field.
+ ///
+ /// This provides a method to safely introduce changes to index entries
+ /// and allow older versions of cargo to ignore newer entries it doesn't
+ /// understand. This is honored as of 1.51, so unfortunately older
+ /// versions will ignore it, and potentially misinterpret version 2 and
+ /// newer entries.
+ ///
+ /// The intent is that versions older than 1.51 will work with a
+ /// pre-existing `Cargo.lock`, but they may not correctly process `cargo
+ /// update` or build a lock from scratch. In that case, cargo may
+ /// incorrectly select a new package that uses a new index format. A
+ /// workaround is to downgrade any packages that are incompatible with the
+ /// `--precise` flag of `cargo update`.
+ v: Option<u32>,
+}
+
+#[test]
+fn escaped_char_in_json() {
+ let _: RegistryPackage<'_> = serde_json::from_str(
+ r#"{"name":"a","vers":"0.0.1","deps":[],"cksum":"bae3","features":{}}"#,
+ )
+ .unwrap();
+ let _: RegistryPackage<'_> = serde_json::from_str(
+ r#"{"name":"a","vers":"0.0.1","deps":[],"cksum":"bae3","features":{"test":["k","q"]},"links":"a-sys"}"#
+ ).unwrap();
+
+ // Now we add escaped cher all the places they can go
+ // these are not valid, but it should error later than json parsing
+ let _: RegistryPackage<'_> = serde_json::from_str(
+ r#"{
+ "name":"This name has a escaped cher in it \n\t\" ",
+ "vers":"0.0.1",
+ "deps":[{
+ "name": " \n\t\" ",
+ "req": " \n\t\" ",
+ "features": [" \n\t\" "],
+ "optional": true,
+ "default_features": true,
+ "target": " \n\t\" ",
+ "kind": " \n\t\" ",
+ "registry": " \n\t\" "
+ }],
+ "cksum":"bae3",
+ "features":{"test \n\t\" ":["k \n\t\" ","q \n\t\" "]},
+ "links":" \n\t\" "}"#,
+ )
+ .unwrap();
+}
+
+/// A dependency as encoded in the index JSON.
+#[derive(Deserialize)]
+struct RegistryDependency<'a> {
+ name: InternedString,
+ #[serde(borrow)]
+ req: Cow<'a, str>,
+ features: Vec<InternedString>,
+ optional: bool,
+ default_features: bool,
+ target: Option<Cow<'a, str>>,
+ kind: Option<Cow<'a, str>>,
+ registry: Option<Cow<'a, str>>,
+ package: Option<InternedString>,
+ public: Option<bool>,
+}
+
+impl<'a> RegistryDependency<'a> {
+ /// Converts an encoded dependency in the registry to a cargo dependency
+ pub fn into_dep(self, default: SourceId) -> CargoResult<Dependency> {
+ let RegistryDependency {
+ name,
+ req,
+ mut features,
+ optional,
+ default_features,
+ target,
+ kind,
+ registry,
+ package,
+ public,
+ } = self;
+
+ let id = if let Some(registry) = &registry {
+ SourceId::for_registry(&registry.into_url()?)?
+ } else {
+ default
+ };
+
+ let mut dep = Dependency::parse(package.unwrap_or(name), Some(&req), id)?;
+ if package.is_some() {
+ dep.set_explicit_name_in_toml(name);
+ }
+ let kind = match kind.as_deref().unwrap_or("") {
+ "dev" => DepKind::Development,
+ "build" => DepKind::Build,
+ _ => DepKind::Normal,
+ };
+
+ let platform = match target {
+ Some(target) => Some(target.parse()?),
+ None => None,
+ };
+
+ // All dependencies are private by default
+ let public = public.unwrap_or(false);
+
+ // Unfortunately older versions of cargo and/or the registry ended up
+ // publishing lots of entries where the features array contained the
+ // empty feature, "", inside. This confuses the resolution process much
+ // later on and these features aren't actually valid, so filter them all
+ // out here.
+ features.retain(|s| !s.is_empty());
+
+ // In index, "registry" is null if it is from the same index.
+ // In Cargo.toml, "registry" is None if it is from the default
+ if !id.is_crates_io() {
+ dep.set_registry_id(id);
+ }
+
+ dep.set_optional(optional)
+ .set_default_features(default_features)
+ .set_features(features)
+ .set_platform(platform)
+ .set_kind(kind)
+ .set_public(public);
+
+ Ok(dep)
+ }
+}
+
+/// Result from loading data from a registry.
+pub enum LoadResponse {
+ /// The cache is valid. The cached data should be used.
+ CacheValid,
+
+ /// The cache is out of date. Returned data should be used.
+ Data {
+ raw_data: Vec<u8>,
+ index_version: Option<String>,
+ },
+
+ /// The requested crate was found.
+ NotFound,
+}
+
+/// An abstract interface to handle both a local (see `local::LocalRegistry`)
+/// and remote (see `remote::RemoteRegistry`) registry.
+///
+/// This allows [`RegistrySource`] to abstractly handle both registry kinds.
+pub trait RegistryData {
+ /// Performs initialization for the registry.
+ ///
+ /// This should be safe to call multiple times, the implementation is
+ /// expected to not do any work if it is already prepared.
+ fn prepare(&self) -> CargoResult<()>;
+
+ /// Returns the path to the index.
+ ///
+ /// Note that different registries store the index in different formats
+ /// (remote=git, local=files).
+ fn index_path(&self) -> &Filesystem;
+
+ /// Loads the JSON for a specific named package from the index.
+ ///
+ /// * `root` is the root path to the index.
+ /// * `path` is the relative path to the package to load (like `ca/rg/cargo`).
+ /// * `index_version` is the version of the requested crate data currently in cache.
+ fn load(
+ &mut self,
+ root: &Path,
+ path: &Path,
+ index_version: Option<&str>,
+ ) -> Poll<CargoResult<LoadResponse>>;
+
+ /// Loads the `config.json` file and returns it.
+ ///
+ /// Local registries don't have a config, and return `None`.
+ fn config(&mut self) -> Poll<CargoResult<Option<RegistryConfig>>>;
+
+ /// Invalidates locally cached data.
+ fn invalidate_cache(&mut self);
+
+ /// If quiet, the source should not display any progress or status messages.
+ fn set_quiet(&mut self, quiet: bool);
+
+ /// Is the local cached data up-to-date?
+ fn is_updated(&self) -> bool;
+
+ /// Prepare to start downloading a `.crate` file.
+ ///
+ /// Despite the name, this doesn't actually download anything. If the
+ /// `.crate` is already downloaded, then it returns [`MaybeLock::Ready`].
+ /// If it hasn't been downloaded, then it returns [`MaybeLock::Download`]
+ /// which contains the URL to download. The [`crate::core::package::Downloads`]
+ /// system handles the actual download process. After downloading, it
+ /// calls [`Self::finish_download`] to save the downloaded file.
+ ///
+ /// `checksum` is currently only used by local registries to verify the
+ /// file contents (because local registries never actually download
+ /// anything). Remote registries will validate the checksum in
+ /// `finish_download`. For already downloaded `.crate` files, it does not
+ /// validate the checksum, assuming the filesystem does not suffer from
+ /// corruption or manipulation.
+ fn download(&mut self, pkg: PackageId, checksum: &str) -> CargoResult<MaybeLock>;
+
+ /// Finish a download by saving a `.crate` file to disk.
+ ///
+ /// After [`crate::core::package::Downloads`] has finished a download,
+ /// it will call this to save the `.crate` file. This is only relevant
+ /// for remote registries. This should validate the checksum and save
+ /// the given data to the on-disk cache.
+ ///
+ /// Returns a [`File`] handle to the `.crate` file, positioned at the start.
+ fn finish_download(&mut self, pkg: PackageId, checksum: &str, data: &[u8])
+ -> CargoResult<File>;
+
+ /// Returns whether or not the `.crate` file is already downloaded.
+ fn is_crate_downloaded(&self, _pkg: PackageId) -> bool {
+ true
+ }
+
+ /// Validates that the global package cache lock is held.
+ ///
+ /// Given the [`Filesystem`], this will make sure that the package cache
+ /// lock is held. If not, it will panic. See
+ /// [`Config::acquire_package_cache_lock`] for acquiring the global lock.
+ ///
+ /// Returns the [`Path`] to the [`Filesystem`].
+ fn assert_index_locked<'a>(&self, path: &'a Filesystem) -> &'a Path;
+
+ /// Block until all outstanding Poll::Pending requests are Poll::Ready.
+ fn block_until_ready(&mut self) -> CargoResult<()>;
+}
+
+/// The status of [`RegistryData::download`] which indicates if a `.crate`
+/// file has already been downloaded, or if not then the URL to download.
+pub enum MaybeLock {
+ /// The `.crate` file is already downloaded. [`File`] is a handle to the
+ /// opened `.crate` file on the filesystem.
+ Ready(File),
+ /// The `.crate` file is not downloaded, here's the URL to download it from.
+ ///
+ /// `descriptor` is just a text string to display to the user of what is
+ /// being downloaded.
+ Download {
+ url: String,
+ descriptor: String,
+ authorization: Option<String>,
+ },
+}
+
+mod download;
+mod http_remote;
+mod index;
+mod local;
+mod remote;
+
+fn short_name(id: SourceId) -> String {
+ let hash = hex::short_hash(&id);
+ let ident = id.url().host_str().unwrap_or("").to_string();
+ format!("{}-{}", ident, hash)
+}
+
+impl<'cfg> RegistrySource<'cfg> {
+ pub fn remote(
+ source_id: SourceId,
+ yanked_whitelist: &HashSet<PackageId>,
+ config: &'cfg Config,
+ ) -> CargoResult<RegistrySource<'cfg>> {
+ assert!(source_id.is_remote_registry());
+ let name = short_name(source_id);
+ let ops = if source_id.is_sparse() {
+ Box::new(http_remote::HttpRegistry::new(source_id, config, &name)?) as Box<_>
+ } else {
+ Box::new(remote::RemoteRegistry::new(source_id, config, &name)) as Box<_>
+ };
+
+ Ok(RegistrySource::new(
+ source_id,
+ config,
+ &name,
+ ops,
+ yanked_whitelist,
+ ))
+ }
+
+ pub fn local(
+ source_id: SourceId,
+ path: &Path,
+ yanked_whitelist: &HashSet<PackageId>,
+ config: &'cfg Config,
+ ) -> RegistrySource<'cfg> {
+ let name = short_name(source_id);
+ let ops = local::LocalRegistry::new(path, config, &name);
+ RegistrySource::new(source_id, config, &name, Box::new(ops), yanked_whitelist)
+ }
+
+ fn new(
+ source_id: SourceId,
+ config: &'cfg Config,
+ name: &str,
+ ops: Box<dyn RegistryData + 'cfg>,
+ yanked_whitelist: &HashSet<PackageId>,
+ ) -> RegistrySource<'cfg> {
+ RegistrySource {
+ src_path: config.registry_source_path().join(name),
+ config,
+ source_id,
+ index: index::RegistryIndex::new(source_id, ops.index_path(), config),
+ yanked_whitelist: yanked_whitelist.clone(),
+ ops,
+ }
+ }
+
+ /// Decode the configuration stored within the registry.
+ ///
+ /// This requires that the index has been at least checked out.
+ pub fn config(&mut self) -> Poll<CargoResult<Option<RegistryConfig>>> {
+ self.ops.config()
+ }
+
+ /// Unpacks a downloaded package into a location where it's ready to be
+ /// compiled.
+ ///
+ /// No action is taken if the source looks like it's already unpacked.
+ fn unpack_package(&self, pkg: PackageId, tarball: &File) -> CargoResult<PathBuf> {
+ // The `.cargo-ok` file is used to track if the source is already
+ // unpacked.
+ let package_dir = format!("{}-{}", pkg.name(), pkg.version());
+ let dst = self.src_path.join(&package_dir);
+ let path = dst.join(PACKAGE_SOURCE_LOCK);
+ let path = self.config.assert_package_cache_locked(&path);
+ let unpack_dir = path.parent().unwrap();
+ match path.metadata() {
+ Ok(meta) if meta.len() > 0 => return Ok(unpack_dir.to_path_buf()),
+ Ok(_meta) => {
+ // The `.cargo-ok` file is not in a state we expect it to be
+ // (with two bytes containing "ok").
+ //
+ // Cargo has always included a `.cargo-ok` file to detect if
+ // extraction was interrupted, but it was originally empty.
+ //
+ // In 1.34, Cargo was changed to create the `.cargo-ok` file
+ // before it started extraction to implement fine-grained
+ // locking. After it was finished extracting, it wrote two
+ // bytes to indicate it was complete. It would use the length
+ // check to detect if it was possibly interrupted.
+ //
+ // In 1.36, Cargo changed to not use fine-grained locking, and
+ // instead used a global lock. The use of `.cargo-ok` was no
+ // longer needed for locking purposes, but was kept to detect
+ // when extraction was interrupted.
+ //
+ // In 1.49, Cargo changed to not create the `.cargo-ok` file
+ // before it started extraction to deal with `.crate` files
+ // that inexplicably had a `.cargo-ok` file in them.
+ //
+ // In 1.64, Cargo changed to detect `.crate` files with
+ // `.cargo-ok` files in them in response to CVE-2022-36113,
+ // which dealt with malicious `.crate` files making
+ // `.cargo-ok` a symlink causing cargo to write "ok" to any
+ // arbitrary file on the filesystem it has permission to.
+ //
+ // This is all a long-winded way of explaining the
+ // circumstances that might cause a directory to contain a
+ // `.cargo-ok` file that is empty or otherwise corrupted.
+ // Either this was extracted by a version of Rust before 1.34,
+ // in which case everything should be fine. However, an empty
+ // file created by versions 1.36 to 1.49 indicates that the
+ // extraction was interrupted and that we need to start again.
+ //
+ // Another possibility is that the filesystem is simply
+ // corrupted, in which case deleting the directory might be
+ // the safe thing to do. That is probably unlikely, though.
+ //
+ // To be safe, this deletes the directory and starts over
+ // again.
+ log::warn!("unexpected length of {path:?}, clearing cache");
+ paths::remove_dir_all(dst.as_path_unlocked())?;
+ }
+ Err(e) if e.kind() == io::ErrorKind::NotFound => {}
+ Err(e) => anyhow::bail!("failed to access package completion {path:?}: {e}"),
+ }
+ dst.create_dir()?;
+ let mut tar = {
+ let size_limit = max_unpack_size(self.config, tarball.metadata()?.len());
+ let gz = GzDecoder::new(tarball);
+ let gz = LimitErrorReader::new(gz, size_limit);
+ Archive::new(gz)
+ };
+ let prefix = unpack_dir.file_name().unwrap();
+ let parent = unpack_dir.parent().unwrap();
+ for entry in tar.entries()? {
+ let mut entry = entry.with_context(|| "failed to iterate over archive")?;
+ let entry_path = entry
+ .path()
+ .with_context(|| "failed to read entry path")?
+ .into_owned();
+
+ // We're going to unpack this tarball into the global source
+ // directory, but we want to make sure that it doesn't accidentally
+ // (or maliciously) overwrite source code from other crates. Cargo
+ // itself should never generate a tarball that hits this error, and
+ // crates.io should also block uploads with these sorts of tarballs,
+ // but be extra sure by adding a check here as well.
+ if !entry_path.starts_with(prefix) {
+ anyhow::bail!(
+ "invalid tarball downloaded, contains \
+ a file at {:?} which isn't under {:?}",
+ entry_path,
+ prefix
+ )
+ }
+ // Prevent unpacking the lockfile from the crate itself.
+ if entry_path
+ .file_name()
+ .map_or(false, |p| p == PACKAGE_SOURCE_LOCK)
+ {
+ continue;
+ }
+ // Unpacking failed
+ let mut result = entry.unpack_in(parent).map_err(anyhow::Error::from);
+ if cfg!(windows) && restricted_names::is_windows_reserved_path(&entry_path) {
+ result = result.with_context(|| {
+ format!(
+ "`{}` appears to contain a reserved Windows path, \
+ it cannot be extracted on Windows",
+ entry_path.display()
+ )
+ });
+ }
+ result
+ .with_context(|| format!("failed to unpack entry at `{}`", entry_path.display()))?;
+ }
+
+ // Now that we've finished unpacking, create and write to the lock file to indicate that
+ // unpacking was successful.
+ let mut ok = OpenOptions::new()
+ .create_new(true)
+ .read(true)
+ .write(true)
+ .open(&path)
+ .with_context(|| format!("failed to open `{}`", path.display()))?;
+ write!(ok, "ok")?;
+
+ Ok(unpack_dir.to_path_buf())
+ }
+
+ fn get_pkg(&mut self, package: PackageId, path: &File) -> CargoResult<Package> {
+ let path = self
+ .unpack_package(package, path)
+ .with_context(|| format!("failed to unpack package `{}`", package))?;
+ let mut src = PathSource::new(&path, self.source_id, self.config);
+ src.update()?;
+ let mut pkg = match src.download(package)? {
+ MaybePackage::Ready(pkg) => pkg,
+ MaybePackage::Download { .. } => unreachable!(),
+ };
+
+ // After we've loaded the package configure its summary's `checksum`
+ // field with the checksum we know for this `PackageId`.
+ let req = OptVersionReq::exact(package.version());
+ let summary_with_cksum = self
+ .index
+ .summaries(&package.name(), &req, &mut *self.ops)?
+ .expect("a downloaded dep now pending!?")
+ .map(|s| s.summary.clone())
+ .next()
+ .expect("summary not found");
+ if let Some(cksum) = summary_with_cksum.checksum() {
+ pkg.manifest_mut()
+ .summary_mut()
+ .set_checksum(cksum.to_string());
+ }
+
+ Ok(pkg)
+ }
+}
+
+impl<'cfg> Source for RegistrySource<'cfg> {
+ fn query(
+ &mut self,
+ dep: &Dependency,
+ kind: QueryKind,
+ f: &mut dyn FnMut(Summary),
+ ) -> Poll<CargoResult<()>> {
+ // If this is a precise dependency, then it came from a lock file and in
+ // theory the registry is known to contain this version. If, however, we
+ // come back with no summaries, then our registry may need to be
+ // updated, so we fall back to performing a lazy update.
+ if kind == QueryKind::Exact && dep.source_id().precise().is_some() && !self.ops.is_updated()
+ {
+ debug!("attempting query without update");
+ let mut called = false;
+ ready!(self.index.query_inner(
+ &dep.package_name(),
+ dep.version_req(),
+ &mut *self.ops,
+ &self.yanked_whitelist,
+ &mut |s| {
+ if dep.matches(&s) {
+ called = true;
+ f(s);
+ }
+ },
+ ))?;
+ if called {
+ Poll::Ready(Ok(()))
+ } else {
+ debug!("falling back to an update");
+ self.invalidate_cache();
+ Poll::Pending
+ }
+ } else {
+ let mut called = false;
+ ready!(self.index.query_inner(
+ &dep.package_name(),
+ dep.version_req(),
+ &mut *self.ops,
+ &self.yanked_whitelist,
+ &mut |s| {
+ let matched = match kind {
+ QueryKind::Exact => dep.matches(&s),
+ QueryKind::Fuzzy => true,
+ };
+ if matched {
+ f(s);
+ called = true;
+ }
+ }
+ ))?;
+ if called {
+ return Poll::Ready(Ok(()));
+ }
+ let mut any_pending = false;
+ if kind == QueryKind::Fuzzy {
+ // Attempt to handle misspellings by searching for a chain of related
+ // names to the original name. The resolver will later
+ // reject any candidates that have the wrong name, and with this it'll
+ // along the way produce helpful "did you mean?" suggestions.
+ for name_permutation in
+ index::UncanonicalizedIter::new(&dep.package_name()).take(1024)
+ {
+ any_pending |= self
+ .index
+ .query_inner(
+ &name_permutation,
+ dep.version_req(),
+ &mut *self.ops,
+ &self.yanked_whitelist,
+ f,
+ )?
+ .is_pending();
+ }
+ }
+ if any_pending {
+ Poll::Pending
+ } else {
+ Poll::Ready(Ok(()))
+ }
+ }
+ }
+
+ fn supports_checksums(&self) -> bool {
+ true
+ }
+
+ fn requires_precise(&self) -> bool {
+ false
+ }
+
+ fn source_id(&self) -> SourceId {
+ self.source_id
+ }
+
+ fn invalidate_cache(&mut self) {
+ self.index.clear_summaries_cache();
+ self.ops.invalidate_cache();
+ }
+
+ fn set_quiet(&mut self, quiet: bool) {
+ self.ops.set_quiet(quiet);
+ }
+
+ fn download(&mut self, package: PackageId) -> CargoResult<MaybePackage> {
+ let hash = loop {
+ match self.index.hash(package, &mut *self.ops)? {
+ Poll::Pending => self.block_until_ready()?,
+ Poll::Ready(hash) => break hash,
+ }
+ };
+ match self.ops.download(package, hash)? {
+ MaybeLock::Ready(file) => self.get_pkg(package, &file).map(MaybePackage::Ready),
+ MaybeLock::Download {
+ url,
+ descriptor,
+ authorization,
+ } => Ok(MaybePackage::Download {
+ url,
+ descriptor,
+ authorization,
+ }),
+ }
+ }
+
+ fn finish_download(&mut self, package: PackageId, data: Vec<u8>) -> CargoResult<Package> {
+ let hash = loop {
+ match self.index.hash(package, &mut *self.ops)? {
+ Poll::Pending => self.block_until_ready()?,
+ Poll::Ready(hash) => break hash,
+ }
+ };
+ let file = self.ops.finish_download(package, hash, &data)?;
+ self.get_pkg(package, &file)
+ }
+
+ fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
+ Ok(pkg.package_id().version().to_string())
+ }
+
+ fn describe(&self) -> String {
+ self.source_id.display_index()
+ }
+
+ fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]) {
+ self.yanked_whitelist.extend(pkgs);
+ }
+
+ fn is_yanked(&mut self, pkg: PackageId) -> Poll<CargoResult<bool>> {
+ self.index.is_yanked(pkg, &mut *self.ops)
+ }
+
+ fn block_until_ready(&mut self) -> CargoResult<()> {
+ // Before starting to work on the registry, make sure that
+ // `<cargo_home>/registry` is marked as excluded from indexing and
+ // backups. Older versions of Cargo didn't do this, so we do it here
+ // regardless of whether `<cargo_home>` exists.
+ //
+ // This does not use `create_dir_all_excluded_from_backups_atomic` for
+ // the same reason: we want to exclude it even if the directory already
+ // exists.
+ //
+ // IO errors in creating and marking it are ignored, e.g. in case we're on a
+ // read-only filesystem.
+ let registry_base = self.config.registry_base_path();
+ let _ = registry_base.create_dir();
+ exclude_from_backups_and_indexing(&registry_base.into_path_unlocked());
+
+ self.ops.block_until_ready()
+ }
+}
+
+/// Get the maximum upack size that Cargo permits
+/// based on a given `size of your compressed file.
+///
+/// Returns the larger one between `size * max compression ratio`
+/// and a fixed max unpacked size.
+///
+/// In reality, the compression ratio usually falls in the range of 2:1 to 10:1.
+/// We choose 20:1 to cover almost all possible cases hopefully.
+/// Any ratio higher than this is considered as a zip bomb.
+///
+/// In the future we might want to introduce a configurable size.
+///
+/// Some of the real world data from common compression algorithms:
+///
+/// * <https://www.zlib.net/zlib_tech.html>
+/// * <https://cran.r-project.org/web/packages/brotli/vignettes/brotli-2015-09-22.pdf>
+/// * <https://blog.cloudflare.com/results-experimenting-brotli/>
+/// * <https://tukaani.org/lzma/benchmarks.html>
+fn max_unpack_size(config: &Config, size: u64) -> u64 {
+ const SIZE_VAR: &str = "__CARGO_TEST_MAX_UNPACK_SIZE";
+ const RATIO_VAR: &str = "__CARGO_TEST_MAX_UNPACK_RATIO";
+ let max_unpack_size = if cfg!(debug_assertions) && config.get_env(SIZE_VAR).is_ok() {
+ // For integration test only.
+ config
+ .get_env(SIZE_VAR)
+ .unwrap()
+ .parse()
+ .expect("a max unpack size in bytes")
+ } else {
+ MAX_UNPACK_SIZE
+ };
+ let max_compression_ratio = if cfg!(debug_assertions) && config.get_env(RATIO_VAR).is_ok() {
+ // For integration test only.
+ config
+ .get_env(RATIO_VAR)
+ .unwrap()
+ .parse()
+ .expect("a max compression ratio in bytes")
+ } else {
+ MAX_COMPRESSION_RATIO
+ };
+
+ u64::max(max_unpack_size, size * max_compression_ratio as u64)
+}
+
+fn make_dep_prefix(name: &str) -> String {
+ match name.len() {
+ 1 => String::from("1"),
+ 2 => String::from("2"),
+ 3 => format!("3/{}", &name[..1]),
+ _ => format!("{}/{}", &name[0..2], &name[2..4]),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::make_dep_prefix;
+
+ #[test]
+ fn dep_prefix() {
+ assert_eq!(make_dep_prefix("a"), "1");
+ assert_eq!(make_dep_prefix("ab"), "2");
+ assert_eq!(make_dep_prefix("abc"), "3/a");
+ assert_eq!(make_dep_prefix("Abc"), "3/A");
+ assert_eq!(make_dep_prefix("AbCd"), "Ab/Cd");
+ assert_eq!(make_dep_prefix("aBcDe"), "aB/cD");
+ }
+}
diff --git a/src/tools/cargo/src/cargo/sources/registry/remote.rs b/src/tools/cargo/src/cargo/sources/registry/remote.rs
new file mode 100644
index 000000000..3e5029144
--- /dev/null
+++ b/src/tools/cargo/src/cargo/sources/registry/remote.rs
@@ -0,0 +1,366 @@
+use crate::core::{GitReference, PackageId, SourceId};
+use crate::sources::git;
+use crate::sources::registry::download;
+use crate::sources::registry::MaybeLock;
+use crate::sources::registry::{LoadResponse, RegistryConfig, RegistryData};
+use crate::util::errors::CargoResult;
+use crate::util::interning::InternedString;
+use crate::util::{Config, Filesystem};
+use anyhow::Context as _;
+use cargo_util::paths;
+use lazycell::LazyCell;
+use log::{debug, trace};
+use std::cell::{Cell, Ref, RefCell};
+use std::fs::File;
+use std::mem;
+use std::path::Path;
+use std::str;
+use std::task::{ready, Poll};
+
+/// A remote registry is a registry that lives at a remote URL (such as
+/// crates.io). The git index is cloned locally, and `.crate` files are
+/// downloaded as needed and cached locally.
+pub struct RemoteRegistry<'cfg> {
+ index_path: Filesystem,
+ /// Path to the cache of `.crate` files (`$CARGO_HOME/registry/path/$REG-HASH`).
+ cache_path: Filesystem,
+ source_id: SourceId,
+ index_git_ref: GitReference,
+ config: &'cfg Config,
+ tree: RefCell<Option<git2::Tree<'static>>>,
+ repo: LazyCell<git2::Repository>,
+ head: Cell<Option<git2::Oid>>,
+ current_sha: Cell<Option<InternedString>>,
+ needs_update: bool, // Does this registry need to be updated?
+ quiet: bool,
+}
+
+impl<'cfg> RemoteRegistry<'cfg> {
+ pub fn new(source_id: SourceId, config: &'cfg Config, name: &str) -> RemoteRegistry<'cfg> {
+ RemoteRegistry {
+ index_path: config.registry_index_path().join(name),
+ cache_path: config.registry_cache_path().join(name),
+ source_id,
+ config,
+ // TODO: we should probably make this configurable
+ index_git_ref: GitReference::DefaultBranch,
+ tree: RefCell::new(None),
+ repo: LazyCell::new(),
+ head: Cell::new(None),
+ current_sha: Cell::new(None),
+ needs_update: false,
+ quiet: false,
+ }
+ }
+
+ fn repo(&self) -> CargoResult<&git2::Repository> {
+ self.repo.try_borrow_with(|| {
+ let path = self.config.assert_package_cache_locked(&self.index_path);
+
+ // Fast path without a lock
+ if let Ok(repo) = git2::Repository::open(&path) {
+ trace!("opened a repo without a lock");
+ return Ok(repo);
+ }
+
+ // Ok, now we need to lock and try the whole thing over again.
+ trace!("acquiring registry index lock");
+ match git2::Repository::open(&path) {
+ Ok(repo) => Ok(repo),
+ Err(_) => {
+ drop(paths::remove_dir_all(&path));
+ paths::create_dir_all(&path)?;
+
+ // Note that we'd actually prefer to use a bare repository
+ // here as we're not actually going to check anything out.
+ // All versions of Cargo, though, share the same CARGO_HOME,
+ // so for compatibility with older Cargo which *does* do
+ // checkouts we make sure to initialize a new full
+ // repository (not a bare one).
+ //
+ // We should change this to `init_bare` whenever we feel
+ // like enough time has passed or if we change the directory
+ // that the folder is located in, such as by changing the
+ // hash at the end of the directory.
+ //
+ // Note that in the meantime we also skip `init.templatedir`
+ // as it can be misconfigured sometimes or otherwise add
+ // things that we don't want.
+ let mut opts = git2::RepositoryInitOptions::new();
+ opts.external_template(false);
+ Ok(git2::Repository::init_opts(&path, &opts).with_context(|| {
+ format!("failed to initialize index git repository (in {:?})", path)
+ })?)
+ }
+ }
+ })
+ }
+
+ fn head(&self) -> CargoResult<git2::Oid> {
+ if self.head.get().is_none() {
+ let repo = self.repo()?;
+ let oid = self.index_git_ref.resolve(repo)?;
+ self.head.set(Some(oid));
+ }
+ Ok(self.head.get().unwrap())
+ }
+
+ fn tree(&self) -> CargoResult<Ref<'_, git2::Tree<'_>>> {
+ {
+ let tree = self.tree.borrow();
+ if tree.is_some() {
+ return Ok(Ref::map(tree, |s| s.as_ref().unwrap()));
+ }
+ }
+ let repo = self.repo()?;
+ let commit = repo.find_commit(self.head()?)?;
+ let tree = commit.tree()?;
+
+ // Unfortunately in libgit2 the tree objects look like they've got a
+ // reference to the repository object which means that a tree cannot
+ // outlive the repository that it came from. Here we want to cache this
+ // tree, though, so to accomplish this we transmute it to a static
+ // lifetime.
+ //
+ // Note that we don't actually hand out the static lifetime, instead we
+ // only return a scoped one from this function. Additionally the repo
+ // we loaded from (above) lives as long as this object
+ // (`RemoteRegistry`) so we then just need to ensure that the tree is
+ // destroyed first in the destructor, hence the destructor on
+ // `RemoteRegistry` below.
+ let tree = unsafe { mem::transmute::<git2::Tree<'_>, git2::Tree<'static>>(tree) };
+ *self.tree.borrow_mut() = Some(tree);
+ Ok(Ref::map(self.tree.borrow(), |s| s.as_ref().unwrap()))
+ }
+
+ fn current_version(&self) -> Option<InternedString> {
+ if let Some(sha) = self.current_sha.get() {
+ return Some(sha);
+ }
+ let sha = InternedString::new(&self.head().ok()?.to_string());
+ self.current_sha.set(Some(sha));
+ Some(sha)
+ }
+
+ fn is_updated(&self) -> bool {
+ self.config.updated_sources().contains(&self.source_id)
+ }
+
+ fn mark_updated(&self) {
+ self.config.updated_sources().insert(self.source_id);
+ }
+}
+
+const LAST_UPDATED_FILE: &str = ".last-updated";
+
+impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
+ fn prepare(&self) -> CargoResult<()> {
+ self.repo()?; // create intermediate dirs and initialize the repo
+ Ok(())
+ }
+
+ fn index_path(&self) -> &Filesystem {
+ &self.index_path
+ }
+
+ fn assert_index_locked<'a>(&self, path: &'a Filesystem) -> &'a Path {
+ self.config.assert_package_cache_locked(path)
+ }
+
+ // `index_version` Is a string representing the version of the file used to construct the cached copy.
+ // Older versions of Cargo used the single value of the hash of the HEAD commit as a `index_version`.
+ // This is technically correct but a little too conservative. If a new commit is fetched all cached
+ // files need to be regenerated even if a particular file was not changed.
+ // However if an old cargo has written such a file we still know how to read it, as long as we check for that hash value.
+ //
+ // Cargo now uses a hash of the file's contents as provided by git.
+ fn load(
+ &mut self,
+ _root: &Path,
+ path: &Path,
+ index_version: Option<&str>,
+ ) -> Poll<CargoResult<LoadResponse>> {
+ if self.needs_update {
+ return Poll::Pending;
+ }
+ // Check if the cache is valid.
+ let git_commit_hash = self.current_version();
+ if index_version.is_some() && index_version == git_commit_hash.as_deref() {
+ // This file was written by an old version of cargo, but it is still up-to-date.
+ return Poll::Ready(Ok(LoadResponse::CacheValid));
+ }
+ // Note that the index calls this method and the filesystem is locked
+ // in the index, so we don't need to worry about an `update_index`
+ // happening in a different process.
+ fn load_helper(
+ registry: &RemoteRegistry<'_>,
+ path: &Path,
+ index_version: Option<&str>,
+ ) -> CargoResult<LoadResponse> {
+ let repo = registry.repo()?;
+ let tree = registry.tree()?;
+ let entry = tree.get_path(path);
+ let entry = entry?;
+ let git_file_hash = Some(entry.id().to_string());
+
+ // Check if the cache is valid.
+ if index_version.is_some() && index_version == git_file_hash.as_deref() {
+ return Ok(LoadResponse::CacheValid);
+ }
+
+ let object = entry.to_object(repo)?;
+ let blob = match object.as_blob() {
+ Some(blob) => blob,
+ None => anyhow::bail!("path `{}` is not a blob in the git repo", path.display()),
+ };
+
+ Ok(LoadResponse::Data {
+ raw_data: blob.content().to_vec(),
+ index_version: git_file_hash,
+ })
+ }
+
+ match load_helper(&self, path, index_version) {
+ Ok(result) => Poll::Ready(Ok(result)),
+ Err(_) if !self.is_updated() => {
+ // If git returns an error and we haven't updated the repo, return
+ // pending to allow an update to try again.
+ self.needs_update = true;
+ Poll::Pending
+ }
+ Err(e)
+ if e.downcast_ref::<git2::Error>()
+ .map(|e| e.code() == git2::ErrorCode::NotFound)
+ .unwrap_or_default() =>
+ {
+ // The repo has been updated and the file does not exist.
+ Poll::Ready(Ok(LoadResponse::NotFound))
+ }
+ Err(e) => Poll::Ready(Err(e)),
+ }
+ }
+
+ fn config(&mut self) -> Poll<CargoResult<Option<RegistryConfig>>> {
+ debug!("loading config");
+ self.prepare()?;
+ self.config.assert_package_cache_locked(&self.index_path);
+ match ready!(self.load(Path::new(""), Path::new("config.json"), None)?) {
+ LoadResponse::Data { raw_data, .. } => {
+ trace!("config loaded");
+ let mut cfg: RegistryConfig = serde_json::from_slice(&raw_data)?;
+ if !self.config.cli_unstable().registry_auth {
+ cfg.auth_required = false;
+ }
+ Poll::Ready(Ok(Some(cfg)))
+ }
+ _ => Poll::Ready(Ok(None)),
+ }
+ }
+
+ fn block_until_ready(&mut self) -> CargoResult<()> {
+ if !self.needs_update {
+ return Ok(());
+ }
+
+ self.needs_update = false;
+
+ // Make sure the index is only updated once per session since it is an
+ // expensive operation. This generally only happens when the resolver
+ // is run multiple times, such as during `cargo publish`.
+ if self.is_updated() {
+ return Ok(());
+ }
+ self.mark_updated();
+
+ if self.config.offline() {
+ return Ok(());
+ }
+ if self.config.cli_unstable().no_index_update {
+ return Ok(());
+ }
+
+ debug!("updating the index");
+
+ // Ensure that we'll actually be able to acquire an HTTP handle later on
+ // once we start trying to download crates. This will weed out any
+ // problems with `.cargo/config` configuration related to HTTP.
+ //
+ // This way if there's a problem the error gets printed before we even
+ // hit the index, which may not actually read this configuration.
+ self.config.http()?;
+
+ self.prepare()?;
+ self.head.set(None);
+ *self.tree.borrow_mut() = None;
+ self.current_sha.set(None);
+ let path = self.config.assert_package_cache_locked(&self.index_path);
+ if !self.quiet {
+ self.config
+ .shell()
+ .status("Updating", self.source_id.display_index())?;
+ }
+
+ // Fetch the latest version of our `index_git_ref` into the index
+ // checkout.
+ let url = self.source_id.url();
+ let repo = self.repo.borrow_mut().unwrap();
+ git::fetch(repo, url.as_str(), &self.index_git_ref, self.config)
+ .with_context(|| format!("failed to fetch `{}`", url))?;
+
+ // Create a dummy file to record the mtime for when we updated the
+ // index.
+ paths::create(&path.join(LAST_UPDATED_FILE))?;
+
+ Ok(())
+ }
+
+ fn invalidate_cache(&mut self) {
+ // To fully invalidate, undo `mark_updated`s work
+ self.needs_update = true;
+ }
+
+ fn set_quiet(&mut self, quiet: bool) {
+ self.quiet = quiet;
+ }
+
+ fn is_updated(&self) -> bool {
+ self.is_updated()
+ }
+
+ fn download(&mut self, pkg: PackageId, checksum: &str) -> CargoResult<MaybeLock> {
+ let registry_config = loop {
+ match self.config()? {
+ Poll::Pending => self.block_until_ready()?,
+ Poll::Ready(cfg) => break cfg.unwrap(),
+ }
+ };
+
+ download::download(
+ &self.cache_path,
+ &self.config,
+ pkg,
+ checksum,
+ registry_config,
+ )
+ }
+
+ fn finish_download(
+ &mut self,
+ pkg: PackageId,
+ checksum: &str,
+ data: &[u8],
+ ) -> CargoResult<File> {
+ download::finish_download(&self.cache_path, &self.config, pkg, checksum, data)
+ }
+
+ fn is_crate_downloaded(&self, pkg: PackageId) -> bool {
+ download::is_crate_downloaded(&self.cache_path, &self.config, pkg)
+ }
+}
+
+impl<'cfg> Drop for RemoteRegistry<'cfg> {
+ fn drop(&mut self) {
+ // Just be sure to drop this before our other fields
+ self.tree.borrow_mut().take();
+ }
+}
diff --git a/src/tools/cargo/src/cargo/sources/replaced.rs b/src/tools/cargo/src/cargo/sources/replaced.rs
new file mode 100644
index 000000000..13191d223
--- /dev/null
+++ b/src/tools/cargo/src/cargo/sources/replaced.rs
@@ -0,0 +1,141 @@
+use crate::core::source::MaybePackage;
+use crate::core::{Dependency, Package, PackageId, QueryKind, Source, SourceId, Summary};
+use crate::util::errors::CargoResult;
+use std::task::Poll;
+
+use anyhow::Context as _;
+
+pub struct ReplacedSource<'cfg> {
+ to_replace: SourceId,
+ replace_with: SourceId,
+ inner: Box<dyn Source + 'cfg>,
+}
+
+impl<'cfg> ReplacedSource<'cfg> {
+ pub fn new(
+ to_replace: SourceId,
+ replace_with: SourceId,
+ src: Box<dyn Source + 'cfg>,
+ ) -> ReplacedSource<'cfg> {
+ ReplacedSource {
+ to_replace,
+ replace_with,
+ inner: src,
+ }
+ }
+}
+
+impl<'cfg> Source for ReplacedSource<'cfg> {
+ fn source_id(&self) -> SourceId {
+ self.to_replace
+ }
+
+ fn replaced_source_id(&self) -> SourceId {
+ self.replace_with
+ }
+
+ fn supports_checksums(&self) -> bool {
+ self.inner.supports_checksums()
+ }
+
+ fn requires_precise(&self) -> bool {
+ self.inner.requires_precise()
+ }
+
+ fn query(
+ &mut self,
+ dep: &Dependency,
+ kind: QueryKind,
+ f: &mut dyn FnMut(Summary),
+ ) -> Poll<CargoResult<()>> {
+ let (replace_with, to_replace) = (self.replace_with, self.to_replace);
+ let dep = dep.clone().map_source(to_replace, replace_with);
+
+ self.inner
+ .query(&dep, kind, &mut |summary| {
+ f(summary.map_source(replace_with, to_replace))
+ })
+ .map_err(|e| {
+ e.context(format!(
+ "failed to query replaced source {}",
+ self.to_replace
+ ))
+ })
+ }
+
+ fn invalidate_cache(&mut self) {
+ self.inner.invalidate_cache()
+ }
+
+ fn set_quiet(&mut self, quiet: bool) {
+ self.inner.set_quiet(quiet);
+ }
+
+ fn download(&mut self, id: PackageId) -> CargoResult<MaybePackage> {
+ let id = id.with_source_id(self.replace_with);
+ let pkg = self
+ .inner
+ .download(id)
+ .with_context(|| format!("failed to download replaced source {}", self.to_replace))?;
+ Ok(match pkg {
+ MaybePackage::Ready(pkg) => {
+ MaybePackage::Ready(pkg.map_source(self.replace_with, self.to_replace))
+ }
+ other @ MaybePackage::Download { .. } => other,
+ })
+ }
+
+ fn finish_download(&mut self, id: PackageId, data: Vec<u8>) -> CargoResult<Package> {
+ let id = id.with_source_id(self.replace_with);
+ let pkg = self
+ .inner
+ .finish_download(id, data)
+ .with_context(|| format!("failed to download replaced source {}", self.to_replace))?;
+ Ok(pkg.map_source(self.replace_with, self.to_replace))
+ }
+
+ fn fingerprint(&self, id: &Package) -> CargoResult<String> {
+ self.inner.fingerprint(id)
+ }
+
+ fn verify(&self, id: PackageId) -> CargoResult<()> {
+ let id = id.with_source_id(self.replace_with);
+ self.inner.verify(id)
+ }
+
+ fn describe(&self) -> String {
+ if self.replace_with.is_crates_io() && self.to_replace.is_crates_io() {
+ // Built-in source replacement of crates.io for sparse registry or tests
+ // doesn't need duplicate description (crates.io replacing crates.io).
+ self.inner.describe()
+ } else {
+ format!(
+ "{} (which is replacing {})",
+ self.inner.describe(),
+ self.to_replace
+ )
+ }
+ }
+
+ fn is_replaced(&self) -> bool {
+ true
+ }
+
+ fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]) {
+ let pkgs = pkgs
+ .iter()
+ .map(|id| id.with_source_id(self.replace_with))
+ .collect::<Vec<_>>();
+ self.inner.add_to_yanked_whitelist(&pkgs);
+ }
+
+ fn is_yanked(&mut self, pkg: PackageId) -> Poll<CargoResult<bool>> {
+ self.inner.is_yanked(pkg)
+ }
+
+ fn block_until_ready(&mut self) -> CargoResult<()> {
+ self.inner
+ .block_until_ready()
+ .with_context(|| format!("failed to update replaced source {}", self.to_replace))
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/auth.rs b/src/tools/cargo/src/cargo/util/auth.rs
new file mode 100644
index 000000000..f19acaebe
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/auth.rs
@@ -0,0 +1,839 @@
+//! Registry authentication support.
+
+use crate::util::{config, config::ConfigKey, CanonicalUrl, CargoResult, Config, IntoUrl};
+use anyhow::{bail, format_err, Context as _};
+use cargo_util::ProcessError;
+use core::fmt;
+use pasetors::keys::{AsymmetricPublicKey, AsymmetricSecretKey};
+use pasetors::paserk::FormatAsPaserk;
+use serde::Deserialize;
+use std::collections::HashMap;
+use std::error::Error;
+use std::io::{Read, Write};
+use std::ops::Deref;
+use std::path::PathBuf;
+use std::process::{Command, Stdio};
+use time::format_description::well_known::Rfc3339;
+use time::OffsetDateTime;
+use url::Url;
+
+use crate::core::SourceId;
+use crate::ops::RegistryCredentialConfig;
+
+use super::config::CredentialCacheValue;
+
+/// A wrapper for values that should not be printed.
+///
+/// This type does not implement `Display`, and has a `Debug` impl that hides
+/// the contained value.
+///
+/// ```
+/// # use cargo::util::auth::Secret;
+/// let token = Secret::from("super secret string");
+/// assert_eq!(format!("{:?}", token), "Secret { inner: \"REDACTED\" }");
+/// ```
+///
+/// Currently, we write a borrowed `Secret<T>` as `Secret<&T>`.
+/// The [`as_deref`](Secret::as_deref) and [`owned`](Secret::owned) methods can
+/// be used to convert back and forth between `Secret<String>` and `Secret<&str>`.
+#[derive(Default, Clone, PartialEq, Eq)]
+pub struct Secret<T> {
+ inner: T,
+}
+
+impl<T> Secret<T> {
+ /// Unwraps the contained value.
+ ///
+ /// Use of this method marks the boundary of where the contained value is
+ /// hidden.
+ pub fn expose(self) -> T {
+ self.inner
+ }
+
+ /// Converts a `Secret<T>` to a `Secret<&T::Target>`.
+ /// ```
+ /// # use cargo::util::auth::Secret;
+ /// let owned: Secret<String> = Secret::from(String::from("token"));
+ /// let borrowed: Secret<&str> = owned.as_deref();
+ /// ```
+ pub fn as_deref(&self) -> Secret<&<T as Deref>::Target>
+ where
+ T: Deref,
+ {
+ Secret::from(self.inner.deref())
+ }
+
+ /// Converts a `Secret<T>` to a `Secret<&T>`.
+ pub fn as_ref(&self) -> Secret<&T> {
+ Secret::from(&self.inner)
+ }
+
+ /// Converts a `Secret<T>` to a `Secret<U>` by applying `f` to the contained value.
+ pub fn map<U, F>(self, f: F) -> Secret<U>
+ where
+ F: FnOnce(T) -> U,
+ {
+ Secret::from(f(self.inner))
+ }
+}
+
+impl<T: ToOwned + ?Sized> Secret<&T> {
+ /// Converts a `Secret` containing a borrowed type to a `Secret` containing the
+ /// corresponding owned type.
+ /// ```
+ /// # use cargo::util::auth::Secret;
+ /// let borrowed: Secret<&str> = Secret::from("token");
+ /// let owned: Secret<String> = borrowed.owned();
+ /// ```
+ pub fn owned(&self) -> Secret<<T as ToOwned>::Owned> {
+ Secret::from(self.inner.to_owned())
+ }
+}
+
+impl<T, E> Secret<Result<T, E>> {
+ /// Converts a `Secret<Result<T, E>>` to a `Result<Secret<T>, E>`.
+ pub fn transpose(self) -> Result<Secret<T>, E> {
+ self.inner.map(|v| Secret::from(v))
+ }
+}
+
+impl<T: AsRef<str>> Secret<T> {
+ /// Checks if the contained value is empty.
+ pub fn is_empty(&self) -> bool {
+ self.inner.as_ref().is_empty()
+ }
+}
+
+impl<T> From<T> for Secret<T> {
+ fn from(inner: T) -> Self {
+ Self { inner }
+ }
+}
+
+impl<T> fmt::Debug for Secret<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Secret")
+ .field("inner", &"REDACTED")
+ .finish()
+ }
+}
+
+/// Get the credential configuration for a `SourceId`.
+pub fn registry_credential_config(
+ config: &Config,
+ sid: &SourceId,
+) -> CargoResult<RegistryCredentialConfig> {
+ #[derive(Deserialize)]
+ #[serde(rename_all = "kebab-case")]
+ struct RegistryConfig {
+ index: Option<String>,
+ token: Option<String>,
+ credential_process: Option<config::PathAndArgs>,
+ secret_key: Option<String>,
+ secret_key_subject: Option<String>,
+ #[serde(rename = "default")]
+ _default: Option<String>,
+ #[serde(rename = "protocol")]
+ _protocol: Option<String>,
+ }
+
+ log::trace!("loading credential config for {}", sid);
+ config.load_credentials()?;
+ if !sid.is_remote_registry() {
+ bail!(
+ "{} does not support API commands.\n\
+ Check for a source-replacement in .cargo/config.",
+ sid
+ );
+ }
+
+ // Handle crates.io specially, since it uses different configuration keys.
+ if sid.is_crates_io() {
+ config.check_registry_index_not_set()?;
+ let RegistryConfig {
+ token,
+ credential_process,
+ secret_key,
+ secret_key_subject,
+ ..
+ } = config.get::<RegistryConfig>("registry")?;
+ return registry_credential_config_inner(
+ true,
+ None,
+ token.map(Secret::from),
+ credential_process,
+ secret_key.map(Secret::from),
+ secret_key_subject,
+ config,
+ );
+ }
+
+ // Find the SourceId's name by its index URL. If environment variables
+ // are available they will be preferred over configuration values.
+ //
+ // The fundamental problem is that we only know the index url of the registry
+ // for certain. For example, an unnamed registry source can come from the `--index`
+ // command line argument, or from a Cargo.lock file. For this reason, we always
+ // attempt to discover the name by looking it up by the index URL.
+ //
+ // This also allows the authorization token for a registry to be set
+ // without knowing the registry name by using the _INDEX and _TOKEN
+ // environment variables.
+ let name = {
+ // Discover names from environment variables.
+ let index = sid.canonical_url();
+ let mut names: Vec<_> = config
+ .env()
+ .filter_map(|(k, v)| {
+ Some((
+ k.strip_prefix("CARGO_REGISTRIES_")?
+ .strip_suffix("_INDEX")?,
+ v,
+ ))
+ })
+ .filter_map(|(k, v)| Some((k, CanonicalUrl::new(&v.into_url().ok()?).ok()?)))
+ .filter(|(_, v)| v == index)
+ .map(|(k, _)| k.to_lowercase())
+ .collect();
+
+ // Discover names from the configuration only if none were found in the environment.
+ if names.len() == 0 {
+ names = config
+ .get::<HashMap<String, RegistryConfig>>("registries")?
+ .iter()
+ .filter_map(|(k, v)| Some((k, v.index.as_deref()?)))
+ .filter_map(|(k, v)| Some((k, CanonicalUrl::new(&v.into_url().ok()?).ok()?)))
+ .filter(|(_, v)| v == index)
+ .map(|(k, _)| k.to_string())
+ .collect();
+ }
+ names.sort();
+ match names.len() {
+ 0 => None,
+ 1 => Some(std::mem::take(&mut names[0])),
+ _ => anyhow::bail!(
+ "multiple registries are configured with the same index url '{}': {}",
+ &sid.as_url(),
+ names.join(", ")
+ ),
+ }
+ };
+
+ // It's possible to have a registry configured in a Cargo config file,
+ // then override it with configuration from environment variables.
+ // If the name doesn't match, leave a note to help the user understand
+ // the potentially confusing situation.
+ if let Some(name) = name.as_deref() {
+ if Some(name) != sid.alt_registry_key() {
+ config.shell().note(format!(
+ "name of alternative registry `{}` set to `{name}`",
+ sid.url()
+ ))?
+ }
+ }
+
+ let (token, credential_process, secret_key, secret_key_subject) = if let Some(name) = &name {
+ log::debug!("found alternative registry name `{name}` for {sid}");
+ let RegistryConfig {
+ token,
+ secret_key,
+ secret_key_subject,
+ credential_process,
+ ..
+ } = config.get::<RegistryConfig>(&format!("registries.{name}"))?;
+ (token, credential_process, secret_key, secret_key_subject)
+ } else {
+ log::debug!("no registry name found for {sid}");
+ (None, None, None, None)
+ };
+
+ registry_credential_config_inner(
+ false,
+ name.as_deref(),
+ token.map(Secret::from),
+ credential_process,
+ secret_key.map(Secret::from),
+ secret_key_subject,
+ config,
+ )
+}
+
+fn registry_credential_config_inner(
+ is_crates_io: bool,
+ name: Option<&str>,
+ token: Option<Secret<String>>,
+ credential_process: Option<config::PathAndArgs>,
+ secret_key: Option<Secret<String>>,
+ secret_key_subject: Option<String>,
+ config: &Config,
+) -> CargoResult<RegistryCredentialConfig> {
+ let credential_process =
+ credential_process.filter(|_| config.cli_unstable().credential_process);
+ let secret_key = secret_key.filter(|_| config.cli_unstable().registry_auth);
+ let secret_key_subject = secret_key_subject.filter(|_| config.cli_unstable().registry_auth);
+ let err_both = |token_key: &str, proc_key: &str| {
+ let registry = if is_crates_io {
+ "".to_string()
+ } else {
+ format!(" for registry `{}`", name.unwrap_or("UN-NAMED"))
+ };
+ Err(format_err!(
+ "both `{token_key}` and `{proc_key}` \
+ were specified in the config{registry}.\n\
+ Only one of these values may be set, remove one or the other to proceed.",
+ ))
+ };
+ Ok(
+ match (token, credential_process, secret_key, secret_key_subject) {
+ (Some(_), Some(_), _, _) => return err_both("token", "credential-process"),
+ (Some(_), _, Some(_), _) => return err_both("token", "secret-key"),
+ (_, Some(_), Some(_), _) => return err_both("credential-process", "secret-key"),
+ (_, _, None, Some(_)) => {
+ let registry = if is_crates_io {
+ "".to_string()
+ } else {
+ format!(" for registry `{}`", name.as_ref().unwrap())
+ };
+ return Err(format_err!(
+ "`secret-key-subject` was set but `secret-key` was not in the config{}.\n\
+ Either set the `secret-key` or remove the `secret-key-subject`.",
+ registry
+ ));
+ }
+ (Some(token), _, _, _) => RegistryCredentialConfig::Token(token),
+ (_, Some(process), _, _) => RegistryCredentialConfig::Process((
+ process.path.resolve_program(config),
+ process.args,
+ )),
+ (None, None, Some(key), subject) => {
+ RegistryCredentialConfig::AsymmetricKey((key, subject))
+ }
+ (None, None, None, _) => {
+ if !is_crates_io {
+ // If we couldn't find a registry-specific credential, try the global credential process.
+ if let Some(process) = config
+ .get::<Option<config::PathAndArgs>>("registry.credential-process")?
+ .filter(|_| config.cli_unstable().credential_process)
+ {
+ return Ok(RegistryCredentialConfig::Process((
+ process.path.resolve_program(config),
+ process.args,
+ )));
+ }
+ }
+ RegistryCredentialConfig::None
+ }
+ },
+ )
+}
+
+#[derive(Debug, PartialEq)]
+pub enum AuthorizationErrorReason {
+ TokenMissing,
+ TokenRejected,
+}
+
+impl fmt::Display for AuthorizationErrorReason {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ AuthorizationErrorReason::TokenMissing => write!(f, "no token found"),
+ AuthorizationErrorReason::TokenRejected => write!(f, "token rejected"),
+ }
+ }
+}
+
+/// An authorization error from accessing a registry.
+#[derive(Debug)]
+pub struct AuthorizationError {
+ /// Url that was attempted
+ pub sid: SourceId,
+ /// The `registry.default` config value.
+ pub default_registry: Option<String>,
+ /// Url where the user could log in.
+ pub login_url: Option<Url>,
+ /// Specific reason indicating what failed
+ pub reason: AuthorizationErrorReason,
+}
+impl Error for AuthorizationError {}
+impl fmt::Display for AuthorizationError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ if self.sid.is_crates_io() {
+ let args = if self.default_registry.is_some() {
+ " --registry crates-io"
+ } else {
+ ""
+ };
+ write!(
+ f,
+ "{}, please run `cargo login{args}`\nor use environment variable CARGO_REGISTRY_TOKEN",
+ self.reason
+ )
+ } else if let Some(name) = self.sid.alt_registry_key() {
+ let key = ConfigKey::from_str(&format!("registries.{name}.token"));
+ write!(
+ f,
+ "{} for `{}`, please run `cargo login --registry {name}`\nor use environment variable {}",
+ self.reason,
+ self.sid.display_registry_name(),
+ key.as_env_key(),
+ )
+ } else if self.reason == AuthorizationErrorReason::TokenMissing {
+ write!(
+ f,
+ r#"{} for `{}`
+consider setting up an alternate registry in Cargo's configuration
+as described by https://doc.rust-lang.org/cargo/reference/registries.html
+
+[registries]
+my-registry = {{ index = "{}" }}
+"#,
+ self.reason,
+ self.sid.display_registry_name(),
+ self.sid.url()
+ )
+ } else {
+ write!(
+ f,
+ r#"{} for `{}`"#,
+ self.reason,
+ self.sid.display_registry_name(),
+ )
+ }
+ }
+}
+
+// Store a token in the cache for future calls.
+pub fn cache_token(config: &Config, sid: &SourceId, token: Secret<&str>) {
+ let url = sid.canonical_url();
+ config.credential_cache().insert(
+ url.clone(),
+ CredentialCacheValue {
+ from_commandline: true,
+ independent_of_endpoint: true,
+ token_value: token.owned(),
+ },
+ );
+}
+
+/// Returns the token to use for the given registry.
+/// If a `login_url` is provided and a token is not available, the
+/// login_url will be included in the returned error.
+pub fn auth_token(
+ config: &Config,
+ sid: &SourceId,
+ login_url: Option<&Url>,
+ mutation: Option<Mutation<'_>>,
+) -> CargoResult<String> {
+ match auth_token_optional(config, sid, mutation.as_ref())? {
+ Some(token) => Ok(token.expose()),
+ None => Err(AuthorizationError {
+ sid: sid.clone(),
+ default_registry: config.default_registry()?,
+ login_url: login_url.cloned(),
+ reason: AuthorizationErrorReason::TokenMissing,
+ }
+ .into()),
+ }
+}
+
+/// Returns the token to use for the given registry.
+fn auth_token_optional(
+ config: &Config,
+ sid: &SourceId,
+ mutation: Option<&'_ Mutation<'_>>,
+) -> CargoResult<Option<Secret<String>>> {
+ let mut cache = config.credential_cache();
+ let url = sid.canonical_url();
+
+ if let Some(cache_token_value) = cache.get(url) {
+ // Tokens for endpoints that do not involve a mutation can always be reused.
+ // If the value is put in the cache by the command line, then we reuse it without looking at the configuration.
+ if cache_token_value.from_commandline
+ || cache_token_value.independent_of_endpoint
+ || mutation.is_none()
+ {
+ return Ok(Some(cache_token_value.token_value.clone()));
+ }
+ }
+
+ let credential = registry_credential_config(config, sid)?;
+ let (independent_of_endpoint, token) = match credential {
+ RegistryCredentialConfig::None => return Ok(None),
+ RegistryCredentialConfig::Token(config_token) => (true, config_token),
+ RegistryCredentialConfig::Process(process) => {
+ // todo: PASETO with process
+ let (independent_of_endpoint, token) =
+ run_command(config, &process, sid, Action::Get)?.unwrap();
+ (independent_of_endpoint, Secret::from(token))
+ }
+ RegistryCredentialConfig::AsymmetricKey((secret_key, secret_key_subject)) => {
+ let secret: Secret<AsymmetricSecretKey<pasetors::version3::V3>> =
+ secret_key.map(|key| key.as_str().try_into()).transpose()?;
+ let public: AsymmetricPublicKey<pasetors::version3::V3> = secret
+ .as_ref()
+ .map(|key| key.try_into())
+ .transpose()?
+ .expose();
+ let kip: pasetors::paserk::Id = (&public).try_into()?;
+ let iat = OffsetDateTime::now_utc();
+
+ let message = Message {
+ iat: &iat.format(&Rfc3339)?,
+ sub: secret_key_subject.as_deref(),
+ mutation: mutation.and_then(|m| {
+ Some(match m {
+ Mutation::PrePublish => return None,
+ Mutation::Publish { .. } => "publish",
+ Mutation::Yank { .. } => "yank",
+ Mutation::Unyank { .. } => "unyank",
+ Mutation::Owners { .. } => "owners",
+ })
+ }),
+ name: mutation.and_then(|m| {
+ Some(match m {
+ Mutation::PrePublish => return None,
+ Mutation::Publish { name, .. }
+ | Mutation::Yank { name, .. }
+ | Mutation::Unyank { name, .. }
+ | Mutation::Owners { name, .. } => *name,
+ })
+ }),
+ vers: mutation.and_then(|m| {
+ Some(match m {
+ Mutation::PrePublish | Mutation::Owners { .. } => return None,
+ Mutation::Publish { vers, .. }
+ | Mutation::Yank { vers, .. }
+ | Mutation::Unyank { vers, .. } => *vers,
+ })
+ }),
+ cksum: mutation.and_then(|m| {
+ Some(match m {
+ Mutation::PrePublish
+ | Mutation::Yank { .. }
+ | Mutation::Unyank { .. }
+ | Mutation::Owners { .. } => return None,
+ Mutation::Publish { cksum, .. } => *cksum,
+ })
+ }),
+ challenge: None, // todo: PASETO with challenges
+ v: None,
+ };
+ let footer = Footer {
+ url: &sid.url().to_string(),
+ kip,
+ };
+
+ (
+ false,
+ secret
+ .map(|secret| {
+ pasetors::version3::PublicToken::sign(
+ &secret,
+ serde_json::to_string(&message)
+ .expect("cannot serialize")
+ .as_bytes(),
+ Some(
+ serde_json::to_string(&footer)
+ .expect("cannot serialize")
+ .as_bytes(),
+ ),
+ None,
+ )
+ })
+ .transpose()?,
+ )
+ }
+ };
+
+ if independent_of_endpoint || mutation.is_none() {
+ cache.insert(
+ url.clone(),
+ CredentialCacheValue {
+ from_commandline: false,
+ independent_of_endpoint,
+ token_value: token.clone(),
+ },
+ );
+ }
+ Ok(Some(token))
+}
+
+/// A record of what kind of operation is happening that we should generate a token for.
+pub enum Mutation<'a> {
+ /// Before we generate a crate file for the users attempt to publish,
+ /// we need to check if we are configured correctly to generate a token.
+ /// This variant is used to make sure that we can generate a token,
+ /// to error out early if the token is not configured correctly.
+ PrePublish,
+ /// The user is attempting to publish a crate.
+ Publish {
+ /// The name of the crate
+ name: &'a str,
+ /// The version of the crate
+ vers: &'a str,
+ /// The checksum of the crate file being uploaded
+ cksum: &'a str,
+ },
+ /// The user is attempting to yank a crate.
+ Yank {
+ /// The name of the crate
+ name: &'a str,
+ /// The version of the crate
+ vers: &'a str,
+ },
+ /// The user is attempting to unyank a crate.
+ Unyank {
+ /// The name of the crate
+ name: &'a str,
+ /// The version of the crate
+ vers: &'a str,
+ },
+ /// The user is attempting to modify the owners of a crate.
+ Owners {
+ /// The name of the crate
+ name: &'a str,
+ },
+}
+
+/// The main body of an asymmetric token as describe in RFC 3231.
+#[derive(serde::Serialize)]
+struct Message<'a> {
+ iat: &'a str,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ sub: Option<&'a str>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ mutation: Option<&'a str>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ name: Option<&'a str>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ vers: Option<&'a str>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ cksum: Option<&'a str>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ challenge: Option<&'a str>,
+ /// This field is not yet used. This field can be set to a value >1 to indicate a breaking change in the token format.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ v: Option<u8>,
+}
+/// The footer of an asymmetric token as describe in RFC 3231.
+#[derive(serde::Serialize)]
+struct Footer<'a> {
+ url: &'a str,
+ kip: pasetors::paserk::Id,
+}
+
+enum Action {
+ Get,
+ Store(String),
+ Erase,
+}
+
+/// Saves the given token.
+pub fn login(config: &Config, sid: &SourceId, token: RegistryCredentialConfig) -> CargoResult<()> {
+ match registry_credential_config(config, sid)? {
+ RegistryCredentialConfig::Process(process) => {
+ let token = token
+ .as_token()
+ .expect("credential_process cannot use login with a secret_key")
+ .expose()
+ .to_owned();
+ run_command(config, &process, sid, Action::Store(token))?;
+ }
+ _ => {
+ config::save_credentials(config, Some(token), &sid)?;
+ }
+ };
+ Ok(())
+}
+
+/// Checks that a secret key is valid, and returns the associated public key in Paserk format.
+pub(crate) fn paserk_public_from_paserk_secret(secret_key: Secret<&str>) -> Option<String> {
+ let secret: Secret<AsymmetricSecretKey<pasetors::version3::V3>> =
+ secret_key.map(|key| key.try_into()).transpose().ok()?;
+ let public: AsymmetricPublicKey<pasetors::version3::V3> = secret
+ .as_ref()
+ .map(|key| key.try_into())
+ .transpose()
+ .ok()?
+ .expose();
+ let mut paserk_pub_key = String::new();
+ FormatAsPaserk::fmt(&public, &mut paserk_pub_key).unwrap();
+ Some(paserk_pub_key)
+}
+
+/// Removes the token for the given registry.
+pub fn logout(config: &Config, sid: &SourceId) -> CargoResult<()> {
+ match registry_credential_config(config, sid)? {
+ RegistryCredentialConfig::Process(process) => {
+ run_command(config, &process, sid, Action::Erase)?;
+ }
+ _ => {
+ config::save_credentials(config, None, &sid)?;
+ }
+ };
+ Ok(())
+}
+
+fn run_command(
+ config: &Config,
+ process: &(PathBuf, Vec<String>),
+ sid: &SourceId,
+ action: Action,
+) -> CargoResult<Option<(bool, String)>> {
+ let index_url = sid.url().as_str();
+ let cred_proc;
+ let (exe, args) = if process.0.to_str().unwrap_or("").starts_with("cargo:") {
+ cred_proc = sysroot_credential(config, process)?;
+ &cred_proc
+ } else {
+ process
+ };
+ if !args.iter().any(|arg| arg.contains("{action}")) {
+ let msg = |which| {
+ format!(
+ "credential process `{}` cannot be used to {}, \
+ the credential-process configuration value must pass the \
+ `{{action}}` argument in the config to support this command",
+ exe.display(),
+ which
+ )
+ };
+ match action {
+ Action::Get => {}
+ Action::Store(_) => bail!(msg("log in")),
+ Action::Erase => bail!(msg("log out")),
+ }
+ }
+ // todo: PASETO with process
+ let independent_of_endpoint = true;
+ let action_str = match action {
+ Action::Get => "get",
+ Action::Store(_) => "store",
+ Action::Erase => "erase",
+ };
+ let args: Vec<_> = args
+ .iter()
+ .map(|arg| {
+ arg.replace("{action}", action_str)
+ .replace("{index_url}", index_url)
+ })
+ .collect();
+
+ let mut cmd = Command::new(&exe);
+ cmd.args(args)
+ .env(crate::CARGO_ENV, config.cargo_exe()?)
+ .env("CARGO_REGISTRY_INDEX_URL", index_url);
+ if sid.is_crates_io() {
+ cmd.env("CARGO_REGISTRY_NAME_OPT", "crates-io");
+ } else if let Some(name) = sid.alt_registry_key() {
+ cmd.env("CARGO_REGISTRY_NAME_OPT", name);
+ }
+ match action {
+ Action::Get => {
+ cmd.stdout(Stdio::piped());
+ }
+ Action::Store(_) => {
+ cmd.stdin(Stdio::piped());
+ }
+ Action::Erase => {}
+ }
+ let mut child = cmd.spawn().with_context(|| {
+ let verb = match action {
+ Action::Get => "fetch",
+ Action::Store(_) => "store",
+ Action::Erase => "erase",
+ };
+ format!(
+ "failed to execute `{}` to {} authentication token for registry `{}`",
+ exe.display(),
+ verb,
+ sid.display_registry_name(),
+ )
+ })?;
+ let mut token = None;
+ match &action {
+ Action::Get => {
+ let mut buffer = String::new();
+ log::debug!("reading into buffer");
+ child
+ .stdout
+ .as_mut()
+ .unwrap()
+ .read_to_string(&mut buffer)
+ .with_context(|| {
+ format!(
+ "failed to read token from registry credential process `{}`",
+ exe.display()
+ )
+ })?;
+ if let Some(end) = buffer.find('\n') {
+ if buffer.len() > end + 1 {
+ bail!(
+ "credential process `{}` returned more than one line of output; \
+ expected a single token",
+ exe.display()
+ );
+ }
+ buffer.truncate(end);
+ }
+ token = Some((independent_of_endpoint, buffer));
+ }
+ Action::Store(token) => {
+ writeln!(child.stdin.as_ref().unwrap(), "{}", token).with_context(|| {
+ format!(
+ "failed to send token to registry credential process `{}`",
+ exe.display()
+ )
+ })?;
+ }
+ Action::Erase => {}
+ }
+ let status = child.wait().with_context(|| {
+ format!(
+ "registry credential process `{}` exit failure",
+ exe.display()
+ )
+ })?;
+ if !status.success() {
+ let msg = match action {
+ Action::Get => "failed to authenticate to registry",
+ Action::Store(_) => "failed to store token to registry",
+ Action::Erase => "failed to erase token from registry",
+ };
+ return Err(ProcessError::new(
+ &format!(
+ "registry credential process `{}` {} `{}`",
+ exe.display(),
+ msg,
+ sid.display_registry_name()
+ ),
+ Some(status),
+ None,
+ )
+ .into());
+ }
+ Ok(token)
+}
+
+/// Gets the path to the libexec processes in the sysroot.
+fn sysroot_credential(
+ config: &Config,
+ process: &(PathBuf, Vec<String>),
+) -> CargoResult<(PathBuf, Vec<String>)> {
+ let cred_name = process.0.to_str().unwrap().strip_prefix("cargo:").unwrap();
+ let cargo = config.cargo_exe()?;
+ let root = cargo
+ .parent()
+ .and_then(|p| p.parent())
+ .ok_or_else(|| format_err!("expected cargo path {}", cargo.display()))?;
+ let exe = root.join("libexec").join(format!(
+ "cargo-credential-{}{}",
+ cred_name,
+ std::env::consts::EXE_SUFFIX
+ ));
+ let mut args = process.1.clone();
+ if !args.iter().any(|arg| arg == "{action}") {
+ args.push("{action}".to_string());
+ }
+ Ok((exe, args))
+}
diff --git a/src/tools/cargo/src/cargo/util/canonical_url.rs b/src/tools/cargo/src/cargo/util/canonical_url.rs
new file mode 100644
index 000000000..7516e0356
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/canonical_url.rs
@@ -0,0 +1,75 @@
+use crate::util::errors::CargoResult;
+use std::hash::{self, Hash};
+use url::Url;
+
+/// A newtype wrapper around `Url` which represents a "canonical" version of an
+/// original URL.
+///
+/// A "canonical" url is only intended for internal comparison purposes in
+/// Cargo. It's to help paper over mistakes such as depending on
+/// `github.com/foo/bar` vs `github.com/foo/bar.git`. This is **only** for
+/// internal purposes within Cargo and provides no means to actually read the
+/// underlying string value of the `Url` it contains. This is intentional,
+/// because all fetching should still happen within the context of the original
+/// URL.
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
+pub struct CanonicalUrl(Url);
+
+impl CanonicalUrl {
+ pub fn new(url: &Url) -> CargoResult<CanonicalUrl> {
+ let mut url = url.clone();
+
+ // cannot-be-a-base-urls (e.g., `github.com:rust-lang/rustfmt.git`)
+ // are not supported.
+ if url.cannot_be_a_base() {
+ anyhow::bail!(
+ "invalid url `{}`: cannot-be-a-base-URLs are not supported",
+ url
+ )
+ }
+
+ // Strip a trailing slash.
+ if url.path().ends_with('/') {
+ url.path_segments_mut().unwrap().pop_if_empty();
+ }
+
+ // For GitHub URLs specifically, just lower-case everything. GitHub
+ // treats both the same, but they hash differently, and we're gonna be
+ // hashing them. This wants a more general solution, and also we're
+ // almost certainly not using the same case conversion rules that GitHub
+ // does. (See issue #84)
+ if url.host_str() == Some("github.com") {
+ url = format!("https{}", &url[url::Position::AfterScheme..])
+ .parse()
+ .unwrap();
+ let path = url.path().to_lowercase();
+ url.set_path(&path);
+ }
+
+ // Repos can generally be accessed with or without `.git` extension.
+ let needs_chopping = url.path().ends_with(".git");
+ if needs_chopping {
+ let last = {
+ let last = url.path_segments().unwrap().next_back().unwrap();
+ last[..last.len() - 4].to_owned()
+ };
+ url.path_segments_mut().unwrap().pop().push(&last);
+ }
+
+ Ok(CanonicalUrl(url))
+ }
+
+ /// Returns the raw canonicalized URL, although beware that this should
+ /// never be used/displayed/etc, it should only be used for internal data
+ /// structures and hashes and such.
+ pub fn raw_canonicalized_url(&self) -> &Url {
+ &self.0
+ }
+}
+
+// See comment in `source_id.rs` for why we explicitly use `as_str()` here.
+impl Hash for CanonicalUrl {
+ fn hash<S: hash::Hasher>(&self, into: &mut S) {
+ self.0.as_str().hash(into);
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/command_prelude.rs b/src/tools/cargo/src/cargo/util/command_prelude.rs
new file mode 100644
index 000000000..2145dbdee
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/command_prelude.rs
@@ -0,0 +1,801 @@
+use crate::core::compiler::{BuildConfig, MessageFormat, TimingOutput};
+use crate::core::resolver::CliFeatures;
+use crate::core::{Edition, Workspace};
+use crate::ops::{CompileFilter, CompileOptions, NewOptions, Packages, VersionControl};
+use crate::util::important_paths::find_root_manifest_for_wd;
+use crate::util::interning::InternedString;
+use crate::util::restricted_names::is_glob_pattern;
+use crate::util::toml::{StringOrVec, TomlProfile};
+use crate::util::validate_package_name;
+use crate::util::{
+ print_available_benches, print_available_binaries, print_available_examples,
+ print_available_packages, print_available_tests,
+};
+use crate::CargoResult;
+use anyhow::bail;
+use cargo_util::paths;
+use std::ffi::{OsStr, OsString};
+use std::path::PathBuf;
+
+pub use crate::core::compiler::CompileMode;
+pub use crate::{CliError, CliResult, Config};
+pub use clap::{value_parser, Arg, ArgAction, ArgMatches};
+
+pub use clap::Command;
+
+pub trait CommandExt: Sized {
+ fn _arg(self, arg: Arg) -> Self;
+
+ /// Do not use this method, it is only for backwards compatibility.
+ /// Use `arg_package_spec_no_all` instead.
+ fn arg_package_spec(
+ self,
+ package: &'static str,
+ all: &'static str,
+ exclude: &'static str,
+ ) -> Self {
+ self.arg_package_spec_no_all(package, all, exclude)
+ ._arg(flag("all", "Alias for --workspace (deprecated)"))
+ }
+
+ /// Variant of arg_package_spec that does not include the `--all` flag
+ /// (but does include `--workspace`). Used to avoid confusion with
+ /// historical uses of `--all`.
+ fn arg_package_spec_no_all(
+ self,
+ package: &'static str,
+ all: &'static str,
+ exclude: &'static str,
+ ) -> Self {
+ self.arg_package_spec_simple(package)
+ ._arg(flag("workspace", all))
+ ._arg(multi_opt("exclude", "SPEC", exclude))
+ }
+
+ fn arg_package_spec_simple(self, package: &'static str) -> Self {
+ self._arg(optional_multi_opt("package", "SPEC", package).short('p'))
+ }
+
+ fn arg_package(self, package: &'static str) -> Self {
+ self._arg(
+ optional_opt("package", package)
+ .short('p')
+ .value_name("SPEC"),
+ )
+ }
+
+ fn arg_jobs(self) -> Self {
+ self._arg(
+ opt("jobs", "Number of parallel jobs, defaults to # of CPUs")
+ .short('j')
+ .value_name("N")
+ .allow_hyphen_values(true),
+ )
+ ._arg(flag(
+ "keep-going",
+ "Do not abort the build as soon as there is an error (unstable)",
+ ))
+ }
+
+ fn arg_targets_all(
+ self,
+ lib: &'static str,
+ bin: &'static str,
+ bins: &'static str,
+ example: &'static str,
+ examples: &'static str,
+ test: &'static str,
+ tests: &'static str,
+ bench: &'static str,
+ benches: &'static str,
+ all: &'static str,
+ ) -> Self {
+ self.arg_targets_lib_bin_example(lib, bin, bins, example, examples)
+ ._arg(optional_multi_opt("test", "NAME", test))
+ ._arg(flag("tests", tests))
+ ._arg(optional_multi_opt("bench", "NAME", bench))
+ ._arg(flag("benches", benches))
+ ._arg(flag("all-targets", all))
+ }
+
+ fn arg_targets_lib_bin_example(
+ self,
+ lib: &'static str,
+ bin: &'static str,
+ bins: &'static str,
+ example: &'static str,
+ examples: &'static str,
+ ) -> Self {
+ self._arg(flag("lib", lib))
+ ._arg(optional_multi_opt("bin", "NAME", bin))
+ ._arg(flag("bins", bins))
+ ._arg(optional_multi_opt("example", "NAME", example))
+ ._arg(flag("examples", examples))
+ }
+
+ fn arg_targets_bins_examples(
+ self,
+ bin: &'static str,
+ bins: &'static str,
+ example: &'static str,
+ examples: &'static str,
+ ) -> Self {
+ self._arg(optional_multi_opt("bin", "NAME", bin))
+ ._arg(flag("bins", bins))
+ ._arg(optional_multi_opt("example", "NAME", example))
+ ._arg(flag("examples", examples))
+ }
+
+ fn arg_targets_bin_example(self, bin: &'static str, example: &'static str) -> Self {
+ self._arg(optional_multi_opt("bin", "NAME", bin))
+ ._arg(optional_multi_opt("example", "NAME", example))
+ }
+
+ fn arg_features(self) -> Self {
+ self._arg(
+ multi_opt(
+ "features",
+ "FEATURES",
+ "Space or comma separated list of features to activate",
+ )
+ .short('F'),
+ )
+ ._arg(flag("all-features", "Activate all available features"))
+ ._arg(flag(
+ "no-default-features",
+ "Do not activate the `default` feature",
+ ))
+ }
+
+ fn arg_release(self, release: &'static str) -> Self {
+ self._arg(flag("release", release).short('r'))
+ }
+
+ fn arg_profile(self, profile: &'static str) -> Self {
+ self._arg(opt("profile", profile).value_name("PROFILE-NAME"))
+ }
+
+ fn arg_doc(self, doc: &'static str) -> Self {
+ self._arg(flag("doc", doc))
+ }
+
+ fn arg_target_triple(self, target: &'static str) -> Self {
+ self._arg(multi_opt("target", "TRIPLE", target))
+ }
+
+ fn arg_target_dir(self) -> Self {
+ self._arg(
+ opt("target-dir", "Directory for all generated artifacts").value_name("DIRECTORY"),
+ )
+ }
+
+ fn arg_manifest_path(self) -> Self {
+ self._arg(opt("manifest-path", "Path to Cargo.toml").value_name("PATH"))
+ }
+
+ fn arg_message_format(self) -> Self {
+ self._arg(multi_opt("message-format", "FMT", "Error format"))
+ }
+
+ fn arg_build_plan(self) -> Self {
+ self._arg(flag(
+ "build-plan",
+ "Output the build plan in JSON (unstable)",
+ ))
+ }
+
+ fn arg_unit_graph(self) -> Self {
+ self._arg(flag("unit-graph", "Output build graph in JSON (unstable)"))
+ }
+
+ fn arg_new_opts(self) -> Self {
+ self._arg(
+ opt(
+ "vcs",
+ "Initialize a new repository for the given version \
+ control system (git, hg, pijul, or fossil) or do not \
+ initialize any version control at all (none), overriding \
+ a global configuration.",
+ )
+ .value_name("VCS")
+ .value_parser(["git", "hg", "pijul", "fossil", "none"]),
+ )
+ ._arg(flag("bin", "Use a binary (application) template [default]"))
+ ._arg(flag("lib", "Use a library template"))
+ ._arg(
+ opt("edition", "Edition to set for the crate generated")
+ .value_parser(Edition::CLI_VALUES)
+ .value_name("YEAR"),
+ )
+ ._arg(
+ opt(
+ "name",
+ "Set the resulting package name, defaults to the directory name",
+ )
+ .value_name("NAME"),
+ )
+ }
+
+ fn arg_index(self) -> Self {
+ self._arg(opt("index", "Registry index URL to upload the package to").value_name("INDEX"))
+ }
+
+ fn arg_dry_run(self, dry_run: &'static str) -> Self {
+ self._arg(flag("dry-run", dry_run))
+ }
+
+ fn arg_ignore_rust_version(self) -> Self {
+ self._arg(flag(
+ "ignore-rust-version",
+ "Ignore `rust-version` specification in packages",
+ ))
+ }
+
+ fn arg_future_incompat_report(self) -> Self {
+ self._arg(flag(
+ "future-incompat-report",
+ "Outputs a future incompatibility report at the end of the build",
+ ))
+ }
+
+ fn arg_quiet(self) -> Self {
+ self._arg(flag("quiet", "Do not print cargo log messages").short('q'))
+ }
+
+ fn arg_timings(self) -> Self {
+ self._arg(
+ optional_opt(
+ "timings",
+ "Timing output formats (unstable) (comma separated): html, json",
+ )
+ .value_name("FMTS")
+ .require_equals(true),
+ )
+ }
+}
+
+impl CommandExt for Command {
+ fn _arg(self, arg: Arg) -> Self {
+ self.arg(arg)
+ }
+}
+
+pub fn flag(name: &'static str, help: &'static str) -> Arg {
+ Arg::new(name)
+ .long(name)
+ .help(help)
+ .action(ArgAction::SetTrue)
+}
+
+pub fn opt(name: &'static str, help: &'static str) -> Arg {
+ Arg::new(name).long(name).help(help).action(ArgAction::Set)
+}
+
+pub fn optional_opt(name: &'static str, help: &'static str) -> Arg {
+ opt(name, help).num_args(0..=1)
+}
+
+pub fn optional_multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Arg {
+ opt(name, help)
+ .value_name(value_name)
+ .num_args(0..=1)
+ .action(ArgAction::Append)
+}
+
+pub fn multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Arg {
+ opt(name, help)
+ .value_name(value_name)
+ .action(ArgAction::Append)
+}
+
+pub fn subcommand(name: &'static str) -> Command {
+ Command::new(name)
+}
+
+/// Determines whether or not to gate `--profile` as unstable when resolving it.
+pub enum ProfileChecking {
+ /// `cargo rustc` historically has allowed "test", "bench", and "check". This
+ /// variant explicitly allows those.
+ LegacyRustc,
+ /// `cargo check` and `cargo fix` historically has allowed "test". This variant
+ /// explicitly allows that on stable.
+ LegacyTestOnly,
+ /// All other commands, which allow any valid custom named profile.
+ Custom,
+}
+
+pub trait ArgMatchesExt {
+ fn value_of_u32(&self, name: &str) -> CargoResult<Option<u32>> {
+ let arg = match self._value_of(name) {
+ None => None,
+ Some(arg) => Some(arg.parse::<u32>().map_err(|_| {
+ clap::Error::raw(
+ clap::error::ErrorKind::ValueValidation,
+ format!("Invalid value: could not parse `{}` as a number", arg),
+ )
+ })?),
+ };
+ Ok(arg)
+ }
+
+ fn value_of_i32(&self, name: &str) -> CargoResult<Option<i32>> {
+ let arg = match self._value_of(name) {
+ None => None,
+ Some(arg) => Some(arg.parse::<i32>().map_err(|_| {
+ clap::Error::raw(
+ clap::error::ErrorKind::ValueValidation,
+ format!("Invalid value: could not parse `{}` as a number", arg),
+ )
+ })?),
+ };
+ Ok(arg)
+ }
+
+ /// Returns value of the `name` command-line argument as an absolute path
+ fn value_of_path(&self, name: &str, config: &Config) -> Option<PathBuf> {
+ self._value_of(name).map(|path| config.cwd().join(path))
+ }
+
+ fn root_manifest(&self, config: &Config) -> CargoResult<PathBuf> {
+ if let Some(path) = self.value_of_path("manifest-path", config) {
+ // In general, we try to avoid normalizing paths in Cargo,
+ // but in this particular case we need it to fix #3586.
+ let path = paths::normalize_path(&path);
+ if !path.ends_with("Cargo.toml") {
+ anyhow::bail!("the manifest-path must be a path to a Cargo.toml file")
+ }
+ if !path.exists() {
+ anyhow::bail!(
+ "manifest path `{}` does not exist",
+ self._value_of("manifest-path").unwrap()
+ )
+ }
+ return Ok(path);
+ }
+ find_root_manifest_for_wd(config.cwd())
+ }
+
+ fn workspace<'a>(&self, config: &'a Config) -> CargoResult<Workspace<'a>> {
+ let root = self.root_manifest(config)?;
+ let mut ws = Workspace::new(&root, config)?;
+ if config.cli_unstable().avoid_dev_deps {
+ ws.set_require_optional_deps(false);
+ }
+ Ok(ws)
+ }
+
+ fn jobs(&self) -> CargoResult<Option<i32>> {
+ self.value_of_i32("jobs")
+ }
+
+ fn verbose(&self) -> u32 {
+ self._count("verbose")
+ }
+
+ fn dry_run(&self) -> bool {
+ self.flag("dry-run")
+ }
+
+ fn keep_going(&self) -> bool {
+ self.flag("keep-going")
+ }
+
+ fn targets(&self) -> Vec<String> {
+ self._values_of("target")
+ }
+
+ fn get_profile_name(
+ &self,
+ config: &Config,
+ default: &str,
+ profile_checking: ProfileChecking,
+ ) -> CargoResult<InternedString> {
+ let specified_profile = self._value_of("profile");
+
+ // Check for allowed legacy names.
+ // This is an early exit, since it allows combination with `--release`.
+ match (specified_profile, profile_checking) {
+ // `cargo rustc` has legacy handling of these names
+ (Some(name @ ("dev" | "test" | "bench" | "check")), ProfileChecking::LegacyRustc)
+ // `cargo fix` and `cargo check` has legacy handling of this profile name
+ | (Some(name @ "test"), ProfileChecking::LegacyTestOnly) => {
+ if self.flag("release") {
+ config.shell().warn(
+ "the `--release` flag should not be specified with the `--profile` flag\n\
+ The `--release` flag will be ignored.\n\
+ This was historically accepted, but will become an error \
+ in a future release."
+ )?;
+ }
+ return Ok(InternedString::new(name));
+ }
+ _ => {}
+ }
+
+ let conflict = |flag: &str, equiv: &str, specified: &str| -> anyhow::Error {
+ anyhow::format_err!(
+ "conflicting usage of --profile={} and --{flag}\n\
+ The `--{flag}` flag is the same as `--profile={equiv}`.\n\
+ Remove one flag or the other to continue.",
+ specified,
+ flag = flag,
+ equiv = equiv
+ )
+ };
+
+ let name = match (self.flag("release"), self.flag("debug"), specified_profile) {
+ (false, false, None) => default,
+ (true, _, None | Some("release")) => "release",
+ (true, _, Some(name)) => return Err(conflict("release", "release", name)),
+ (_, true, None | Some("dev")) => "dev",
+ (_, true, Some(name)) => return Err(conflict("debug", "dev", name)),
+ // `doc` is separate from all the other reservations because
+ // [profile.doc] was historically allowed, but is deprecated and
+ // has no effect. To avoid potentially breaking projects, it is a
+ // warning in Cargo.toml, but since `--profile` is new, we can
+ // reject it completely here.
+ (_, _, Some("doc")) => {
+ bail!("profile `doc` is reserved and not allowed to be explicitly specified")
+ }
+ (_, _, Some(name)) => {
+ TomlProfile::validate_name(name)?;
+ name
+ }
+ };
+
+ Ok(InternedString::new(name))
+ }
+
+ fn packages_from_flags(&self) -> CargoResult<Packages> {
+ Packages::from_flags(
+ // TODO Integrate into 'workspace'
+ self.flag("workspace") || self.flag("all"),
+ self._values_of("exclude"),
+ self._values_of("package"),
+ )
+ }
+
+ fn compile_options(
+ &self,
+ config: &Config,
+ mode: CompileMode,
+ workspace: Option<&Workspace<'_>>,
+ profile_checking: ProfileChecking,
+ ) -> CargoResult<CompileOptions> {
+ let spec = self.packages_from_flags()?;
+ let mut message_format = None;
+ let default_json = MessageFormat::Json {
+ short: false,
+ ansi: false,
+ render_diagnostics: false,
+ };
+ let two_kinds_of_msg_format_err = "cannot specify two kinds of `message-format` arguments";
+ for fmt in self._values_of("message-format") {
+ for fmt in fmt.split(',') {
+ let fmt = fmt.to_ascii_lowercase();
+ match fmt.as_str() {
+ "json" => {
+ if message_format.is_some() {
+ bail!(two_kinds_of_msg_format_err);
+ }
+ message_format = Some(default_json);
+ }
+ "human" => {
+ if message_format.is_some() {
+ bail!(two_kinds_of_msg_format_err);
+ }
+ message_format = Some(MessageFormat::Human);
+ }
+ "short" => {
+ if message_format.is_some() {
+ bail!(two_kinds_of_msg_format_err);
+ }
+ message_format = Some(MessageFormat::Short);
+ }
+ "json-render-diagnostics" => {
+ if message_format.is_none() {
+ message_format = Some(default_json);
+ }
+ match &mut message_format {
+ Some(MessageFormat::Json {
+ render_diagnostics, ..
+ }) => *render_diagnostics = true,
+ _ => bail!(two_kinds_of_msg_format_err),
+ }
+ }
+ "json-diagnostic-short" => {
+ if message_format.is_none() {
+ message_format = Some(default_json);
+ }
+ match &mut message_format {
+ Some(MessageFormat::Json { short, .. }) => *short = true,
+ _ => bail!(two_kinds_of_msg_format_err),
+ }
+ }
+ "json-diagnostic-rendered-ansi" => {
+ if message_format.is_none() {
+ message_format = Some(default_json);
+ }
+ match &mut message_format {
+ Some(MessageFormat::Json { ansi, .. }) => *ansi = true,
+ _ => bail!(two_kinds_of_msg_format_err),
+ }
+ }
+ s => bail!("invalid message format specifier: `{}`", s),
+ }
+ }
+ }
+
+ let mut build_config = BuildConfig::new(
+ config,
+ self.jobs()?,
+ self.keep_going(),
+ &self.targets(),
+ mode,
+ )?;
+ build_config.message_format = message_format.unwrap_or(MessageFormat::Human);
+ build_config.requested_profile = self.get_profile_name(config, "dev", profile_checking)?;
+ build_config.build_plan = self.flag("build-plan");
+ build_config.unit_graph = self.flag("unit-graph");
+ build_config.future_incompat_report = self.flag("future-incompat-report");
+
+ if self._contains("timings") {
+ for timing_output in self._values_of("timings") {
+ for timing_output in timing_output.split(',') {
+ let timing_output = timing_output.to_ascii_lowercase();
+ let timing_output = match timing_output.as_str() {
+ "html" => {
+ config
+ .cli_unstable()
+ .fail_if_stable_opt("--timings=html", 7405)?;
+ TimingOutput::Html
+ }
+ "json" => {
+ config
+ .cli_unstable()
+ .fail_if_stable_opt("--timings=json", 7405)?;
+ TimingOutput::Json
+ }
+ s => bail!("invalid timings output specifier: `{}`", s),
+ };
+ build_config.timing_outputs.push(timing_output);
+ }
+ }
+ if build_config.timing_outputs.is_empty() {
+ build_config.timing_outputs.push(TimingOutput::Html);
+ }
+ }
+
+ if build_config.keep_going {
+ config
+ .cli_unstable()
+ .fail_if_stable_opt("--keep-going", 10496)?;
+ }
+ if build_config.build_plan {
+ config
+ .cli_unstable()
+ .fail_if_stable_opt("--build-plan", 5579)?;
+ };
+ if build_config.unit_graph {
+ config
+ .cli_unstable()
+ .fail_if_stable_opt("--unit-graph", 8002)?;
+ }
+
+ let opts = CompileOptions {
+ build_config,
+ cli_features: self.cli_features()?,
+ spec,
+ filter: CompileFilter::from_raw_arguments(
+ self.flag("lib"),
+ self._values_of("bin"),
+ self.flag("bins"),
+ self._values_of("test"),
+ self.flag("tests"),
+ self._values_of("example"),
+ self.flag("examples"),
+ self._values_of("bench"),
+ self.flag("benches"),
+ self.flag("all-targets"),
+ ),
+ target_rustdoc_args: None,
+ target_rustc_args: None,
+ target_rustc_crate_types: None,
+ rustdoc_document_private_items: false,
+ honor_rust_version: !self.flag("ignore-rust-version"),
+ };
+
+ if let Some(ws) = workspace {
+ self.check_optional_opts(ws, &opts)?;
+ } else if self.is_present_with_zero_values("package") {
+ // As for cargo 0.50.0, this won't occur but if someone sneaks in
+ // we can still provide this informative message for them.
+ anyhow::bail!(
+ "\"--package <SPEC>\" requires a SPEC format value, \
+ which can be any package ID specifier in the dependency graph.\n\
+ Run `cargo help pkgid` for more information about SPEC format."
+ )
+ }
+
+ Ok(opts)
+ }
+
+ fn cli_features(&self) -> CargoResult<CliFeatures> {
+ CliFeatures::from_command_line(
+ &self._values_of("features"),
+ self.flag("all-features"),
+ !self.flag("no-default-features"),
+ )
+ }
+
+ fn compile_options_for_single_package(
+ &self,
+ config: &Config,
+ mode: CompileMode,
+ workspace: Option<&Workspace<'_>>,
+ profile_checking: ProfileChecking,
+ ) -> CargoResult<CompileOptions> {
+ let mut compile_opts = self.compile_options(config, mode, workspace, profile_checking)?;
+ let spec = self._values_of("package");
+ if spec.iter().any(is_glob_pattern) {
+ anyhow::bail!("Glob patterns on package selection are not supported.")
+ }
+ compile_opts.spec = Packages::Packages(spec);
+ Ok(compile_opts)
+ }
+
+ fn new_options(&self, config: &Config) -> CargoResult<NewOptions> {
+ let vcs = self._value_of("vcs").map(|vcs| match vcs {
+ "git" => VersionControl::Git,
+ "hg" => VersionControl::Hg,
+ "pijul" => VersionControl::Pijul,
+ "fossil" => VersionControl::Fossil,
+ "none" => VersionControl::NoVcs,
+ vcs => panic!("Impossible vcs: {:?}", vcs),
+ });
+ NewOptions::new(
+ vcs,
+ self.flag("bin"),
+ self.flag("lib"),
+ self.value_of_path("path", config).unwrap(),
+ self._value_of("name").map(|s| s.to_string()),
+ self._value_of("edition").map(|s| s.to_string()),
+ self.registry(config)?,
+ )
+ }
+
+ fn registry(&self, config: &Config) -> CargoResult<Option<String>> {
+ let registry = self._value_of("registry");
+ let index = self._value_of("index");
+ let result = match (registry, index) {
+ (None, None) => config.default_registry()?,
+ (None, Some(_)) => {
+ // If --index is set, then do not look at registry.default.
+ None
+ }
+ (Some(r), None) => {
+ validate_package_name(r, "registry name", "")?;
+ Some(r.to_string())
+ }
+ (Some(_), Some(_)) => {
+ bail!("both `--index` and `--registry` should not be set at the same time")
+ }
+ };
+ Ok(result)
+ }
+
+ fn index(&self) -> CargoResult<Option<String>> {
+ let index = self._value_of("index").map(|s| s.to_string());
+ Ok(index)
+ }
+
+ fn check_optional_opts(
+ &self,
+ workspace: &Workspace<'_>,
+ compile_opts: &CompileOptions,
+ ) -> CargoResult<()> {
+ if self.is_present_with_zero_values("package") {
+ print_available_packages(workspace)?
+ }
+
+ if self.is_present_with_zero_values("example") {
+ print_available_examples(workspace, compile_opts)?;
+ }
+
+ if self.is_present_with_zero_values("bin") {
+ print_available_binaries(workspace, compile_opts)?;
+ }
+
+ if self.is_present_with_zero_values("bench") {
+ print_available_benches(workspace, compile_opts)?;
+ }
+
+ if self.is_present_with_zero_values("test") {
+ print_available_tests(workspace, compile_opts)?;
+ }
+
+ Ok(())
+ }
+
+ fn is_present_with_zero_values(&self, name: &str) -> bool {
+ self._contains(name) && self._value_of(name).is_none()
+ }
+
+ fn flag(&self, name: &str) -> bool;
+
+ fn _value_of(&self, name: &str) -> Option<&str>;
+
+ fn _values_of(&self, name: &str) -> Vec<String>;
+
+ fn _value_of_os(&self, name: &str) -> Option<&OsStr>;
+
+ fn _values_of_os(&self, name: &str) -> Vec<OsString>;
+
+ fn _count(&self, name: &str) -> u32;
+
+ fn _contains(&self, name: &str) -> bool;
+}
+
+impl<'a> ArgMatchesExt for ArgMatches {
+ fn flag(&self, name: &str) -> bool {
+ ignore_unknown(self.try_get_one::<bool>(name))
+ .copied()
+ .unwrap_or(false)
+ }
+
+ fn _value_of(&self, name: &str) -> Option<&str> {
+ ignore_unknown(self.try_get_one::<String>(name)).map(String::as_str)
+ }
+
+ fn _value_of_os(&self, name: &str) -> Option<&OsStr> {
+ ignore_unknown(self.try_get_one::<OsString>(name)).map(OsString::as_os_str)
+ }
+
+ fn _values_of(&self, name: &str) -> Vec<String> {
+ ignore_unknown(self.try_get_many::<String>(name))
+ .unwrap_or_default()
+ .cloned()
+ .collect()
+ }
+
+ fn _values_of_os(&self, name: &str) -> Vec<OsString> {
+ ignore_unknown(self.try_get_many::<OsString>(name))
+ .unwrap_or_default()
+ .cloned()
+ .collect()
+ }
+
+ fn _count(&self, name: &str) -> u32 {
+ *ignore_unknown(self.try_get_one::<u8>(name)).expect("defaulted by clap") as u32
+ }
+
+ fn _contains(&self, name: &str) -> bool {
+ ignore_unknown(self.try_contains_id(name))
+ }
+}
+
+pub fn values(args: &ArgMatches, name: &str) -> Vec<String> {
+ args._values_of(name)
+}
+
+pub fn values_os(args: &ArgMatches, name: &str) -> Vec<OsString> {
+ args._values_of_os(name)
+}
+
+#[track_caller]
+pub fn ignore_unknown<T: Default>(r: Result<T, clap::parser::MatchesError>) -> T {
+ match r {
+ Ok(t) => t,
+ Err(clap::parser::MatchesError::UnknownArgument { .. }) => Default::default(),
+ Err(e) => {
+ panic!("Mismatch between definition and access: {}", e);
+ }
+ }
+}
+
+#[derive(PartialEq, Eq, PartialOrd, Ord)]
+pub enum CommandInfo {
+ BuiltIn { about: Option<String> },
+ External { path: PathBuf },
+ Alias { target: StringOrVec },
+}
diff --git a/src/tools/cargo/src/cargo/util/config/de.rs b/src/tools/cargo/src/cargo/util/config/de.rs
new file mode 100644
index 000000000..a9147ab03
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/config/de.rs
@@ -0,0 +1,633 @@
+//! Support for deserializing configuration via `serde`
+
+use crate::util::config::value;
+use crate::util::config::{Config, ConfigError, ConfigKey};
+use crate::util::config::{ConfigValue as CV, Definition, Value};
+use serde::{de, de::IntoDeserializer};
+use std::collections::HashSet;
+use std::vec;
+
+/// Serde deserializer used to convert config values to a target type using
+/// `Config::get`.
+#[derive(Clone)]
+pub(super) struct Deserializer<'config> {
+ pub(super) config: &'config Config,
+ /// The current key being deserialized.
+ pub(super) key: ConfigKey,
+ /// Whether or not this key part is allowed to be an inner table. For
+ /// example, `profile.dev.build-override` needs to check if
+ /// CARGO_PROFILE_DEV_BUILD_OVERRIDE_ prefixes exist. But
+ /// CARGO_BUILD_TARGET should not check for prefixes because it would
+ /// collide with CARGO_BUILD_TARGET_DIR. See `ConfigMapAccess` for
+ /// details.
+ pub(super) env_prefix_ok: bool,
+}
+
+macro_rules! deserialize_method {
+ ($method:ident, $visit:ident, $getter:ident) => {
+ fn $method<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ let v = self
+ .config
+ .$getter(&self.key)?
+ .ok_or_else(|| ConfigError::missing(&self.key))?;
+ let Value { val, definition } = v;
+ let res: Result<V::Value, ConfigError> = visitor.$visit(val);
+ res.map_err(|e| e.with_key_context(&self.key, definition))
+ }
+ };
+}
+
+impl<'de, 'config> de::Deserializer<'de> for Deserializer<'config> {
+ type Error = ConfigError;
+
+ fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ let cv = self.config.get_cv_with_env(&self.key)?;
+ if let Some(cv) = cv {
+ let res: (Result<V::Value, ConfigError>, Definition) = match cv {
+ CV::Integer(i, def) => (visitor.visit_i64(i), def),
+ CV::String(s, def) => (visitor.visit_string(s), def),
+ CV::List(_, def) => (visitor.visit_seq(ConfigSeqAccess::new(self.clone())?), def),
+ CV::Table(_, def) => (
+ visitor.visit_map(ConfigMapAccess::new_map(self.clone())?),
+ def,
+ ),
+ CV::Boolean(b, def) => (visitor.visit_bool(b), def),
+ };
+ let (res, def) = res;
+ return res.map_err(|e| e.with_key_context(&self.key, def));
+ }
+ Err(ConfigError::missing(&self.key))
+ }
+
+ deserialize_method!(deserialize_bool, visit_bool, get_bool);
+ deserialize_method!(deserialize_i8, visit_i64, get_integer);
+ deserialize_method!(deserialize_i16, visit_i64, get_integer);
+ deserialize_method!(deserialize_i32, visit_i64, get_integer);
+ deserialize_method!(deserialize_i64, visit_i64, get_integer);
+ deserialize_method!(deserialize_u8, visit_i64, get_integer);
+ deserialize_method!(deserialize_u16, visit_i64, get_integer);
+ deserialize_method!(deserialize_u32, visit_i64, get_integer);
+ deserialize_method!(deserialize_u64, visit_i64, get_integer);
+ deserialize_method!(deserialize_string, visit_string, get_string_priv);
+
+ fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ if self.config.has_key(&self.key, self.env_prefix_ok)? {
+ visitor.visit_some(self)
+ } else {
+ // Treat missing values as `None`.
+ visitor.visit_none()
+ }
+ }
+
+ fn deserialize_struct<V>(
+ self,
+ name: &'static str,
+ fields: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value, Self::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ // Match on the magical struct name/field names that are passed in to
+ // detect when we're deserializing `Value<T>`.
+ //
+ // See more comments in `value.rs` for the protocol used here.
+ if name == value::NAME && fields == value::FIELDS {
+ return visitor.visit_map(ValueDeserializer::new(self)?);
+ }
+ visitor.visit_map(ConfigMapAccess::new_struct(self, fields)?)
+ }
+
+ fn deserialize_map<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ visitor.visit_map(ConfigMapAccess::new_map(self)?)
+ }
+
+ fn deserialize_seq<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ visitor.visit_seq(ConfigSeqAccess::new(self)?)
+ }
+
+ fn deserialize_tuple<V>(self, _len: usize, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ visitor.visit_seq(ConfigSeqAccess::new(self)?)
+ }
+
+ fn deserialize_tuple_struct<V>(
+ self,
+ _name: &'static str,
+ _len: usize,
+ visitor: V,
+ ) -> Result<V::Value, Self::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ visitor.visit_seq(ConfigSeqAccess::new(self)?)
+ }
+
+ fn deserialize_newtype_struct<V>(
+ self,
+ name: &'static str,
+ visitor: V,
+ ) -> Result<V::Value, Self::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ let merge = if name == "StringList" {
+ true
+ } else if name == "UnmergedStringList" {
+ false
+ } else {
+ return visitor.visit_newtype_struct(self);
+ };
+
+ let vals = self.config.get_list_or_string(&self.key, merge)?;
+ let vals: Vec<String> = vals.into_iter().map(|vd| vd.0).collect();
+ visitor.visit_newtype_struct(vals.into_deserializer())
+ }
+
+ fn deserialize_enum<V>(
+ self,
+ _name: &'static str,
+ _variants: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value, Self::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ let value = self
+ .config
+ .get_string_priv(&self.key)?
+ .ok_or_else(|| ConfigError::missing(&self.key))?;
+
+ let Value { val, definition } = value;
+ visitor
+ .visit_enum(val.into_deserializer())
+ .map_err(|e: ConfigError| e.with_key_context(&self.key, definition))
+ }
+
+ // These aren't really supported, yet.
+ serde::forward_to_deserialize_any! {
+ f32 f64 char str bytes
+ byte_buf unit unit_struct
+ identifier ignored_any
+ }
+}
+
+struct ConfigMapAccess<'config> {
+ de: Deserializer<'config>,
+ /// The fields that this map should deserialize.
+ fields: Vec<KeyKind>,
+ /// Current field being deserialized.
+ field_index: usize,
+}
+
+#[derive(Debug, PartialEq, Eq, Hash)]
+enum KeyKind {
+ Normal(String),
+ CaseSensitive(String),
+}
+
+impl<'config> ConfigMapAccess<'config> {
+ fn new_map(de: Deserializer<'config>) -> Result<ConfigMapAccess<'config>, ConfigError> {
+ let mut fields = Vec::new();
+ if let Some(mut v) = de.config.get_table(&de.key)? {
+ // `v: Value<HashMap<String, CV>>`
+ for (key, _value) in v.val.drain() {
+ fields.push(KeyKind::CaseSensitive(key));
+ }
+ }
+ if de.config.cli_unstable().advanced_env {
+ // `CARGO_PROFILE_DEV_PACKAGE_`
+ let env_prefix = format!("{}_", de.key.as_env_key());
+ for env_key in de.config.env_keys() {
+ if env_key.starts_with(&env_prefix) {
+ // `CARGO_PROFILE_DEV_PACKAGE_bar_OPT_LEVEL = 3`
+ let rest = &env_key[env_prefix.len()..];
+ // `rest = bar_OPT_LEVEL`
+ let part = rest.splitn(2, '_').next().unwrap();
+ // `part = "bar"`
+ fields.push(KeyKind::CaseSensitive(part.to_string()));
+ }
+ }
+ }
+ Ok(ConfigMapAccess {
+ de,
+ fields,
+ field_index: 0,
+ })
+ }
+
+ fn new_struct(
+ de: Deserializer<'config>,
+ given_fields: &'static [&'static str],
+ ) -> Result<ConfigMapAccess<'config>, ConfigError> {
+ let table = de.config.get_table(&de.key)?;
+
+ // Assume that if we're deserializing a struct it exhaustively lists all
+ // possible fields on this key that we're *supposed* to use, so take
+ // this opportunity to warn about any keys that aren't recognized as
+ // fields and warn about them.
+ if let Some(v) = table.as_ref() {
+ let unused_keys = v
+ .val
+ .iter()
+ .filter(|(k, _v)| !given_fields.iter().any(|gk| gk == k));
+ for (unused_key, unused_value) in unused_keys {
+ de.config.shell().warn(format!(
+ "unused config key `{}.{}` in `{}`",
+ de.key,
+ unused_key,
+ unused_value.definition()
+ ))?;
+ }
+ }
+
+ let mut fields = HashSet::new();
+
+ // If the caller is interested in a field which we can provide from
+ // the environment, get it from there.
+ for field in given_fields {
+ let mut field_key = de.key.clone();
+ field_key.push(field);
+ for env_key in de.config.env_keys() {
+ if env_key.starts_with(field_key.as_env_key()) {
+ fields.insert(KeyKind::Normal(field.to_string()));
+ }
+ }
+ }
+
+ // Add everything from the config table we're interested in that we
+ // haven't already provided via an environment variable
+ if let Some(v) = table {
+ for key in v.val.keys() {
+ fields.insert(KeyKind::Normal(key.clone()));
+ }
+ }
+
+ Ok(ConfigMapAccess {
+ de,
+ fields: fields.into_iter().collect(),
+ field_index: 0,
+ })
+ }
+}
+
+impl<'de, 'config> de::MapAccess<'de> for ConfigMapAccess<'config> {
+ type Error = ConfigError;
+
+ fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Self::Error>
+ where
+ K: de::DeserializeSeed<'de>,
+ {
+ if self.field_index >= self.fields.len() {
+ return Ok(None);
+ }
+ let field = match &self.fields[self.field_index] {
+ KeyKind::Normal(s) | KeyKind::CaseSensitive(s) => s.as_str(),
+ };
+ seed.deserialize(field.into_deserializer()).map(Some)
+ }
+
+ fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Self::Error>
+ where
+ V: de::DeserializeSeed<'de>,
+ {
+ let field = &self.fields[self.field_index];
+ self.field_index += 1;
+ // Set this as the current key in the deserializer.
+ let field = match field {
+ KeyKind::Normal(field) => {
+ self.de.key.push(field);
+ field
+ }
+ KeyKind::CaseSensitive(field) => {
+ self.de.key.push_sensitive(field);
+ field
+ }
+ };
+ // Env vars that are a prefix of another with a dash/underscore cannot
+ // be supported by our serde implementation, so check for them here.
+ // Example:
+ // CARGO_BUILD_TARGET
+ // CARGO_BUILD_TARGET_DIR
+ // or
+ // CARGO_PROFILE_DEV_DEBUG
+ // CARGO_PROFILE_DEV_DEBUG_ASSERTIONS
+ // The `deserialize_option` method does not know the type of the field.
+ // If the type is an Option<struct> (like
+ // `profile.dev.build-override`), then it needs to check for env vars
+ // starting with CARGO_FOO_BAR_. This is a problem for keys like
+ // CARGO_BUILD_TARGET because checking for a prefix would incorrectly
+ // match CARGO_BUILD_TARGET_DIR. `deserialize_option` would have no
+ // choice but to call `visit_some()` which would then fail if
+ // CARGO_BUILD_TARGET isn't set. So we check for these prefixes and
+ // disallow them here.
+ let env_prefix = format!("{}_", field).replace('-', "_");
+ let env_prefix_ok = !self.fields.iter().any(|field| {
+ let field = match field {
+ KeyKind::Normal(s) | KeyKind::CaseSensitive(s) => s.as_str(),
+ };
+ field.replace('-', "_").starts_with(&env_prefix)
+ });
+
+ let result = seed.deserialize(Deserializer {
+ config: self.de.config,
+ key: self.de.key.clone(),
+ env_prefix_ok,
+ });
+ self.de.key.pop();
+ result
+ }
+}
+
+struct ConfigSeqAccess {
+ list_iter: vec::IntoIter<(String, Definition)>,
+}
+
+impl ConfigSeqAccess {
+ fn new(de: Deserializer<'_>) -> Result<ConfigSeqAccess, ConfigError> {
+ let mut res = Vec::new();
+ if let Some(v) = de.config._get_list(&de.key)? {
+ res.extend(v.val);
+ }
+
+ de.config.get_env_list(&de.key, &mut res)?;
+
+ Ok(ConfigSeqAccess {
+ list_iter: res.into_iter(),
+ })
+ }
+}
+
+impl<'de> de::SeqAccess<'de> for ConfigSeqAccess {
+ type Error = ConfigError;
+
+ fn next_element_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>, Self::Error>
+ where
+ T: de::DeserializeSeed<'de>,
+ {
+ match self.list_iter.next() {
+ // TODO: add `def` to error?
+ Some((value, def)) => {
+ // This might be a String or a Value<String>.
+ // ValueDeserializer will handle figuring out which one it is.
+ let maybe_value_de = ValueDeserializer::new_with_string(value, def);
+ seed.deserialize(maybe_value_de).map(Some)
+ }
+ None => Ok(None),
+ }
+ }
+}
+
+/// This is a deserializer that deserializes into a `Value<T>` for
+/// configuration.
+///
+/// This is a special deserializer because it deserializes one of its struct
+/// fields into the location that this configuration value was defined in.
+///
+/// See more comments in `value.rs` for the protocol used here.
+struct ValueDeserializer<'config> {
+ hits: u32,
+ definition: Definition,
+ /// The deserializer, used to actually deserialize a Value struct.
+ /// This is `None` if deserializing a string.
+ de: Option<Deserializer<'config>>,
+ /// A string value to deserialize.
+ ///
+ /// This is used for situations where you can't address a string via a
+ /// TOML key, such as a string inside an array. The `ConfigSeqAccess`
+ /// doesn't know if the type it should deserialize to is a `String` or
+ /// `Value<String>`, so `ValueDeserializer` needs to be able to handle
+ /// both.
+ str_value: Option<String>,
+}
+
+impl<'config> ValueDeserializer<'config> {
+ fn new(de: Deserializer<'config>) -> Result<ValueDeserializer<'config>, ConfigError> {
+ // Figure out where this key is defined.
+ let definition = {
+ let env = de.key.as_env_key();
+ let env_def = Definition::Environment(env.to_string());
+ match (de.config.env.contains_key(env), de.config.get_cv(&de.key)?) {
+ (true, Some(cv)) => {
+ // Both, pick highest priority.
+ if env_def.is_higher_priority(cv.definition()) {
+ env_def
+ } else {
+ cv.definition().clone()
+ }
+ }
+ (false, Some(cv)) => cv.definition().clone(),
+ // Assume it is an environment, even if the key is not set.
+ // This can happen for intermediate tables, like
+ // CARGO_FOO_BAR_* where `CARGO_FOO_BAR` is not set.
+ (_, None) => env_def,
+ }
+ };
+ Ok(ValueDeserializer {
+ hits: 0,
+ definition,
+ de: Some(de),
+ str_value: None,
+ })
+ }
+
+ fn new_with_string(s: String, definition: Definition) -> ValueDeserializer<'config> {
+ ValueDeserializer {
+ hits: 0,
+ definition,
+ de: None,
+ str_value: Some(s),
+ }
+ }
+}
+
+impl<'de, 'config> de::MapAccess<'de> for ValueDeserializer<'config> {
+ type Error = ConfigError;
+
+ fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Self::Error>
+ where
+ K: de::DeserializeSeed<'de>,
+ {
+ self.hits += 1;
+ match self.hits {
+ 1 => seed
+ .deserialize(value::VALUE_FIELD.into_deserializer())
+ .map(Some),
+ 2 => seed
+ .deserialize(value::DEFINITION_FIELD.into_deserializer())
+ .map(Some),
+ _ => Ok(None),
+ }
+ }
+
+ fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Self::Error>
+ where
+ V: de::DeserializeSeed<'de>,
+ {
+ // If this is the first time around we deserialize the `value` field
+ // which is the actual deserializer
+ if self.hits == 1 {
+ if let Some(de) = &self.de {
+ return seed
+ .deserialize(de.clone())
+ .map_err(|e| e.with_key_context(&de.key, self.definition.clone()));
+ } else {
+ return seed
+ .deserialize(self.str_value.as_ref().unwrap().clone().into_deserializer());
+ }
+ }
+
+ // ... otherwise we're deserializing the `definition` field, so we need
+ // to figure out where the field we just deserialized was defined at.
+ match &self.definition {
+ Definition::Path(path) => {
+ seed.deserialize(Tuple2Deserializer(0i32, path.to_string_lossy()))
+ }
+ Definition::Environment(env) => {
+ seed.deserialize(Tuple2Deserializer(1i32, env.as_str()))
+ }
+ Definition::Cli(path) => {
+ let str = path
+ .as_ref()
+ .map(|p| p.to_string_lossy())
+ .unwrap_or_default();
+ seed.deserialize(Tuple2Deserializer(2i32, str))
+ }
+ }
+ }
+}
+
+// Deserializer is only implemented to handle deserializing a String inside a
+// sequence (like `Vec<String>` or `Vec<Value<String>>`). `Value<String>` is
+// handled by deserialize_struct, and the plain `String` is handled by all the
+// other functions here.
+impl<'de, 'config> de::Deserializer<'de> for ValueDeserializer<'config> {
+ type Error = ConfigError;
+
+ fn deserialize_str<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ visitor.visit_str(&self.str_value.expect("string expected"))
+ }
+
+ fn deserialize_string<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ visitor.visit_string(self.str_value.expect("string expected"))
+ }
+
+ fn deserialize_struct<V>(
+ self,
+ name: &'static str,
+ fields: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value, Self::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ // Match on the magical struct name/field names that are passed in to
+ // detect when we're deserializing `Value<T>`.
+ //
+ // See more comments in `value.rs` for the protocol used here.
+ if name == value::NAME && fields == value::FIELDS {
+ return visitor.visit_map(self);
+ }
+ unimplemented!("only strings and Value can be deserialized from a sequence");
+ }
+
+ fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ visitor.visit_string(self.str_value.expect("string expected"))
+ }
+
+ fn deserialize_ignored_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ visitor.visit_unit()
+ }
+
+ serde::forward_to_deserialize_any! {
+ i8 i16 i32 i64
+ u8 u16 u32 u64
+ option
+ newtype_struct seq tuple tuple_struct map enum bool
+ f32 f64 char bytes
+ byte_buf unit unit_struct
+ identifier
+ }
+}
+
+/// A deserializer which takes two values and deserializes into a tuple of those
+/// two values. This is similar to types like `StrDeserializer` in upstream
+/// serde itself.
+struct Tuple2Deserializer<T, U>(T, U);
+
+impl<'de, T, U> de::Deserializer<'de> for Tuple2Deserializer<T, U>
+where
+ T: IntoDeserializer<'de, ConfigError>,
+ U: IntoDeserializer<'de, ConfigError>,
+{
+ type Error = ConfigError;
+
+ fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, ConfigError>
+ where
+ V: de::Visitor<'de>,
+ {
+ struct SeqVisitor<T, U> {
+ first: Option<T>,
+ second: Option<U>,
+ }
+ impl<'de, T, U> de::SeqAccess<'de> for SeqVisitor<T, U>
+ where
+ T: IntoDeserializer<'de, ConfigError>,
+ U: IntoDeserializer<'de, ConfigError>,
+ {
+ type Error = ConfigError;
+ fn next_element_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Self::Error>
+ where
+ K: de::DeserializeSeed<'de>,
+ {
+ if let Some(first) = self.first.take() {
+ return seed.deserialize(first.into_deserializer()).map(Some);
+ }
+ if let Some(second) = self.second.take() {
+ return seed.deserialize(second.into_deserializer()).map(Some);
+ }
+ Ok(None)
+ }
+ }
+
+ visitor.visit_seq(SeqVisitor {
+ first: Some(self.0),
+ second: Some(self.1),
+ })
+ }
+
+ serde::forward_to_deserialize_any! {
+ bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq
+ bytes byte_buf map struct option unit newtype_struct
+ ignored_any unit_struct tuple_struct tuple enum identifier
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/config/environment.rs b/src/tools/cargo/src/cargo/util/config/environment.rs
new file mode 100644
index 000000000..0172c88c0
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/config/environment.rs
@@ -0,0 +1,189 @@
+//! Encapsulates snapshotting of environment variables.
+
+use std::collections::HashMap;
+use std::ffi::{OsStr, OsString};
+
+use crate::util::errors::CargoResult;
+use anyhow::{anyhow, bail};
+
+/// Generate `case_insensitive_env` and `normalized_env` from the `env`.
+fn make_case_insensitive_and_normalized_env(
+ env: &HashMap<OsString, OsString>,
+) -> (HashMap<String, String>, HashMap<String, String>) {
+ let case_insensitive_env: HashMap<_, _> = env
+ .keys()
+ .filter_map(|k| k.to_str())
+ .map(|k| (k.to_uppercase(), k.to_owned()))
+ .collect();
+ let normalized_env = env
+ .iter()
+ // Only keep entries where both the key and value are valid UTF-8,
+ // since the config env vars only support UTF-8 keys and values.
+ // Otherwise, the normalized map warning could incorrectly warn about entries that can't be
+ // read by the config system.
+ // Please see the docs for `Env` for more context.
+ .filter_map(|(k, v)| Some((k.to_str()?, v.to_str()?)))
+ .map(|(k, _)| (k.to_uppercase().replace("-", "_"), k.to_owned()))
+ .collect();
+ (case_insensitive_env, normalized_env)
+}
+
+/// A snapshot of the environment variables available to [`super::Config`].
+///
+/// Currently, the [`Config`](super::Config) supports lookup of environment variables
+/// through two different means:
+///
+/// - [`Config::get_env`](super::Config::get_env)
+/// and [`Config::get_env_os`](super::Config::get_env_os)
+/// for process environment variables (similar to [`std::env::var`] and [`std::env::var_os`]),
+/// - Typed Config Value API via [`Config::get`](super::Config::get).
+/// This is only available for `CARGO_` prefixed environment keys.
+///
+/// This type contains the env var snapshot and helper methods for both APIs.
+#[derive(Debug)]
+pub struct Env {
+ /// A snapshot of the process's environment variables.
+ env: HashMap<OsString, OsString>,
+ /// Used in the typed Config value API for warning messages when config keys are
+ /// given in the wrong format.
+ ///
+ /// Maps from "normalized" (upper case and with "-" replaced by "_") env keys
+ /// to the actual keys in the environment.
+ /// The normalized format is the one expected by Cargo.
+ ///
+ /// This only holds env keys that are valid UTF-8, since [`super::ConfigKey`] only supports UTF-8 keys.
+ /// In addition, this only holds env keys whose value in the environment is also valid UTF-8,
+ /// since the typed Config value API only supports UTF-8 values.
+ normalized_env: HashMap<String, String>,
+ /// Used to implement `get_env` and `get_env_os` on Windows, where env keys are case-insensitive.
+ ///
+ /// Maps from uppercased env keys to the actual key in the environment.
+ /// For example, this might hold a pair `("PATH", "Path")`.
+ /// Currently only supports UTF-8 keys and values.
+ case_insensitive_env: HashMap<String, String>,
+}
+
+impl Env {
+ /// Create a new `Env` from process's environment variables.
+ pub fn new() -> Self {
+ // ALLOWED: This is the only permissible usage of `std::env::vars{_os}`
+ // within cargo. If you do need access to individual variables without
+ // interacting with `Config` system, please use `std::env::var{_os}`
+ // and justify the validity of the usage.
+ #[allow(clippy::disallowed_methods)]
+ let env: HashMap<_, _> = std::env::vars_os().collect();
+ let (case_insensitive_env, normalized_env) = make_case_insensitive_and_normalized_env(&env);
+ Self {
+ env,
+ case_insensitive_env,
+ normalized_env,
+ }
+ }
+
+ /// Set the env directly from a `HashMap`.
+ /// This should be used for debugging purposes only.
+ pub(super) fn from_map(env: HashMap<String, String>) -> Self {
+ let env = env.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
+ let (case_insensitive_env, normalized_env) = make_case_insensitive_and_normalized_env(&env);
+ Self {
+ env,
+ case_insensitive_env,
+ normalized_env,
+ }
+ }
+
+ /// Returns all environment variables as an iterator,
+ /// keeping only entries where both the key and value are valid UTF-8.
+ pub fn iter_str(&self) -> impl Iterator<Item = (&str, &str)> {
+ self.env
+ .iter()
+ .filter_map(|(k, v)| Some((k.to_str()?, v.to_str()?)))
+ }
+
+ /// Returns all environment variable keys, filtering out keys that are not valid UTF-8.
+ pub fn keys_str(&self) -> impl Iterator<Item = &str> {
+ self.env.keys().filter_map(|k| k.to_str())
+ }
+
+ /// Get the value of environment variable `key` through the `Config` snapshot.
+ ///
+ /// This can be used similarly to `std::env::var_os`.
+ /// On Windows, we check for case mismatch since environment keys are case-insensitive.
+ pub fn get_env_os(&self, key: impl AsRef<OsStr>) -> Option<OsString> {
+ match self.env.get(key.as_ref()) {
+ Some(s) => Some(s.clone()),
+ None => {
+ if cfg!(windows) {
+ self.get_env_case_insensitive(key).cloned()
+ } else {
+ None
+ }
+ }
+ }
+ }
+
+ /// Get the value of environment variable `key` through the `self.env` snapshot.
+ ///
+ /// This can be used similarly to `std::env::var`.
+ /// On Windows, we check for case mismatch since environment keys are case-insensitive.
+ pub fn get_env(&self, key: impl AsRef<OsStr>) -> CargoResult<String> {
+ let key = key.as_ref();
+ let s = self
+ .get_env_os(key)
+ .ok_or_else(|| anyhow!("{key:?} could not be found in the environment snapshot"))?;
+
+ match s.to_str() {
+ Some(s) => Ok(s.to_owned()),
+ None => bail!("environment variable value is not valid unicode: {s:?}"),
+ }
+ }
+
+ /// Performs a case-insensitive lookup of `key` in the environment.
+ ///
+ /// This is relevant on Windows, where environment variables are case-insensitive.
+ /// Note that this only works on keys that are valid UTF-8 and it uses Unicode uppercase,
+ /// which may differ from the OS's notion of uppercase.
+ fn get_env_case_insensitive(&self, key: impl AsRef<OsStr>) -> Option<&OsString> {
+ let upper_case_key = key.as_ref().to_str()?.to_uppercase();
+ let env_key: &OsStr = self.case_insensitive_env.get(&upper_case_key)?.as_ref();
+ self.env.get(env_key)
+ }
+
+ /// Get the value of environment variable `key` as a `&str`.
+ /// Returns `None` if `key` is not in `self.env` or if the value is not valid UTF-8.
+ ///
+ /// This is intended for use in private methods of `Config`,
+ /// and does not check for env key case mismatch.
+ ///
+ /// This is case-sensitive on Windows (even though environment keys on Windows are usually
+ /// case-insensitive) due to an unintended regression in 1.28 (via #5552).
+ /// This should only affect keys used for cargo's config-system env variables (`CARGO_`
+ /// prefixed ones), which are currently all uppercase.
+ /// We may want to consider rectifying it if users report issues.
+ /// One thing that adds a wrinkle here is the unstable advanced-env option that *requires*
+ /// case-sensitive keys.
+ ///
+ /// Do not use this for any other purposes.
+ /// Use [`Env::get_env_os`] or [`Env::get_env`] instead, which properly handle case
+ /// insensitivity on Windows.
+ pub(super) fn get_str(&self, key: impl AsRef<OsStr>) -> Option<&str> {
+ self.env.get(key.as_ref()).and_then(|s| s.to_str())
+ }
+
+ /// Check if the environment contains `key`.
+ ///
+ /// This is intended for use in private methods of `Config`,
+ /// and does not check for env key case mismatch.
+ /// See the docstring of [`Env::get_str`] for more context.
+ pub(super) fn contains_key(&self, key: impl AsRef<OsStr>) -> bool {
+ self.env.contains_key(key.as_ref())
+ }
+
+ /// Looks up a normalized `key` in the `normalized_env`.
+ /// Returns the corresponding (non-normalized) env key if it exists, else `None`.
+ ///
+ /// This is used by [`super::Config::check_environment_key_case_mismatch`].
+ pub(super) fn get_normalized(&self, key: &str) -> Option<&str> {
+ self.normalized_env.get(key).map(|s| s.as_ref())
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/config/key.rs b/src/tools/cargo/src/cargo/util/config/key.rs
new file mode 100644
index 000000000..228d85844
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/config/key.rs
@@ -0,0 +1,116 @@
+use std::borrow::Cow;
+use std::fmt;
+
+/// Key for a configuration variable.
+///
+/// This type represents a configuration variable that we're looking up in
+/// Cargo's configuration. This structure simultaneously keeps track of a
+/// corresponding environment variable name as well as a TOML config name. The
+/// intention here is that this is built up and torn down over time efficiently,
+/// avoiding clones and such as possible.
+#[derive(Debug, Clone)]
+pub struct ConfigKey {
+ // The current environment variable this configuration key maps to. This is
+ // updated with `push` methods and looks like `CARGO_FOO_BAR` for pushing
+ // `foo` and then `bar`.
+ env: String,
+ // This is used to keep track of how many sub-keys have been pushed on
+ // this `ConfigKey`. Each element of this vector is a new sub-key pushed
+ // onto this `ConfigKey`. Each element is a pair where the first item is
+ // the key part as a string, and the second item is an index into `env`.
+ // The `env` index is used on `pop` to truncate `env` to rewind back to
+ // the previous `ConfigKey` state before a `push`.
+ parts: Vec<(String, usize)>,
+}
+
+impl ConfigKey {
+ /// Creates a new blank configuration key which is ready to get built up by
+ /// using `push` and `push_sensitive`.
+ pub fn new() -> ConfigKey {
+ ConfigKey {
+ env: "CARGO".to_string(),
+ parts: Vec::new(),
+ }
+ }
+
+ /// Creates a `ConfigKey` from the `key` specified.
+ ///
+ /// The `key` specified is expected to be a period-separated toml
+ /// configuration key.
+ pub fn from_str(key: &str) -> ConfigKey {
+ let mut cfg = ConfigKey::new();
+ for part in key.split('.') {
+ cfg.push(part);
+ }
+ cfg
+ }
+
+ /// Pushes a new sub-key on this `ConfigKey`. This sub-key should be
+ /// equivalent to accessing a sub-table in TOML.
+ ///
+ /// Note that this considers `name` to be case-insensitive, meaning that the
+ /// corrseponding toml key is appended with this `name` as-is and the
+ /// corresponding env key is appended with `name` after transforming it to
+ /// uppercase characters.
+ pub fn push(&mut self, name: &str) {
+ let env = name.replace("-", "_").to_uppercase();
+ self._push(&env, name);
+ }
+
+ /// Performs the same function as `push` except that the corresponding
+ /// environment variable does not get the uppercase letters of `name` but
+ /// instead `name` is pushed raw onto the corresponding environment
+ /// variable.
+ pub fn push_sensitive(&mut self, name: &str) {
+ self._push(name, name);
+ }
+
+ fn _push(&mut self, env: &str, config: &str) {
+ self.parts.push((config.to_string(), self.env.len()));
+ self.env.push('_');
+ self.env.push_str(env);
+ }
+
+ /// Rewinds this `ConfigKey` back to the state it was at before the last
+ /// `push` method being called.
+ pub fn pop(&mut self) {
+ let (_part, env) = self.parts.pop().unwrap();
+ self.env.truncate(env);
+ }
+
+ /// Returns the corresponding environment variable key for this
+ /// configuration value.
+ pub fn as_env_key(&self) -> &str {
+ &self.env
+ }
+
+ /// Returns an iterator of the key parts as strings.
+ pub(crate) fn parts(&self) -> impl Iterator<Item = &str> {
+ self.parts.iter().map(|p| p.0.as_ref())
+ }
+
+ /// Returns whether or not this is a key for the root table.
+ pub fn is_root(&self) -> bool {
+ self.parts.is_empty()
+ }
+}
+
+impl fmt::Display for ConfigKey {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let parts: Vec<_> = self.parts().map(|part| escape_key_part(part)).collect();
+ parts.join(".").fmt(f)
+ }
+}
+
+fn escape_key_part<'a>(part: &'a str) -> Cow<'a, str> {
+ let ok = part.chars().all(|c| {
+ matches!(c,
+ 'a'..='z' | 'A'..='Z' | '0'..='9' | '-' | '_')
+ });
+ if ok {
+ Cow::Borrowed(part)
+ } else {
+ // This is a bit messy, but toml doesn't expose a function to do this.
+ Cow::Owned(toml::Value::from(part).to_string())
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/config/mod.rs b/src/tools/cargo/src/cargo/util/config/mod.rs
new file mode 100644
index 000000000..dad7e9c72
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/config/mod.rs
@@ -0,0 +1,2647 @@
+//! Cargo's config system.
+//!
+//! The `Config` object contains general information about the environment,
+//! and provides access to Cargo's configuration files.
+//!
+//! ## Config value API
+//!
+//! The primary API for fetching user-defined config values is the
+//! `Config::get` method. It uses `serde` to translate config values to a
+//! target type.
+//!
+//! There are a variety of helper types for deserializing some common formats:
+//!
+//! - `value::Value`: This type provides access to the location where the
+//! config value was defined.
+//! - `ConfigRelativePath`: For a path that is relative to where it is
+//! defined.
+//! - `PathAndArgs`: Similar to `ConfigRelativePath`, but also supports a list
+//! of arguments, useful for programs to execute.
+//! - `StringList`: Get a value that is either a list or a whitespace split
+//! string.
+//!
+//! ## Map key recommendations
+//!
+//! Handling tables that have arbitrary keys can be tricky, particularly if it
+//! should support environment variables. In general, if possible, the caller
+//! should pass the full key path into the `get()` method so that the config
+//! deserializer can properly handle environment variables (which need to be
+//! uppercased, and dashes converted to underscores).
+//!
+//! A good example is the `[target]` table. The code will request
+//! `target.$TRIPLE` and the config system can then appropriately fetch
+//! environment variables like `CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER`.
+//! Conversely, it is not possible do the same thing for the `cfg()` target
+//! tables (because Cargo must fetch all of them), so those do not support
+//! environment variables.
+//!
+//! Try to avoid keys that are a prefix of another with a dash/underscore. For
+//! example `build.target` and `build.target-dir`. This is OK if these are not
+//! structs/maps, but if it is a struct or map, then it will not be able to
+//! read the environment variable due to ambiguity. (See `ConfigMapAccess` for
+//! more details.)
+//!
+//! ## Internal API
+//!
+//! Internally config values are stored with the `ConfigValue` type after they
+//! have been loaded from disk. This is similar to the `toml::Value` type, but
+//! includes the definition location. The `get()` method uses serde to
+//! translate from `ConfigValue` and environment variables to the caller's
+//! desired type.
+
+use std::borrow::Cow;
+use std::cell::{RefCell, RefMut};
+use std::collections::hash_map::Entry::{Occupied, Vacant};
+use std::collections::{HashMap, HashSet};
+use std::env;
+use std::ffi::{OsStr, OsString};
+use std::fmt;
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::io::{self, SeekFrom};
+use std::mem;
+use std::path::{Path, PathBuf};
+use std::str::FromStr;
+use std::sync::Once;
+use std::time::Instant;
+
+use self::ConfigValue as CV;
+use crate::core::compiler::rustdoc::RustdocExternMap;
+use crate::core::shell::Verbosity;
+use crate::core::{features, CliUnstable, Shell, SourceId, Workspace, WorkspaceRootConfig};
+use crate::ops::{self, RegistryCredentialConfig};
+use crate::util::auth::Secret;
+use crate::util::errors::CargoResult;
+use crate::util::CanonicalUrl;
+use crate::util::{internal, toml as cargo_toml};
+use crate::util::{try_canonicalize, validate_package_name};
+use crate::util::{FileLock, Filesystem, IntoUrl, IntoUrlWithBase, Rustc};
+use anyhow::{anyhow, bail, format_err, Context as _};
+use cargo_util::paths;
+use curl::easy::Easy;
+use lazycell::LazyCell;
+use serde::de::IntoDeserializer as _;
+use serde::Deserialize;
+use toml_edit::Item;
+use url::Url;
+
+mod de;
+use de::Deserializer;
+
+mod value;
+pub use value::{Definition, OptValue, Value};
+
+mod key;
+pub use key::ConfigKey;
+
+mod path;
+pub use path::{ConfigRelativePath, PathAndArgs};
+
+mod target;
+pub use target::{TargetCfgConfig, TargetConfig};
+
+mod environment;
+use environment::Env;
+
+// Helper macro for creating typed access methods.
+macro_rules! get_value_typed {
+ ($name:ident, $ty:ty, $variant:ident, $expected:expr) => {
+ /// Low-level private method for getting a config value as an OptValue.
+ fn $name(&self, key: &ConfigKey) -> Result<OptValue<$ty>, ConfigError> {
+ let cv = self.get_cv(key)?;
+ let env = self.get_config_env::<$ty>(key)?;
+ match (cv, env) {
+ (Some(CV::$variant(val, definition)), Some(env)) => {
+ if definition.is_higher_priority(&env.definition) {
+ Ok(Some(Value { val, definition }))
+ } else {
+ Ok(Some(env))
+ }
+ }
+ (Some(CV::$variant(val, definition)), None) => Ok(Some(Value { val, definition })),
+ (Some(cv), _) => Err(ConfigError::expected(key, $expected, &cv)),
+ (None, Some(env)) => Ok(Some(env)),
+ (None, None) => Ok(None),
+ }
+ }
+ };
+}
+
+/// Indicates why a config value is being loaded.
+#[derive(Clone, Copy, Debug)]
+enum WhyLoad {
+ /// Loaded due to a request from the global cli arg `--config`
+ ///
+ /// Indirect configs loaded via [`config-include`] are also seen as from cli args,
+ /// if the initial config is being loaded from cli.
+ ///
+ /// [`config-include`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#config-include
+ Cli,
+ /// Loaded due to config file discovery.
+ FileDiscovery,
+}
+
+/// A previously generated authentication token and the data needed to determine if it can be reused.
+#[derive(Debug)]
+pub struct CredentialCacheValue {
+ /// If the command line was used to override the token then it must always be reused,
+ /// even if reading the configuration files would lead to a different value.
+ pub from_commandline: bool,
+ /// If nothing depends on which endpoint is being hit, then we can reuse the token
+ /// for any future request even if some of the requests involve mutations.
+ pub independent_of_endpoint: bool,
+ pub token_value: Secret<String>,
+}
+
+/// Configuration information for cargo. This is not specific to a build, it is information
+/// relating to cargo itself.
+#[derive(Debug)]
+pub struct Config {
+ /// The location of the user's Cargo home directory. OS-dependent.
+ home_path: Filesystem,
+ /// Information about how to write messages to the shell
+ shell: RefCell<Shell>,
+ /// A collection of configuration options
+ values: LazyCell<HashMap<String, ConfigValue>>,
+ /// A collection of configuration options from the credentials file
+ credential_values: LazyCell<HashMap<String, ConfigValue>>,
+ /// CLI config values, passed in via `configure`.
+ cli_config: Option<Vec<String>>,
+ /// The current working directory of cargo
+ cwd: PathBuf,
+ /// Directory where config file searching should stop (inclusive).
+ search_stop_path: Option<PathBuf>,
+ /// The location of the cargo executable (path to current process)
+ cargo_exe: LazyCell<PathBuf>,
+ /// The location of the rustdoc executable
+ rustdoc: LazyCell<PathBuf>,
+ /// Whether we are printing extra verbose messages
+ extra_verbose: bool,
+ /// `frozen` is the same as `locked`, but additionally will not access the
+ /// network to determine if the lock file is out-of-date.
+ frozen: bool,
+ /// `locked` is set if we should not update lock files. If the lock file
+ /// is missing, or needs to be updated, an error is produced.
+ locked: bool,
+ /// `offline` is set if we should never access the network, but otherwise
+ /// continue operating if possible.
+ offline: bool,
+ /// A global static IPC control mechanism (used for managing parallel builds)
+ jobserver: Option<jobserver::Client>,
+ /// Cli flags of the form "-Z something" merged with config file values
+ unstable_flags: CliUnstable,
+ /// Cli flags of the form "-Z something"
+ unstable_flags_cli: Option<Vec<String>>,
+ /// A handle on curl easy mode for http calls
+ easy: LazyCell<RefCell<Easy>>,
+ /// Cache of the `SourceId` for crates.io
+ crates_io_source_id: LazyCell<SourceId>,
+ /// If false, don't cache `rustc --version --verbose` invocations
+ cache_rustc_info: bool,
+ /// Creation time of this config, used to output the total build time
+ creation_time: Instant,
+ /// Target Directory via resolved Cli parameter
+ target_dir: Option<Filesystem>,
+ /// Environment variable snapshot.
+ env: Env,
+ /// Tracks which sources have been updated to avoid multiple updates.
+ updated_sources: LazyCell<RefCell<HashSet<SourceId>>>,
+ /// Cache of credentials from configuration or credential providers.
+ /// Maps from url to credential value.
+ credential_cache: LazyCell<RefCell<HashMap<CanonicalUrl, CredentialCacheValue>>>,
+ /// Lock, if held, of the global package cache along with the number of
+ /// acquisitions so far.
+ package_cache_lock: RefCell<Option<(Option<FileLock>, usize)>>,
+ /// Cached configuration parsed by Cargo
+ http_config: LazyCell<CargoHttpConfig>,
+ future_incompat_config: LazyCell<CargoFutureIncompatConfig>,
+ net_config: LazyCell<CargoNetConfig>,
+ build_config: LazyCell<CargoBuildConfig>,
+ target_cfgs: LazyCell<Vec<(String, TargetCfgConfig)>>,
+ doc_extern_map: LazyCell<RustdocExternMap>,
+ progress_config: ProgressConfig,
+ env_config: LazyCell<EnvConfig>,
+ /// This should be false if:
+ /// - this is an artifact of the rustc distribution process for "stable" or for "beta"
+ /// - this is an `#[test]` that does not opt in with `enable_nightly_features`
+ /// - this is an integration test that uses `ProcessBuilder`
+ /// that does not opt in with `masquerade_as_nightly_cargo`
+ /// This should be true if:
+ /// - this is an artifact of the rustc distribution process for "nightly"
+ /// - this is being used in the rustc distribution process internally
+ /// - this is a cargo executable that was built from source
+ /// - this is an `#[test]` that called `enable_nightly_features`
+ /// - this is an integration test that uses `ProcessBuilder`
+ /// that called `masquerade_as_nightly_cargo`
+ /// It's public to allow tests use nightly features.
+ /// NOTE: this should be set before `configure()`. If calling this from an integration test,
+ /// consider using `ConfigBuilder::enable_nightly_features` instead.
+ pub nightly_features_allowed: bool,
+ /// WorkspaceRootConfigs that have been found
+ pub ws_roots: RefCell<HashMap<PathBuf, WorkspaceRootConfig>>,
+}
+
+impl Config {
+ /// Creates a new config instance.
+ ///
+ /// This is typically used for tests or other special cases. `default` is
+ /// preferred otherwise.
+ ///
+ /// This does only minimal initialization. In particular, it does not load
+ /// any config files from disk. Those will be loaded lazily as-needed.
+ pub fn new(shell: Shell, cwd: PathBuf, homedir: PathBuf) -> Config {
+ static mut GLOBAL_JOBSERVER: *mut jobserver::Client = 0 as *mut _;
+ static INIT: Once = Once::new();
+
+ // This should be called early on in the process, so in theory the
+ // unsafety is ok here. (taken ownership of random fds)
+ INIT.call_once(|| unsafe {
+ if let Some(client) = jobserver::Client::from_env() {
+ GLOBAL_JOBSERVER = Box::into_raw(Box::new(client));
+ }
+ });
+
+ let env = Env::new();
+
+ let cache_key = "CARGO_CACHE_RUSTC_INFO";
+ let cache_rustc_info = match env.get_env_os(cache_key) {
+ Some(cache) => cache != "0",
+ _ => true,
+ };
+
+ Config {
+ home_path: Filesystem::new(homedir),
+ shell: RefCell::new(shell),
+ cwd,
+ search_stop_path: None,
+ values: LazyCell::new(),
+ credential_values: LazyCell::new(),
+ cli_config: None,
+ cargo_exe: LazyCell::new(),
+ rustdoc: LazyCell::new(),
+ extra_verbose: false,
+ frozen: false,
+ locked: false,
+ offline: false,
+ jobserver: unsafe {
+ if GLOBAL_JOBSERVER.is_null() {
+ None
+ } else {
+ Some((*GLOBAL_JOBSERVER).clone())
+ }
+ },
+ unstable_flags: CliUnstable::default(),
+ unstable_flags_cli: None,
+ easy: LazyCell::new(),
+ crates_io_source_id: LazyCell::new(),
+ cache_rustc_info,
+ creation_time: Instant::now(),
+ target_dir: None,
+ env,
+ updated_sources: LazyCell::new(),
+ credential_cache: LazyCell::new(),
+ package_cache_lock: RefCell::new(None),
+ http_config: LazyCell::new(),
+ future_incompat_config: LazyCell::new(),
+ net_config: LazyCell::new(),
+ build_config: LazyCell::new(),
+ target_cfgs: LazyCell::new(),
+ doc_extern_map: LazyCell::new(),
+ progress_config: ProgressConfig::default(),
+ env_config: LazyCell::new(),
+ nightly_features_allowed: matches!(&*features::channel(), "nightly" | "dev"),
+ ws_roots: RefCell::new(HashMap::new()),
+ }
+ }
+
+ /// Creates a new Config instance, with all default settings.
+ ///
+ /// This does only minimal initialization. In particular, it does not load
+ /// any config files from disk. Those will be loaded lazily as-needed.
+ pub fn default() -> CargoResult<Config> {
+ let shell = Shell::new();
+ let cwd = env::current_dir()
+ .with_context(|| "couldn't get the current directory of the process")?;
+ let homedir = homedir(&cwd).ok_or_else(|| {
+ anyhow!(
+ "Cargo couldn't find your home directory. \
+ This probably means that $HOME was not set."
+ )
+ })?;
+ Ok(Config::new(shell, cwd, homedir))
+ }
+
+ /// Gets the user's Cargo home directory (OS-dependent).
+ pub fn home(&self) -> &Filesystem {
+ &self.home_path
+ }
+
+ /// Returns a path to display to the user with the location of their home
+ /// config file (to only be used for displaying a diagnostics suggestion,
+ /// such as recommending where to add a config value).
+ pub fn diagnostic_home_config(&self) -> String {
+ let home = self.home_path.as_path_unlocked();
+ let path = match self.get_file_path(home, "config", false) {
+ Ok(Some(existing_path)) => existing_path,
+ _ => home.join("config.toml"),
+ };
+ path.to_string_lossy().to_string()
+ }
+
+ /// Gets the Cargo Git directory (`<cargo_home>/git`).
+ pub fn git_path(&self) -> Filesystem {
+ self.home_path.join("git")
+ }
+
+ /// Gets the Cargo base directory for all registry information (`<cargo_home>/registry`).
+ pub fn registry_base_path(&self) -> Filesystem {
+ self.home_path.join("registry")
+ }
+
+ /// Gets the Cargo registry index directory (`<cargo_home>/registry/index`).
+ pub fn registry_index_path(&self) -> Filesystem {
+ self.registry_base_path().join("index")
+ }
+
+ /// Gets the Cargo registry cache directory (`<cargo_home>/registry/path`).
+ pub fn registry_cache_path(&self) -> Filesystem {
+ self.registry_base_path().join("cache")
+ }
+
+ /// Gets the Cargo registry source directory (`<cargo_home>/registry/src`).
+ pub fn registry_source_path(&self) -> Filesystem {
+ self.registry_base_path().join("src")
+ }
+
+ /// Gets the default Cargo registry.
+ pub fn default_registry(&self) -> CargoResult<Option<String>> {
+ Ok(self
+ .get_string("registry.default")?
+ .map(|registry| registry.val))
+ }
+
+ /// Gets a reference to the shell, e.g., for writing error messages.
+ pub fn shell(&self) -> RefMut<'_, Shell> {
+ self.shell.borrow_mut()
+ }
+
+ /// Gets the path to the `rustdoc` executable.
+ pub fn rustdoc(&self) -> CargoResult<&Path> {
+ self.rustdoc
+ .try_borrow_with(|| Ok(self.get_tool("rustdoc", &self.build_config()?.rustdoc)))
+ .map(AsRef::as_ref)
+ }
+
+ /// Gets the path to the `rustc` executable.
+ pub fn load_global_rustc(&self, ws: Option<&Workspace<'_>>) -> CargoResult<Rustc> {
+ let cache_location = ws.map(|ws| {
+ ws.target_dir()
+ .join(".rustc_info.json")
+ .into_path_unlocked()
+ });
+ let wrapper = self.maybe_get_tool("rustc_wrapper", &self.build_config()?.rustc_wrapper);
+ let rustc_workspace_wrapper = self.maybe_get_tool(
+ "rustc_workspace_wrapper",
+ &self.build_config()?.rustc_workspace_wrapper,
+ );
+
+ Rustc::new(
+ self.get_tool("rustc", &self.build_config()?.rustc),
+ wrapper,
+ rustc_workspace_wrapper,
+ &self
+ .home()
+ .join("bin")
+ .join("rustc")
+ .into_path_unlocked()
+ .with_extension(env::consts::EXE_EXTENSION),
+ if self.cache_rustc_info {
+ cache_location
+ } else {
+ None
+ },
+ self,
+ )
+ }
+
+ /// Gets the path to the `cargo` executable.
+ pub fn cargo_exe(&self) -> CargoResult<&Path> {
+ self.cargo_exe
+ .try_borrow_with(|| {
+ let from_env = || -> CargoResult<PathBuf> {
+ // Try re-using the `cargo` set in the environment already. This allows
+ // commands that use Cargo as a library to inherit (via `cargo <subcommand>`)
+ // or set (by setting `$CARGO`) a correct path to `cargo` when the current exe
+ // is not actually cargo (e.g., `cargo-*` binaries, Valgrind, `ld.so`, etc.).
+ let exe = try_canonicalize(
+ self.get_env_os(crate::CARGO_ENV)
+ .map(PathBuf::from)
+ .ok_or_else(|| anyhow!("$CARGO not set"))?,
+ )?;
+ Ok(exe)
+ };
+
+ fn from_current_exe() -> CargoResult<PathBuf> {
+ // Try fetching the path to `cargo` using `env::current_exe()`.
+ // The method varies per operating system and might fail; in particular,
+ // it depends on `/proc` being mounted on Linux, and some environments
+ // (like containers or chroots) may not have that available.
+ let exe = try_canonicalize(env::current_exe()?)?;
+ Ok(exe)
+ }
+
+ fn from_argv() -> CargoResult<PathBuf> {
+ // Grab `argv[0]` and attempt to resolve it to an absolute path.
+ // If `argv[0]` has one component, it must have come from a `PATH` lookup,
+ // so probe `PATH` in that case.
+ // Otherwise, it has multiple components and is either:
+ // - a relative path (e.g., `./cargo`, `target/debug/cargo`), or
+ // - an absolute path (e.g., `/usr/local/bin/cargo`).
+ // In either case, `Path::canonicalize` will return the full absolute path
+ // to the target if it exists.
+ let argv0 = env::args_os()
+ .map(PathBuf::from)
+ .next()
+ .ok_or_else(|| anyhow!("no argv[0]"))?;
+ paths::resolve_executable(&argv0)
+ }
+
+ let exe = from_env()
+ .or_else(|_| from_current_exe())
+ .or_else(|_| from_argv())
+ .with_context(|| "couldn't get the path to cargo executable")?;
+ Ok(exe)
+ })
+ .map(AsRef::as_ref)
+ }
+
+ /// Which package sources have been updated, used to ensure it is only done once.
+ pub fn updated_sources(&self) -> RefMut<'_, HashSet<SourceId>> {
+ self.updated_sources
+ .borrow_with(|| RefCell::new(HashSet::new()))
+ .borrow_mut()
+ }
+
+ /// Cached credentials from credential providers or configuration.
+ pub fn credential_cache(&self) -> RefMut<'_, HashMap<CanonicalUrl, CredentialCacheValue>> {
+ self.credential_cache
+ .borrow_with(|| RefCell::new(HashMap::new()))
+ .borrow_mut()
+ }
+
+ /// Gets all config values from disk.
+ ///
+ /// This will lazy-load the values as necessary. Callers are responsible
+ /// for checking environment variables. Callers outside of the `config`
+ /// module should avoid using this.
+ pub fn values(&self) -> CargoResult<&HashMap<String, ConfigValue>> {
+ self.values.try_borrow_with(|| self.load_values())
+ }
+
+ /// Gets a mutable copy of the on-disk config values.
+ ///
+ /// This requires the config values to already have been loaded. This
+ /// currently only exists for `cargo vendor` to remove the `source`
+ /// entries. This doesn't respect environment variables. You should avoid
+ /// using this if possible.
+ pub fn values_mut(&mut self) -> CargoResult<&mut HashMap<String, ConfigValue>> {
+ let _ = self.values()?;
+ Ok(self
+ .values
+ .borrow_mut()
+ .expect("already loaded config values"))
+ }
+
+ // Note: this is used by RLS, not Cargo.
+ pub fn set_values(&self, values: HashMap<String, ConfigValue>) -> CargoResult<()> {
+ if self.values.borrow().is_some() {
+ bail!("config values already found")
+ }
+ match self.values.fill(values) {
+ Ok(()) => Ok(()),
+ Err(_) => bail!("could not fill values"),
+ }
+ }
+
+ /// Sets the path where ancestor config file searching will stop. The
+ /// given path is included, but its ancestors are not.
+ pub fn set_search_stop_path<P: Into<PathBuf>>(&mut self, path: P) {
+ let path = path.into();
+ debug_assert!(self.cwd.starts_with(&path));
+ self.search_stop_path = Some(path);
+ }
+
+ /// Reloads on-disk configuration values, starting at the given path and
+ /// walking up its ancestors.
+ pub fn reload_rooted_at<P: AsRef<Path>>(&mut self, path: P) -> CargoResult<()> {
+ let values = self.load_values_from(path.as_ref())?;
+ self.values.replace(values);
+ self.merge_cli_args()?;
+ self.load_unstable_flags_from_config()?;
+ Ok(())
+ }
+
+ /// The current working directory.
+ pub fn cwd(&self) -> &Path {
+ &self.cwd
+ }
+
+ /// The `target` output directory to use.
+ ///
+ /// Returns `None` if the user has not chosen an explicit directory.
+ ///
+ /// Callers should prefer `Workspace::target_dir` instead.
+ pub fn target_dir(&self) -> CargoResult<Option<Filesystem>> {
+ if let Some(dir) = &self.target_dir {
+ Ok(Some(dir.clone()))
+ } else if let Some(dir) = self.get_env_os("CARGO_TARGET_DIR") {
+ // Check if the CARGO_TARGET_DIR environment variable is set to an empty string.
+ if dir.is_empty() {
+ bail!(
+ "the target directory is set to an empty string in the \
+ `CARGO_TARGET_DIR` environment variable"
+ )
+ }
+
+ Ok(Some(Filesystem::new(self.cwd.join(dir))))
+ } else if let Some(val) = &self.build_config()?.target_dir {
+ let path = val.resolve_path(self);
+
+ // Check if the target directory is set to an empty string in the config.toml file.
+ if val.raw_value().is_empty() {
+ bail!(
+ "the target directory is set to an empty string in {}",
+ val.value().definition
+ )
+ }
+
+ Ok(Some(Filesystem::new(path)))
+ } else {
+ Ok(None)
+ }
+ }
+
+ /// Get a configuration value by key.
+ ///
+ /// This does NOT look at environment variables. See `get_cv_with_env` for
+ /// a variant that supports environment variables.
+ fn get_cv(&self, key: &ConfigKey) -> CargoResult<Option<ConfigValue>> {
+ if let Some(vals) = self.credential_values.borrow() {
+ let val = self.get_cv_helper(key, vals)?;
+ if val.is_some() {
+ return Ok(val);
+ }
+ }
+ self.get_cv_helper(key, self.values()?)
+ }
+
+ fn get_cv_helper(
+ &self,
+ key: &ConfigKey,
+ vals: &HashMap<String, ConfigValue>,
+ ) -> CargoResult<Option<ConfigValue>> {
+ log::trace!("get cv {:?}", key);
+ if key.is_root() {
+ // Returning the entire root table (for example `cargo config get`
+ // with no key). The definition here shouldn't matter.
+ return Ok(Some(CV::Table(
+ vals.clone(),
+ Definition::Path(PathBuf::new()),
+ )));
+ }
+ let mut parts = key.parts().enumerate();
+ let mut val = match vals.get(parts.next().unwrap().1) {
+ Some(val) => val,
+ None => return Ok(None),
+ };
+ for (i, part) in parts {
+ match val {
+ CV::Table(map, _) => {
+ val = match map.get(part) {
+ Some(val) => val,
+ None => return Ok(None),
+ }
+ }
+ CV::Integer(_, def)
+ | CV::String(_, def)
+ | CV::List(_, def)
+ | CV::Boolean(_, def) => {
+ let mut key_so_far = ConfigKey::new();
+ for part in key.parts().take(i) {
+ key_so_far.push(part);
+ }
+ bail!(
+ "expected table for configuration key `{}`, \
+ but found {} in {}",
+ key_so_far,
+ val.desc(),
+ def
+ )
+ }
+ }
+ }
+ Ok(Some(val.clone()))
+ }
+
+ /// This is a helper for getting a CV from a file or env var.
+ pub(crate) fn get_cv_with_env(&self, key: &ConfigKey) -> CargoResult<Option<CV>> {
+ // Determine if value comes from env, cli, or file, and merge env if
+ // possible.
+ let cv = self.get_cv(key)?;
+ if key.is_root() {
+ // Root table can't have env value.
+ return Ok(cv);
+ }
+ let env = self.env.get_str(key.as_env_key());
+ let env_def = Definition::Environment(key.as_env_key().to_string());
+ let use_env = match (&cv, env) {
+ // Lists are always merged.
+ (Some(CV::List(..)), Some(_)) => true,
+ (Some(cv), Some(_)) => env_def.is_higher_priority(cv.definition()),
+ (None, Some(_)) => true,
+ _ => false,
+ };
+
+ if !use_env {
+ return Ok(cv);
+ }
+
+ // Future note: If you ever need to deserialize a non-self describing
+ // map type, this should implement a starts_with check (similar to how
+ // ConfigMapAccess does).
+ let env = env.unwrap();
+ if env == "true" {
+ Ok(Some(CV::Boolean(true, env_def)))
+ } else if env == "false" {
+ Ok(Some(CV::Boolean(false, env_def)))
+ } else if let Ok(i) = env.parse::<i64>() {
+ Ok(Some(CV::Integer(i, env_def)))
+ } else if self.cli_unstable().advanced_env && env.starts_with('[') && env.ends_with(']') {
+ match cv {
+ Some(CV::List(mut cv_list, cv_def)) => {
+ // Merge with config file.
+ self.get_env_list(key, &mut cv_list)?;
+ Ok(Some(CV::List(cv_list, cv_def)))
+ }
+ Some(cv) => {
+ // This can't assume StringList or UnmergedStringList.
+ // Return an error, which is the behavior of merging
+ // multiple config.toml files with the same scenario.
+ bail!(
+ "unable to merge array env for config `{}`\n\
+ file: {:?}\n\
+ env: {}",
+ key,
+ cv,
+ env
+ );
+ }
+ None => {
+ let mut cv_list = Vec::new();
+ self.get_env_list(key, &mut cv_list)?;
+ Ok(Some(CV::List(cv_list, env_def)))
+ }
+ }
+ } else {
+ // Try to merge if possible.
+ match cv {
+ Some(CV::List(mut cv_list, cv_def)) => {
+ // Merge with config file.
+ self.get_env_list(key, &mut cv_list)?;
+ Ok(Some(CV::List(cv_list, cv_def)))
+ }
+ _ => {
+ // Note: CV::Table merging is not implemented, as env
+ // vars do not support table values. In the future, we
+ // could check for `{}`, and interpret it as TOML if
+ // that seems useful.
+ Ok(Some(CV::String(env.to_string(), env_def)))
+ }
+ }
+ }
+ }
+
+ /// Helper primarily for testing.
+ pub fn set_env(&mut self, env: HashMap<String, String>) {
+ self.env = Env::from_map(env);
+ }
+
+ /// Returns all environment variables as an iterator,
+ /// keeping only entries where both the key and value are valid UTF-8.
+ pub(crate) fn env(&self) -> impl Iterator<Item = (&str, &str)> {
+ self.env.iter_str()
+ }
+
+ /// Returns all environment variable keys, filtering out keys that are not valid UTF-8.
+ fn env_keys(&self) -> impl Iterator<Item = &str> {
+ self.env.keys_str()
+ }
+
+ fn get_config_env<T>(&self, key: &ConfigKey) -> Result<OptValue<T>, ConfigError>
+ where
+ T: FromStr,
+ <T as FromStr>::Err: fmt::Display,
+ {
+ match self.env.get_str(key.as_env_key()) {
+ Some(value) => {
+ let definition = Definition::Environment(key.as_env_key().to_string());
+ Ok(Some(Value {
+ val: value
+ .parse()
+ .map_err(|e| ConfigError::new(format!("{}", e), definition.clone()))?,
+ definition,
+ }))
+ }
+ None => {
+ self.check_environment_key_case_mismatch(key);
+ Ok(None)
+ }
+ }
+ }
+
+ /// Get the value of environment variable `key` through the `Config` snapshot.
+ ///
+ /// This can be used similarly to `std::env::var`.
+ pub fn get_env(&self, key: impl AsRef<OsStr>) -> CargoResult<String> {
+ self.env.get_env(key)
+ }
+
+ /// Get the value of environment variable `key` through the `Config` snapshot.
+ ///
+ /// This can be used similarly to `std::env::var_os`.
+ pub fn get_env_os(&self, key: impl AsRef<OsStr>) -> Option<OsString> {
+ self.env.get_env_os(key)
+ }
+
+ /// Check if the [`Config`] contains a given [`ConfigKey`].
+ ///
+ /// See `ConfigMapAccess` for a description of `env_prefix_ok`.
+ fn has_key(&self, key: &ConfigKey, env_prefix_ok: bool) -> CargoResult<bool> {
+ if self.env.contains_key(key.as_env_key()) {
+ return Ok(true);
+ }
+ if env_prefix_ok {
+ let env_prefix = format!("{}_", key.as_env_key());
+ if self.env_keys().any(|k| k.starts_with(&env_prefix)) {
+ return Ok(true);
+ }
+ }
+ if self.get_cv(key)?.is_some() {
+ return Ok(true);
+ }
+ self.check_environment_key_case_mismatch(key);
+
+ Ok(false)
+ }
+
+ fn check_environment_key_case_mismatch(&self, key: &ConfigKey) {
+ if let Some(env_key) = self.env.get_normalized(key.as_env_key()) {
+ let _ = self.shell().warn(format!(
+ "Environment variables are expected to use uppercase letters and underscores, \
+ the variable `{}` will be ignored and have no effect",
+ env_key
+ ));
+ }
+ }
+
+ /// Get a string config value.
+ ///
+ /// See `get` for more details.
+ pub fn get_string(&self, key: &str) -> CargoResult<OptValue<String>> {
+ self.get::<Option<Value<String>>>(key)
+ }
+
+ /// Get a config value that is expected to be a path.
+ ///
+ /// This returns a relative path if the value does not contain any
+ /// directory separators. See `ConfigRelativePath::resolve_program` for
+ /// more details.
+ pub fn get_path(&self, key: &str) -> CargoResult<OptValue<PathBuf>> {
+ self.get::<Option<Value<ConfigRelativePath>>>(key).map(|v| {
+ v.map(|v| Value {
+ val: v.val.resolve_program(self),
+ definition: v.definition,
+ })
+ })
+ }
+
+ fn string_to_path(&self, value: &str, definition: &Definition) -> PathBuf {
+ let is_path = value.contains('/') || (cfg!(windows) && value.contains('\\'));
+ if is_path {
+ definition.root(self).join(value)
+ } else {
+ // A pathless name.
+ PathBuf::from(value)
+ }
+ }
+
+ /// Get a list of strings.
+ ///
+ /// DO NOT USE outside of the config module. `pub` will be removed in the
+ /// future.
+ ///
+ /// NOTE: this does **not** support environment variables. Use `get` instead
+ /// if you want that.
+ pub fn get_list(&self, key: &str) -> CargoResult<OptValue<Vec<(String, Definition)>>> {
+ let key = ConfigKey::from_str(key);
+ self._get_list(&key)
+ }
+
+ fn _get_list(&self, key: &ConfigKey) -> CargoResult<OptValue<Vec<(String, Definition)>>> {
+ match self.get_cv(key)? {
+ Some(CV::List(val, definition)) => Ok(Some(Value { val, definition })),
+ Some(val) => self.expected("list", key, &val),
+ None => Ok(None),
+ }
+ }
+
+ /// Helper for StringList type to get something that is a string or list.
+ fn get_list_or_string(
+ &self,
+ key: &ConfigKey,
+ merge: bool,
+ ) -> CargoResult<Vec<(String, Definition)>> {
+ let mut res = Vec::new();
+
+ if !merge {
+ self.get_env_list(key, &mut res)?;
+
+ if !res.is_empty() {
+ return Ok(res);
+ }
+ }
+
+ match self.get_cv(key)? {
+ Some(CV::List(val, _def)) => res.extend(val),
+ Some(CV::String(val, def)) => {
+ let split_vs = val.split_whitespace().map(|s| (s.to_string(), def.clone()));
+ res.extend(split_vs);
+ }
+ Some(val) => {
+ return self.expected("string or array of strings", key, &val);
+ }
+ None => {}
+ }
+
+ self.get_env_list(key, &mut res)?;
+
+ Ok(res)
+ }
+
+ /// Internal method for getting an environment variable as a list.
+ fn get_env_list(
+ &self,
+ key: &ConfigKey,
+ output: &mut Vec<(String, Definition)>,
+ ) -> CargoResult<()> {
+ let env_val = match self.env.get_str(key.as_env_key()) {
+ Some(v) => v,
+ None => {
+ self.check_environment_key_case_mismatch(key);
+ return Ok(());
+ }
+ };
+
+ let def = Definition::Environment(key.as_env_key().to_string());
+ if self.cli_unstable().advanced_env && env_val.starts_with('[') && env_val.ends_with(']') {
+ // Parse an environment string as a TOML array.
+ let toml_v = toml::Value::deserialize(toml::de::ValueDeserializer::new(&env_val))
+ .map_err(|e| {
+ ConfigError::new(format!("could not parse TOML list: {}", e), def.clone())
+ })?;
+ let values = toml_v.as_array().expect("env var was not array");
+ for value in values {
+ // TODO: support other types.
+ let s = value.as_str().ok_or_else(|| {
+ ConfigError::new(
+ format!("expected string, found {}", value.type_str()),
+ def.clone(),
+ )
+ })?;
+ output.push((s.to_string(), def.clone()));
+ }
+ } else {
+ output.extend(
+ env_val
+ .split_whitespace()
+ .map(|s| (s.to_string(), def.clone())),
+ );
+ }
+ Ok(())
+ }
+
+ /// Low-level method for getting a config value as an `OptValue<HashMap<String, CV>>`.
+ ///
+ /// NOTE: This does not read from env. The caller is responsible for that.
+ fn get_table(&self, key: &ConfigKey) -> CargoResult<OptValue<HashMap<String, CV>>> {
+ match self.get_cv(key)? {
+ Some(CV::Table(val, definition)) => Ok(Some(Value { val, definition })),
+ Some(val) => self.expected("table", key, &val),
+ None => Ok(None),
+ }
+ }
+
+ get_value_typed! {get_integer, i64, Integer, "an integer"}
+ get_value_typed! {get_bool, bool, Boolean, "true/false"}
+ get_value_typed! {get_string_priv, String, String, "a string"}
+
+ /// Generate an error when the given value is the wrong type.
+ fn expected<T>(&self, ty: &str, key: &ConfigKey, val: &CV) -> CargoResult<T> {
+ val.expected(ty, &key.to_string())
+ .map_err(|e| anyhow!("invalid configuration for key `{}`\n{}", key, e))
+ }
+
+ /// Update the Config instance based on settings typically passed in on
+ /// the command-line.
+ ///
+ /// This may also load the config from disk if it hasn't already been
+ /// loaded.
+ pub fn configure(
+ &mut self,
+ verbose: u32,
+ quiet: bool,
+ color: Option<&str>,
+ frozen: bool,
+ locked: bool,
+ offline: bool,
+ target_dir: &Option<PathBuf>,
+ unstable_flags: &[String],
+ cli_config: &[String],
+ ) -> CargoResult<()> {
+ for warning in self
+ .unstable_flags
+ .parse(unstable_flags, self.nightly_features_allowed)?
+ {
+ self.shell().warn(warning)?;
+ }
+ if !unstable_flags.is_empty() {
+ // store a copy of the cli flags separately for `load_unstable_flags_from_config`
+ // (we might also need it again for `reload_rooted_at`)
+ self.unstable_flags_cli = Some(unstable_flags.to_vec());
+ }
+ if !cli_config.is_empty() {
+ self.cli_config = Some(cli_config.iter().map(|s| s.to_string()).collect());
+ self.merge_cli_args()?;
+ }
+ if self.unstable_flags.config_include {
+ // If the config was already loaded (like when fetching the
+ // `[alias]` table), it was loaded with includes disabled because
+ // the `unstable_flags` hadn't been set up, yet. Any values
+ // fetched before this step will not process includes, but that
+ // should be fine (`[alias]` is one of the only things loaded
+ // before configure). This can be removed when stabilized.
+ self.reload_rooted_at(self.cwd.clone())?;
+ }
+ let extra_verbose = verbose >= 2;
+ let verbose = verbose != 0;
+
+ // Ignore errors in the configuration files. We don't want basic
+ // commands like `cargo version` to error out due to config file
+ // problems.
+ let term = self.get::<TermConfig>("term").unwrap_or_default();
+
+ let color = color.or_else(|| term.color.as_deref());
+
+ // The command line takes precedence over configuration.
+ let verbosity = match (verbose, quiet) {
+ (true, true) => bail!("cannot set both --verbose and --quiet"),
+ (true, false) => Verbosity::Verbose,
+ (false, true) => Verbosity::Quiet,
+ (false, false) => match (term.verbose, term.quiet) {
+ (Some(true), Some(true)) => {
+ bail!("cannot set both `term.verbose` and `term.quiet`")
+ }
+ (Some(true), _) => Verbosity::Verbose,
+ (_, Some(true)) => Verbosity::Quiet,
+ _ => Verbosity::Normal,
+ },
+ };
+
+ let cli_target_dir = target_dir.as_ref().map(|dir| Filesystem::new(dir.clone()));
+
+ self.shell().set_verbosity(verbosity);
+ self.shell().set_color_choice(color)?;
+ self.progress_config = term.progress.unwrap_or_default();
+ self.extra_verbose = extra_verbose;
+ self.frozen = frozen;
+ self.locked = locked;
+ self.offline = offline
+ || self
+ .net_config()
+ .ok()
+ .and_then(|n| n.offline)
+ .unwrap_or(false);
+ self.target_dir = cli_target_dir;
+
+ self.load_unstable_flags_from_config()?;
+
+ Ok(())
+ }
+
+ fn load_unstable_flags_from_config(&mut self) -> CargoResult<()> {
+ // If nightly features are enabled, allow setting Z-flags from config
+ // using the `unstable` table. Ignore that block otherwise.
+ if self.nightly_features_allowed {
+ self.unstable_flags = self
+ .get::<Option<CliUnstable>>("unstable")?
+ .unwrap_or_default();
+ if let Some(unstable_flags_cli) = &self.unstable_flags_cli {
+ // NB. It's not ideal to parse these twice, but doing it again here
+ // allows the CLI to override config files for both enabling
+ // and disabling, and doing it up top allows CLI Zflags to
+ // control config parsing behavior.
+ self.unstable_flags.parse(unstable_flags_cli, true)?;
+ }
+ }
+
+ Ok(())
+ }
+
+ pub fn cli_unstable(&self) -> &CliUnstable {
+ &self.unstable_flags
+ }
+
+ pub fn extra_verbose(&self) -> bool {
+ self.extra_verbose
+ }
+
+ pub fn network_allowed(&self) -> bool {
+ !self.frozen() && !self.offline()
+ }
+
+ pub fn offline(&self) -> bool {
+ self.offline
+ }
+
+ pub fn frozen(&self) -> bool {
+ self.frozen
+ }
+
+ pub fn locked(&self) -> bool {
+ self.locked
+ }
+
+ pub fn lock_update_allowed(&self) -> bool {
+ !self.frozen && !self.locked
+ }
+
+ /// Loads configuration from the filesystem.
+ pub fn load_values(&self) -> CargoResult<HashMap<String, ConfigValue>> {
+ self.load_values_from(&self.cwd)
+ }
+
+ /// Like [`load_values`](Config::load_values) but without merging config values.
+ ///
+ /// This is primarily crafted for `cargo config` command.
+ pub(crate) fn load_values_unmerged(&self) -> CargoResult<Vec<ConfigValue>> {
+ let mut result = Vec::new();
+ let mut seen = HashSet::new();
+ let home = self.home_path.clone().into_path_unlocked();
+ self.walk_tree(&self.cwd, &home, |path| {
+ let mut cv = self._load_file(path, &mut seen, false, WhyLoad::FileDiscovery)?;
+ if self.cli_unstable().config_include {
+ self.load_unmerged_include(&mut cv, &mut seen, &mut result)?;
+ }
+ result.push(cv);
+ Ok(())
+ })
+ .with_context(|| "could not load Cargo configuration")?;
+ Ok(result)
+ }
+
+ /// Like [`load_includes`](Config::load_includes) but without merging config values.
+ ///
+ /// This is primarily crafted for `cargo config` command.
+ fn load_unmerged_include(
+ &self,
+ cv: &mut CV,
+ seen: &mut HashSet<PathBuf>,
+ output: &mut Vec<CV>,
+ ) -> CargoResult<()> {
+ let includes = self.include_paths(cv, false)?;
+ for (path, abs_path, def) in includes {
+ let mut cv = self
+ ._load_file(&abs_path, seen, false, WhyLoad::FileDiscovery)
+ .with_context(|| {
+ format!("failed to load config include `{}` from `{}`", path, def)
+ })?;
+ self.load_unmerged_include(&mut cv, seen, output)?;
+ output.push(cv);
+ }
+ Ok(())
+ }
+
+ /// Start a config file discovery from a path and merges all config values found.
+ fn load_values_from(&self, path: &Path) -> CargoResult<HashMap<String, ConfigValue>> {
+ // This definition path is ignored, this is just a temporary container
+ // representing the entire file.
+ let mut cfg = CV::Table(HashMap::new(), Definition::Path(PathBuf::from(".")));
+ let home = self.home_path.clone().into_path_unlocked();
+
+ self.walk_tree(path, &home, |path| {
+ let value = self.load_file(path)?;
+ cfg.merge(value, false).with_context(|| {
+ format!("failed to merge configuration at `{}`", path.display())
+ })?;
+ Ok(())
+ })
+ .with_context(|| "could not load Cargo configuration")?;
+
+ match cfg {
+ CV::Table(map, _) => Ok(map),
+ _ => unreachable!(),
+ }
+ }
+
+ /// Loads a config value from a path.
+ ///
+ /// This is used during config file discovery.
+ fn load_file(&self, path: &Path) -> CargoResult<ConfigValue> {
+ self._load_file(path, &mut HashSet::new(), true, WhyLoad::FileDiscovery)
+ }
+
+ /// Loads a config value from a path with options.
+ ///
+ /// This is actual implementation of loading a config value from a path.
+ ///
+ /// * `includes` determines whether to load configs from [`config-include`].
+ /// * `seen` is used to check for cyclic includes.
+ /// * `why_load` tells why a config is being loaded.
+ ///
+ /// [`config-include`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#config-include
+ fn _load_file(
+ &self,
+ path: &Path,
+ seen: &mut HashSet<PathBuf>,
+ includes: bool,
+ why_load: WhyLoad,
+ ) -> CargoResult<ConfigValue> {
+ if !seen.insert(path.to_path_buf()) {
+ bail!(
+ "config `include` cycle detected with path `{}`",
+ path.display()
+ );
+ }
+ let contents = fs::read_to_string(path)
+ .with_context(|| format!("failed to read configuration file `{}`", path.display()))?;
+ let toml = cargo_toml::parse_document(&contents, path, self).with_context(|| {
+ format!("could not parse TOML configuration in `{}`", path.display())
+ })?;
+ let def = match why_load {
+ WhyLoad::Cli => Definition::Cli(Some(path.into())),
+ WhyLoad::FileDiscovery => Definition::Path(path.into()),
+ };
+ let value = CV::from_toml(def, toml::Value::Table(toml)).with_context(|| {
+ format!(
+ "failed to load TOML configuration from `{}`",
+ path.display()
+ )
+ })?;
+ if includes {
+ self.load_includes(value, seen, why_load)
+ } else {
+ Ok(value)
+ }
+ }
+
+ /// Load any `include` files listed in the given `value`.
+ ///
+ /// Returns `value` with the given include files merged into it.
+ ///
+ /// * `seen` is used to check for cyclic includes.
+ /// * `why_load` tells why a config is being loaded.
+ fn load_includes(
+ &self,
+ mut value: CV,
+ seen: &mut HashSet<PathBuf>,
+ why_load: WhyLoad,
+ ) -> CargoResult<CV> {
+ // Get the list of files to load.
+ let includes = self.include_paths(&mut value, true)?;
+ // Check unstable.
+ if !self.cli_unstable().config_include {
+ return Ok(value);
+ }
+ // Accumulate all values here.
+ let mut root = CV::Table(HashMap::new(), value.definition().clone());
+ for (path, abs_path, def) in includes {
+ self._load_file(&abs_path, seen, true, why_load)
+ .and_then(|include| root.merge(include, true))
+ .with_context(|| {
+ format!("failed to load config include `{}` from `{}`", path, def)
+ })?;
+ }
+ root.merge(value, true)?;
+ Ok(root)
+ }
+
+ /// Converts the `include` config value to a list of absolute paths.
+ fn include_paths(
+ &self,
+ cv: &mut CV,
+ remove: bool,
+ ) -> CargoResult<Vec<(String, PathBuf, Definition)>> {
+ let abs = |path: &str, def: &Definition| -> (String, PathBuf, Definition) {
+ let abs_path = match def {
+ Definition::Path(p) | Definition::Cli(Some(p)) => p.parent().unwrap().join(&path),
+ Definition::Environment(_) | Definition::Cli(None) => self.cwd().join(&path),
+ };
+ (path.to_string(), abs_path, def.clone())
+ };
+ let table = match cv {
+ CV::Table(table, _def) => table,
+ _ => unreachable!(),
+ };
+ let owned;
+ let include = if remove {
+ owned = table.remove("include");
+ owned.as_ref()
+ } else {
+ table.get("include")
+ };
+ let includes = match include {
+ Some(CV::String(s, def)) => {
+ vec![abs(s, def)]
+ }
+ Some(CV::List(list, _def)) => list.iter().map(|(s, def)| abs(s, def)).collect(),
+ Some(other) => bail!(
+ "`include` expected a string or list, but found {} in `{}`",
+ other.desc(),
+ other.definition()
+ ),
+ None => {
+ return Ok(Vec::new());
+ }
+ };
+ Ok(includes)
+ }
+
+ /// Parses the CLI config args and returns them as a table.
+ pub(crate) fn cli_args_as_table(&self) -> CargoResult<ConfigValue> {
+ let mut loaded_args = CV::Table(HashMap::new(), Definition::Cli(None));
+ let cli_args = match &self.cli_config {
+ Some(cli_args) => cli_args,
+ None => return Ok(loaded_args),
+ };
+ let mut seen = HashSet::new();
+ for arg in cli_args {
+ let arg_as_path = self.cwd.join(arg);
+ let tmp_table = if !arg.is_empty() && arg_as_path.exists() {
+ // --config path_to_file
+ let str_path = arg_as_path
+ .to_str()
+ .ok_or_else(|| {
+ anyhow::format_err!("config path {:?} is not utf-8", arg_as_path)
+ })?
+ .to_string();
+ self._load_file(&self.cwd().join(&str_path), &mut seen, true, WhyLoad::Cli)
+ .with_context(|| format!("failed to load config from `{}`", str_path))?
+ } else {
+ // We only want to allow "dotted key" (see https://toml.io/en/v1.0.0#keys)
+ // expressions followed by a value that's not an "inline table"
+ // (https://toml.io/en/v1.0.0#inline-table). Easiest way to check for that is to
+ // parse the value as a toml_edit::Document, and check that the (single)
+ // inner-most table is set via dotted keys.
+ let doc: toml_edit::Document = arg.parse().with_context(|| {
+ format!("failed to parse value from --config argument `{arg}` as a dotted key expression")
+ })?;
+ fn non_empty_decor(d: &toml_edit::Decor) -> bool {
+ d.prefix()
+ .map_or(false, |p| !p.as_str().unwrap_or_default().trim().is_empty())
+ || d.suffix()
+ .map_or(false, |s| !s.as_str().unwrap_or_default().trim().is_empty())
+ }
+ let ok = {
+ let mut got_to_value = false;
+ let mut table = doc.as_table();
+ let mut is_root = true;
+ while table.is_dotted() || is_root {
+ is_root = false;
+ if table.len() != 1 {
+ break;
+ }
+ let (k, n) = table.iter().next().expect("len() == 1 above");
+ match n {
+ Item::Table(nt) => {
+ if table.key_decor(k).map_or(false, non_empty_decor)
+ || non_empty_decor(nt.decor())
+ {
+ bail!(
+ "--config argument `{arg}` \
+ includes non-whitespace decoration"
+ )
+ }
+ table = nt;
+ }
+ Item::Value(v) if v.is_inline_table() => {
+ bail!(
+ "--config argument `{arg}` \
+ sets a value to an inline table, which is not accepted"
+ );
+ }
+ Item::Value(v) => {
+ if non_empty_decor(v.decor()) {
+ bail!(
+ "--config argument `{arg}` \
+ includes non-whitespace decoration"
+ )
+ }
+ got_to_value = true;
+ break;
+ }
+ Item::ArrayOfTables(_) => {
+ bail!(
+ "--config argument `{arg}` \
+ sets a value to an array of tables, which is not accepted"
+ );
+ }
+
+ Item::None => {
+ bail!("--config argument `{arg}` doesn't provide a value")
+ }
+ }
+ }
+ got_to_value
+ };
+ if !ok {
+ bail!(
+ "--config argument `{arg}` was not a TOML dotted key expression (such as `build.jobs = 2`)"
+ );
+ }
+
+ let toml_v: toml::Value = toml::Value::deserialize(doc.into_deserializer())
+ .with_context(|| {
+ format!("failed to parse value from --config argument `{arg}`")
+ })?;
+
+ if toml_v
+ .get("registry")
+ .and_then(|v| v.as_table())
+ .and_then(|t| t.get("token"))
+ .is_some()
+ {
+ bail!("registry.token cannot be set through --config for security reasons");
+ } else if let Some((k, _)) = toml_v
+ .get("registries")
+ .and_then(|v| v.as_table())
+ .and_then(|t| t.iter().find(|(_, v)| v.get("token").is_some()))
+ {
+ bail!(
+ "registries.{}.token cannot be set through --config for security reasons",
+ k
+ );
+ }
+
+ if toml_v
+ .get("registry")
+ .and_then(|v| v.as_table())
+ .and_then(|t| t.get("secret-key"))
+ .is_some()
+ {
+ bail!(
+ "registry.secret-key cannot be set through --config for security reasons"
+ );
+ } else if let Some((k, _)) = toml_v
+ .get("registries")
+ .and_then(|v| v.as_table())
+ .and_then(|t| t.iter().find(|(_, v)| v.get("secret-key").is_some()))
+ {
+ bail!(
+ "registries.{}.secret-key cannot be set through --config for security reasons",
+ k
+ );
+ }
+
+ CV::from_toml(Definition::Cli(None), toml_v)
+ .with_context(|| format!("failed to convert --config argument `{arg}`"))?
+ };
+ let tmp_table = self
+ .load_includes(tmp_table, &mut HashSet::new(), WhyLoad::Cli)
+ .with_context(|| "failed to load --config include".to_string())?;
+ loaded_args
+ .merge(tmp_table, true)
+ .with_context(|| format!("failed to merge --config argument `{arg}`"))?;
+ }
+ Ok(loaded_args)
+ }
+
+ /// Add config arguments passed on the command line.
+ fn merge_cli_args(&mut self) -> CargoResult<()> {
+ let loaded_map = match self.cli_args_as_table()? {
+ CV::Table(table, _def) => table,
+ _ => unreachable!(),
+ };
+ let values = self.values_mut()?;
+ for (key, value) in loaded_map.into_iter() {
+ match values.entry(key) {
+ Vacant(entry) => {
+ entry.insert(value);
+ }
+ Occupied(mut entry) => entry.get_mut().merge(value, true).with_context(|| {
+ format!(
+ "failed to merge --config key `{}` into `{}`",
+ entry.key(),
+ entry.get().definition(),
+ )
+ })?,
+ };
+ }
+ Ok(())
+ }
+
+ /// The purpose of this function is to aid in the transition to using
+ /// .toml extensions on Cargo's config files, which were historically not used.
+ /// Both 'config.toml' and 'credentials.toml' should be valid with or without extension.
+ /// When both exist, we want to prefer the one without an extension for
+ /// backwards compatibility, but warn the user appropriately.
+ fn get_file_path(
+ &self,
+ dir: &Path,
+ filename_without_extension: &str,
+ warn: bool,
+ ) -> CargoResult<Option<PathBuf>> {
+ let possible = dir.join(filename_without_extension);
+ let possible_with_extension = dir.join(format!("{}.toml", filename_without_extension));
+
+ if possible.exists() {
+ if warn && possible_with_extension.exists() {
+ // We don't want to print a warning if the version
+ // without the extension is just a symlink to the version
+ // WITH an extension, which people may want to do to
+ // support multiple Cargo versions at once and not
+ // get a warning.
+ let skip_warning = if let Ok(target_path) = fs::read_link(&possible) {
+ target_path == possible_with_extension
+ } else {
+ false
+ };
+
+ if !skip_warning {
+ self.shell().warn(format!(
+ "Both `{}` and `{}` exist. Using `{}`",
+ possible.display(),
+ possible_with_extension.display(),
+ possible.display()
+ ))?;
+ }
+ }
+
+ Ok(Some(possible))
+ } else if possible_with_extension.exists() {
+ Ok(Some(possible_with_extension))
+ } else {
+ Ok(None)
+ }
+ }
+
+ fn walk_tree<F>(&self, pwd: &Path, home: &Path, mut walk: F) -> CargoResult<()>
+ where
+ F: FnMut(&Path) -> CargoResult<()>,
+ {
+ let mut stash: HashSet<PathBuf> = HashSet::new();
+
+ for current in paths::ancestors(pwd, self.search_stop_path.as_deref()) {
+ if let Some(path) = self.get_file_path(&current.join(".cargo"), "config", true)? {
+ walk(&path)?;
+ stash.insert(path);
+ }
+ }
+
+ // Once we're done, also be sure to walk the home directory even if it's not
+ // in our history to be sure we pick up that standard location for
+ // information.
+ if let Some(path) = self.get_file_path(home, "config", true)? {
+ if !stash.contains(&path) {
+ walk(&path)?;
+ }
+ }
+
+ Ok(())
+ }
+
+ /// Gets the index for a registry.
+ pub fn get_registry_index(&self, registry: &str) -> CargoResult<Url> {
+ validate_package_name(registry, "registry name", "")?;
+ if let Some(index) = self.get_string(&format!("registries.{}.index", registry))? {
+ self.resolve_registry_index(&index).with_context(|| {
+ format!(
+ "invalid index URL for registry `{}` defined in {}",
+ registry, index.definition
+ )
+ })
+ } else {
+ bail!("no index found for registry: `{}`", registry);
+ }
+ }
+
+ /// Returns an error if `registry.index` is set.
+ pub fn check_registry_index_not_set(&self) -> CargoResult<()> {
+ if self.get_string("registry.index")?.is_some() {
+ bail!(
+ "the `registry.index` config value is no longer supported\n\
+ Use `[source]` replacement to alter the default index for crates.io."
+ );
+ }
+ Ok(())
+ }
+
+ fn resolve_registry_index(&self, index: &Value<String>) -> CargoResult<Url> {
+ // This handles relative file: URLs, relative to the config definition.
+ let base = index
+ .definition
+ .root(self)
+ .join("truncated-by-url_with_base");
+ // Parse val to check it is a URL, not a relative path without a protocol.
+ let _parsed = index.val.into_url()?;
+ let url = index.val.into_url_with_base(Some(&*base))?;
+ if url.password().is_some() {
+ bail!("registry URLs may not contain passwords");
+ }
+ Ok(url)
+ }
+
+ /// Loads credentials config from the credentials file, if present.
+ ///
+ /// The credentials are loaded into a separate field to enable them
+ /// to be lazy-loaded after the main configuration has been loaded,
+ /// without requiring `mut` access to the `Config`.
+ ///
+ /// If the credentials are already loaded, this function does nothing.
+ pub fn load_credentials(&self) -> CargoResult<()> {
+ if self.credential_values.filled() {
+ return Ok(());
+ }
+
+ let home_path = self.home_path.clone().into_path_unlocked();
+ let credentials = match self.get_file_path(&home_path, "credentials", true)? {
+ Some(credentials) => credentials,
+ None => return Ok(()),
+ };
+
+ let mut value = self.load_file(&credentials)?;
+ // Backwards compatibility for old `.cargo/credentials` layout.
+ {
+ let (value_map, def) = match value {
+ CV::Table(ref mut value, ref def) => (value, def),
+ _ => unreachable!(),
+ };
+
+ if let Some(token) = value_map.remove("token") {
+ if let Vacant(entry) = value_map.entry("registry".into()) {
+ let map = HashMap::from([("token".into(), token)]);
+ let table = CV::Table(map, def.clone());
+ entry.insert(table);
+ }
+ }
+ }
+
+ let mut credential_values = HashMap::new();
+ if let CV::Table(map, _) = value {
+ let base_map = self.values()?;
+ for (k, v) in map {
+ let entry = match base_map.get(&k) {
+ Some(base_entry) => {
+ let mut entry = base_entry.clone();
+ entry.merge(v, true)?;
+ entry
+ }
+ None => v,
+ };
+ credential_values.insert(k, entry);
+ }
+ }
+ self.credential_values
+ .fill(credential_values)
+ .expect("was not filled at beginning of the function");
+ Ok(())
+ }
+
+ /// Looks for a path for `tool` in an environment variable or the given config, and returns
+ /// `None` if it's not present.
+ fn maybe_get_tool(
+ &self,
+ tool: &str,
+ from_config: &Option<ConfigRelativePath>,
+ ) -> Option<PathBuf> {
+ let var = tool.to_uppercase();
+
+ match self.get_env_os(&var).as_ref().and_then(|s| s.to_str()) {
+ Some(tool_path) => {
+ let maybe_relative = tool_path.contains('/') || tool_path.contains('\\');
+ let path = if maybe_relative {
+ self.cwd.join(tool_path)
+ } else {
+ PathBuf::from(tool_path)
+ };
+ Some(path)
+ }
+
+ None => from_config.as_ref().map(|p| p.resolve_program(self)),
+ }
+ }
+
+ /// Looks for a path for `tool` in an environment variable or config path, defaulting to `tool`
+ /// as a path.
+ fn get_tool(&self, tool: &str, from_config: &Option<ConfigRelativePath>) -> PathBuf {
+ self.maybe_get_tool(tool, from_config)
+ .unwrap_or_else(|| PathBuf::from(tool))
+ }
+
+ pub fn jobserver_from_env(&self) -> Option<&jobserver::Client> {
+ self.jobserver.as_ref()
+ }
+
+ pub fn http(&self) -> CargoResult<&RefCell<Easy>> {
+ let http = self
+ .easy
+ .try_borrow_with(|| ops::http_handle(self).map(RefCell::new))?;
+ {
+ let mut http = http.borrow_mut();
+ http.reset();
+ let timeout = ops::configure_http_handle(self, &mut http)?;
+ timeout.configure(&mut http)?;
+ }
+ Ok(http)
+ }
+
+ pub fn http_config(&self) -> CargoResult<&CargoHttpConfig> {
+ self.http_config
+ .try_borrow_with(|| self.get::<CargoHttpConfig>("http"))
+ }
+
+ pub fn future_incompat_config(&self) -> CargoResult<&CargoFutureIncompatConfig> {
+ self.future_incompat_config
+ .try_borrow_with(|| self.get::<CargoFutureIncompatConfig>("future-incompat-report"))
+ }
+
+ pub fn net_config(&self) -> CargoResult<&CargoNetConfig> {
+ self.net_config
+ .try_borrow_with(|| self.get::<CargoNetConfig>("net"))
+ }
+
+ pub fn build_config(&self) -> CargoResult<&CargoBuildConfig> {
+ self.build_config
+ .try_borrow_with(|| self.get::<CargoBuildConfig>("build"))
+ }
+
+ pub fn progress_config(&self) -> &ProgressConfig {
+ &self.progress_config
+ }
+
+ pub fn env_config(&self) -> CargoResult<&EnvConfig> {
+ let env_config = self
+ .env_config
+ .try_borrow_with(|| self.get::<EnvConfig>("env"))?;
+
+ if env_config.get("CARGO_HOME").is_some() {
+ bail!("setting the `CARGO_HOME` environment variable is not supported in the `[env]` configuration table")
+ }
+
+ Ok(env_config)
+ }
+
+ /// This is used to validate the `term` table has valid syntax.
+ ///
+ /// This is necessary because loading the term settings happens very
+ /// early, and in some situations (like `cargo version`) we don't want to
+ /// fail if there are problems with the config file.
+ pub fn validate_term_config(&self) -> CargoResult<()> {
+ drop(self.get::<TermConfig>("term")?);
+ Ok(())
+ }
+
+ /// Returns a list of [target.'cfg()'] tables.
+ ///
+ /// The list is sorted by the table name.
+ pub fn target_cfgs(&self) -> CargoResult<&Vec<(String, TargetCfgConfig)>> {
+ self.target_cfgs
+ .try_borrow_with(|| target::load_target_cfgs(self))
+ }
+
+ pub fn doc_extern_map(&self) -> CargoResult<&RustdocExternMap> {
+ // Note: This does not support environment variables. The `Unit`
+ // fundamentally does not have access to the registry name, so there is
+ // nothing to query. Plumbing the name into SourceId is quite challenging.
+ self.doc_extern_map
+ .try_borrow_with(|| self.get::<RustdocExternMap>("doc.extern-map"))
+ }
+
+ /// Returns true if the `[target]` table should be applied to host targets.
+ pub fn target_applies_to_host(&self) -> CargoResult<bool> {
+ target::get_target_applies_to_host(self)
+ }
+
+ /// Returns the `[host]` table definition for the given target triple.
+ pub fn host_cfg_triple(&self, target: &str) -> CargoResult<TargetConfig> {
+ target::load_host_triple(self, target)
+ }
+
+ /// Returns the `[target]` table definition for the given target triple.
+ pub fn target_cfg_triple(&self, target: &str) -> CargoResult<TargetConfig> {
+ target::load_target_triple(self, target)
+ }
+
+ pub fn crates_io_source_id<F>(&self, f: F) -> CargoResult<SourceId>
+ where
+ F: FnMut() -> CargoResult<SourceId>,
+ {
+ Ok(*(self.crates_io_source_id.try_borrow_with(f)?))
+ }
+
+ pub fn creation_time(&self) -> Instant {
+ self.creation_time
+ }
+
+ /// Retrieves a config variable.
+ ///
+ /// This supports most serde `Deserialize` types. Examples:
+ ///
+ /// ```rust,ignore
+ /// let v: Option<u32> = config.get("some.nested.key")?;
+ /// let v: Option<MyStruct> = config.get("some.key")?;
+ /// let v: Option<HashMap<String, MyStruct>> = config.get("foo")?;
+ /// ```
+ ///
+ /// The key may be a dotted key, but this does NOT support TOML key
+ /// quoting. Avoid key components that may have dots. For example,
+ /// `foo.'a.b'.bar" does not work if you try to fetch `foo.'a.b'". You can
+ /// fetch `foo` if it is a map, though.
+ pub fn get<'de, T: serde::de::Deserialize<'de>>(&self, key: &str) -> CargoResult<T> {
+ let d = Deserializer {
+ config: self,
+ key: ConfigKey::from_str(key),
+ env_prefix_ok: true,
+ };
+ T::deserialize(d).map_err(|e| e.into())
+ }
+
+ pub fn assert_package_cache_locked<'a>(&self, f: &'a Filesystem) -> &'a Path {
+ let ret = f.as_path_unlocked();
+ assert!(
+ self.package_cache_lock.borrow().is_some(),
+ "package cache lock is not currently held, Cargo forgot to call \
+ `acquire_package_cache_lock` before we got to this stack frame",
+ );
+ assert!(ret.starts_with(self.home_path.as_path_unlocked()));
+ ret
+ }
+
+ /// Acquires an exclusive lock on the global "package cache"
+ ///
+ /// This lock is global per-process and can be acquired recursively. An RAII
+ /// structure is returned to release the lock, and if this process
+ /// abnormally terminates the lock is also released.
+ pub fn acquire_package_cache_lock(&self) -> CargoResult<PackageCacheLock<'_>> {
+ let mut slot = self.package_cache_lock.borrow_mut();
+ match *slot {
+ // We've already acquired the lock in this process, so simply bump
+ // the count and continue.
+ Some((_, ref mut cnt)) => {
+ *cnt += 1;
+ }
+ None => {
+ let path = ".package-cache";
+ let desc = "package cache";
+
+ // First, attempt to open an exclusive lock which is in general
+ // the purpose of this lock!
+ //
+ // If that fails because of a readonly filesystem or a
+ // permission error, though, then we don't really want to fail
+ // just because of this. All files that this lock protects are
+ // in subfolders, so they're assumed by Cargo to also be
+ // readonly or have invalid permissions for us to write to. If
+ // that's the case, then we don't really need to grab a lock in
+ // the first place here.
+ //
+ // Despite this we attempt to grab a readonly lock. This means
+ // that if our read-only folder is shared read-write with
+ // someone else on the system we should synchronize with them,
+ // but if we can't even do that then we did our best and we just
+ // keep on chugging elsewhere.
+ match self.home_path.open_rw(path, self, desc) {
+ Ok(lock) => *slot = Some((Some(lock), 1)),
+ Err(e) => {
+ if maybe_readonly(&e) {
+ let lock = self.home_path.open_ro(path, self, desc).ok();
+ *slot = Some((lock, 1));
+ return Ok(PackageCacheLock(self));
+ }
+
+ Err(e).with_context(|| "failed to acquire package cache lock")?;
+ }
+ }
+ }
+ }
+ return Ok(PackageCacheLock(self));
+
+ fn maybe_readonly(err: &anyhow::Error) -> bool {
+ err.chain().any(|err| {
+ if let Some(io) = err.downcast_ref::<io::Error>() {
+ if io.kind() == io::ErrorKind::PermissionDenied {
+ return true;
+ }
+
+ #[cfg(unix)]
+ return io.raw_os_error() == Some(libc::EROFS);
+ }
+
+ false
+ })
+ }
+ }
+
+ pub fn release_package_cache_lock(&self) {}
+}
+
+/// Internal error for serde errors.
+#[derive(Debug)]
+pub struct ConfigError {
+ error: anyhow::Error,
+ definition: Option<Definition>,
+}
+
+impl ConfigError {
+ fn new(message: String, definition: Definition) -> ConfigError {
+ ConfigError {
+ error: anyhow::Error::msg(message),
+ definition: Some(definition),
+ }
+ }
+
+ fn expected(key: &ConfigKey, expected: &str, found: &ConfigValue) -> ConfigError {
+ ConfigError {
+ error: anyhow!(
+ "`{}` expected {}, but found a {}",
+ key,
+ expected,
+ found.desc()
+ ),
+ definition: Some(found.definition().clone()),
+ }
+ }
+
+ fn missing(key: &ConfigKey) -> ConfigError {
+ ConfigError {
+ error: anyhow!("missing config key `{}`", key),
+ definition: None,
+ }
+ }
+
+ fn with_key_context(self, key: &ConfigKey, definition: Definition) -> ConfigError {
+ ConfigError {
+ error: anyhow::Error::from(self)
+ .context(format!("could not load config key `{}`", key)),
+ definition: Some(definition),
+ }
+ }
+}
+
+impl std::error::Error for ConfigError {
+ fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
+ self.error.source()
+ }
+}
+
+impl fmt::Display for ConfigError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ if let Some(definition) = &self.definition {
+ write!(f, "error in {}: {}", definition, self.error)
+ } else {
+ self.error.fmt(f)
+ }
+ }
+}
+
+impl serde::de::Error for ConfigError {
+ fn custom<T: fmt::Display>(msg: T) -> Self {
+ ConfigError {
+ error: anyhow::Error::msg(msg.to_string()),
+ definition: None,
+ }
+ }
+}
+
+impl From<anyhow::Error> for ConfigError {
+ fn from(error: anyhow::Error) -> Self {
+ ConfigError {
+ error,
+ definition: None,
+ }
+ }
+}
+
+#[derive(Eq, PartialEq, Clone)]
+pub enum ConfigValue {
+ Integer(i64, Definition),
+ String(String, Definition),
+ List(Vec<(String, Definition)>, Definition),
+ Table(HashMap<String, ConfigValue>, Definition),
+ Boolean(bool, Definition),
+}
+
+impl fmt::Debug for ConfigValue {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ CV::Integer(i, def) => write!(f, "{} (from {})", i, def),
+ CV::Boolean(b, def) => write!(f, "{} (from {})", b, def),
+ CV::String(s, def) => write!(f, "{} (from {})", s, def),
+ CV::List(list, def) => {
+ write!(f, "[")?;
+ for (i, (s, def)) in list.iter().enumerate() {
+ if i > 0 {
+ write!(f, ", ")?;
+ }
+ write!(f, "{} (from {})", s, def)?;
+ }
+ write!(f, "] (from {})", def)
+ }
+ CV::Table(table, _) => write!(f, "{:?}", table),
+ }
+ }
+}
+
+impl ConfigValue {
+ fn from_toml(def: Definition, toml: toml::Value) -> CargoResult<ConfigValue> {
+ match toml {
+ toml::Value::String(val) => Ok(CV::String(val, def)),
+ toml::Value::Boolean(b) => Ok(CV::Boolean(b, def)),
+ toml::Value::Integer(i) => Ok(CV::Integer(i, def)),
+ toml::Value::Array(val) => Ok(CV::List(
+ val.into_iter()
+ .map(|toml| match toml {
+ toml::Value::String(val) => Ok((val, def.clone())),
+ v => bail!("expected string but found {} in list", v.type_str()),
+ })
+ .collect::<CargoResult<_>>()?,
+ def,
+ )),
+ toml::Value::Table(val) => Ok(CV::Table(
+ val.into_iter()
+ .map(|(key, value)| {
+ let value = CV::from_toml(def.clone(), value)
+ .with_context(|| format!("failed to parse key `{}`", key))?;
+ Ok((key, value))
+ })
+ .collect::<CargoResult<_>>()?,
+ def,
+ )),
+ v => bail!(
+ "found TOML configuration value of unknown type `{}`",
+ v.type_str()
+ ),
+ }
+ }
+
+ fn into_toml(self) -> toml::Value {
+ match self {
+ CV::Boolean(s, _) => toml::Value::Boolean(s),
+ CV::String(s, _) => toml::Value::String(s),
+ CV::Integer(i, _) => toml::Value::Integer(i),
+ CV::List(l, _) => {
+ toml::Value::Array(l.into_iter().map(|(s, _)| toml::Value::String(s)).collect())
+ }
+ CV::Table(l, _) => {
+ toml::Value::Table(l.into_iter().map(|(k, v)| (k, v.into_toml())).collect())
+ }
+ }
+ }
+
+ /// Merge the given value into self.
+ ///
+ /// If `force` is true, primitive (non-container) types will override existing values.
+ /// If false, the original will be kept and the new value ignored.
+ ///
+ /// Container types (tables and arrays) are merged with existing values.
+ ///
+ /// Container and non-container types cannot be mixed.
+ fn merge(&mut self, from: ConfigValue, force: bool) -> CargoResult<()> {
+ match (self, from) {
+ (&mut CV::List(ref mut old, _), CV::List(ref mut new, _)) => {
+ old.extend(mem::take(new).into_iter());
+ }
+ (&mut CV::Table(ref mut old, _), CV::Table(ref mut new, _)) => {
+ for (key, value) in mem::take(new) {
+ match old.entry(key.clone()) {
+ Occupied(mut entry) => {
+ let new_def = value.definition().clone();
+ let entry = entry.get_mut();
+ entry.merge(value, force).with_context(|| {
+ format!(
+ "failed to merge key `{}` between \
+ {} and {}",
+ key,
+ entry.definition(),
+ new_def,
+ )
+ })?;
+ }
+ Vacant(entry) => {
+ entry.insert(value);
+ }
+ };
+ }
+ }
+ // Allow switching types except for tables or arrays.
+ (expected @ &mut CV::List(_, _), found)
+ | (expected @ &mut CV::Table(_, _), found)
+ | (expected, found @ CV::List(_, _))
+ | (expected, found @ CV::Table(_, _)) => {
+ return Err(anyhow!(
+ "failed to merge config value from `{}` into `{}`: expected {}, but found {}",
+ found.definition(),
+ expected.definition(),
+ expected.desc(),
+ found.desc()
+ ));
+ }
+ (old, mut new) => {
+ if force || new.definition().is_higher_priority(old.definition()) {
+ mem::swap(old, &mut new);
+ }
+ }
+ }
+
+ Ok(())
+ }
+
+ pub fn i64(&self, key: &str) -> CargoResult<(i64, &Definition)> {
+ match self {
+ CV::Integer(i, def) => Ok((*i, def)),
+ _ => self.expected("integer", key),
+ }
+ }
+
+ pub fn string(&self, key: &str) -> CargoResult<(&str, &Definition)> {
+ match self {
+ CV::String(s, def) => Ok((s, def)),
+ _ => self.expected("string", key),
+ }
+ }
+
+ pub fn table(&self, key: &str) -> CargoResult<(&HashMap<String, ConfigValue>, &Definition)> {
+ match self {
+ CV::Table(table, def) => Ok((table, def)),
+ _ => self.expected("table", key),
+ }
+ }
+
+ pub fn list(&self, key: &str) -> CargoResult<&[(String, Definition)]> {
+ match self {
+ CV::List(list, _) => Ok(list),
+ _ => self.expected("list", key),
+ }
+ }
+
+ pub fn boolean(&self, key: &str) -> CargoResult<(bool, &Definition)> {
+ match self {
+ CV::Boolean(b, def) => Ok((*b, def)),
+ _ => self.expected("bool", key),
+ }
+ }
+
+ pub fn desc(&self) -> &'static str {
+ match *self {
+ CV::Table(..) => "table",
+ CV::List(..) => "array",
+ CV::String(..) => "string",
+ CV::Boolean(..) => "boolean",
+ CV::Integer(..) => "integer",
+ }
+ }
+
+ pub fn definition(&self) -> &Definition {
+ match self {
+ CV::Boolean(_, def)
+ | CV::Integer(_, def)
+ | CV::String(_, def)
+ | CV::List(_, def)
+ | CV::Table(_, def) => def,
+ }
+ }
+
+ fn expected<T>(&self, wanted: &str, key: &str) -> CargoResult<T> {
+ bail!(
+ "expected a {}, but found a {} for `{}` in {}",
+ wanted,
+ self.desc(),
+ key,
+ self.definition()
+ )
+ }
+}
+
+pub fn homedir(cwd: &Path) -> Option<PathBuf> {
+ ::home::cargo_home_with_cwd(cwd).ok()
+}
+
+pub fn save_credentials(
+ cfg: &Config,
+ token: Option<RegistryCredentialConfig>,
+ registry: &SourceId,
+) -> CargoResult<()> {
+ let registry = if registry.is_crates_io() {
+ None
+ } else {
+ let name = registry
+ .alt_registry_key()
+ .ok_or_else(|| internal("can't save credentials for anonymous registry"))?;
+ Some(name)
+ };
+
+ // If 'credentials' exists, write to that for backward compatibility reasons.
+ // Otherwise write to 'credentials.toml'. There's no need to print the
+ // warning here, because it would already be printed at load time.
+ let home_path = cfg.home_path.clone().into_path_unlocked();
+ let filename = match cfg.get_file_path(&home_path, "credentials", false)? {
+ Some(path) => match path.file_name() {
+ Some(filename) => Path::new(filename).to_owned(),
+ None => Path::new("credentials.toml").to_owned(),
+ },
+ None => Path::new("credentials.toml").to_owned(),
+ };
+
+ let mut file = {
+ cfg.home_path.create_dir()?;
+ cfg.home_path
+ .open_rw(filename, cfg, "credentials' config file")?
+ };
+
+ let mut contents = String::new();
+ file.read_to_string(&mut contents).with_context(|| {
+ format!(
+ "failed to read configuration file `{}`",
+ file.path().display()
+ )
+ })?;
+
+ let mut toml = cargo_toml::parse_document(&contents, file.path(), cfg)?;
+
+ // Move the old token location to the new one.
+ if let Some(token) = toml.remove("token") {
+ let map = HashMap::from([("token".to_string(), token)]);
+ toml.insert("registry".into(), map.into());
+ }
+
+ if let Some(token) = token {
+ // login
+
+ let path_def = Definition::Path(file.path().to_path_buf());
+ let (key, mut value) = match token {
+ RegistryCredentialConfig::Token(token) => {
+ // login with token
+
+ let key = "token".to_string();
+ let value = ConfigValue::String(token.expose(), path_def.clone());
+ let map = HashMap::from([(key, value)]);
+ let table = CV::Table(map, path_def.clone());
+
+ if let Some(registry) = registry {
+ let map = HashMap::from([(registry.to_string(), table)]);
+ ("registries".into(), CV::Table(map, path_def.clone()))
+ } else {
+ ("registry".into(), table)
+ }
+ }
+ RegistryCredentialConfig::AsymmetricKey((secret_key, key_subject)) => {
+ // login with key
+
+ let key = "secret-key".to_string();
+ let value = ConfigValue::String(secret_key.expose(), path_def.clone());
+ let mut map = HashMap::from([(key, value)]);
+ if let Some(key_subject) = key_subject {
+ let key = "secret-key-subject".to_string();
+ let value = ConfigValue::String(key_subject, path_def.clone());
+ map.insert(key, value);
+ }
+ let table = CV::Table(map, path_def.clone());
+
+ if let Some(registry) = registry {
+ let map = HashMap::from([(registry.to_string(), table)]);
+ ("registries".into(), CV::Table(map, path_def.clone()))
+ } else {
+ ("registry".into(), table)
+ }
+ }
+ _ => unreachable!(),
+ };
+
+ if registry.is_some() {
+ if let Some(table) = toml.remove("registries") {
+ let v = CV::from_toml(path_def, table)?;
+ value.merge(v, false)?;
+ }
+ }
+ toml.insert(key, value.into_toml());
+ } else {
+ // logout
+ if let Some(registry) = registry {
+ if let Some(registries) = toml.get_mut("registries") {
+ if let Some(reg) = registries.get_mut(registry) {
+ let rtable = reg.as_table_mut().ok_or_else(|| {
+ format_err!("expected `[registries.{}]` to be a table", registry)
+ })?;
+ rtable.remove("token");
+ rtable.remove("secret-key");
+ rtable.remove("secret-key-subject");
+ }
+ }
+ } else if let Some(registry) = toml.get_mut("registry") {
+ let reg_table = registry
+ .as_table_mut()
+ .ok_or_else(|| format_err!("expected `[registry]` to be a table"))?;
+ reg_table.remove("token");
+ reg_table.remove("secret-key");
+ reg_table.remove("secret-key-subject");
+ }
+ }
+
+ let contents = toml.to_string();
+ file.seek(SeekFrom::Start(0))?;
+ file.write_all(contents.as_bytes())
+ .with_context(|| format!("failed to write to `{}`", file.path().display()))?;
+ file.file().set_len(contents.len() as u64)?;
+ set_permissions(file.file(), 0o600)
+ .with_context(|| format!("failed to set permissions of `{}`", file.path().display()))?;
+
+ return Ok(());
+
+ #[cfg(unix)]
+ fn set_permissions(file: &File, mode: u32) -> CargoResult<()> {
+ use std::os::unix::fs::PermissionsExt;
+
+ let mut perms = file.metadata()?.permissions();
+ perms.set_mode(mode);
+ file.set_permissions(perms)?;
+ Ok(())
+ }
+
+ #[cfg(not(unix))]
+ #[allow(unused)]
+ fn set_permissions(file: &File, mode: u32) -> CargoResult<()> {
+ Ok(())
+ }
+}
+
+pub struct PackageCacheLock<'a>(&'a Config);
+
+impl Drop for PackageCacheLock<'_> {
+ fn drop(&mut self) {
+ let mut slot = self.0.package_cache_lock.borrow_mut();
+ let (_, cnt) = slot.as_mut().unwrap();
+ *cnt -= 1;
+ if *cnt == 0 {
+ *slot = None;
+ }
+ }
+}
+
+#[derive(Debug, Default, Deserialize, PartialEq)]
+#[serde(rename_all = "kebab-case")]
+pub struct CargoHttpConfig {
+ pub proxy: Option<String>,
+ pub low_speed_limit: Option<u32>,
+ pub timeout: Option<u64>,
+ pub cainfo: Option<ConfigRelativePath>,
+ pub check_revoke: Option<bool>,
+ pub user_agent: Option<String>,
+ pub debug: Option<bool>,
+ pub multiplexing: Option<bool>,
+ pub ssl_version: Option<SslVersionConfig>,
+}
+
+#[derive(Debug, Default, Deserialize, PartialEq)]
+#[serde(rename_all = "kebab-case")]
+pub struct CargoFutureIncompatConfig {
+ frequency: Option<CargoFutureIncompatFrequencyConfig>,
+}
+
+#[derive(Debug, Default, Deserialize, PartialEq)]
+#[serde(rename_all = "kebab-case")]
+pub enum CargoFutureIncompatFrequencyConfig {
+ #[default]
+ Always,
+ Never,
+}
+
+impl CargoFutureIncompatConfig {
+ pub fn should_display_message(&self) -> bool {
+ use CargoFutureIncompatFrequencyConfig::*;
+
+ let frequency = self.frequency.as_ref().unwrap_or(&Always);
+ match frequency {
+ Always => true,
+ Never => false,
+ }
+ }
+}
+
+/// Configuration for `ssl-version` in `http` section
+/// There are two ways to configure:
+///
+/// ```text
+/// [http]
+/// ssl-version = "tlsv1.3"
+/// ```
+///
+/// ```text
+/// [http]
+/// ssl-version.min = "tlsv1.2"
+/// ssl-version.max = "tlsv1.3"
+/// ```
+#[derive(Clone, Debug, Deserialize, PartialEq)]
+#[serde(untagged)]
+pub enum SslVersionConfig {
+ Single(String),
+ Range(SslVersionConfigRange),
+}
+
+#[derive(Clone, Debug, Deserialize, PartialEq)]
+pub struct SslVersionConfigRange {
+ pub min: Option<String>,
+ pub max: Option<String>,
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "kebab-case")]
+pub struct CargoNetConfig {
+ pub retry: Option<u32>,
+ pub offline: Option<bool>,
+ pub git_fetch_with_cli: Option<bool>,
+ pub ssh: Option<CargoSshConfig>,
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "kebab-case")]
+pub struct CargoSshConfig {
+ pub known_hosts: Option<Vec<Value<String>>>,
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "kebab-case")]
+pub struct CargoBuildConfig {
+ // deprecated, but preserved for compatibility
+ pub pipelining: Option<bool>,
+ pub dep_info_basedir: Option<ConfigRelativePath>,
+ pub target_dir: Option<ConfigRelativePath>,
+ pub incremental: Option<bool>,
+ pub target: Option<BuildTargetConfig>,
+ pub jobs: Option<i32>,
+ pub rustflags: Option<StringList>,
+ pub rustdocflags: Option<StringList>,
+ pub rustc_wrapper: Option<ConfigRelativePath>,
+ pub rustc_workspace_wrapper: Option<ConfigRelativePath>,
+ pub rustc: Option<ConfigRelativePath>,
+ pub rustdoc: Option<ConfigRelativePath>,
+ pub out_dir: Option<ConfigRelativePath>,
+}
+
+/// Configuration for `build.target`.
+///
+/// Accepts in the following forms:
+///
+/// ```toml
+/// target = "a"
+/// target = ["a"]
+/// target = ["a", "b"]
+/// ```
+#[derive(Debug, Deserialize)]
+#[serde(transparent)]
+pub struct BuildTargetConfig {
+ inner: Value<BuildTargetConfigInner>,
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(untagged)]
+enum BuildTargetConfigInner {
+ One(String),
+ Many(Vec<String>),
+}
+
+impl BuildTargetConfig {
+ /// Gets values of `build.target` as a list of strings.
+ pub fn values(&self, config: &Config) -> CargoResult<Vec<String>> {
+ let map = |s: &String| {
+ if s.ends_with(".json") {
+ // Path to a target specification file (in JSON).
+ // <https://doc.rust-lang.org/rustc/targets/custom.html>
+ self.inner
+ .definition
+ .root(config)
+ .join(s)
+ .to_str()
+ .expect("must be utf-8 in toml")
+ .to_string()
+ } else {
+ // A string. Probably a target triple.
+ s.to_string()
+ }
+ };
+ let values = match &self.inner.val {
+ BuildTargetConfigInner::One(s) => vec![map(s)],
+ BuildTargetConfigInner::Many(v) => v.iter().map(map).collect(),
+ };
+ Ok(values)
+ }
+}
+
+#[derive(Deserialize, Default)]
+struct TermConfig {
+ verbose: Option<bool>,
+ quiet: Option<bool>,
+ color: Option<String>,
+ #[serde(default)]
+ #[serde(deserialize_with = "progress_or_string")]
+ progress: Option<ProgressConfig>,
+}
+
+#[derive(Debug, Default, Deserialize)]
+pub struct ProgressConfig {
+ pub when: ProgressWhen,
+ pub width: Option<usize>,
+}
+
+#[derive(Debug, Default, Deserialize)]
+#[serde(rename_all = "lowercase")]
+pub enum ProgressWhen {
+ #[default]
+ Auto,
+ Never,
+ Always,
+}
+
+fn progress_or_string<'de, D>(deserializer: D) -> Result<Option<ProgressConfig>, D::Error>
+where
+ D: serde::de::Deserializer<'de>,
+{
+ struct ProgressVisitor;
+
+ impl<'de> serde::de::Visitor<'de> for ProgressVisitor {
+ type Value = Option<ProgressConfig>;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str("a string (\"auto\" or \"never\") or a table")
+ }
+
+ fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ match s {
+ "auto" => Ok(Some(ProgressConfig {
+ when: ProgressWhen::Auto,
+ width: None,
+ })),
+ "never" => Ok(Some(ProgressConfig {
+ when: ProgressWhen::Never,
+ width: None,
+ })),
+ "always" => Err(E::custom("\"always\" progress requires a `width` key")),
+ _ => Err(E::unknown_variant(s, &["auto", "never"])),
+ }
+ }
+
+ fn visit_none<E>(self) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ Ok(None)
+ }
+
+ fn visit_some<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
+ where
+ D: serde::de::Deserializer<'de>,
+ {
+ let pc = ProgressConfig::deserialize(deserializer)?;
+ if let ProgressConfig {
+ when: ProgressWhen::Always,
+ width: None,
+ } = pc
+ {
+ return Err(serde::de::Error::custom(
+ "\"always\" progress requires a `width` key",
+ ));
+ }
+ Ok(Some(pc))
+ }
+ }
+
+ deserializer.deserialize_option(ProgressVisitor)
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(untagged)]
+enum EnvConfigValueInner {
+ Simple(String),
+ WithOptions {
+ value: String,
+ #[serde(default)]
+ force: bool,
+ #[serde(default)]
+ relative: bool,
+ },
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(transparent)]
+pub struct EnvConfigValue {
+ inner: Value<EnvConfigValueInner>,
+}
+
+impl EnvConfigValue {
+ pub fn is_force(&self) -> bool {
+ match self.inner.val {
+ EnvConfigValueInner::Simple(_) => false,
+ EnvConfigValueInner::WithOptions { force, .. } => force,
+ }
+ }
+
+ pub fn resolve<'a>(&'a self, config: &Config) -> Cow<'a, OsStr> {
+ match self.inner.val {
+ EnvConfigValueInner::Simple(ref s) => Cow::Borrowed(OsStr::new(s.as_str())),
+ EnvConfigValueInner::WithOptions {
+ ref value,
+ relative,
+ ..
+ } => {
+ if relative {
+ let p = self.inner.definition.root(config).join(&value);
+ Cow::Owned(p.into_os_string())
+ } else {
+ Cow::Borrowed(OsStr::new(value.as_str()))
+ }
+ }
+ }
+ }
+}
+
+pub type EnvConfig = HashMap<String, EnvConfigValue>;
+
+/// A type to deserialize a list of strings from a toml file.
+///
+/// Supports deserializing either a whitespace-separated list of arguments in a
+/// single string or a string list itself. For example these deserialize to
+/// equivalent values:
+///
+/// ```toml
+/// a = 'a b c'
+/// b = ['a', 'b', 'c']
+/// ```
+#[derive(Debug, Deserialize, Clone)]
+pub struct StringList(Vec<String>);
+
+impl StringList {
+ pub fn as_slice(&self) -> &[String] {
+ &self.0
+ }
+}
+
+/// StringList automatically merges config values with environment values,
+/// this instead follows the precedence rules, so that eg. a string list found
+/// in the environment will be used instead of one in a config file.
+///
+/// This is currently only used by `PathAndArgs`
+#[derive(Debug, Deserialize)]
+pub struct UnmergedStringList(Vec<String>);
+
+#[macro_export]
+macro_rules! __shell_print {
+ ($config:expr, $which:ident, $newline:literal, $($arg:tt)*) => ({
+ let mut shell = $config.shell();
+ let out = shell.$which();
+ drop(out.write_fmt(format_args!($($arg)*)));
+ if $newline {
+ drop(out.write_all(b"\n"));
+ }
+ });
+}
+
+#[macro_export]
+macro_rules! drop_println {
+ ($config:expr) => ( $crate::drop_print!($config, "\n") );
+ ($config:expr, $($arg:tt)*) => (
+ $crate::__shell_print!($config, out, true, $($arg)*)
+ );
+}
+
+#[macro_export]
+macro_rules! drop_eprintln {
+ ($config:expr) => ( $crate::drop_eprint!($config, "\n") );
+ ($config:expr, $($arg:tt)*) => (
+ $crate::__shell_print!($config, err, true, $($arg)*)
+ );
+}
+
+#[macro_export]
+macro_rules! drop_print {
+ ($config:expr, $($arg:tt)*) => (
+ $crate::__shell_print!($config, out, false, $($arg)*)
+ );
+}
+
+#[macro_export]
+macro_rules! drop_eprint {
+ ($config:expr, $($arg:tt)*) => (
+ $crate::__shell_print!($config, err, false, $($arg)*)
+ );
+}
diff --git a/src/tools/cargo/src/cargo/util/config/path.rs b/src/tools/cargo/src/cargo/util/config/path.rs
new file mode 100644
index 000000000..a90cab2b2
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/config/path.rs
@@ -0,0 +1,78 @@
+use super::{Config, UnmergedStringList, Value};
+use serde::{de::Error, Deserialize};
+use std::path::PathBuf;
+
+/// Use with the `get` API to fetch a string that will be converted to a
+/// `PathBuf`. Relative paths are converted to absolute paths based on the
+/// location of the config file.
+#[derive(Debug, Deserialize, PartialEq, Clone)]
+#[serde(transparent)]
+pub struct ConfigRelativePath(Value<String>);
+
+impl ConfigRelativePath {
+ /// Returns the underlying value.
+ pub fn value(&self) -> &Value<String> {
+ &self.0
+ }
+
+ /// Returns the raw underlying configuration value for this key.
+ pub fn raw_value(&self) -> &str {
+ &self.0.val
+ }
+
+ /// Resolves this configuration-relative path to an absolute path.
+ ///
+ /// This will always return an absolute path where it's relative to the
+ /// location for configuration for this value.
+ pub fn resolve_path(&self, config: &Config) -> PathBuf {
+ self.0.definition.root(config).join(&self.0.val)
+ }
+
+ /// Resolves this configuration-relative path to either an absolute path or
+ /// something appropriate to execute from `PATH`.
+ ///
+ /// Values which don't look like a filesystem path (don't contain `/` or
+ /// `\`) will be returned as-is, and everything else will fall through to an
+ /// absolute path.
+ pub fn resolve_program(&self, config: &Config) -> PathBuf {
+ config.string_to_path(&self.0.val, &self.0.definition)
+ }
+}
+
+/// A config type that is a program to run.
+///
+/// This supports a list of strings like `['/path/to/program', 'somearg']`
+/// or a space separated string like `'/path/to/program somearg'`.
+///
+/// This expects the first value to be the path to the program to run.
+/// Subsequent values are strings of arguments to pass to the program.
+///
+/// Typically you should use `ConfigRelativePath::resolve_program` on the path
+/// to get the actual program.
+#[derive(Debug, Clone)]
+pub struct PathAndArgs {
+ pub path: ConfigRelativePath,
+ pub args: Vec<String>,
+}
+
+impl<'de> serde::Deserialize<'de> for PathAndArgs {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ let vsl = Value::<UnmergedStringList>::deserialize(deserializer)?;
+ let mut strings = vsl.val.0;
+ if strings.is_empty() {
+ return Err(D::Error::invalid_length(0, &"at least one element"));
+ }
+ let first = strings.remove(0);
+ let crp = Value {
+ val: first,
+ definition: vsl.definition,
+ };
+ Ok(PathAndArgs {
+ path: ConfigRelativePath(crp),
+ args: strings,
+ })
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/config/target.rs b/src/tools/cargo/src/cargo/util/config/target.rs
new file mode 100644
index 000000000..a7f2f3ef2
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/config/target.rs
@@ -0,0 +1,246 @@
+use super::{Config, ConfigKey, ConfigRelativePath, OptValue, PathAndArgs, StringList, CV};
+use crate::core::compiler::{BuildOutput, LinkType};
+use crate::util::CargoResult;
+use serde::Deserialize;
+use std::collections::{BTreeMap, HashMap};
+use std::path::PathBuf;
+
+/// Config definition of a `[target.'cfg(…)']` table.
+///
+/// This is a subset of `TargetConfig`.
+#[derive(Debug, Deserialize)]
+pub struct TargetCfgConfig {
+ pub runner: OptValue<PathAndArgs>,
+ pub rustflags: OptValue<StringList>,
+ // This is here just to ignore fields from normal `TargetConfig` because
+ // all `[target]` tables are getting deserialized, whether they start with
+ // `cfg(` or not.
+ #[serde(flatten)]
+ pub other: BTreeMap<String, toml::Value>,
+}
+
+/// Config definition of a `[target]` table or `[host]`.
+#[derive(Debug, Clone)]
+pub struct TargetConfig {
+ /// Process to run as a wrapper for `cargo run`, `test`, and `bench` commands.
+ pub runner: OptValue<PathAndArgs>,
+ /// Additional rustc flags to pass.
+ pub rustflags: OptValue<StringList>,
+ /// The path of the linker for this target.
+ pub linker: OptValue<ConfigRelativePath>,
+ /// Build script override for the given library name.
+ ///
+ /// Any package with a `links` value for the given library name will skip
+ /// running its build script and instead use the given output from the
+ /// config file.
+ pub links_overrides: BTreeMap<String, BuildOutput>,
+}
+
+/// Loads all of the `target.'cfg()'` tables.
+pub(super) fn load_target_cfgs(config: &Config) -> CargoResult<Vec<(String, TargetCfgConfig)>> {
+ // Load all [target] tables, filter out the cfg() entries.
+ let mut result = Vec::new();
+ // Use a BTreeMap so the keys are sorted. This is important for
+ // deterministic ordering of rustflags, which affects fingerprinting and
+ // rebuilds. We may perhaps one day wish to ensure a deterministic
+ // ordering via the order keys were defined in files perhaps.
+ let target: BTreeMap<String, TargetCfgConfig> = config.get("target")?;
+ log::debug!("Got all targets {:#?}", target);
+ for (key, cfg) in target {
+ if key.starts_with("cfg(") {
+ // Unfortunately this is not able to display the location of the
+ // unused key. Using config::Value<toml::Value> doesn't work. One
+ // solution might be to create a special "Any" type, but I think
+ // that will be quite difficult with the current design.
+ for other_key in cfg.other.keys() {
+ config.shell().warn(format!(
+ "unused key `{}` in [target] config table `{}`",
+ other_key, key
+ ))?;
+ }
+ result.push((key, cfg));
+ }
+ }
+ Ok(result)
+}
+
+/// Returns true if the `[target]` table should be applied to host targets.
+pub(super) fn get_target_applies_to_host(config: &Config) -> CargoResult<bool> {
+ if config.cli_unstable().target_applies_to_host {
+ if let Ok(target_applies_to_host) = config.get::<bool>("target-applies-to-host") {
+ Ok(target_applies_to_host)
+ } else {
+ Ok(!config.cli_unstable().host_config)
+ }
+ } else if config.cli_unstable().host_config {
+ anyhow::bail!(
+ "the -Zhost-config flag requires the -Ztarget-applies-to-host flag to be set"
+ );
+ } else {
+ Ok(true)
+ }
+}
+
+/// Loads a single `[host]` table for the given triple.
+pub(super) fn load_host_triple(config: &Config, triple: &str) -> CargoResult<TargetConfig> {
+ if config.cli_unstable().host_config {
+ let host_triple_prefix = format!("host.{}", triple);
+ let host_triple_key = ConfigKey::from_str(&host_triple_prefix);
+ let host_prefix = match config.get_cv(&host_triple_key)? {
+ Some(_) => host_triple_prefix,
+ None => "host".to_string(),
+ };
+ load_config_table(config, &host_prefix)
+ } else {
+ Ok(TargetConfig {
+ runner: None,
+ rustflags: None,
+ linker: None,
+ links_overrides: BTreeMap::new(),
+ })
+ }
+}
+
+/// Loads a single `[target]` table for the given triple.
+pub(super) fn load_target_triple(config: &Config, triple: &str) -> CargoResult<TargetConfig> {
+ load_config_table(config, &format!("target.{}", triple))
+}
+
+/// Loads a single table for the given prefix.
+fn load_config_table(config: &Config, prefix: &str) -> CargoResult<TargetConfig> {
+ // This needs to get each field individually because it cannot fetch the
+ // struct all at once due to `links_overrides`. Can't use `serde(flatten)`
+ // because it causes serde to use `deserialize_map` which means the config
+ // deserializer does not know which keys to deserialize, which means
+ // environment variables would not work.
+ let runner: OptValue<PathAndArgs> = config.get(&format!("{}.runner", prefix))?;
+ let rustflags: OptValue<StringList> = config.get(&format!("{}.rustflags", prefix))?;
+ let linker: OptValue<ConfigRelativePath> = config.get(&format!("{}.linker", prefix))?;
+ // Links do not support environment variables.
+ let target_key = ConfigKey::from_str(prefix);
+ let links_overrides = match config.get_table(&target_key)? {
+ Some(links) => parse_links_overrides(&target_key, links.val, config)?,
+ None => BTreeMap::new(),
+ };
+ Ok(TargetConfig {
+ runner,
+ rustflags,
+ linker,
+ links_overrides,
+ })
+}
+
+fn parse_links_overrides(
+ target_key: &ConfigKey,
+ links: HashMap<String, CV>,
+ config: &Config,
+) -> CargoResult<BTreeMap<String, BuildOutput>> {
+ let mut links_overrides = BTreeMap::new();
+ let extra_check_cfg = match config.cli_unstable().check_cfg {
+ Some((_, _, _, output)) => output,
+ None => false,
+ };
+
+ for (lib_name, value) in links {
+ // Skip these keys, it shares the namespace with `TargetConfig`.
+ match lib_name.as_str() {
+ // `ar` is a historical thing.
+ "ar" | "linker" | "runner" | "rustflags" => continue,
+ _ => {}
+ }
+ let mut output = BuildOutput::default();
+ let table = value.table(&format!("{}.{}", target_key, lib_name))?.0;
+ // We require deterministic order of evaluation, so we must sort the pairs by key first.
+ let mut pairs = Vec::new();
+ for (k, value) in table {
+ pairs.push((k, value));
+ }
+ pairs.sort_by_key(|p| p.0);
+ for (key, value) in pairs {
+ match key.as_str() {
+ "rustc-flags" => {
+ let flags = value.string(key)?;
+ let whence = format!("target config `{}.{}` (in {})", target_key, key, flags.1);
+ let (paths, links) = BuildOutput::parse_rustc_flags(flags.0, &whence)?;
+ output.library_paths.extend(paths);
+ output.library_links.extend(links);
+ }
+ "rustc-link-lib" => {
+ let list = value.list(key)?;
+ output
+ .library_links
+ .extend(list.iter().map(|v| v.0.clone()));
+ }
+ "rustc-link-search" => {
+ let list = value.list(key)?;
+ output
+ .library_paths
+ .extend(list.iter().map(|v| PathBuf::from(&v.0)));
+ }
+ "rustc-link-arg-cdylib" | "rustc-cdylib-link-arg" => {
+ let args = extra_link_args(LinkType::Cdylib, key, value)?;
+ output.linker_args.extend(args);
+ }
+ "rustc-link-arg-bins" => {
+ let args = extra_link_args(LinkType::Bin, key, value)?;
+ output.linker_args.extend(args);
+ }
+ "rustc-link-arg" => {
+ let args = extra_link_args(LinkType::All, key, value)?;
+ output.linker_args.extend(args);
+ }
+ "rustc-link-arg-tests" => {
+ let args = extra_link_args(LinkType::Test, key, value)?;
+ output.linker_args.extend(args);
+ }
+ "rustc-link-arg-benches" => {
+ let args = extra_link_args(LinkType::Bench, key, value)?;
+ output.linker_args.extend(args);
+ }
+ "rustc-link-arg-examples" => {
+ let args = extra_link_args(LinkType::Example, key, value)?;
+ output.linker_args.extend(args);
+ }
+ "rustc-cfg" => {
+ let list = value.list(key)?;
+ output.cfgs.extend(list.iter().map(|v| v.0.clone()));
+ }
+ "rustc-check-cfg" => {
+ if extra_check_cfg {
+ let list = value.list(key)?;
+ output.check_cfgs.extend(list.iter().map(|v| v.0.clone()));
+ } else {
+ config.shell().warn(format!(
+ "target config `{}.{}` requires -Zcheck-cfg=output flag",
+ target_key, key
+ ))?;
+ }
+ }
+ "rustc-env" => {
+ for (name, val) in value.table(key)?.0 {
+ let val = val.string(name)?.0;
+ output.env.push((name.clone(), val.to_string()));
+ }
+ }
+ "warning" | "rerun-if-changed" | "rerun-if-env-changed" => {
+ anyhow::bail!("`{}` is not supported in build script overrides", key);
+ }
+ _ => {
+ let val = value.string(key)?.0;
+ output.metadata.push((key.clone(), val.to_string()));
+ }
+ }
+ }
+ links_overrides.insert(lib_name, output);
+ }
+ Ok(links_overrides)
+}
+
+fn extra_link_args<'a>(
+ link_type: LinkType,
+ key: &str,
+ value: &'a CV,
+) -> CargoResult<impl Iterator<Item = (LinkType, String)> + 'a> {
+ let args = value.list(key)?;
+ Ok(args.iter().map(move |v| (link_type.clone(), v.0.clone())))
+}
diff --git a/src/tools/cargo/src/cargo/util/config/value.rs b/src/tools/cargo/src/cargo/util/config/value.rs
new file mode 100644
index 000000000..a70d75a07
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/config/value.rs
@@ -0,0 +1,229 @@
+//! Deserialization of a `Value<T>` type which tracks where it was deserialized
+//! from.
+//!
+//! Often Cargo wants to report semantic error information or other sorts of
+//! error information about configuration keys but it also may wish to indicate
+//! as an error context where the key was defined as well (to help user
+//! debugging). The `Value<T>` type here can be used to deserialize a `T` value
+//! from configuration, but also record where it was deserialized from when it
+//! was read.
+
+use crate::util::config::Config;
+use serde::de;
+use std::fmt;
+use std::marker;
+use std::mem;
+use std::path::{Path, PathBuf};
+
+/// A type which can be deserialized as a configuration value which records
+/// where it was deserialized from.
+#[derive(Debug, PartialEq, Clone)]
+pub struct Value<T> {
+ /// The inner value that was deserialized.
+ pub val: T,
+ /// The location where `val` was defined in configuration (e.g. file it was
+ /// defined in, env var etc).
+ pub definition: Definition,
+}
+
+pub type OptValue<T> = Option<Value<T>>;
+
+// Deserializing `Value<T>` is pretty special, and serde doesn't have built-in
+// support for this operation. To implement this we extend serde's "data model"
+// a bit. We configure deserialization of `Value<T>` to basically only work with
+// our one deserializer using configuration.
+//
+// We define that `Value<T>` deserialization asks the deserializer for a very
+// special struct name and struct field names. In doing so the deserializer will
+// recognize this and synthesize a magical value for the `definition` field when
+// we deserialize it. This protocol is how we're able to have a channel of
+// information flowing from the configuration deserializer into the
+// deserialization implementation here.
+//
+// You'll want to also check out the implementation of `ValueDeserializer` in
+// `de.rs`. Also note that the names below are intended to be invalid Rust
+// identifiers to avoid how they might conflict with other valid structures.
+// Finally the `definition` field is transmitted as a tuple of i32/string, which
+// is effectively a tagged union of `Definition` itself.
+
+pub(crate) const VALUE_FIELD: &str = "$__cargo_private_value";
+pub(crate) const DEFINITION_FIELD: &str = "$__cargo_private_definition";
+pub(crate) const NAME: &str = "$__cargo_private_Value";
+pub(crate) static FIELDS: [&str; 2] = [VALUE_FIELD, DEFINITION_FIELD];
+
+/// Location where a config value is defined.
+#[derive(Clone, Debug, Eq)]
+pub enum Definition {
+ /// Defined in a `.cargo/config`, includes the path to the file.
+ Path(PathBuf),
+ /// Defined in an environment variable, includes the environment key.
+ Environment(String),
+ /// Passed in on the command line.
+ /// A path is attached when the config value is a path to a config file.
+ Cli(Option<PathBuf>),
+}
+
+impl Definition {
+ /// Root directory where this is defined.
+ ///
+ /// If from a file, it is the directory above `.cargo/config`.
+ /// CLI and env are the current working directory.
+ pub fn root<'a>(&'a self, config: &'a Config) -> &'a Path {
+ match self {
+ Definition::Path(p) | Definition::Cli(Some(p)) => p.parent().unwrap().parent().unwrap(),
+ Definition::Environment(_) | Definition::Cli(None) => config.cwd(),
+ }
+ }
+
+ /// Returns true if self is a higher priority to other.
+ ///
+ /// CLI is preferred over environment, which is preferred over files.
+ pub fn is_higher_priority(&self, other: &Definition) -> bool {
+ matches!(
+ (self, other),
+ (Definition::Cli(_), Definition::Environment(_))
+ | (Definition::Cli(_), Definition::Path(_))
+ | (Definition::Environment(_), Definition::Path(_))
+ )
+ }
+}
+
+impl PartialEq for Definition {
+ fn eq(&self, other: &Definition) -> bool {
+ // configuration values are equivalent no matter where they're defined,
+ // but they need to be defined in the same location. For example if
+ // they're defined in the environment that's different than being
+ // defined in a file due to path interpretations.
+ mem::discriminant(self) == mem::discriminant(other)
+ }
+}
+
+impl fmt::Display for Definition {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Definition::Path(p) | Definition::Cli(Some(p)) => p.display().fmt(f),
+ Definition::Environment(key) => write!(f, "environment variable `{}`", key),
+ Definition::Cli(None) => write!(f, "--config cli option"),
+ }
+ }
+}
+
+impl<'de, T> de::Deserialize<'de> for Value<T>
+where
+ T: de::Deserialize<'de>,
+{
+ fn deserialize<D>(deserializer: D) -> Result<Value<T>, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ struct ValueVisitor<T> {
+ _marker: marker::PhantomData<T>,
+ }
+
+ impl<'de, T> de::Visitor<'de> for ValueVisitor<T>
+ where
+ T: de::Deserialize<'de>,
+ {
+ type Value = Value<T>;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str("a value")
+ }
+
+ fn visit_map<V>(self, mut visitor: V) -> Result<Value<T>, V::Error>
+ where
+ V: de::MapAccess<'de>,
+ {
+ let value = visitor.next_key::<ValueKey>()?;
+ if value.is_none() {
+ return Err(de::Error::custom("value not found"));
+ }
+ let val: T = visitor.next_value()?;
+
+ let definition = visitor.next_key::<DefinitionKey>()?;
+ if definition.is_none() {
+ return Err(de::Error::custom("definition not found"));
+ }
+ let definition: Definition = visitor.next_value()?;
+ Ok(Value { val, definition })
+ }
+ }
+
+ deserializer.deserialize_struct(
+ NAME,
+ &FIELDS,
+ ValueVisitor {
+ _marker: marker::PhantomData,
+ },
+ )
+ }
+}
+
+struct FieldVisitor {
+ expected: &'static str,
+}
+
+impl<'de> de::Visitor<'de> for FieldVisitor {
+ type Value = ();
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str("a valid value field")
+ }
+
+ fn visit_str<E>(self, s: &str) -> Result<(), E>
+ where
+ E: de::Error,
+ {
+ if s == self.expected {
+ Ok(())
+ } else {
+ Err(de::Error::custom("expected field with custom name"))
+ }
+ }
+}
+
+struct ValueKey;
+
+impl<'de> de::Deserialize<'de> for ValueKey {
+ fn deserialize<D>(deserializer: D) -> Result<ValueKey, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ deserializer.deserialize_identifier(FieldVisitor {
+ expected: VALUE_FIELD,
+ })?;
+ Ok(ValueKey)
+ }
+}
+
+struct DefinitionKey;
+
+impl<'de> de::Deserialize<'de> for DefinitionKey {
+ fn deserialize<D>(deserializer: D) -> Result<DefinitionKey, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ deserializer.deserialize_identifier(FieldVisitor {
+ expected: DEFINITION_FIELD,
+ })?;
+ Ok(DefinitionKey)
+ }
+}
+
+impl<'de> de::Deserialize<'de> for Definition {
+ fn deserialize<D>(deserializer: D) -> Result<Definition, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ let (discr, value) = <(u32, String)>::deserialize(deserializer)?;
+ match discr {
+ 0 => Ok(Definition::Path(value.into())),
+ 1 => Ok(Definition::Environment(value)),
+ 2 => {
+ let path = (value.len() > 0).then_some(value.into());
+ Ok(Definition::Cli(path))
+ }
+ _ => panic!("unexpected discriminant {discr} value {value}"),
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/counter.rs b/src/tools/cargo/src/cargo/util/counter.rs
new file mode 100644
index 000000000..82e5addae
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/counter.rs
@@ -0,0 +1,67 @@
+use std::time::Instant;
+
+/// A metrics counter storing only latest `N` records.
+pub struct MetricsCounter<const N: usize> {
+ /// Slots to store metrics.
+ slots: [(usize, Instant); N],
+ /// The slot of the oldest record.
+ /// Also the next slot to store the new record.
+ index: usize,
+}
+
+impl<const N: usize> MetricsCounter<N> {
+ /// Creates a new counter with an initial value.
+ pub fn new(init: usize, init_at: Instant) -> Self {
+ assert!(N > 0, "number of slots must be greater than zero");
+ Self {
+ slots: [(init, init_at); N],
+ index: 0,
+ }
+ }
+
+ /// Adds record to the counter.
+ pub fn add(&mut self, data: usize, added_at: Instant) {
+ self.slots[self.index] = (data, added_at);
+ self.index = (self.index + 1) % N;
+ }
+
+ /// Calculates per-second average rate of all slots.
+ pub fn rate(&self) -> f32 {
+ let latest = self.slots[self.index.checked_sub(1).unwrap_or(N - 1)];
+ let oldest = self.slots[self.index];
+ let duration = (latest.1 - oldest.1).as_secs_f32();
+ let avg = (latest.0 - oldest.0) as f32 / duration;
+ if f32::is_nan(avg) {
+ 0f32
+ } else {
+ avg
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::MetricsCounter;
+ use std::time::{Duration, Instant};
+
+ #[test]
+ fn counter() {
+ let now = Instant::now();
+ let mut counter = MetricsCounter::<3>::new(0, now);
+ assert_eq!(counter.rate(), 0f32);
+ counter.add(1, now + Duration::from_secs(1));
+ assert_eq!(counter.rate(), 1f32);
+ counter.add(4, now + Duration::from_secs(2));
+ assert_eq!(counter.rate(), 2f32);
+ counter.add(7, now + Duration::from_secs(3));
+ assert_eq!(counter.rate(), 3f32);
+ counter.add(12, now + Duration::from_secs(4));
+ assert_eq!(counter.rate(), 4f32);
+ }
+
+ #[test]
+ #[should_panic(expected = "number of slots must be greater than zero")]
+ fn counter_zero_slot() {
+ let _counter = MetricsCounter::<0>::new(0, Instant::now());
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/cpu.rs b/src/tools/cargo/src/cargo/util/cpu.rs
new file mode 100644
index 000000000..6ebb01b8e
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/cpu.rs
@@ -0,0 +1,244 @@
+use std::io;
+
+pub struct State(imp::State);
+
+impl State {
+ /// Captures the current state of all CPUs on the system.
+ ///
+ /// The `State` returned here isn't too meaningful in terms of
+ /// interpretation across platforms, but it can be compared to previous
+ /// states to get a meaningful cross-platform number.
+ pub fn current() -> io::Result<State> {
+ imp::current().map(State)
+ }
+
+ /// Returns the percentage of time CPUs were idle from the current state
+ /// relative to the previous state, as a percentage from 0.0 to 100.0.
+ ///
+ /// This function will return, as a percentage, the amount of time that the
+ /// entire system was idle between the `previous` state and this own state.
+ /// This can be useful to compare two snapshots in time of CPU usage to see
+ /// how the CPU usage compares between the two.
+ pub fn idle_since(&self, previous: &State) -> f64 {
+ imp::pct_idle(&previous.0, &self.0)
+ }
+}
+
+#[cfg(target_os = "linux")]
+mod imp {
+ use std::{fs, io};
+
+ pub struct State {
+ user: u64,
+ nice: u64,
+ system: u64,
+ idle: u64,
+ iowait: u64,
+ irq: u64,
+ softirq: u64,
+ steal: u64,
+ guest: u64,
+ guest_nice: u64,
+ }
+
+ pub fn current() -> io::Result<State> {
+ let state = fs::read_to_string("/proc/stat")?;
+
+ (|| {
+ let mut parts = state.lines().next()?.split_whitespace();
+ if parts.next()? != "cpu" {
+ return None;
+ }
+ Some(State {
+ user: parts.next()?.parse::<u64>().ok()?,
+ nice: parts.next()?.parse::<u64>().ok()?,
+ system: parts.next()?.parse::<u64>().ok()?,
+ idle: parts.next()?.parse::<u64>().ok()?,
+ iowait: parts.next()?.parse::<u64>().ok()?,
+ irq: parts.next()?.parse::<u64>().ok()?,
+ softirq: parts.next()?.parse::<u64>().ok()?,
+ steal: parts.next()?.parse::<u64>().ok()?,
+ guest: parts.next()?.parse::<u64>().ok()?,
+ guest_nice: parts.next()?.parse::<u64>().ok()?,
+ })
+ })()
+ .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "first line of /proc/stat malformed"))
+ }
+
+ pub fn pct_idle(prev: &State, next: &State) -> f64 {
+ let user = next.user - prev.user;
+ let nice = next.nice - prev.nice;
+ let system = next.system - prev.system;
+ let idle = next.idle - prev.idle;
+ let iowait = next.iowait.saturating_sub(prev.iowait);
+ let irq = next.irq - prev.irq;
+ let softirq = next.softirq - prev.softirq;
+ let steal = next.steal - prev.steal;
+ let guest = next.guest - prev.guest;
+ let guest_nice = next.guest_nice - prev.guest_nice;
+ let total =
+ user + nice + system + idle + iowait + irq + softirq + steal + guest + guest_nice;
+
+ (idle as f64) / (total as f64) * 100.0
+ }
+}
+
+#[cfg(target_os = "macos")]
+#[allow(bad_style)]
+mod imp {
+ use std::io;
+ use std::ptr;
+
+ type host_t = u32;
+ type mach_port_t = u32;
+ type vm_map_t = mach_port_t;
+ type vm_offset_t = usize;
+ type vm_size_t = usize;
+ type vm_address_t = vm_offset_t;
+ type processor_flavor_t = i32;
+ type natural_t = u32;
+ type processor_info_array_t = *mut i32;
+ type mach_msg_type_number_t = i32;
+ type kern_return_t = i32;
+
+ const PROESSOR_CPU_LOAD_INFO: processor_flavor_t = 2;
+ const CPU_STATE_USER: usize = 0;
+ const CPU_STATE_SYSTEM: usize = 1;
+ const CPU_STATE_IDLE: usize = 2;
+ const CPU_STATE_NICE: usize = 3;
+ const CPU_STATE_MAX: usize = 4;
+
+ extern "C" {
+ static mut mach_task_self_: mach_port_t;
+
+ fn mach_host_self() -> mach_port_t;
+ fn host_processor_info(
+ host: host_t,
+ flavor: processor_flavor_t,
+ out_processor_count: *mut natural_t,
+ out_processor_info: *mut processor_info_array_t,
+ out_processor_infoCnt: *mut mach_msg_type_number_t,
+ ) -> kern_return_t;
+ fn vm_deallocate(
+ target_task: vm_map_t,
+ address: vm_address_t,
+ size: vm_size_t,
+ ) -> kern_return_t;
+ }
+
+ pub struct State {
+ user: u64,
+ system: u64,
+ idle: u64,
+ nice: u64,
+ }
+
+ #[repr(C)]
+ struct processor_cpu_load_info_data_t {
+ cpu_ticks: [u32; CPU_STATE_MAX],
+ }
+
+ pub fn current() -> io::Result<State> {
+ // There's scant little documentation on `host_processor_info`
+ // throughout the internet, so this is just modeled after what everyone
+ // else is doing. For now this is modeled largely after libuv.
+
+ unsafe {
+ let mut num_cpus_u = 0;
+ let mut cpu_info = ptr::null_mut();
+ let mut msg_type = 0;
+ let err = host_processor_info(
+ mach_host_self(),
+ PROESSOR_CPU_LOAD_INFO,
+ &mut num_cpus_u,
+ &mut cpu_info,
+ &mut msg_type,
+ );
+ if err != 0 {
+ return Err(io::Error::last_os_error());
+ }
+ let mut ret = State {
+ user: 0,
+ system: 0,
+ idle: 0,
+ nice: 0,
+ };
+ let mut current = cpu_info as *const processor_cpu_load_info_data_t;
+ for _ in 0..num_cpus_u {
+ ret.user += (*current).cpu_ticks[CPU_STATE_USER] as u64;
+ ret.system += (*current).cpu_ticks[CPU_STATE_SYSTEM] as u64;
+ ret.idle += (*current).cpu_ticks[CPU_STATE_IDLE] as u64;
+ ret.nice += (*current).cpu_ticks[CPU_STATE_NICE] as u64;
+ current = current.offset(1);
+ }
+ vm_deallocate(mach_task_self_, cpu_info as vm_address_t, msg_type as usize);
+ Ok(ret)
+ }
+ }
+
+ pub fn pct_idle(prev: &State, next: &State) -> f64 {
+ let user = next.user - prev.user;
+ let system = next.system - prev.system;
+ let idle = next.idle - prev.idle;
+ let nice = next.nice - prev.nice;
+ let total = user + system + idle + nice;
+ (idle as f64) / (total as f64) * 100.0
+ }
+}
+
+#[cfg(windows)]
+mod imp {
+ use std::io;
+ use std::mem;
+
+ use windows_sys::Win32::Foundation::FILETIME;
+ use windows_sys::Win32::System::Threading::GetSystemTimes;
+
+ pub struct State {
+ idle: FILETIME,
+ kernel: FILETIME,
+ user: FILETIME,
+ }
+
+ pub fn current() -> io::Result<State> {
+ unsafe {
+ let mut ret = mem::zeroed::<State>();
+ let r = GetSystemTimes(&mut ret.idle, &mut ret.kernel, &mut ret.user);
+ if r != 0 {
+ Ok(ret)
+ } else {
+ Err(io::Error::last_os_error())
+ }
+ }
+ }
+
+ pub fn pct_idle(prev: &State, next: &State) -> f64 {
+ fn to_u64(a: &FILETIME) -> u64 {
+ ((a.dwHighDateTime as u64) << 32) | (a.dwLowDateTime as u64)
+ }
+
+ let idle = to_u64(&next.idle) - to_u64(&prev.idle);
+ let kernel = to_u64(&next.kernel) - to_u64(&prev.kernel);
+ let user = to_u64(&next.user) - to_u64(&prev.user);
+ let total = user + kernel;
+ (idle as f64) / (total as f64) * 100.0
+ }
+}
+
+#[cfg(not(any(target_os = "linux", target_os = "macos", windows)))]
+mod imp {
+ use std::io;
+
+ pub struct State;
+
+ pub fn current() -> io::Result<State> {
+ Err(io::Error::new(
+ io::ErrorKind::Other,
+ "unsupported platform to learn CPU state",
+ ))
+ }
+
+ pub fn pct_idle(_prev: &State, _next: &State) -> f64 {
+ unimplemented!()
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/dependency_queue.rs b/src/tools/cargo/src/cargo/util/dependency_queue.rs
new file mode 100644
index 000000000..33e8bf28e
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/dependency_queue.rs
@@ -0,0 +1,255 @@
+//! A graph-like structure used to represent a set of dependencies and in what
+//! order they should be built.
+//!
+//! This structure is used to store the dependency graph and dynamically update
+//! it to figure out when a dependency should be built.
+//!
+//! Dependencies in this queue are represented as a (node, edge) pair. This is
+//! used to model nodes which produce multiple outputs at different times but
+//! some nodes may only require one of the outputs and can start before the
+//! whole node is finished.
+
+use std::collections::{HashMap, HashSet};
+use std::hash::Hash;
+
+#[derive(Debug)]
+pub struct DependencyQueue<N: Hash + Eq, E: Hash + Eq, V> {
+ /// A list of all known keys to build.
+ ///
+ /// The value of the hash map is list of dependencies which still need to be
+ /// built before the package can be built. Note that the set is dynamically
+ /// updated as more dependencies are built.
+ dep_map: HashMap<N, (HashSet<(N, E)>, V)>,
+
+ /// A reverse mapping of a package to all packages that depend on that
+ /// package.
+ ///
+ /// This map is statically known and does not get updated throughout the
+ /// lifecycle of the DependencyQueue.
+ ///
+ /// This is sort of like a `HashMap<(N, E), HashSet<N>>` map, but more
+ /// easily indexable with just an `N`
+ reverse_dep_map: HashMap<N, HashMap<E, HashSet<N>>>,
+
+ /// The relative priority of this package. Higher values should be scheduled sooner.
+ priority: HashMap<N, usize>,
+
+ /// An expected cost for building this package. Used to determine priority.
+ cost: HashMap<N, usize>,
+}
+
+impl<N: Hash + Eq, E: Hash + Eq, V> Default for DependencyQueue<N, E, V> {
+ fn default() -> DependencyQueue<N, E, V> {
+ DependencyQueue::new()
+ }
+}
+
+impl<N: Hash + Eq, E: Hash + Eq, V> DependencyQueue<N, E, V> {
+ /// Creates a new dependency queue with 0 packages.
+ pub fn new() -> DependencyQueue<N, E, V> {
+ DependencyQueue {
+ dep_map: HashMap::new(),
+ reverse_dep_map: HashMap::new(),
+ priority: HashMap::new(),
+ cost: HashMap::new(),
+ }
+ }
+}
+
+impl<N: Hash + Eq + Clone, E: Eq + Hash + Clone, V> DependencyQueue<N, E, V> {
+ /// Adds a new node and its dependencies to this queue.
+ ///
+ /// The `key` specified is a new node in the dependency graph, and the node
+ /// depend on all the dependencies iterated by `dependencies`. Each
+ /// dependency is a node/edge pair, where edges can be thought of as
+ /// productions from nodes (aka if it's just `()` it's just waiting for the
+ /// node to finish).
+ ///
+ /// An optional `value` can also be associated with `key` which is reclaimed
+ /// when the node is ready to go.
+ ///
+ /// The cost parameter can be used to hint at the relative cost of building
+ /// this node. This implementation does not care about the units of this value, so
+ /// the calling code is free to use whatever they'd like. In general, higher cost
+ /// nodes are expected to take longer to build.
+ pub fn queue(
+ &mut self,
+ key: N,
+ value: V,
+ dependencies: impl IntoIterator<Item = (N, E)>,
+ cost: usize,
+ ) {
+ assert!(!self.dep_map.contains_key(&key));
+
+ let mut my_dependencies = HashSet::new();
+ for (dep, edge) in dependencies {
+ my_dependencies.insert((dep.clone(), edge.clone()));
+ self.reverse_dep_map
+ .entry(dep)
+ .or_insert_with(HashMap::new)
+ .entry(edge)
+ .or_insert_with(HashSet::new)
+ .insert(key.clone());
+ }
+ self.dep_map.insert(key.clone(), (my_dependencies, value));
+ self.cost.insert(key, cost);
+ }
+
+ /// All nodes have been added, calculate some internal metadata and prepare
+ /// for `dequeue`.
+ pub fn queue_finished(&mut self) {
+ let mut out = HashMap::new();
+ for key in self.dep_map.keys() {
+ depth(key, &self.reverse_dep_map, &mut out);
+ }
+ self.priority = out
+ .into_iter()
+ .map(|(n, set)| {
+ let total_cost =
+ self.cost[&n] + set.iter().map(|key| self.cost[key]).sum::<usize>();
+ (n, total_cost)
+ })
+ .collect();
+
+ /// Creates a flattened reverse dependency list. For a given key, finds the
+ /// set of nodes which depend on it, including transitively. This is different
+ /// from self.reverse_dep_map because self.reverse_dep_map only maps one level
+ /// of reverse dependencies.
+ fn depth<'a, N: Hash + Eq + Clone, E: Hash + Eq + Clone>(
+ key: &N,
+ map: &HashMap<N, HashMap<E, HashSet<N>>>,
+ results: &'a mut HashMap<N, HashSet<N>>,
+ ) -> &'a HashSet<N> {
+ if results.contains_key(key) {
+ let depth = &results[key];
+ assert!(!depth.is_empty(), "cycle in DependencyQueue");
+ return depth;
+ }
+ results.insert(key.clone(), HashSet::new());
+
+ let mut set = HashSet::new();
+ set.insert(key.clone());
+
+ for dep in map
+ .get(key)
+ .into_iter()
+ .flat_map(|it| it.values())
+ .flatten()
+ {
+ set.extend(depth(dep, map, results).iter().cloned())
+ }
+
+ let slot = results.get_mut(key).unwrap();
+ *slot = set;
+ &*slot
+ }
+ }
+
+ /// Dequeues a package that is ready to be built.
+ ///
+ /// A package is ready to be built when it has 0 un-built dependencies. If
+ /// `None` is returned then no packages are ready to be built.
+ pub fn dequeue(&mut self) -> Option<(N, V, usize)> {
+ let (key, priority) = self
+ .dep_map
+ .iter()
+ .filter(|(_, (deps, _))| deps.is_empty())
+ .map(|(key, _)| (key.clone(), self.priority[key]))
+ .max_by_key(|(_, priority)| *priority)?;
+ let (_, data) = self.dep_map.remove(&key).unwrap();
+ Some((key, data, priority))
+ }
+
+ /// Returns `true` if there are remaining packages to be built.
+ pub fn is_empty(&self) -> bool {
+ self.dep_map.is_empty()
+ }
+
+ /// Returns the number of remaining packages to be built.
+ pub fn len(&self) -> usize {
+ self.dep_map.len()
+ }
+
+ /// Indicate that something has finished.
+ ///
+ /// Calling this function indicates that the `node` has produced `edge`. All
+ /// remaining work items which only depend on this node/edge pair are now
+ /// candidates to start their job.
+ ///
+ /// Returns the nodes that are now allowed to be dequeued as a result of
+ /// finishing this node.
+ pub fn finish(&mut self, node: &N, edge: &E) -> Vec<&N> {
+ // hashset<Node>
+ let reverse_deps = self.reverse_dep_map.get(node).and_then(|map| map.get(edge));
+ let reverse_deps = match reverse_deps {
+ Some(deps) => deps,
+ None => return Vec::new(),
+ };
+ let key = (node.clone(), edge.clone());
+ let mut result = Vec::new();
+ for dep in reverse_deps.iter() {
+ let edges = &mut self.dep_map.get_mut(dep).unwrap().0;
+ assert!(edges.remove(&key));
+ if edges.is_empty() {
+ result.push(dep);
+ }
+ }
+ result
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::DependencyQueue;
+
+ #[test]
+ fn deep_first_equal_cost() {
+ let mut q = DependencyQueue::new();
+
+ q.queue(1, (), vec![], 1);
+ q.queue(2, (), vec![(1, ())], 1);
+ q.queue(3, (), vec![], 1);
+ q.queue(4, (), vec![(2, ()), (3, ())], 1);
+ q.queue(5, (), vec![(4, ()), (3, ())], 1);
+ q.queue_finished();
+
+ assert_eq!(q.dequeue(), Some((1, (), 5)));
+ assert_eq!(q.dequeue(), Some((3, (), 4)));
+ assert_eq!(q.dequeue(), None);
+ q.finish(&3, &());
+ assert_eq!(q.dequeue(), None);
+ q.finish(&1, &());
+ assert_eq!(q.dequeue(), Some((2, (), 4)));
+ assert_eq!(q.dequeue(), None);
+ q.finish(&2, &());
+ assert_eq!(q.dequeue(), Some((4, (), 3)));
+ assert_eq!(q.dequeue(), None);
+ q.finish(&4, &());
+ assert_eq!(q.dequeue(), Some((5, (), 2)));
+ }
+
+ #[test]
+ fn sort_by_highest_cost() {
+ let mut q = DependencyQueue::new();
+
+ q.queue(1, (), vec![], 1);
+ q.queue(2, (), vec![(1, ())], 1);
+ q.queue(3, (), vec![], 4);
+ q.queue(4, (), vec![(2, ()), (3, ())], 1);
+ q.queue_finished();
+
+ assert_eq!(q.dequeue(), Some((3, (), 9)));
+ assert_eq!(q.dequeue(), Some((1, (), 4)));
+ assert_eq!(q.dequeue(), None);
+ q.finish(&3, &());
+ assert_eq!(q.dequeue(), None);
+ q.finish(&1, &());
+ assert_eq!(q.dequeue(), Some((2, (), 3)));
+ assert_eq!(q.dequeue(), None);
+ q.finish(&2, &());
+ assert_eq!(q.dequeue(), Some((4, (), 2)));
+ assert_eq!(q.dequeue(), None);
+ q.finish(&4, &());
+ assert_eq!(q.dequeue(), None);
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/diagnostic_server.rs b/src/tools/cargo/src/cargo/util/diagnostic_server.rs
new file mode 100644
index 000000000..cc5314260
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/diagnostic_server.rs
@@ -0,0 +1,296 @@
+//! A small TCP server to handle collection of diagnostics information in a
+//! cross-platform way for the `cargo fix` command.
+
+use std::collections::HashSet;
+use std::io::{BufReader, Read, Write};
+use std::net::{Shutdown, SocketAddr, TcpListener, TcpStream};
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::sync::Arc;
+use std::thread::{self, JoinHandle};
+
+use anyhow::{Context, Error};
+use cargo_util::ProcessBuilder;
+use log::warn;
+use serde::{Deserialize, Serialize};
+
+use crate::core::Edition;
+use crate::util::errors::CargoResult;
+use crate::util::Config;
+
+const DIAGNOSTICS_SERVER_VAR: &str = "__CARGO_FIX_DIAGNOSTICS_SERVER";
+const PLEASE_REPORT_THIS_BUG: &str =
+ "This likely indicates a bug in either rustc or cargo itself,\n\
+ and we would appreciate a bug report! You're likely to see \n\
+ a number of compiler warnings after this message which cargo\n\
+ attempted to fix but failed. If you could open an issue at\n\
+ https://github.com/rust-lang/rust/issues\n\
+ quoting the full output of this command we'd be very appreciative!\n\
+ Note that you may be able to make some more progress in the near-term\n\
+ fixing code with the `--broken-code` flag\n\n\
+ ";
+
+#[derive(Deserialize, Serialize, Hash, Eq, PartialEq, Clone)]
+pub enum Message {
+ Migrating {
+ file: String,
+ from_edition: Edition,
+ to_edition: Edition,
+ },
+ Fixing {
+ file: String,
+ },
+ Fixed {
+ file: String,
+ fixes: u32,
+ },
+ FixFailed {
+ files: Vec<String>,
+ krate: Option<String>,
+ errors: Vec<String>,
+ abnormal_exit: Option<String>,
+ },
+ ReplaceFailed {
+ file: String,
+ message: String,
+ },
+ EditionAlreadyEnabled {
+ message: String,
+ edition: Edition,
+ },
+}
+
+impl Message {
+ pub fn post(&self, config: &Config) -> Result<(), Error> {
+ let addr = config
+ .get_env(DIAGNOSTICS_SERVER_VAR)
+ .context("diagnostics collector misconfigured")?;
+ let mut client =
+ TcpStream::connect(&addr).context("failed to connect to parent diagnostics target")?;
+
+ let s = serde_json::to_string(self).context("failed to serialize message")?;
+ client
+ .write_all(s.as_bytes())
+ .context("failed to write message to diagnostics target")?;
+ client
+ .shutdown(Shutdown::Write)
+ .context("failed to shutdown")?;
+
+ client
+ .read_to_end(&mut Vec::new())
+ .context("failed to receive a disconnect")?;
+
+ Ok(())
+ }
+}
+
+pub struct DiagnosticPrinter<'a> {
+ config: &'a Config,
+ dedupe: HashSet<Message>,
+}
+
+impl<'a> DiagnosticPrinter<'a> {
+ pub fn new(config: &'a Config) -> DiagnosticPrinter<'a> {
+ DiagnosticPrinter {
+ config,
+ dedupe: HashSet::new(),
+ }
+ }
+
+ pub fn print(&mut self, msg: &Message) -> CargoResult<()> {
+ match msg {
+ Message::Migrating {
+ file,
+ from_edition,
+ to_edition,
+ } => {
+ if !self.dedupe.insert(msg.clone()) {
+ return Ok(());
+ }
+ self.config.shell().status(
+ "Migrating",
+ &format!("{} from {} edition to {}", file, from_edition, to_edition),
+ )
+ }
+ Message::Fixing { file } => self
+ .config
+ .shell()
+ .verbose(|shell| shell.status("Fixing", file)),
+ Message::Fixed { file, fixes } => {
+ let msg = if *fixes == 1 { "fix" } else { "fixes" };
+ let msg = format!("{} ({} {})", file, fixes, msg);
+ self.config.shell().status("Fixed", msg)
+ }
+ Message::ReplaceFailed { file, message } => {
+ let msg = format!("error applying suggestions to `{}`\n", file);
+ self.config.shell().warn(&msg)?;
+ write!(
+ self.config.shell().err(),
+ "The full error message was:\n\n> {}\n\n",
+ message,
+ )?;
+ write!(self.config.shell().err(), "{}", PLEASE_REPORT_THIS_BUG)?;
+ Ok(())
+ }
+ Message::FixFailed {
+ files,
+ krate,
+ errors,
+ abnormal_exit,
+ } => {
+ if let Some(ref krate) = *krate {
+ self.config.shell().warn(&format!(
+ "failed to automatically apply fixes suggested by rustc \
+ to crate `{}`",
+ krate,
+ ))?;
+ } else {
+ self.config
+ .shell()
+ .warn("failed to automatically apply fixes suggested by rustc")?;
+ }
+ if !files.is_empty() {
+ writeln!(
+ self.config.shell().err(),
+ "\nafter fixes were automatically applied the compiler \
+ reported errors within these files:\n"
+ )?;
+ for file in files {
+ writeln!(self.config.shell().err(), " * {}", file)?;
+ }
+ writeln!(self.config.shell().err())?;
+ }
+ write!(self.config.shell().err(), "{}", PLEASE_REPORT_THIS_BUG)?;
+ if !errors.is_empty() {
+ writeln!(
+ self.config.shell().err(),
+ "The following errors were reported:"
+ )?;
+ for error in errors {
+ write!(self.config.shell().err(), "{}", error)?;
+ if !error.ends_with('\n') {
+ writeln!(self.config.shell().err())?;
+ }
+ }
+ }
+ if let Some(exit) = abnormal_exit {
+ writeln!(
+ self.config.shell().err(),
+ "rustc exited abnormally: {}",
+ exit
+ )?;
+ }
+ writeln!(
+ self.config.shell().err(),
+ "Original diagnostics will follow.\n"
+ )?;
+ Ok(())
+ }
+ Message::EditionAlreadyEnabled { message, edition } => {
+ if !self.dedupe.insert(msg.clone()) {
+ return Ok(());
+ }
+ // Don't give a really verbose warning if it has already been issued.
+ if self.dedupe.insert(Message::EditionAlreadyEnabled {
+ message: "".to_string(), // Dummy, so that this only long-warns once.
+ edition: *edition,
+ }) {
+ self.config.shell().warn(&format!("\
+{}
+
+If you are trying to migrate from the previous edition ({prev_edition}), the
+process requires following these steps:
+
+1. Start with `edition = \"{prev_edition}\"` in `Cargo.toml`
+2. Run `cargo fix --edition`
+3. Modify `Cargo.toml` to set `edition = \"{this_edition}\"`
+4. Run `cargo build` or `cargo test` to verify the fixes worked
+
+More details may be found at
+https://doc.rust-lang.org/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html
+",
+ message, this_edition=edition, prev_edition=edition.previous().unwrap()
+ ))
+ } else {
+ self.config.shell().warn(message)
+ }
+ }
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct RustfixDiagnosticServer {
+ listener: TcpListener,
+ addr: SocketAddr,
+}
+
+pub struct StartedServer {
+ addr: SocketAddr,
+ done: Arc<AtomicBool>,
+ thread: Option<JoinHandle<()>>,
+}
+
+impl RustfixDiagnosticServer {
+ pub fn new() -> Result<Self, Error> {
+ let listener = TcpListener::bind("127.0.0.1:0")
+ .with_context(|| "failed to bind TCP listener to manage locking")?;
+ let addr = listener.local_addr()?;
+
+ Ok(RustfixDiagnosticServer { listener, addr })
+ }
+
+ pub fn configure(&self, process: &mut ProcessBuilder) {
+ process.env(DIAGNOSTICS_SERVER_VAR, self.addr.to_string());
+ }
+
+ pub fn start<F>(self, on_message: F) -> Result<StartedServer, Error>
+ where
+ F: Fn(Message) + Send + 'static,
+ {
+ let addr = self.addr;
+ let done = Arc::new(AtomicBool::new(false));
+ let done2 = done.clone();
+ let thread = thread::spawn(move || {
+ self.run(&on_message, &done2);
+ });
+
+ Ok(StartedServer {
+ addr,
+ thread: Some(thread),
+ done,
+ })
+ }
+
+ fn run(self, on_message: &dyn Fn(Message), done: &AtomicBool) {
+ while let Ok((client, _)) = self.listener.accept() {
+ if done.load(Ordering::SeqCst) {
+ break;
+ }
+ let mut client = BufReader::new(client);
+ let mut s = String::new();
+ if let Err(e) = client.read_to_string(&mut s) {
+ warn!("diagnostic server failed to read: {}", e);
+ } else {
+ match serde_json::from_str(&s) {
+ Ok(message) => on_message(message),
+ Err(e) => warn!("invalid diagnostics message: {}", e),
+ }
+ }
+ // The client should be kept alive until after `on_message` is
+ // called to ensure that the client doesn't exit too soon (and
+ // Message::Finish getting posted before Message::FixDiagnostic).
+ drop(client);
+ }
+ }
+}
+
+impl Drop for StartedServer {
+ fn drop(&mut self) {
+ self.done.store(true, Ordering::SeqCst);
+ // Ignore errors here as this is largely best-effort
+ if TcpStream::connect(&self.addr).is_err() {
+ return;
+ }
+ drop(self.thread.take().unwrap().join());
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/errors.rs b/src/tools/cargo/src/cargo/util/errors.rs
new file mode 100644
index 000000000..5c7eebcdb
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/errors.rs
@@ -0,0 +1,335 @@
+#![allow(unknown_lints)]
+
+use anyhow::Error;
+use curl::easy::Easy;
+use std::fmt::{self, Write};
+use std::path::PathBuf;
+
+use super::truncate_with_ellipsis;
+
+pub type CargoResult<T> = anyhow::Result<T>;
+
+/// These are headers that are included in error messages to help with
+/// diagnosing issues.
+pub const DEBUG_HEADERS: &[&str] = &[
+ // This is the unique ID that identifies the request in CloudFront which
+ // can be used for looking at the AWS logs.
+ "x-amz-cf-id",
+ // This is the CloudFront POP (Point of Presence) that identifies the
+ // region where the request was routed. This can help identify if an issue
+ // is region-specific.
+ "x-amz-cf-pop",
+ // The unique token used for troubleshooting S3 requests via AWS logs or support.
+ "x-amz-request-id",
+ // Another token used in conjunction with x-amz-request-id.
+ "x-amz-id-2",
+ // Whether or not there was a cache hit or miss (both CloudFront and Fastly).
+ "x-cache",
+ // The cache server that processed the request (Fastly).
+ "x-served-by",
+];
+
+#[derive(Debug)]
+pub struct HttpNotSuccessful {
+ pub code: u32,
+ pub url: String,
+ pub ip: Option<String>,
+ pub body: Vec<u8>,
+ pub headers: Vec<String>,
+}
+
+impl HttpNotSuccessful {
+ pub fn new_from_handle(
+ handle: &mut Easy,
+ initial_url: &str,
+ body: Vec<u8>,
+ headers: Vec<String>,
+ ) -> HttpNotSuccessful {
+ let ip = handle.primary_ip().ok().flatten().map(|s| s.to_string());
+ let url = handle
+ .effective_url()
+ .ok()
+ .flatten()
+ .unwrap_or(initial_url)
+ .to_string();
+ HttpNotSuccessful {
+ code: handle.response_code().unwrap_or(0),
+ url,
+ ip,
+ body,
+ headers,
+ }
+ }
+
+ /// Renders the error in a compact form.
+ pub fn display_short(&self) -> String {
+ self.render(false)
+ }
+
+ fn render(&self, show_headers: bool) -> String {
+ let mut result = String::new();
+ let body = std::str::from_utf8(&self.body)
+ .map(|s| truncate_with_ellipsis(s, 512))
+ .unwrap_or_else(|_| format!("[{} non-utf8 bytes]", self.body.len()));
+
+ write!(
+ result,
+ "failed to get successful HTTP response from `{}`",
+ self.url
+ )
+ .unwrap();
+ if let Some(ip) = &self.ip {
+ write!(result, " ({ip})").unwrap();
+ }
+ write!(result, ", got {}\n", self.code).unwrap();
+ if show_headers {
+ if !self.headers.is_empty() {
+ write!(result, "debug headers:\n{}\n", self.headers.join("\n")).unwrap();
+ }
+ }
+ write!(result, "body:\n{body}").unwrap();
+ result
+ }
+}
+
+impl fmt::Display for HttpNotSuccessful {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.render(true))
+ }
+}
+
+impl std::error::Error for HttpNotSuccessful {}
+
+// =============================================================================
+// Verbose error
+
+/// An error wrapper for errors that should only be displayed with `--verbose`.
+///
+/// This should only be used in rare cases. When emitting this error, you
+/// should have a normal error higher up the error-cause chain (like "could
+/// not compile `foo`"), so at least *something* gets printed without
+/// `--verbose`.
+pub struct VerboseError {
+ inner: Error,
+}
+
+impl VerboseError {
+ pub fn new(inner: Error) -> VerboseError {
+ VerboseError { inner }
+ }
+}
+
+impl std::error::Error for VerboseError {
+ fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
+ self.inner.source()
+ }
+}
+
+impl fmt::Debug for VerboseError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.inner.fmt(f)
+ }
+}
+
+impl fmt::Display for VerboseError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.inner.fmt(f)
+ }
+}
+
+// =============================================================================
+// Internal error
+
+/// An unexpected, internal error.
+///
+/// This should only be used for unexpected errors. It prints a message asking
+/// the user to file a bug report.
+pub struct InternalError {
+ inner: Error,
+}
+
+impl InternalError {
+ pub fn new(inner: Error) -> InternalError {
+ InternalError { inner }
+ }
+}
+
+impl std::error::Error for InternalError {
+ fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
+ self.inner.source()
+ }
+}
+
+impl fmt::Debug for InternalError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.inner.fmt(f)
+ }
+}
+
+impl fmt::Display for InternalError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.inner.fmt(f)
+ }
+}
+
+// =============================================================================
+// Already printed error
+
+/// An error that does not need to be printed because it does not add any new
+/// information to what has already been printed.
+pub struct AlreadyPrintedError {
+ inner: Error,
+}
+
+impl AlreadyPrintedError {
+ pub fn new(inner: Error) -> Self {
+ AlreadyPrintedError { inner }
+ }
+}
+
+impl std::error::Error for AlreadyPrintedError {
+ fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
+ self.inner.source()
+ }
+}
+
+impl fmt::Debug for AlreadyPrintedError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.inner.fmt(f)
+ }
+}
+
+impl fmt::Display for AlreadyPrintedError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.inner.fmt(f)
+ }
+}
+
+// =============================================================================
+// Manifest error
+
+/// Error wrapper related to a particular manifest and providing it's path.
+///
+/// This error adds no displayable info of it's own.
+pub struct ManifestError {
+ cause: Error,
+ manifest: PathBuf,
+}
+
+impl ManifestError {
+ pub fn new<E: Into<Error>>(cause: E, manifest: PathBuf) -> Self {
+ Self {
+ cause: cause.into(),
+ manifest,
+ }
+ }
+
+ pub fn manifest_path(&self) -> &PathBuf {
+ &self.manifest
+ }
+
+ /// Returns an iterator over the `ManifestError` chain of causes.
+ ///
+ /// So if this error was not caused by another `ManifestError` this will be empty.
+ pub fn manifest_causes(&self) -> ManifestCauses<'_> {
+ ManifestCauses { current: self }
+ }
+}
+
+impl std::error::Error for ManifestError {
+ fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
+ self.cause.source()
+ }
+}
+
+impl fmt::Debug for ManifestError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.cause.fmt(f)
+ }
+}
+
+impl fmt::Display for ManifestError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.cause.fmt(f)
+ }
+}
+
+/// An iterator over the `ManifestError` chain of causes.
+pub struct ManifestCauses<'a> {
+ current: &'a ManifestError,
+}
+
+impl<'a> Iterator for ManifestCauses<'a> {
+ type Item = &'a ManifestError;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.current = self.current.cause.downcast_ref()?;
+ Some(self.current)
+ }
+}
+
+impl<'a> ::std::iter::FusedIterator for ManifestCauses<'a> {}
+
+// =============================================================================
+// CLI errors
+
+pub type CliResult = Result<(), CliError>;
+
+#[derive(Debug)]
+/// The CLI error is the error type used at Cargo's CLI-layer.
+///
+/// All errors from the lib side of Cargo will get wrapped with this error.
+/// Other errors (such as command-line argument validation) will create this
+/// directly.
+pub struct CliError {
+ /// The error to display. This can be `None` in rare cases to exit with a
+ /// code without displaying a message. For example `cargo run -q` where
+ /// the resulting process exits with a nonzero code (on Windows), or an
+ /// external subcommand that exits nonzero (we assume it printed its own
+ /// message).
+ pub error: Option<anyhow::Error>,
+ /// The process exit code.
+ pub exit_code: i32,
+}
+
+impl CliError {
+ pub fn new(error: anyhow::Error, code: i32) -> CliError {
+ CliError {
+ error: Some(error),
+ exit_code: code,
+ }
+ }
+
+ pub fn code(code: i32) -> CliError {
+ CliError {
+ error: None,
+ exit_code: code,
+ }
+ }
+}
+
+impl From<anyhow::Error> for CliError {
+ fn from(err: anyhow::Error) -> CliError {
+ CliError::new(err, 101)
+ }
+}
+
+impl From<clap::Error> for CliError {
+ fn from(err: clap::Error) -> CliError {
+ let code = if err.use_stderr() { 1 } else { 0 };
+ CliError::new(err.into(), code)
+ }
+}
+
+impl From<std::io::Error> for CliError {
+ fn from(err: std::io::Error) -> CliError {
+ CliError::new(err.into(), 1)
+ }
+}
+
+// =============================================================================
+// Construction helpers
+
+pub fn internal<S: fmt::Display>(error: S) -> anyhow::Error {
+ InternalError::new(anyhow::format_err!("{}", error)).into()
+}
diff --git a/src/tools/cargo/src/cargo/util/flock.rs b/src/tools/cargo/src/cargo/util/flock.rs
new file mode 100644
index 000000000..295eb1e14
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/flock.rs
@@ -0,0 +1,501 @@
+use std::fs::{File, OpenOptions};
+use std::io;
+use std::io::{Read, Seek, SeekFrom, Write};
+use std::path::{Display, Path, PathBuf};
+
+use crate::util::errors::CargoResult;
+use crate::util::Config;
+use anyhow::Context as _;
+use cargo_util::paths;
+use sys::*;
+use termcolor::Color::Cyan;
+
+#[derive(Debug)]
+pub struct FileLock {
+ f: Option<File>,
+ path: PathBuf,
+ state: State,
+}
+
+#[derive(PartialEq, Debug)]
+enum State {
+ Unlocked,
+ Shared,
+ Exclusive,
+}
+
+impl FileLock {
+ /// Returns the underlying file handle of this lock.
+ pub fn file(&self) -> &File {
+ self.f.as_ref().unwrap()
+ }
+
+ /// Returns the underlying path that this lock points to.
+ ///
+ /// Note that special care must be taken to ensure that the path is not
+ /// referenced outside the lifetime of this lock.
+ pub fn path(&self) -> &Path {
+ assert_ne!(self.state, State::Unlocked);
+ &self.path
+ }
+
+ /// Returns the parent path containing this file
+ pub fn parent(&self) -> &Path {
+ assert_ne!(self.state, State::Unlocked);
+ self.path.parent().unwrap()
+ }
+
+ /// Removes all sibling files to this locked file.
+ ///
+ /// This can be useful if a directory is locked with a sentinel file but it
+ /// needs to be cleared out as it may be corrupt.
+ pub fn remove_siblings(&self) -> CargoResult<()> {
+ let path = self.path();
+ for entry in path.parent().unwrap().read_dir()? {
+ let entry = entry?;
+ if Some(&entry.file_name()[..]) == path.file_name() {
+ continue;
+ }
+ let kind = entry.file_type()?;
+ if kind.is_dir() {
+ paths::remove_dir_all(entry.path())?;
+ } else {
+ paths::remove_file(entry.path())?;
+ }
+ }
+ Ok(())
+ }
+}
+
+impl Read for FileLock {
+ fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
+ self.file().read(buf)
+ }
+}
+
+impl Seek for FileLock {
+ fn seek(&mut self, to: SeekFrom) -> io::Result<u64> {
+ self.file().seek(to)
+ }
+}
+
+impl Write for FileLock {
+ fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
+ self.file().write(buf)
+ }
+
+ fn flush(&mut self) -> io::Result<()> {
+ self.file().flush()
+ }
+}
+
+impl Drop for FileLock {
+ fn drop(&mut self) {
+ if self.state != State::Unlocked {
+ if let Some(f) = self.f.take() {
+ let _ = unlock(&f);
+ }
+ }
+ }
+}
+
+/// A "filesystem" is intended to be a globally shared, hence locked, resource
+/// in Cargo.
+///
+/// The `Path` of a filesystem cannot be learned unless it's done in a locked
+/// fashion, and otherwise functions on this structure are prepared to handle
+/// concurrent invocations across multiple instances of Cargo.
+#[derive(Clone, Debug)]
+pub struct Filesystem {
+ root: PathBuf,
+}
+
+impl Filesystem {
+ /// Creates a new filesystem to be rooted at the given path.
+ pub fn new(path: PathBuf) -> Filesystem {
+ Filesystem { root: path }
+ }
+
+ /// Like `Path::join`, creates a new filesystem rooted at this filesystem
+ /// joined with the given path.
+ pub fn join<T: AsRef<Path>>(&self, other: T) -> Filesystem {
+ Filesystem::new(self.root.join(other))
+ }
+
+ /// Like `Path::push`, pushes a new path component onto this filesystem.
+ pub fn push<T: AsRef<Path>>(&mut self, other: T) {
+ self.root.push(other);
+ }
+
+ /// Consumes this filesystem and returns the underlying `PathBuf`.
+ ///
+ /// Note that this is a relatively dangerous operation and should be used
+ /// with great caution!.
+ pub fn into_path_unlocked(self) -> PathBuf {
+ self.root
+ }
+
+ /// Returns the underlying `Path`.
+ ///
+ /// Note that this is a relatively dangerous operation and should be used
+ /// with great caution!.
+ pub fn as_path_unlocked(&self) -> &Path {
+ &self.root
+ }
+
+ /// Creates the directory pointed to by this filesystem.
+ ///
+ /// Handles errors where other Cargo processes are also attempting to
+ /// concurrently create this directory.
+ pub fn create_dir(&self) -> CargoResult<()> {
+ paths::create_dir_all(&self.root)
+ }
+
+ /// Returns an adaptor that can be used to print the path of this
+ /// filesystem.
+ pub fn display(&self) -> Display<'_> {
+ self.root.display()
+ }
+
+ /// Opens exclusive access to a file, returning the locked version of a
+ /// file.
+ ///
+ /// This function will create a file at `path` if it doesn't already exist
+ /// (including intermediate directories), and then it will acquire an
+ /// exclusive lock on `path`. If the process must block waiting for the
+ /// lock, the `msg` is printed to `config`.
+ ///
+ /// The returned file can be accessed to look at the path and also has
+ /// read/write access to the underlying file.
+ pub fn open_rw<P>(&self, path: P, config: &Config, msg: &str) -> CargoResult<FileLock>
+ where
+ P: AsRef<Path>,
+ {
+ self.open(
+ path.as_ref(),
+ OpenOptions::new().read(true).write(true).create(true),
+ State::Exclusive,
+ config,
+ msg,
+ )
+ }
+
+ /// Opens shared access to a file, returning the locked version of a file.
+ ///
+ /// This function will fail if `path` doesn't already exist, but if it does
+ /// then it will acquire a shared lock on `path`. If the process must block
+ /// waiting for the lock, the `msg` is printed to `config`.
+ ///
+ /// The returned file can be accessed to look at the path and also has read
+ /// access to the underlying file. Any writes to the file will return an
+ /// error.
+ pub fn open_ro<P>(&self, path: P, config: &Config, msg: &str) -> CargoResult<FileLock>
+ where
+ P: AsRef<Path>,
+ {
+ self.open(
+ path.as_ref(),
+ OpenOptions::new().read(true),
+ State::Shared,
+ config,
+ msg,
+ )
+ }
+
+ fn open(
+ &self,
+ path: &Path,
+ opts: &OpenOptions,
+ state: State,
+ config: &Config,
+ msg: &str,
+ ) -> CargoResult<FileLock> {
+ let path = self.root.join(path);
+
+ // If we want an exclusive lock then if we fail because of NotFound it's
+ // likely because an intermediate directory didn't exist, so try to
+ // create the directory and then continue.
+ let f = opts
+ .open(&path)
+ .or_else(|e| {
+ if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive {
+ paths::create_dir_all(path.parent().unwrap())?;
+ Ok(opts.open(&path)?)
+ } else {
+ Err(anyhow::Error::from(e))
+ }
+ })
+ .with_context(|| format!("failed to open: {}", path.display()))?;
+ match state {
+ State::Exclusive => {
+ acquire(config, msg, &path, &|| try_lock_exclusive(&f), &|| {
+ lock_exclusive(&f)
+ })?;
+ }
+ State::Shared => {
+ acquire(config, msg, &path, &|| try_lock_shared(&f), &|| {
+ lock_shared(&f)
+ })?;
+ }
+ State::Unlocked => {}
+ }
+ Ok(FileLock {
+ f: Some(f),
+ path,
+ state,
+ })
+ }
+}
+
+impl PartialEq<Path> for Filesystem {
+ fn eq(&self, other: &Path) -> bool {
+ self.root == other
+ }
+}
+
+impl PartialEq<Filesystem> for Path {
+ fn eq(&self, other: &Filesystem) -> bool {
+ self == other.root
+ }
+}
+
+/// Acquires a lock on a file in a "nice" manner.
+///
+/// Almost all long-running blocking actions in Cargo have a status message
+/// associated with them as we're not sure how long they'll take. Whenever a
+/// conflicted file lock happens, this is the case (we're not sure when the lock
+/// will be released).
+///
+/// This function will acquire the lock on a `path`, printing out a nice message
+/// to the console if we have to wait for it. It will first attempt to use `try`
+/// to acquire a lock on the crate, and in the case of contention it will emit a
+/// status message based on `msg` to `config`'s shell, and then use `block` to
+/// block waiting to acquire a lock.
+///
+/// Returns an error if the lock could not be acquired or if any error other
+/// than a contention error happens.
+fn acquire(
+ config: &Config,
+ msg: &str,
+ path: &Path,
+ lock_try: &dyn Fn() -> io::Result<()>,
+ lock_block: &dyn Fn() -> io::Result<()>,
+) -> CargoResult<()> {
+ // File locking on Unix is currently implemented via `flock`, which is known
+ // to be broken on NFS. We could in theory just ignore errors that happen on
+ // NFS, but apparently the failure mode [1] for `flock` on NFS is **blocking
+ // forever**, even if the "non-blocking" flag is passed!
+ //
+ // As a result, we just skip all file locks entirely on NFS mounts. That
+ // should avoid calling any `flock` functions at all, and it wouldn't work
+ // there anyway.
+ //
+ // [1]: https://github.com/rust-lang/cargo/issues/2615
+ if is_on_nfs_mount(path) {
+ return Ok(());
+ }
+
+ match lock_try() {
+ Ok(()) => return Ok(()),
+
+ // In addition to ignoring NFS which is commonly not working we also
+ // just ignore locking on filesystems that look like they don't
+ // implement file locking.
+ Err(e) if error_unsupported(&e) => return Ok(()),
+
+ Err(e) => {
+ if !error_contended(&e) {
+ let e = anyhow::Error::from(e);
+ let cx = format!("failed to lock file: {}", path.display());
+ return Err(e.context(cx));
+ }
+ }
+ }
+ let msg = format!("waiting for file lock on {}", msg);
+ config.shell().status_with_color("Blocking", &msg, Cyan)?;
+
+ lock_block().with_context(|| format!("failed to lock file: {}", path.display()))?;
+ return Ok(());
+
+ #[cfg(all(target_os = "linux", not(target_env = "musl")))]
+ fn is_on_nfs_mount(path: &Path) -> bool {
+ use std::ffi::CString;
+ use std::mem;
+ use std::os::unix::prelude::*;
+
+ let path = match CString::new(path.as_os_str().as_bytes()) {
+ Ok(path) => path,
+ Err(_) => return false,
+ };
+
+ unsafe {
+ let mut buf: libc::statfs = mem::zeroed();
+ let r = libc::statfs(path.as_ptr(), &mut buf);
+
+ r == 0 && buf.f_type as u32 == libc::NFS_SUPER_MAGIC as u32
+ }
+ }
+
+ #[cfg(any(not(target_os = "linux"), target_env = "musl"))]
+ fn is_on_nfs_mount(_path: &Path) -> bool {
+ false
+ }
+}
+
+#[cfg(unix)]
+mod sys {
+ use std::fs::File;
+ use std::io::{Error, Result};
+ use std::os::unix::io::AsRawFd;
+
+ pub(super) fn lock_shared(file: &File) -> Result<()> {
+ flock(file, libc::LOCK_SH)
+ }
+
+ pub(super) fn lock_exclusive(file: &File) -> Result<()> {
+ flock(file, libc::LOCK_EX)
+ }
+
+ pub(super) fn try_lock_shared(file: &File) -> Result<()> {
+ flock(file, libc::LOCK_SH | libc::LOCK_NB)
+ }
+
+ pub(super) fn try_lock_exclusive(file: &File) -> Result<()> {
+ flock(file, libc::LOCK_EX | libc::LOCK_NB)
+ }
+
+ pub(super) fn unlock(file: &File) -> Result<()> {
+ flock(file, libc::LOCK_UN)
+ }
+
+ pub(super) fn error_contended(err: &Error) -> bool {
+ err.raw_os_error().map_or(false, |x| x == libc::EWOULDBLOCK)
+ }
+
+ pub(super) fn error_unsupported(err: &Error) -> bool {
+ match err.raw_os_error() {
+ // Unfortunately, depending on the target, these may or may not be the same.
+ // For targets in which they are the same, the duplicate pattern causes a warning.
+ #[allow(unreachable_patterns)]
+ Some(libc::ENOTSUP | libc::EOPNOTSUPP) => true,
+ Some(libc::ENOSYS) => true,
+ _ => false,
+ }
+ }
+
+ #[cfg(not(target_os = "solaris"))]
+ fn flock(file: &File, flag: libc::c_int) -> Result<()> {
+ let ret = unsafe { libc::flock(file.as_raw_fd(), flag) };
+ if ret < 0 {
+ Err(Error::last_os_error())
+ } else {
+ Ok(())
+ }
+ }
+
+ #[cfg(target_os = "solaris")]
+ fn flock(file: &File, flag: libc::c_int) -> Result<()> {
+ // Solaris lacks flock(), so try to emulate using fcntl()
+ let mut flock = libc::flock {
+ l_type: 0,
+ l_whence: 0,
+ l_start: 0,
+ l_len: 0,
+ l_sysid: 0,
+ l_pid: 0,
+ l_pad: [0, 0, 0, 0],
+ };
+ flock.l_type = if flag & libc::LOCK_UN != 0 {
+ libc::F_UNLCK
+ } else if flag & libc::LOCK_EX != 0 {
+ libc::F_WRLCK
+ } else if flag & libc::LOCK_SH != 0 {
+ libc::F_RDLCK
+ } else {
+ panic!("unexpected flock() operation")
+ };
+
+ let mut cmd = libc::F_SETLKW;
+ if (flag & libc::LOCK_NB) != 0 {
+ cmd = libc::F_SETLK;
+ }
+
+ let ret = unsafe { libc::fcntl(file.as_raw_fd(), cmd, &flock) };
+
+ if ret < 0 {
+ Err(Error::last_os_error())
+ } else {
+ Ok(())
+ }
+ }
+}
+
+#[cfg(windows)]
+mod sys {
+ use std::fs::File;
+ use std::io::{Error, Result};
+ use std::mem;
+ use std::os::windows::io::AsRawHandle;
+
+ use windows_sys::Win32::Foundation::HANDLE;
+ use windows_sys::Win32::Foundation::{ERROR_INVALID_FUNCTION, ERROR_LOCK_VIOLATION};
+ use windows_sys::Win32::Storage::FileSystem::{
+ LockFileEx, UnlockFile, LOCKFILE_EXCLUSIVE_LOCK, LOCKFILE_FAIL_IMMEDIATELY,
+ };
+
+ pub(super) fn lock_shared(file: &File) -> Result<()> {
+ lock_file(file, 0)
+ }
+
+ pub(super) fn lock_exclusive(file: &File) -> Result<()> {
+ lock_file(file, LOCKFILE_EXCLUSIVE_LOCK)
+ }
+
+ pub(super) fn try_lock_shared(file: &File) -> Result<()> {
+ lock_file(file, LOCKFILE_FAIL_IMMEDIATELY)
+ }
+
+ pub(super) fn try_lock_exclusive(file: &File) -> Result<()> {
+ lock_file(file, LOCKFILE_EXCLUSIVE_LOCK | LOCKFILE_FAIL_IMMEDIATELY)
+ }
+
+ pub(super) fn error_contended(err: &Error) -> bool {
+ err.raw_os_error()
+ .map_or(false, |x| x == ERROR_LOCK_VIOLATION as i32)
+ }
+
+ pub(super) fn error_unsupported(err: &Error) -> bool {
+ err.raw_os_error()
+ .map_or(false, |x| x == ERROR_INVALID_FUNCTION as i32)
+ }
+
+ pub(super) fn unlock(file: &File) -> Result<()> {
+ unsafe {
+ let ret = UnlockFile(file.as_raw_handle() as HANDLE, 0, 0, !0, !0);
+ if ret == 0 {
+ Err(Error::last_os_error())
+ } else {
+ Ok(())
+ }
+ }
+ }
+
+ fn lock_file(file: &File, flags: u32) -> Result<()> {
+ unsafe {
+ let mut overlapped = mem::zeroed();
+ let ret = LockFileEx(
+ file.as_raw_handle() as HANDLE,
+ flags,
+ 0,
+ !0,
+ !0,
+ &mut overlapped,
+ );
+ if ret == 0 {
+ Err(Error::last_os_error())
+ } else {
+ Ok(())
+ }
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/graph.rs b/src/tools/cargo/src/cargo/util/graph.rs
new file mode 100644
index 000000000..ff4018201
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/graph.rs
@@ -0,0 +1,178 @@
+use std::borrow::Borrow;
+use std::collections::BTreeSet;
+use std::fmt;
+
+pub struct Graph<N: Clone, E: Clone> {
+ nodes: im_rc::OrdMap<N, im_rc::OrdMap<N, E>>,
+}
+
+impl<N: Eq + Ord + Clone, E: Default + Clone> Graph<N, E> {
+ pub fn new() -> Graph<N, E> {
+ Graph {
+ nodes: im_rc::OrdMap::new(),
+ }
+ }
+
+ pub fn add(&mut self, node: N) {
+ self.nodes.entry(node).or_insert_with(im_rc::OrdMap::new);
+ }
+
+ pub fn link(&mut self, node: N, child: N) -> &mut E {
+ self.nodes
+ .entry(node)
+ .or_insert_with(im_rc::OrdMap::new)
+ .entry(child)
+ .or_insert_with(Default::default)
+ }
+
+ pub fn contains<Q: ?Sized>(&self, k: &Q) -> bool
+ where
+ N: Borrow<Q>,
+ Q: Ord + Eq,
+ {
+ self.nodes.contains_key(k)
+ }
+
+ pub fn edge(&self, from: &N, to: &N) -> Option<&E> {
+ self.nodes.get(from)?.get(to)
+ }
+
+ pub fn edges(&self, from: &N) -> impl Iterator<Item = (&N, &E)> {
+ self.nodes.get(from).into_iter().flat_map(|x| x.iter())
+ }
+
+ /// A topological sort of the `Graph`
+ pub fn sort(&self) -> Vec<N> {
+ let mut ret = Vec::new();
+ let mut marks = BTreeSet::new();
+
+ for node in self.nodes.keys() {
+ self.sort_inner_visit(node, &mut ret, &mut marks);
+ }
+
+ ret
+ }
+
+ fn sort_inner_visit(&self, node: &N, dst: &mut Vec<N>, marks: &mut BTreeSet<N>) {
+ if !marks.insert(node.clone()) {
+ return;
+ }
+
+ for child in self.nodes[node].keys() {
+ self.sort_inner_visit(child, dst, marks);
+ }
+
+ dst.push(node.clone());
+ }
+
+ pub fn iter(&self) -> impl Iterator<Item = &N> {
+ self.nodes.keys()
+ }
+
+ /// Checks if there is a path from `from` to `to`.
+ pub fn is_path_from_to<'a>(&'a self, from: &'a N, to: &'a N) -> bool {
+ let mut stack = vec![from];
+ let mut seen = BTreeSet::new();
+ seen.insert(from);
+ while let Some(iter) = stack.pop().and_then(|p| self.nodes.get(p)) {
+ for p in iter.keys() {
+ if p == to {
+ return true;
+ }
+ if seen.insert(p) {
+ stack.push(p);
+ }
+ }
+ }
+ false
+ }
+
+ /// Resolves one of the paths from the given dependent package down to
+ /// a leaf.
+ ///
+ /// Each element contains a node along with an edge except the first one.
+ /// The representation would look like:
+ ///
+ /// (Node0,) -> (Node1, Edge01) -> (Node2, Edge12)...
+ pub fn path_to_bottom<'a>(&'a self, mut pkg: &'a N) -> Vec<(&'a N, Option<&'a E>)> {
+ let mut result = vec![(pkg, None)];
+ while let Some(p) = self.nodes.get(pkg).and_then(|p| {
+ p.iter()
+ // Note that we can have "cycles" introduced through dev-dependency
+ // edges, so make sure we don't loop infinitely.
+ .find(|&(node, _)| result.iter().all(|p| p.0 != node))
+ .map(|(node, edge)| (node, Some(edge)))
+ }) {
+ result.push(p);
+ pkg = p.0;
+ }
+ result
+ }
+
+ /// Resolves one of the paths from the given dependent package up to
+ /// the root.
+ ///
+ /// Each element contains a node along with an edge except the first one.
+ /// The representation would look like:
+ ///
+ /// (Node0,) -> (Node1, Edge01) -> (Node2, Edge12)...
+ pub fn path_to_top<'a>(&'a self, mut pkg: &'a N) -> Vec<(&'a N, Option<&'a E>)> {
+ // Note that this implementation isn't the most robust per se, we'll
+ // likely have to tweak this over time. For now though it works for what
+ // it's used for!
+ let mut result = vec![(pkg, None)];
+ let first_pkg_depending_on = |pkg, res: &[(&N, Option<&E>)]| {
+ self.nodes
+ .iter()
+ .filter(|(_, adjacent)| adjacent.contains_key(pkg))
+ // Note that we can have "cycles" introduced through dev-dependency
+ // edges, so make sure we don't loop infinitely.
+ .find(|&(node, _)| !res.iter().any(|p| p.0 == node))
+ .map(|(p, adjacent)| (p, adjacent.get(pkg)))
+ };
+ while let Some(p) = first_pkg_depending_on(pkg, &result) {
+ result.push(p);
+ pkg = p.0;
+ }
+ result
+ }
+}
+
+impl<N: Eq + Ord + Clone, E: Default + Clone> Default for Graph<N, E> {
+ fn default() -> Graph<N, E> {
+ Graph::new()
+ }
+}
+
+impl<N: fmt::Display + Eq + Ord + Clone, E: Clone> fmt::Debug for Graph<N, E> {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ writeln!(fmt, "Graph {{")?;
+
+ for (n, e) in &self.nodes {
+ writeln!(fmt, " - {}", n)?;
+
+ for n in e.keys() {
+ writeln!(fmt, " - {}", n)?;
+ }
+ }
+
+ write!(fmt, "}}")?;
+
+ Ok(())
+ }
+}
+
+impl<N: Eq + Ord + Clone, E: Eq + Clone> PartialEq for Graph<N, E> {
+ fn eq(&self, other: &Graph<N, E>) -> bool {
+ self.nodes.eq(&other.nodes)
+ }
+}
+impl<N: Eq + Ord + Clone, E: Eq + Clone> Eq for Graph<N, E> {}
+
+impl<N: Eq + Ord + Clone, E: Clone> Clone for Graph<N, E> {
+ fn clone(&self) -> Graph<N, E> {
+ Graph {
+ nodes: self.nodes.clone(),
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/hasher.rs b/src/tools/cargo/src/cargo/util/hasher.rs
new file mode 100644
index 000000000..01e15ae2c
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/hasher.rs
@@ -0,0 +1,24 @@
+//! Implementation of a hasher that produces the same values across releases.
+//!
+//! The hasher should be fast and have a low chance of collisions (but is not
+//! sufficient for cryptographic purposes).
+#![allow(deprecated)]
+
+use std::hash::{Hasher, SipHasher};
+
+pub struct StableHasher(SipHasher);
+
+impl StableHasher {
+ pub fn new() -> StableHasher {
+ StableHasher(SipHasher::new())
+ }
+}
+
+impl Hasher for StableHasher {
+ fn finish(&self) -> u64 {
+ self.0.finish()
+ }
+ fn write(&mut self, bytes: &[u8]) {
+ self.0.write(bytes)
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/hex.rs b/src/tools/cargo/src/cargo/util/hex.rs
new file mode 100644
index 000000000..2d06d9b59
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/hex.rs
@@ -0,0 +1,31 @@
+use super::StableHasher;
+use std::fs::File;
+use std::hash::{Hash, Hasher};
+use std::io::Read;
+
+pub fn to_hex(num: u64) -> String {
+ hex::encode(num.to_le_bytes())
+}
+
+pub fn hash_u64<H: Hash>(hashable: H) -> u64 {
+ let mut hasher = StableHasher::new();
+ hashable.hash(&mut hasher);
+ hasher.finish()
+}
+
+pub fn hash_u64_file(mut file: &File) -> std::io::Result<u64> {
+ let mut hasher = StableHasher::new();
+ let mut buf = [0; 64 * 1024];
+ loop {
+ let n = file.read(&mut buf)?;
+ if n == 0 {
+ break;
+ }
+ hasher.write(&buf[..n]);
+ }
+ Ok(hasher.finish())
+}
+
+pub fn short_hash<H: Hash>(hashable: &H) -> String {
+ to_hex(hash_u64(hashable))
+}
diff --git a/src/tools/cargo/src/cargo/util/important_paths.rs b/src/tools/cargo/src/cargo/util/important_paths.rs
new file mode 100644
index 000000000..224c4ab8b
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/important_paths.rs
@@ -0,0 +1,45 @@
+use crate::util::errors::CargoResult;
+use cargo_util::paths;
+use std::path::{Path, PathBuf};
+
+/// Finds the root `Cargo.toml`.
+pub fn find_root_manifest_for_wd(cwd: &Path) -> CargoResult<PathBuf> {
+ let valid_cargo_toml_file_name = "Cargo.toml";
+ let invalid_cargo_toml_file_name = "cargo.toml";
+ let mut invalid_cargo_toml_path_exists = false;
+
+ for current in paths::ancestors(cwd, None) {
+ let manifest = current.join(valid_cargo_toml_file_name);
+ if manifest.exists() {
+ return Ok(manifest);
+ }
+ if current.join(invalid_cargo_toml_file_name).exists() {
+ invalid_cargo_toml_path_exists = true;
+ }
+ }
+
+ if invalid_cargo_toml_path_exists {
+ anyhow::bail!(
+ "could not find `{}` in `{}` or any parent directory, but found cargo.toml please try to rename it to Cargo.toml",
+ valid_cargo_toml_file_name,
+ cwd.display()
+ )
+ } else {
+ anyhow::bail!(
+ "could not find `{}` in `{}` or any parent directory",
+ valid_cargo_toml_file_name,
+ cwd.display()
+ )
+ }
+}
+
+/// Returns the path to the `file` in `pwd`, if it exists.
+pub fn find_project_manifest_exact(pwd: &Path, file: &str) -> CargoResult<PathBuf> {
+ let manifest = pwd.join(file);
+
+ if manifest.exists() {
+ Ok(manifest)
+ } else {
+ anyhow::bail!("Could not find `{}` in `{}`", file, pwd.display())
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/interning.rs b/src/tools/cargo/src/cargo/util/interning.rs
new file mode 100644
index 000000000..bbec12942
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/interning.rs
@@ -0,0 +1,182 @@
+use serde::{Serialize, Serializer};
+use std::borrow::Borrow;
+use std::cmp::Ordering;
+use std::collections::HashSet;
+use std::ffi::OsStr;
+use std::fmt;
+use std::hash::{Hash, Hasher};
+use std::ops::Deref;
+use std::path::Path;
+use std::ptr;
+use std::str;
+use std::sync::Mutex;
+
+fn leak(s: String) -> &'static str {
+ Box::leak(s.into_boxed_str())
+}
+
+lazy_static::lazy_static! {
+ static ref STRING_CACHE: Mutex<HashSet<&'static str>> = Mutex::new(HashSet::new());
+}
+
+#[derive(Clone, Copy)]
+pub struct InternedString {
+ inner: &'static str,
+}
+
+impl<'a> From<&'a str> for InternedString {
+ fn from(item: &'a str) -> Self {
+ InternedString::new(item)
+ }
+}
+
+impl<'a> From<&'a String> for InternedString {
+ fn from(item: &'a String) -> Self {
+ InternedString::new(item)
+ }
+}
+
+impl From<String> for InternedString {
+ fn from(item: String) -> Self {
+ InternedString::new(&item)
+ }
+}
+
+impl PartialEq for InternedString {
+ fn eq(&self, other: &InternedString) -> bool {
+ ptr::eq(self.as_str(), other.as_str())
+ }
+}
+
+impl PartialEq<str> for InternedString {
+ fn eq(&self, other: &str) -> bool {
+ *self == other
+ }
+}
+
+impl<'a> PartialEq<&'a str> for InternedString {
+ fn eq(&self, other: &&str) -> bool {
+ **self == **other
+ }
+}
+
+impl Eq for InternedString {}
+
+impl InternedString {
+ pub fn new(str: &str) -> InternedString {
+ let mut cache = STRING_CACHE.lock().unwrap();
+ let s = cache.get(str).cloned().unwrap_or_else(|| {
+ let s = leak(str.to_string());
+ cache.insert(s);
+ s
+ });
+
+ InternedString { inner: s }
+ }
+
+ pub fn as_str(&self) -> &'static str {
+ self.inner
+ }
+}
+
+impl Deref for InternedString {
+ type Target = str;
+
+ fn deref(&self) -> &'static str {
+ self.as_str()
+ }
+}
+
+impl AsRef<str> for InternedString {
+ fn as_ref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl AsRef<OsStr> for InternedString {
+ fn as_ref(&self) -> &OsStr {
+ self.as_str().as_ref()
+ }
+}
+
+impl AsRef<Path> for InternedString {
+ fn as_ref(&self) -> &Path {
+ self.as_str().as_ref()
+ }
+}
+
+impl Hash for InternedString {
+ // N.B., we can't implement this as `identity(self).hash(state)`,
+ // because we use this for on-disk fingerprints and so need
+ // stability across Cargo invocations.
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.as_str().hash(state);
+ }
+}
+
+impl Borrow<str> for InternedString {
+ // If we implement Hash as `identity(self).hash(state)`,
+ // then this will need to be removed.
+ fn borrow(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl fmt::Debug for InternedString {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(self.as_str(), f)
+ }
+}
+
+impl fmt::Display for InternedString {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(self.as_str(), f)
+ }
+}
+
+impl Ord for InternedString {
+ fn cmp(&self, other: &InternedString) -> Ordering {
+ self.as_str().cmp(other.as_str())
+ }
+}
+
+impl PartialOrd for InternedString {
+ fn partial_cmp(&self, other: &InternedString) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+impl Serialize for InternedString {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ serializer.serialize_str(self.inner)
+ }
+}
+
+struct InternedStringVisitor;
+
+impl<'de> serde::Deserialize<'de> for InternedString {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ deserializer.deserialize_str(InternedStringVisitor)
+ }
+}
+
+impl<'de> serde::de::Visitor<'de> for InternedStringVisitor {
+ type Value = InternedString;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str("an String like thing")
+ }
+
+ fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ Ok(InternedString::new(v))
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/into_url.rs b/src/tools/cargo/src/cargo/util/into_url.rs
new file mode 100644
index 000000000..26f365ee8
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/into_url.rs
@@ -0,0 +1,30 @@
+use std::path::{Path, PathBuf};
+
+use url::Url;
+
+use crate::util::CargoResult;
+
+/// A type that can be converted to a Url
+pub trait IntoUrl {
+ /// Performs the conversion
+ fn into_url(self) -> CargoResult<Url>;
+}
+
+impl<'a> IntoUrl for &'a str {
+ fn into_url(self) -> CargoResult<Url> {
+ Url::parse(self).map_err(|s| anyhow::format_err!("invalid url `{}`: {}", self, s))
+ }
+}
+
+impl<'a> IntoUrl for &'a Path {
+ fn into_url(self) -> CargoResult<Url> {
+ Url::from_file_path(self)
+ .map_err(|()| anyhow::format_err!("invalid path url `{}`", self.display()))
+ }
+}
+
+impl<'a> IntoUrl for &'a PathBuf {
+ fn into_url(self) -> CargoResult<Url> {
+ self.as_path().into_url()
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/into_url_with_base.rs b/src/tools/cargo/src/cargo/util/into_url_with_base.rs
new file mode 100644
index 000000000..63037bdf6
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/into_url_with_base.rs
@@ -0,0 +1,50 @@
+use crate::util::{CargoResult, IntoUrl};
+
+use url::Url;
+
+/// A type that can be interpreted as a relative Url and converted to
+/// a Url.
+pub trait IntoUrlWithBase {
+ /// Performs the conversion
+ fn into_url_with_base<U: IntoUrl>(self, base: Option<U>) -> CargoResult<Url>;
+}
+
+impl<'a> IntoUrlWithBase for &'a str {
+ fn into_url_with_base<U: IntoUrl>(self, base: Option<U>) -> CargoResult<Url> {
+ let base_url = match base {
+ Some(base) => Some(
+ base.into_url()
+ .map_err(|s| anyhow::format_err!("invalid url `{}`: {}", self, s))?,
+ ),
+ None => None,
+ };
+
+ Url::options()
+ .base_url(base_url.as_ref())
+ .parse(self)
+ .map_err(|s| anyhow::format_err!("invalid url `{}`: {}", self, s))
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::util::IntoUrlWithBase;
+
+ #[test]
+ fn into_url_with_base() {
+ assert_eq!(
+ "rel/path"
+ .into_url_with_base(Some("file:///abs/path/"))
+ .unwrap()
+ .to_string(),
+ "file:///abs/path/rel/path"
+ );
+ assert_eq!(
+ "rel/path"
+ .into_url_with_base(Some("file:///abs/path/popped-file"))
+ .unwrap()
+ .to_string(),
+ "file:///abs/path/rel/path"
+ );
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/io.rs b/src/tools/cargo/src/cargo/util/io.rs
new file mode 100644
index 000000000..60f3ffe05
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/io.rs
@@ -0,0 +1,51 @@
+use std::io::{self, Read, Take};
+
+#[derive(Debug)]
+pub struct LimitErrorReader<R> {
+ inner: Take<R>,
+}
+
+impl<R: Read> LimitErrorReader<R> {
+ pub fn new(r: R, limit: u64) -> LimitErrorReader<R> {
+ LimitErrorReader {
+ inner: r.take(limit),
+ }
+ }
+}
+
+impl<R: Read> Read for LimitErrorReader<R> {
+ fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
+ match self.inner.read(buf) {
+ Ok(0) if self.inner.limit() == 0 => Err(io::Error::new(
+ io::ErrorKind::Other,
+ "maximum limit reached when reading",
+ )),
+ e => e,
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::LimitErrorReader;
+
+ use std::io::Read;
+
+ #[test]
+ fn under_the_limit() {
+ let buf = &[1; 7][..];
+ let mut r = LimitErrorReader::new(buf, 8);
+ let mut out = Vec::new();
+ assert!(matches!(r.read_to_end(&mut out), Ok(7)));
+ assert_eq!(buf, out.as_slice());
+ }
+
+ #[test]
+ #[should_panic = "maximum limit reached when reading"]
+ fn over_the_limit() {
+ let buf = &[1; 8][..];
+ let mut r = LimitErrorReader::new(buf, 8);
+ let mut out = Vec::new();
+ r.read_to_end(&mut out).unwrap();
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/job.rs b/src/tools/cargo/src/cargo/util/job.rs
new file mode 100644
index 000000000..f2bcf94a2
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/job.rs
@@ -0,0 +1,149 @@
+//! Job management (mostly for windows)
+//!
+//! Most of the time when you're running cargo you expect Ctrl-C to actually
+//! terminate the entire tree of processes in play, not just the one at the top
+//! (cargo). This currently works "by default" on Unix platforms because Ctrl-C
+//! actually sends a signal to the *process group* rather than the parent
+//! process, so everything will get torn down. On Windows, however, this does
+//! not happen and Ctrl-C just kills cargo.
+//!
+//! To achieve the same semantics on Windows we use Job Objects to ensure that
+//! all processes die at the same time. Job objects have a mode of operation
+//! where when all handles to the object are closed it causes all child
+//! processes associated with the object to be terminated immediately.
+//! Conveniently whenever a process in the job object spawns a new process the
+//! child will be associated with the job object as well. This means if we add
+//! ourselves to the job object we create then everything will get torn down!
+
+pub use self::imp::Setup;
+
+pub fn setup() -> Option<Setup> {
+ unsafe { imp::setup() }
+}
+
+#[cfg(unix)]
+mod imp {
+ use std::env;
+
+ pub type Setup = ();
+
+ pub unsafe fn setup() -> Option<()> {
+ // There's a test case for the behavior of
+ // when-cargo-is-killed-subprocesses-are-also-killed, but that requires
+ // one cargo spawned to become its own session leader, so we do that
+ // here.
+ //
+ // ALLOWED: For testing cargo itself only.
+ #[allow(clippy::disallowed_methods)]
+ if env::var("__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE").is_ok() {
+ libc::setsid();
+ }
+ Some(())
+ }
+}
+
+#[cfg(windows)]
+mod imp {
+ use std::io;
+ use std::mem;
+ use std::ptr;
+ use std::ptr::addr_of;
+
+ use log::info;
+
+ use windows_sys::Win32::Foundation::CloseHandle;
+ use windows_sys::Win32::Foundation::HANDLE;
+ use windows_sys::Win32::Foundation::INVALID_HANDLE_VALUE;
+ use windows_sys::Win32::System::JobObjects::AssignProcessToJobObject;
+ use windows_sys::Win32::System::JobObjects::CreateJobObjectW;
+ use windows_sys::Win32::System::JobObjects::JobObjectExtendedLimitInformation;
+ use windows_sys::Win32::System::JobObjects::SetInformationJobObject;
+ use windows_sys::Win32::System::JobObjects::JOBOBJECT_EXTENDED_LIMIT_INFORMATION;
+ use windows_sys::Win32::System::JobObjects::JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
+ use windows_sys::Win32::System::Threading::GetCurrentProcess;
+
+ pub struct Setup {
+ job: Handle,
+ }
+
+ pub struct Handle {
+ inner: HANDLE,
+ }
+
+ fn last_err() -> io::Error {
+ io::Error::last_os_error()
+ }
+
+ pub unsafe fn setup() -> Option<Setup> {
+ // Creates a new job object for us to use and then adds ourselves to it.
+ // Note that all errors are basically ignored in this function,
+ // intentionally. Job objects are "relatively new" in Windows,
+ // particularly the ability to support nested job objects. Older
+ // Windows installs don't support this ability. We probably don't want
+ // to force Cargo to abort in this situation or force others to *not*
+ // use job objects, so we instead just ignore errors and assume that
+ // we're otherwise part of someone else's job object in this case.
+
+ let job = CreateJobObjectW(ptr::null_mut(), ptr::null());
+ if job == INVALID_HANDLE_VALUE {
+ return None;
+ }
+ let job = Handle { inner: job };
+
+ // Indicate that when all handles to the job object are gone that all
+ // process in the object should be killed. Note that this includes our
+ // entire process tree by default because we've added ourselves and
+ // our children will reside in the job once we spawn a process.
+ let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION;
+ info = mem::zeroed();
+ info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
+ let r = SetInformationJobObject(
+ job.inner,
+ JobObjectExtendedLimitInformation,
+ addr_of!(info) as *const _,
+ mem::size_of_val(&info) as u32,
+ );
+ if r == 0 {
+ return None;
+ }
+
+ // Assign our process to this job object, meaning that our children will
+ // now live or die based on our existence.
+ let me = GetCurrentProcess();
+ let r = AssignProcessToJobObject(job.inner, me);
+ if r == 0 {
+ return None;
+ }
+
+ Some(Setup { job })
+ }
+
+ impl Drop for Setup {
+ fn drop(&mut self) {
+ // On normal exits (not ctrl-c), we don't want to kill any child
+ // processes. The destructor here configures our job object to
+ // **not** kill everything on close, then closes the job object.
+ unsafe {
+ let info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION;
+ info = mem::zeroed();
+ let r = SetInformationJobObject(
+ self.job.inner,
+ JobObjectExtendedLimitInformation,
+ addr_of!(info) as *const _,
+ mem::size_of_val(&info) as u32,
+ );
+ if r == 0 {
+ info!("failed to configure job object to defaults: {}", last_err());
+ }
+ }
+ }
+ }
+
+ impl Drop for Handle {
+ fn drop(&mut self) {
+ unsafe {
+ CloseHandle(self.inner);
+ }
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/lev_distance.rs b/src/tools/cargo/src/cargo/util/lev_distance.rs
new file mode 100644
index 000000000..8dcef4a89
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/lev_distance.rs
@@ -0,0 +1,93 @@
+use std::cmp;
+
+pub fn lev_distance(me: &str, t: &str) -> usize {
+ // Comparing the strings lowercased will result in a difference in capitalization being less distance away
+ // than being a completely different letter. Otherwise `CHECK` is as far away from `check` as it
+ // is from `build` (both with a distance of 5). For a single letter shortcut (e.g. `b` or `c`), they will
+ // all be as far away from any capital single letter entry (all with a distance of 1).
+ // By first lowercasing the strings, `C` and `c` are closer than `C` and `b`, for example.
+ let me = me.to_lowercase();
+ let t = t.to_lowercase();
+
+ let t_len = t.chars().count();
+ if me.is_empty() {
+ return t_len;
+ }
+ if t.is_empty() {
+ return me.chars().count();
+ }
+
+ let mut dcol = (0..=t_len).collect::<Vec<_>>();
+ let mut t_last = 0;
+
+ for (i, sc) in me.chars().enumerate() {
+ let mut current = i;
+ dcol[0] = current + 1;
+
+ for (j, tc) in t.chars().enumerate() {
+ let next = dcol[j + 1];
+
+ if sc == tc {
+ dcol[j + 1] = current;
+ } else {
+ dcol[j + 1] = cmp::min(current, next);
+ dcol[j + 1] = cmp::min(dcol[j + 1], dcol[j]) + 1;
+ }
+
+ current = next;
+ t_last = j;
+ }
+ }
+
+ dcol[t_last + 1]
+}
+
+/// Find the closest element from `iter` matching `choice`. The `key` callback
+/// is used to select a `&str` from the iterator to compare against `choice`.
+pub fn closest<'a, T>(
+ choice: &str,
+ iter: impl Iterator<Item = T>,
+ key: impl Fn(&T) -> &'a str,
+) -> Option<T> {
+ // Only consider candidates with a lev_distance of 3 or less so we don't
+ // suggest out-of-the-blue options.
+ iter.map(|e| (lev_distance(choice, key(&e)), e))
+ .filter(|&(d, _)| d < 4)
+ .min_by_key(|t| t.0)
+ .map(|t| t.1)
+}
+
+/// Version of `closest` that returns a common "suggestion" that can be tacked
+/// onto the end of an error message.
+pub fn closest_msg<'a, T>(
+ choice: &str,
+ iter: impl Iterator<Item = T>,
+ key: impl Fn(&T) -> &'a str,
+) -> String {
+ match closest(choice, iter, &key) {
+ Some(e) => format!("\n\n\tDid you mean `{}`?", key(&e)),
+ None => String::new(),
+ }
+}
+
+#[test]
+fn test_lev_distance() {
+ use std::char::{from_u32, MAX};
+ // Test bytelength agnosticity
+ for c in (0u32..MAX as u32)
+ .filter_map(from_u32)
+ .map(|i| i.to_string())
+ {
+ assert_eq!(lev_distance(&c, &c), 0);
+ }
+
+ let a = "\nMäry häd ä little lämb\n\nLittle lämb\n";
+ let b = "\nMary häd ä little lämb\n\nLittle lämb\n";
+ let c = "Mary häd ä little lämb\n\nLittle lämb\n";
+ assert_eq!(lev_distance(a, b), 1);
+ assert_eq!(lev_distance(b, a), 1);
+ assert_eq!(lev_distance(a, c), 2);
+ assert_eq!(lev_distance(c, a), 2);
+ assert_eq!(lev_distance(b, c), 1);
+ assert_eq!(lev_distance(c, b), 1);
+}
diff --git a/src/tools/cargo/src/cargo/util/lockserver.rs b/src/tools/cargo/src/cargo/util/lockserver.rs
new file mode 100644
index 000000000..14911556a
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/lockserver.rs
@@ -0,0 +1,171 @@
+//! An implementation of IPC locks, guaranteed to be released if a process dies
+//!
+//! This module implements a locking server/client where the main `cargo fix`
+//! process will start up a server and then all the client processes will
+//! connect to it. The main purpose of this file is to ensure that each crate
+//! (aka file entry point) is only fixed by one process at a time, currently
+//! concurrent fixes can't happen.
+//!
+//! The basic design here is to use a TCP server which is pretty portable across
+//! platforms. For simplicity it just uses threads as well. Clients connect to
+//! the main server, inform the server what its name is, and then wait for the
+//! server to give it the lock (aka write a byte).
+
+use std::collections::HashMap;
+use std::io::{BufRead, BufReader, Read, Write};
+use std::net::{SocketAddr, TcpListener, TcpStream};
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::sync::{Arc, Mutex};
+use std::thread::{self, JoinHandle};
+
+use anyhow::{Context, Error};
+
+pub struct LockServer {
+ listener: TcpListener,
+ addr: SocketAddr,
+ threads: HashMap<String, ServerClient>,
+ done: Arc<AtomicBool>,
+}
+
+pub struct LockServerStarted {
+ done: Arc<AtomicBool>,
+ addr: SocketAddr,
+ thread: Option<JoinHandle<()>>,
+}
+
+pub struct LockServerClient {
+ _socket: TcpStream,
+}
+
+struct ServerClient {
+ thread: Option<JoinHandle<()>>,
+ lock: Arc<Mutex<(bool, Vec<TcpStream>)>>,
+}
+
+impl LockServer {
+ pub fn new() -> Result<LockServer, Error> {
+ let listener = TcpListener::bind("127.0.0.1:0")
+ .with_context(|| "failed to bind TCP listener to manage locking")?;
+ let addr = listener.local_addr()?;
+ Ok(LockServer {
+ listener,
+ addr,
+ threads: HashMap::new(),
+ done: Arc::new(AtomicBool::new(false)),
+ })
+ }
+
+ pub fn addr(&self) -> &SocketAddr {
+ &self.addr
+ }
+
+ pub fn start(self) -> Result<LockServerStarted, Error> {
+ let addr = self.addr;
+ let done = self.done.clone();
+ let thread = thread::spawn(|| {
+ self.run();
+ });
+ Ok(LockServerStarted {
+ addr,
+ thread: Some(thread),
+ done,
+ })
+ }
+
+ fn run(mut self) {
+ while let Ok((client, _)) = self.listener.accept() {
+ if self.done.load(Ordering::SeqCst) {
+ break;
+ }
+
+ // Learn the name of our connected client to figure out if it needs
+ // to wait for another process to release the lock.
+ let mut client = BufReader::new(client);
+ let mut name = String::new();
+ if client.read_line(&mut name).is_err() {
+ continue;
+ }
+ let client = client.into_inner();
+
+ // If this "named mutex" is already registered and the thread is
+ // still going, put it on the queue. Otherwise wait on the previous
+ // thread and we'll replace it just below.
+ if let Some(t) = self.threads.get_mut(&name) {
+ let mut state = t.lock.lock().unwrap();
+ if state.0 {
+ state.1.push(client);
+ continue;
+ }
+ drop(t.thread.take().unwrap().join());
+ }
+
+ let lock = Arc::new(Mutex::new((true, vec![client])));
+ let lock2 = lock.clone();
+ let thread = thread::spawn(move || {
+ loop {
+ let mut client = {
+ let mut state = lock2.lock().unwrap();
+ if state.1.is_empty() {
+ state.0 = false;
+ break;
+ } else {
+ state.1.remove(0)
+ }
+ };
+ // Inform this client that it now has the lock and wait for
+ // it to disconnect by waiting for EOF.
+ if client.write_all(&[1]).is_err() {
+ continue;
+ }
+ let mut dst = Vec::new();
+ drop(client.read_to_end(&mut dst));
+ }
+ });
+
+ self.threads.insert(
+ name,
+ ServerClient {
+ thread: Some(thread),
+ lock,
+ },
+ );
+ }
+ }
+}
+
+impl Drop for LockServer {
+ fn drop(&mut self) {
+ for (_, mut client) in self.threads.drain() {
+ if let Some(thread) = client.thread.take() {
+ drop(thread.join());
+ }
+ }
+ }
+}
+
+impl Drop for LockServerStarted {
+ fn drop(&mut self) {
+ self.done.store(true, Ordering::SeqCst);
+ // Ignore errors here as this is largely best-effort
+ if TcpStream::connect(&self.addr).is_err() {
+ return;
+ }
+ drop(self.thread.take().unwrap().join());
+ }
+}
+
+impl LockServerClient {
+ pub fn lock(addr: &SocketAddr, name: impl AsRef<[u8]>) -> Result<LockServerClient, Error> {
+ let mut client =
+ TcpStream::connect(&addr).with_context(|| "failed to connect to parent lock server")?;
+ client
+ .write_all(name.as_ref())
+ .and_then(|_| client.write_all(b"\n"))
+ .with_context(|| "failed to write to lock server")?;
+ let mut buf = [0];
+ client
+ .read_exact(&mut buf)
+ .with_context(|| "failed to acquire lock")?;
+ Ok(LockServerClient { _socket: client })
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/machine_message.rs b/src/tools/cargo/src/cargo/util/machine_message.rs
new file mode 100644
index 000000000..baef5167b
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/machine_message.rs
@@ -0,0 +1,105 @@
+use std::path::{Path, PathBuf};
+
+use serde::ser;
+use serde::Serialize;
+use serde_json::{self, json, value::RawValue};
+
+use crate::core::{compiler::CompileMode, PackageId, Target};
+
+pub trait Message: ser::Serialize {
+ fn reason(&self) -> &str;
+
+ fn to_json_string(&self) -> String {
+ let json = serde_json::to_string(self).unwrap();
+ assert!(json.starts_with("{\""));
+ let reason = json!(self.reason());
+ format!("{{\"reason\":{},{}", reason, &json[1..])
+ }
+}
+
+#[derive(Serialize)]
+pub struct FromCompiler<'a> {
+ pub package_id: PackageId,
+ pub manifest_path: &'a Path,
+ pub target: &'a Target,
+ pub message: Box<RawValue>,
+}
+
+impl<'a> Message for FromCompiler<'a> {
+ fn reason(&self) -> &str {
+ "compiler-message"
+ }
+}
+
+#[derive(Serialize)]
+pub struct Artifact<'a> {
+ pub package_id: PackageId,
+ pub manifest_path: PathBuf,
+ pub target: &'a Target,
+ pub profile: ArtifactProfile,
+ pub features: Vec<String>,
+ pub filenames: Vec<PathBuf>,
+ pub executable: Option<PathBuf>,
+ pub fresh: bool,
+}
+
+impl<'a> Message for Artifact<'a> {
+ fn reason(&self) -> &str {
+ "compiler-artifact"
+ }
+}
+
+/// This is different from the regular `Profile` to maintain backwards
+/// compatibility (in particular, `test` is no longer in `Profile`, but we
+/// still want it to be included here).
+#[derive(Serialize)]
+pub struct ArtifactProfile {
+ pub opt_level: &'static str,
+ pub debuginfo: Option<u32>,
+ pub debug_assertions: bool,
+ pub overflow_checks: bool,
+ pub test: bool,
+}
+
+#[derive(Serialize)]
+pub struct BuildScript<'a> {
+ pub package_id: PackageId,
+ pub linked_libs: &'a [String],
+ pub linked_paths: &'a [String],
+ pub cfgs: &'a [String],
+ pub env: &'a [(String, String)],
+ pub out_dir: &'a Path,
+}
+
+impl<'a> Message for BuildScript<'a> {
+ fn reason(&self) -> &str {
+ "build-script-executed"
+ }
+}
+
+#[derive(Serialize)]
+pub struct TimingInfo<'a> {
+ pub package_id: PackageId,
+ pub target: &'a Target,
+ pub mode: CompileMode,
+ pub duration: f64,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub rmeta_time: Option<f64>,
+}
+
+impl<'a> Message for TimingInfo<'a> {
+ fn reason(&self) -> &str {
+ "timing-info"
+ }
+}
+
+#[derive(Serialize)]
+pub struct BuildFinished {
+ pub success: bool,
+}
+
+impl Message for BuildFinished {
+ fn reason(&self) -> &str {
+ "build-finished"
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/mod.rs b/src/tools/cargo/src/cargo/util/mod.rs
new file mode 100644
index 000000000..12b152018
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/mod.rs
@@ -0,0 +1,242 @@
+use std::fmt;
+use std::path::{Path, PathBuf};
+use std::time::Duration;
+
+pub use self::canonical_url::CanonicalUrl;
+pub use self::config::{homedir, Config, ConfigValue};
+pub(crate) use self::counter::MetricsCounter;
+pub use self::dependency_queue::DependencyQueue;
+pub use self::diagnostic_server::RustfixDiagnosticServer;
+pub use self::errors::CliError;
+pub use self::errors::{internal, CargoResult, CliResult};
+pub use self::flock::{FileLock, Filesystem};
+pub use self::graph::Graph;
+pub use self::hasher::StableHasher;
+pub use self::hex::{hash_u64, short_hash, to_hex};
+pub use self::into_url::IntoUrl;
+pub use self::into_url_with_base::IntoUrlWithBase;
+pub(crate) use self::io::LimitErrorReader;
+pub use self::lev_distance::{closest, closest_msg, lev_distance};
+pub use self::lockserver::{LockServer, LockServerClient, LockServerStarted};
+pub use self::progress::{Progress, ProgressStyle};
+pub use self::queue::Queue;
+pub use self::restricted_names::validate_package_name;
+pub use self::rustc::Rustc;
+pub use self::semver_ext::{OptVersionReq, VersionExt, VersionReqExt};
+pub use self::to_semver::ToSemver;
+pub use self::vcs::{existing_vcs_repo, FossilRepo, GitRepo, HgRepo, PijulRepo};
+pub use self::workspace::{
+ add_path_args, path_args, print_available_benches, print_available_binaries,
+ print_available_examples, print_available_packages, print_available_tests,
+};
+
+pub mod auth;
+mod canonical_url;
+pub mod command_prelude;
+pub mod config;
+mod counter;
+pub mod cpu;
+mod dependency_queue;
+pub mod diagnostic_server;
+pub mod errors;
+mod flock;
+pub mod graph;
+mod hasher;
+pub mod hex;
+pub mod important_paths;
+pub mod interning;
+pub mod into_url;
+mod into_url_with_base;
+mod io;
+pub mod job;
+pub mod lev_distance;
+mod lockserver;
+pub mod machine_message;
+pub mod network;
+pub mod profile;
+mod progress;
+mod queue;
+pub mod restricted_names;
+pub mod rustc;
+mod semver_ext;
+pub mod to_semver;
+pub mod toml;
+pub mod toml_mut;
+mod vcs;
+mod workspace;
+
+pub fn elapsed(duration: Duration) -> String {
+ let secs = duration.as_secs();
+
+ if secs >= 60 {
+ format!("{}m {:02}s", secs / 60, secs % 60)
+ } else {
+ format!("{}.{:02}s", secs, duration.subsec_nanos() / 10_000_000)
+ }
+}
+
+/// Formats a number of bytes into a human readable SI-prefixed size.
+/// Returns a tuple of `(quantity, units)`.
+pub fn human_readable_bytes(bytes: u64) -> (f32, &'static str) {
+ static UNITS: [&str; 7] = ["B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB"];
+ let bytes = bytes as f32;
+ let i = ((bytes.log2() / 10.0) as usize).min(UNITS.len() - 1);
+ (bytes / 1024_f32.powi(i as i32), UNITS[i])
+}
+
+pub fn iter_join_onto<W, I, T>(mut w: W, iter: I, delim: &str) -> fmt::Result
+where
+ W: fmt::Write,
+ I: IntoIterator<Item = T>,
+ T: std::fmt::Display,
+{
+ let mut it = iter.into_iter().peekable();
+ while let Some(n) = it.next() {
+ write!(w, "{}", n)?;
+ if it.peek().is_some() {
+ write!(w, "{}", delim)?;
+ }
+ }
+ Ok(())
+}
+
+pub fn iter_join<I, T>(iter: I, delim: &str) -> String
+where
+ I: IntoIterator<Item = T>,
+ T: std::fmt::Display,
+{
+ let mut s = String::new();
+ let _ = iter_join_onto(&mut s, iter, delim);
+ s
+}
+
+pub fn indented_lines(text: &str) -> String {
+ text.lines()
+ .map(|line| {
+ if line.is_empty() {
+ String::from("\n")
+ } else {
+ format!(" {}\n", line)
+ }
+ })
+ .collect()
+}
+
+pub fn truncate_with_ellipsis(s: &str, max_width: usize) -> String {
+ // We should truncate at grapheme-boundary and compute character-widths,
+ // yet the dependencies on unicode-segmentation and unicode-width are
+ // not worth it.
+ let mut chars = s.chars();
+ let mut prefix = (&mut chars).take(max_width - 1).collect::<String>();
+ if chars.next().is_some() {
+ prefix.push('…');
+ }
+ prefix
+}
+
+#[cfg(not(windows))]
+#[inline]
+pub fn try_canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<PathBuf> {
+ std::fs::canonicalize(&path)
+}
+
+#[cfg(windows)]
+#[inline]
+pub fn try_canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<PathBuf> {
+ use std::ffi::OsString;
+ use std::io::Error;
+ use std::os::windows::ffi::{OsStrExt, OsStringExt};
+ use std::{io::ErrorKind, ptr};
+ use windows_sys::Win32::Foundation::{GetLastError, SetLastError};
+ use windows_sys::Win32::Storage::FileSystem::GetFullPathNameW;
+
+ // On Windows `canonicalize` may fail, so we fall back to getting an absolute path.
+ std::fs::canonicalize(&path).or_else(|_| {
+ // Return an error if a file does not exist for better compatiblity with `canonicalize`
+ if !path.as_ref().try_exists()? {
+ return Err(Error::new(ErrorKind::NotFound, "the path was not found"));
+ }
+
+ // This code is based on the unstable `std::path::absolute` and could be replaced with it
+ // if it's stabilized.
+
+ let path = path.as_ref().as_os_str();
+ let mut path_u16 = Vec::with_capacity(path.len() + 1);
+ path_u16.extend(path.encode_wide());
+ if path_u16.iter().find(|c| **c == 0).is_some() {
+ return Err(Error::new(
+ ErrorKind::InvalidInput,
+ "strings passed to WinAPI cannot contain NULs",
+ ));
+ }
+ path_u16.push(0);
+
+ loop {
+ unsafe {
+ SetLastError(0);
+ let len =
+ GetFullPathNameW(path_u16.as_ptr(), 0, &mut [] as *mut u16, ptr::null_mut());
+ if len == 0 {
+ let error = GetLastError();
+ if error != 0 {
+ return Err(Error::from_raw_os_error(error as i32));
+ }
+ }
+ let mut result = vec![0u16; len as usize];
+
+ let write_len = GetFullPathNameW(
+ path_u16.as_ptr(),
+ result.len().try_into().unwrap(),
+ result.as_mut_ptr().cast::<u16>(),
+ ptr::null_mut(),
+ );
+ if write_len == 0 {
+ let error = GetLastError();
+ if error != 0 {
+ return Err(Error::from_raw_os_error(error as i32));
+ }
+ }
+
+ if write_len <= len {
+ return Ok(PathBuf::from(OsString::from_wide(
+ &result[0..(write_len as usize)],
+ )));
+ }
+ }
+ }
+ })
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ #[test]
+ fn test_human_readable_bytes() {
+ assert_eq!(human_readable_bytes(0), (0., "B"));
+ assert_eq!(human_readable_bytes(8), (8., "B"));
+ assert_eq!(human_readable_bytes(1000), (1000., "B"));
+ assert_eq!(human_readable_bytes(1024), (1., "KiB"));
+ assert_eq!(human_readable_bytes(1024 * 420 + 512), (420.5, "KiB"));
+ assert_eq!(human_readable_bytes(1024 * 1024), (1., "MiB"));
+ assert_eq!(
+ human_readable_bytes(1024 * 1024 + 1024 * 256),
+ (1.25, "MiB")
+ );
+ assert_eq!(human_readable_bytes(1024 * 1024 * 1024), (1., "GiB"));
+ assert_eq!(
+ human_readable_bytes((1024. * 1024. * 1024. * 3.1415) as u64),
+ (3.1415, "GiB")
+ );
+ assert_eq!(human_readable_bytes(1024 * 1024 * 1024 * 1024), (1., "TiB"));
+ assert_eq!(
+ human_readable_bytes(1024 * 1024 * 1024 * 1024 * 1024),
+ (1., "PiB")
+ );
+ assert_eq!(
+ human_readable_bytes(1024 * 1024 * 1024 * 1024 * 1024 * 1024),
+ (1., "EiB")
+ );
+ assert_eq!(human_readable_bytes(u64::MAX), (16., "EiB"));
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/network/mod.rs b/src/tools/cargo/src/cargo/util/network/mod.rs
new file mode 100644
index 000000000..60a380343
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/network/mod.rs
@@ -0,0 +1,38 @@
+//! Utilities for networking.
+
+use std::task::Poll;
+
+pub mod retry;
+pub mod sleep;
+
+pub trait PollExt<T> {
+ fn expect(self, msg: &str) -> T;
+}
+
+impl<T> PollExt<T> for Poll<T> {
+ #[track_caller]
+ fn expect(self, msg: &str) -> T {
+ match self {
+ Poll::Ready(val) => val,
+ Poll::Pending => panic!("{}", msg),
+ }
+ }
+}
+
+// When dynamically linked against libcurl, we want to ignore some failures
+// when using old versions that don't support certain features.
+#[macro_export]
+macro_rules! try_old_curl {
+ ($e:expr, $msg:expr) => {
+ let result = $e;
+ if cfg!(target_os = "macos") {
+ if let Err(e) = result {
+ warn!("ignoring libcurl {} error: {}", $msg, e);
+ }
+ } else {
+ result.with_context(|| {
+ anyhow::format_err!("failed to enable {}, is curl not built right?", $msg)
+ })?;
+ }
+ };
+}
diff --git a/src/tools/cargo/src/cargo/util/network/retry.rs b/src/tools/cargo/src/cargo/util/network/retry.rs
new file mode 100644
index 000000000..42c38ab9f
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/network/retry.rs
@@ -0,0 +1,249 @@
+//! Utilities for retrying a network operation.
+
+use crate::util::errors::HttpNotSuccessful;
+use crate::{CargoResult, Config};
+use anyhow::Error;
+use rand::Rng;
+use std::cmp::min;
+use std::time::Duration;
+
+pub struct Retry<'a> {
+ config: &'a Config,
+ retries: u64,
+ max_retries: u64,
+}
+
+pub enum RetryResult<T> {
+ Success(T),
+ Err(anyhow::Error),
+ Retry(u64),
+}
+
+/// Maximum amount of time a single retry can be delayed (milliseconds).
+const MAX_RETRY_SLEEP_MS: u64 = 10 * 1000;
+/// The minimum initial amount of time a retry will be delayed (milliseconds).
+///
+/// The actual amount of time will be a random value above this.
+const INITIAL_RETRY_SLEEP_BASE_MS: u64 = 500;
+/// The maximum amount of additional time the initial retry will take (milliseconds).
+///
+/// The initial delay will be [`INITIAL_RETRY_SLEEP_BASE_MS`] plus a random range
+/// from 0 to this value.
+const INITIAL_RETRY_JITTER_MS: u64 = 1000;
+
+impl<'a> Retry<'a> {
+ pub fn new(config: &'a Config) -> CargoResult<Retry<'a>> {
+ Ok(Retry {
+ config,
+ retries: 0,
+ max_retries: config.net_config()?.retry.unwrap_or(3) as u64,
+ })
+ }
+
+ /// Returns `Ok(None)` for operations that should be re-tried.
+ pub fn r#try<T>(&mut self, f: impl FnOnce() -> CargoResult<T>) -> RetryResult<T> {
+ match f() {
+ Err(ref e) if maybe_spurious(e) && self.retries < self.max_retries => {
+ let err_msg = e
+ .downcast_ref::<HttpNotSuccessful>()
+ .map(|http_err| http_err.display_short())
+ .unwrap_or_else(|| e.root_cause().to_string());
+ let msg = format!(
+ "spurious network error ({} tries remaining): {err_msg}",
+ self.max_retries - self.retries,
+ );
+ if let Err(e) = self.config.shell().warn(msg) {
+ return RetryResult::Err(e);
+ }
+ self.retries += 1;
+ let sleep = if self.retries == 1 {
+ let mut rng = rand::thread_rng();
+ INITIAL_RETRY_SLEEP_BASE_MS + rng.gen_range(0..INITIAL_RETRY_JITTER_MS)
+ } else {
+ min(
+ ((self.retries - 1) * 3) * 1000 + INITIAL_RETRY_SLEEP_BASE_MS,
+ MAX_RETRY_SLEEP_MS,
+ )
+ };
+ RetryResult::Retry(sleep)
+ }
+ Err(e) => RetryResult::Err(e),
+ Ok(r) => RetryResult::Success(r),
+ }
+ }
+}
+
+fn maybe_spurious(err: &Error) -> bool {
+ if let Some(git_err) = err.downcast_ref::<git2::Error>() {
+ match git_err.class() {
+ git2::ErrorClass::Net
+ | git2::ErrorClass::Os
+ | git2::ErrorClass::Zlib
+ | git2::ErrorClass::Http => return git_err.code() != git2::ErrorCode::Certificate,
+ _ => (),
+ }
+ }
+ if let Some(curl_err) = err.downcast_ref::<curl::Error>() {
+ if curl_err.is_couldnt_connect()
+ || curl_err.is_couldnt_resolve_proxy()
+ || curl_err.is_couldnt_resolve_host()
+ || curl_err.is_operation_timedout()
+ || curl_err.is_recv_error()
+ || curl_err.is_send_error()
+ || curl_err.is_http2_error()
+ || curl_err.is_http2_stream_error()
+ || curl_err.is_ssl_connect_error()
+ || curl_err.is_partial_file()
+ {
+ return true;
+ }
+ }
+ if let Some(not_200) = err.downcast_ref::<HttpNotSuccessful>() {
+ if 500 <= not_200.code && not_200.code < 600 {
+ return true;
+ }
+ }
+
+ use gix::protocol::transport::IsSpuriousError;
+
+ if let Some(err) = err.downcast_ref::<crate::sources::git::fetch::Error>() {
+ if err.is_spurious() {
+ return true;
+ }
+ }
+
+ false
+}
+
+/// Wrapper method for network call retry logic.
+///
+/// Retry counts provided by Config object `net.retry`. Config shell outputs
+/// a warning on per retry.
+///
+/// Closure must return a `CargoResult`.
+///
+/// # Examples
+///
+/// ```
+/// # use crate::cargo::util::{CargoResult, Config};
+/// # let download_something = || return Ok(());
+/// # let config = Config::default().unwrap();
+/// use cargo::util::network;
+/// let cargo_result = network::retry::with_retry(&config, || download_something());
+/// ```
+pub fn with_retry<T, F>(config: &Config, mut callback: F) -> CargoResult<T>
+where
+ F: FnMut() -> CargoResult<T>,
+{
+ let mut retry = Retry::new(config)?;
+ loop {
+ match retry.r#try(&mut callback) {
+ RetryResult::Success(r) => return Ok(r),
+ RetryResult::Err(e) => return Err(e),
+ RetryResult::Retry(sleep) => std::thread::sleep(Duration::from_millis(sleep)),
+ }
+ }
+}
+
+#[test]
+fn with_retry_repeats_the_call_then_works() {
+ use crate::core::Shell;
+
+ //Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry
+ let error1 = HttpNotSuccessful {
+ code: 501,
+ url: "Uri".to_string(),
+ ip: None,
+ body: Vec::new(),
+ headers: Vec::new(),
+ }
+ .into();
+ let error2 = HttpNotSuccessful {
+ code: 502,
+ url: "Uri".to_string(),
+ ip: None,
+ body: Vec::new(),
+ headers: Vec::new(),
+ }
+ .into();
+ let mut results: Vec<CargoResult<()>> = vec![Ok(()), Err(error1), Err(error2)];
+ let config = Config::default().unwrap();
+ *config.shell() = Shell::from_write(Box::new(Vec::new()));
+ let result = with_retry(&config, || results.pop().unwrap());
+ assert!(result.is_ok())
+}
+
+#[test]
+fn with_retry_finds_nested_spurious_errors() {
+ use crate::core::Shell;
+
+ //Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry
+ //String error messages are not considered spurious
+ let error1 = anyhow::Error::from(HttpNotSuccessful {
+ code: 501,
+ url: "Uri".to_string(),
+ ip: None,
+ body: Vec::new(),
+ headers: Vec::new(),
+ });
+ let error1 = anyhow::Error::from(error1.context("A non-spurious wrapping err"));
+ let error2 = anyhow::Error::from(HttpNotSuccessful {
+ code: 502,
+ url: "Uri".to_string(),
+ ip: None,
+ body: Vec::new(),
+ headers: Vec::new(),
+ });
+ let error2 = anyhow::Error::from(error2.context("A second chained error"));
+ let mut results: Vec<CargoResult<()>> = vec![Ok(()), Err(error1), Err(error2)];
+ let config = Config::default().unwrap();
+ *config.shell() = Shell::from_write(Box::new(Vec::new()));
+ let result = with_retry(&config, || results.pop().unwrap());
+ assert!(result.is_ok())
+}
+
+#[test]
+fn default_retry_schedule() {
+ use crate::core::Shell;
+
+ let spurious = || -> CargoResult<()> {
+ Err(anyhow::Error::from(HttpNotSuccessful {
+ code: 500,
+ url: "Uri".to_string(),
+ ip: None,
+ body: Vec::new(),
+ headers: Vec::new(),
+ }))
+ };
+ let config = Config::default().unwrap();
+ *config.shell() = Shell::from_write(Box::new(Vec::new()));
+ let mut retry = Retry::new(&config).unwrap();
+ match retry.r#try(|| spurious()) {
+ RetryResult::Retry(sleep) => {
+ assert!(
+ sleep >= INITIAL_RETRY_SLEEP_BASE_MS
+ && sleep < INITIAL_RETRY_SLEEP_BASE_MS + INITIAL_RETRY_JITTER_MS
+ );
+ }
+ _ => panic!("unexpected non-retry"),
+ }
+ match retry.r#try(|| spurious()) {
+ RetryResult::Retry(sleep) => assert_eq!(sleep, 3500),
+ _ => panic!("unexpected non-retry"),
+ }
+ match retry.r#try(|| spurious()) {
+ RetryResult::Retry(sleep) => assert_eq!(sleep, 6500),
+ _ => panic!("unexpected non-retry"),
+ }
+ match retry.r#try(|| spurious()) {
+ RetryResult::Err(_) => {}
+ _ => panic!("unexpected non-retry"),
+ }
+}
+
+#[test]
+fn curle_http2_stream_is_spurious() {
+ let code = curl_sys::CURLE_HTTP2_STREAM;
+ let err = curl::Error::new(code);
+ assert!(maybe_spurious(&err.into()));
+}
diff --git a/src/tools/cargo/src/cargo/util/network/sleep.rs b/src/tools/cargo/src/cargo/util/network/sleep.rs
new file mode 100644
index 000000000..d4105065e
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/network/sleep.rs
@@ -0,0 +1,103 @@
+//! Utility for tracking network requests that will be retried in the future.
+
+use core::cmp::Ordering;
+use std::collections::BinaryHeap;
+use std::time::{Duration, Instant};
+
+/// A tracker for network requests that have failed, and are awaiting to be
+/// retried in the future.
+pub struct SleepTracker<T> {
+ /// This is a priority queue that tracks the time when the next sleeper
+ /// should awaken (based on the [`Sleeper::wakeup`] property).
+ heap: BinaryHeap<Sleeper<T>>,
+}
+
+/// An individual network request that is waiting to be retried in the future.
+struct Sleeper<T> {
+ /// The time when this requests should be retried.
+ wakeup: Instant,
+ /// Information about the network request.
+ data: T,
+}
+
+impl<T> PartialEq for Sleeper<T> {
+ fn eq(&self, other: &Sleeper<T>) -> bool {
+ self.wakeup == other.wakeup
+ }
+}
+
+impl<T> PartialOrd for Sleeper<T> {
+ fn partial_cmp(&self, other: &Sleeper<T>) -> Option<Ordering> {
+ // This reverses the comparison so that the BinaryHeap tracks the
+ // entry with the *lowest* wakeup time.
+ Some(other.wakeup.cmp(&self.wakeup))
+ }
+}
+
+impl<T> Eq for Sleeper<T> {}
+
+impl<T> Ord for Sleeper<T> {
+ fn cmp(&self, other: &Sleeper<T>) -> Ordering {
+ self.wakeup.cmp(&other.wakeup)
+ }
+}
+
+impl<T> SleepTracker<T> {
+ pub fn new() -> SleepTracker<T> {
+ SleepTracker {
+ heap: BinaryHeap::new(),
+ }
+ }
+
+ /// Adds a new download that should be retried in the future.
+ pub fn push(&mut self, sleep: u64, data: T) {
+ self.heap.push(Sleeper {
+ wakeup: Instant::now()
+ .checked_add(Duration::from_millis(sleep))
+ .expect("instant should not wrap"),
+ data,
+ });
+ }
+
+ pub fn len(&self) -> usize {
+ self.heap.len()
+ }
+
+ /// Returns any downloads that are ready to go now.
+ pub fn to_retry(&mut self) -> Vec<T> {
+ let now = Instant::now();
+ let mut result = Vec::new();
+ while let Some(next) = self.heap.peek() {
+ log::debug!("ERIC: now={now:?} next={:?}", next.wakeup);
+ if next.wakeup < now {
+ result.push(self.heap.pop().unwrap().data);
+ } else {
+ break;
+ }
+ }
+ result
+ }
+
+ /// Returns the time when the next download is ready to go.
+ ///
+ /// Returns None if there are no sleepers remaining.
+ pub fn time_to_next(&self) -> Option<Duration> {
+ self.heap
+ .peek()
+ .map(|s| s.wakeup.saturating_duration_since(Instant::now()))
+ }
+}
+
+#[test]
+fn returns_in_order() {
+ let mut s = SleepTracker::new();
+ s.push(3, 3);
+ s.push(1, 1);
+ s.push(6, 6);
+ s.push(5, 5);
+ s.push(2, 2);
+ s.push(10000, 10000);
+ assert_eq!(s.len(), 6);
+ std::thread::sleep(Duration::from_millis(100));
+ assert_eq!(s.to_retry(), &[1, 2, 3, 5, 6]);
+}
diff --git a/src/tools/cargo/src/cargo/util/profile.rs b/src/tools/cargo/src/cargo/util/profile.rs
new file mode 100644
index 000000000..79b544d98
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/profile.rs
@@ -0,0 +1,93 @@
+//! # An internal profiler for Cargo itself
+//!
+//! > **Note**: This might not be the module you are looking for.
+//! > For information about how Cargo handles compiler flags with profiles,
+//! > please see the module [`cargo::core::profiles`](crate::core::profiles).
+
+use std::cell::RefCell;
+use std::env;
+use std::fmt;
+use std::io::{stdout, StdoutLock, Write};
+use std::iter::repeat;
+use std::mem;
+use std::time;
+
+thread_local!(static PROFILE_STACK: RefCell<Vec<time::Instant>> = RefCell::new(Vec::new()));
+thread_local!(static MESSAGES: RefCell<Vec<Message>> = RefCell::new(Vec::new()));
+
+type Message = (usize, u64, String);
+
+pub struct Profiler {
+ desc: String,
+}
+
+fn enabled_level() -> Option<usize> {
+ // ALLOWED: for profiling Cargo itself, not intended to be used beyond Cargo contributors.
+ #[allow(clippy::disallowed_methods)]
+ env::var("CARGO_PROFILE").ok().and_then(|s| s.parse().ok())
+}
+
+pub fn start<T: fmt::Display>(desc: T) -> Profiler {
+ if enabled_level().is_none() {
+ return Profiler {
+ desc: String::new(),
+ };
+ }
+
+ PROFILE_STACK.with(|stack| stack.borrow_mut().push(time::Instant::now()));
+
+ Profiler {
+ desc: desc.to_string(),
+ }
+}
+
+impl Drop for Profiler {
+ fn drop(&mut self) {
+ let enabled = match enabled_level() {
+ Some(i) => i,
+ None => return,
+ };
+
+ let (start, stack_len) = PROFILE_STACK.with(|stack| {
+ let mut stack = stack.borrow_mut();
+ let start = stack.pop().unwrap();
+ (start, stack.len())
+ });
+ let duration = start.elapsed();
+ let duration_ms = duration.as_secs() * 1000 + u64::from(duration.subsec_millis());
+
+ let msg = (stack_len, duration_ms, mem::take(&mut self.desc));
+ MESSAGES.with(|msgs| msgs.borrow_mut().push(msg));
+
+ if stack_len == 0 {
+ fn print(lvl: usize, msgs: &[Message], enabled: usize, stdout: &mut StdoutLock<'_>) {
+ if lvl > enabled {
+ return;
+ }
+ let mut last = 0;
+ for (i, &(l, time, ref msg)) in msgs.iter().enumerate() {
+ if l != lvl {
+ continue;
+ }
+ writeln!(
+ stdout,
+ "{} {:6}ms - {}",
+ repeat(" ").take(lvl + 1).collect::<String>(),
+ time,
+ msg
+ )
+ .expect("printing profiling info to stdout");
+
+ print(lvl + 1, &msgs[last..i], enabled, stdout);
+ last = i;
+ }
+ }
+ let stdout = stdout();
+ MESSAGES.with(|msgs| {
+ let mut msgs = msgs.borrow_mut();
+ print(0, &msgs, enabled, &mut stdout.lock());
+ msgs.clear();
+ });
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/progress.rs b/src/tools/cargo/src/cargo/util/progress.rs
new file mode 100644
index 000000000..bcbc1bc0e
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/progress.rs
@@ -0,0 +1,531 @@
+//! Support for CLI progress bars.
+
+use std::cmp;
+use std::time::{Duration, Instant};
+
+use crate::core::shell::Verbosity;
+use crate::util::config::ProgressWhen;
+use crate::util::{CargoResult, Config};
+use cargo_util::is_ci;
+use unicode_width::UnicodeWidthChar;
+
+/// CLI progress bar.
+///
+/// The `Progress` object can be in an enabled or disabled state. When
+/// disabled, calling any of the methods to update it will not display
+/// anything. Disabling is typically done by the user with options such as
+/// `--quiet` or the `term.progress` config option.
+///
+/// There are several methods to update the progress bar and to cause it to
+/// update its display.
+///
+/// The bar will be removed from the display when the `Progress` object is
+/// dropped or [`Progress::clear`] is called.
+///
+/// The progress bar has built-in rate limiting to avoid updating the display
+/// too fast. It should usually be fine to call [`Progress::tick`] as often as
+/// needed, though be cautious if the tick rate is very high or it is
+/// expensive to compute the progress value.
+pub struct Progress<'cfg> {
+ state: Option<State<'cfg>>,
+}
+
+/// Indicates the style of information for displaying the amount of progress.
+///
+/// See also [`Progress::print_now`] for displaying progress without a bar.
+pub enum ProgressStyle {
+ /// Displays progress as a percentage.
+ ///
+ /// Example: `Fetch [=====================> ] 88.15%`
+ ///
+ /// This is good for large values like number of bytes downloaded.
+ Percentage,
+ /// Displays progress as a ratio.
+ ///
+ /// Example: `Building [===> ] 35/222`
+ ///
+ /// This is good for smaller values where the exact number is useful to see.
+ Ratio,
+ /// Does not display an exact value of how far along it is.
+ ///
+ /// Example: `Fetch [===========> ]`
+ ///
+ /// This is good for situations where the exact value is an approximation,
+ /// and thus there isn't anything accurate to display to the user.
+ Indeterminate,
+}
+
+struct Throttle {
+ first: bool,
+ last_update: Instant,
+}
+
+struct State<'cfg> {
+ config: &'cfg Config,
+ format: Format,
+ name: String,
+ done: bool,
+ throttle: Throttle,
+ last_line: Option<String>,
+ fixed_width: Option<usize>,
+}
+
+struct Format {
+ style: ProgressStyle,
+ max_width: usize,
+ max_print: usize,
+}
+
+impl<'cfg> Progress<'cfg> {
+ /// Creates a new progress bar.
+ ///
+ /// The first parameter is the text displayed to the left of the bar, such
+ /// as "Fetching".
+ ///
+ /// The progress bar is not displayed until explicitly updated with one if
+ /// its methods.
+ ///
+ /// The progress bar may be created in a disabled state if the user has
+ /// disabled progress display (such as with the `--quiet` option).
+ pub fn with_style(name: &str, style: ProgressStyle, cfg: &'cfg Config) -> Progress<'cfg> {
+ // report no progress when -q (for quiet) or TERM=dumb are set
+ // or if running on Continuous Integration service like Travis where the
+ // output logs get mangled.
+ let dumb = match cfg.get_env("TERM") {
+ Ok(term) => term == "dumb",
+ Err(_) => false,
+ };
+ let progress_config = cfg.progress_config();
+ match progress_config.when {
+ ProgressWhen::Always => return Progress::new_priv(name, style, cfg),
+ ProgressWhen::Never => return Progress { state: None },
+ ProgressWhen::Auto => {}
+ }
+ if cfg.shell().verbosity() == Verbosity::Quiet || dumb || is_ci() {
+ return Progress { state: None };
+ }
+ Progress::new_priv(name, style, cfg)
+ }
+
+ fn new_priv(name: &str, style: ProgressStyle, cfg: &'cfg Config) -> Progress<'cfg> {
+ let progress_config = cfg.progress_config();
+ let width = progress_config
+ .width
+ .or_else(|| cfg.shell().err_width().progress_max_width());
+
+ Progress {
+ state: width.map(|n| State {
+ config: cfg,
+ format: Format {
+ style,
+ max_width: n,
+ // 50 gives some space for text after the progress bar,
+ // even on narrow (e.g. 80 char) terminals.
+ max_print: 50,
+ },
+ name: name.to_string(),
+ done: false,
+ throttle: Throttle::new(),
+ last_line: None,
+ fixed_width: progress_config.width,
+ }),
+ }
+ }
+
+ /// Disables the progress bar, ensuring it won't be displayed.
+ pub fn disable(&mut self) {
+ self.state = None;
+ }
+
+ /// Returns whether or not the progress bar is allowed to be displayed.
+ pub fn is_enabled(&self) -> bool {
+ self.state.is_some()
+ }
+
+ /// Creates a new `Progress` with the [`ProgressStyle::Percentage`] style.
+ ///
+ /// See [`Progress::with_style`] for more information.
+ pub fn new(name: &str, cfg: &'cfg Config) -> Progress<'cfg> {
+ Self::with_style(name, ProgressStyle::Percentage, cfg)
+ }
+
+ /// Updates the state of the progress bar.
+ ///
+ /// * `cur` should be how far along the progress is.
+ /// * `max` is the maximum value for the progress bar.
+ /// * `msg` is a small piece of text to display at the end of the progress
+ /// bar. It will be truncated with `...` if it does not fit on the
+ /// terminal.
+ ///
+ /// This may not actually update the display if `tick` is being called too
+ /// quickly.
+ pub fn tick(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> {
+ let s = match &mut self.state {
+ Some(s) => s,
+ None => return Ok(()),
+ };
+
+ // Don't update too often as it can cause excessive performance loss
+ // just putting stuff onto the terminal. We also want to avoid
+ // flickering by not drawing anything that goes away too quickly. As a
+ // result we've got two branches here:
+ //
+ // 1. If we haven't drawn anything, we wait for a period of time to
+ // actually start drawing to the console. This ensures that
+ // short-lived operations don't flicker on the console. Currently
+ // there's a 500ms delay to when we first draw something.
+ // 2. If we've drawn something, then we rate limit ourselves to only
+ // draw to the console every so often. Currently there's a 100ms
+ // delay between updates.
+ if !s.throttle.allowed() {
+ return Ok(());
+ }
+
+ s.tick(cur, max, msg)
+ }
+
+ /// Updates the state of the progress bar.
+ ///
+ /// This is the same as [`Progress::tick`], but ignores rate throttling
+ /// and forces the display to be updated immediately.
+ ///
+ /// This may be useful for situations where you know you aren't calling
+ /// `tick` too fast, and accurate information is more important than
+ /// limiting the console update rate.
+ pub fn tick_now(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> {
+ match self.state {
+ Some(ref mut s) => s.tick(cur, max, msg),
+ None => Ok(()),
+ }
+ }
+
+ /// Returns whether or not updates are currently being throttled.
+ ///
+ /// This can be useful if computing the values for calling the
+ /// [`Progress::tick`] function may require some expensive work.
+ pub fn update_allowed(&mut self) -> bool {
+ match &mut self.state {
+ Some(s) => s.throttle.allowed(),
+ None => false,
+ }
+ }
+
+ /// Displays progress without a bar.
+ ///
+ /// The given `msg` is the text to display after the status message.
+ ///
+ /// Example: `Downloading 61 crates, remaining bytes: 28.0 MB`
+ ///
+ /// This does not have any rate limit throttling, so be careful about
+ /// calling it too often.
+ pub fn print_now(&mut self, msg: &str) -> CargoResult<()> {
+ match &mut self.state {
+ Some(s) => s.print("", msg),
+ None => Ok(()),
+ }
+ }
+
+ /// Clears the progress bar from the console.
+ pub fn clear(&mut self) {
+ if let Some(ref mut s) = self.state {
+ s.clear();
+ }
+ }
+}
+
+impl Throttle {
+ fn new() -> Throttle {
+ Throttle {
+ first: true,
+ last_update: Instant::now(),
+ }
+ }
+
+ fn allowed(&mut self) -> bool {
+ if self.first {
+ let delay = Duration::from_millis(500);
+ if self.last_update.elapsed() < delay {
+ return false;
+ }
+ } else {
+ let interval = Duration::from_millis(100);
+ if self.last_update.elapsed() < interval {
+ return false;
+ }
+ }
+ self.update();
+ true
+ }
+
+ fn update(&mut self) {
+ self.first = false;
+ self.last_update = Instant::now();
+ }
+}
+
+impl<'cfg> State<'cfg> {
+ fn tick(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> {
+ if self.done {
+ return Ok(());
+ }
+
+ if max > 0 && cur == max {
+ self.done = true;
+ }
+
+ // Write out a pretty header, then the progress bar itself, and then
+ // return back to the beginning of the line for the next print.
+ self.try_update_max_width();
+ if let Some(pbar) = self.format.progress(cur, max) {
+ self.print(&pbar, msg)?;
+ }
+ Ok(())
+ }
+
+ fn print(&mut self, prefix: &str, msg: &str) -> CargoResult<()> {
+ self.throttle.update();
+ self.try_update_max_width();
+
+ // make sure we have enough room for the header
+ if self.format.max_width < 15 {
+ return Ok(());
+ }
+
+ let mut line = prefix.to_string();
+ self.format.render(&mut line, msg);
+ while line.len() < self.format.max_width - 15 {
+ line.push(' ');
+ }
+
+ // Only update if the line has changed.
+ if self.config.shell().is_cleared() || self.last_line.as_ref() != Some(&line) {
+ let mut shell = self.config.shell();
+ shell.set_needs_clear(false);
+ shell.status_header(&self.name)?;
+ write!(shell.err(), "{}\r", line)?;
+ self.last_line = Some(line);
+ shell.set_needs_clear(true);
+ }
+
+ Ok(())
+ }
+
+ fn clear(&mut self) {
+ // No need to clear if the progress is not currently being displayed.
+ if self.last_line.is_some() && !self.config.shell().is_cleared() {
+ self.config.shell().err_erase_line();
+ self.last_line = None;
+ }
+ }
+
+ fn try_update_max_width(&mut self) {
+ if self.fixed_width.is_none() {
+ if let Some(n) = self.config.shell().err_width().progress_max_width() {
+ self.format.max_width = n;
+ }
+ }
+ }
+}
+
+impl Format {
+ fn progress(&self, cur: usize, max: usize) -> Option<String> {
+ assert!(cur <= max);
+ // Render the percentage at the far right and then figure how long the
+ // progress bar is
+ let pct = (cur as f64) / (max as f64);
+ let pct = if !pct.is_finite() { 0.0 } else { pct };
+ let stats = match self.style {
+ ProgressStyle::Percentage => format!(" {:6.02}%", pct * 100.0),
+ ProgressStyle::Ratio => format!(" {}/{}", cur, max),
+ ProgressStyle::Indeterminate => String::new(),
+ };
+ let extra_len = stats.len() + 2 /* [ and ] */ + 15 /* status header */;
+ let display_width = match self.width().checked_sub(extra_len) {
+ Some(n) => n,
+ None => return None,
+ };
+
+ let mut string = String::with_capacity(self.max_width);
+ string.push('[');
+ let hashes = display_width as f64 * pct;
+ let hashes = hashes as usize;
+
+ // Draw the `===>`
+ if hashes > 0 {
+ for _ in 0..hashes - 1 {
+ string.push('=');
+ }
+ if cur == max {
+ string.push('=');
+ } else {
+ string.push('>');
+ }
+ }
+
+ // Draw the empty space we have left to do
+ for _ in 0..(display_width - hashes) {
+ string.push(' ');
+ }
+ string.push(']');
+ string.push_str(&stats);
+
+ Some(string)
+ }
+
+ fn render(&self, string: &mut String, msg: &str) {
+ let mut avail_msg_len = self.max_width - string.len() - 15;
+ let mut ellipsis_pos = 0;
+ if avail_msg_len <= 3 {
+ return;
+ }
+ for c in msg.chars() {
+ let display_width = c.width().unwrap_or(0);
+ if avail_msg_len >= display_width {
+ avail_msg_len -= display_width;
+ string.push(c);
+ if avail_msg_len >= 3 {
+ ellipsis_pos = string.len();
+ }
+ } else {
+ string.truncate(ellipsis_pos);
+ string.push_str("...");
+ break;
+ }
+ }
+ }
+
+ #[cfg(test)]
+ fn progress_status(&self, cur: usize, max: usize, msg: &str) -> Option<String> {
+ let mut ret = self.progress(cur, max)?;
+ self.render(&mut ret, msg);
+ Some(ret)
+ }
+
+ fn width(&self) -> usize {
+ cmp::min(self.max_width, self.max_print)
+ }
+}
+
+impl<'cfg> Drop for State<'cfg> {
+ fn drop(&mut self) {
+ self.clear();
+ }
+}
+
+#[test]
+fn test_progress_status() {
+ let format = Format {
+ style: ProgressStyle::Ratio,
+ max_print: 40,
+ max_width: 60,
+ };
+ assert_eq!(
+ format.progress_status(0, 4, ""),
+ Some("[ ] 0/4".to_string())
+ );
+ assert_eq!(
+ format.progress_status(1, 4, ""),
+ Some("[===> ] 1/4".to_string())
+ );
+ assert_eq!(
+ format.progress_status(2, 4, ""),
+ Some("[========> ] 2/4".to_string())
+ );
+ assert_eq!(
+ format.progress_status(3, 4, ""),
+ Some("[=============> ] 3/4".to_string())
+ );
+ assert_eq!(
+ format.progress_status(4, 4, ""),
+ Some("[===================] 4/4".to_string())
+ );
+
+ assert_eq!(
+ format.progress_status(3999, 4000, ""),
+ Some("[===========> ] 3999/4000".to_string())
+ );
+ assert_eq!(
+ format.progress_status(4000, 4000, ""),
+ Some("[=============] 4000/4000".to_string())
+ );
+
+ assert_eq!(
+ format.progress_status(3, 4, ": short message"),
+ Some("[=============> ] 3/4: short message".to_string())
+ );
+ assert_eq!(
+ format.progress_status(3, 4, ": msg thats just fit"),
+ Some("[=============> ] 3/4: msg thats just fit".to_string())
+ );
+ assert_eq!(
+ format.progress_status(3, 4, ": msg that's just fit"),
+ Some("[=============> ] 3/4: msg that's just...".to_string())
+ );
+
+ // combining diacritics have width zero and thus can fit max_width.
+ let zalgo_msg = "z̸̧̢̗͉̝̦͍̱ͧͦͨ̑̅̌ͥ́͢a̢ͬͨ̽ͯ̅̑ͥ͋̏̑ͫ̄͢͏̫̝̪̤͎̱̣͍̭̞̙̱͙͍̘̭͚l̶̡̛̥̝̰̭̹̯̯̞̪͇̱̦͙͔̘̼͇͓̈ͨ͗ͧ̓͒ͦ̀̇ͣ̈ͭ͊͛̃̑͒̿̕͜g̸̷̢̩̻̻͚̠͓̞̥͐ͩ͌̑ͥ̊̽͋͐̐͌͛̐̇̑ͨ́ͅo͙̳̣͔̰̠̜͕͕̞̦̙̭̜̯̹̬̻̓͑ͦ͋̈̉͌̃ͯ̀̂͠ͅ ̸̡͎̦̲̖̤̺̜̮̱̰̥͔̯̅̏ͬ̂ͨ̋̃̽̈́̾̔̇ͣ̚͜͜h̡ͫ̐̅̿̍̀͜҉̛͇̭̹̰̠͙̞ẽ̶̙̹̳̖͉͎̦͂̋̓ͮ̔ͬ̐̀͂̌͑̒͆̚͜͠ ͓͓̟͍̮̬̝̝̰͓͎̼̻ͦ͐̾̔͒̃̓͟͟c̮̦͍̺͈͚̯͕̄̒͐̂͊̊͗͊ͤͣ̀͘̕͝͞o̶͍͚͍̣̮͌ͦ̽̑ͩ̅ͮ̐̽̏͗́͂̅ͪ͠m̷̧͖̻͔̥̪̭͉͉̤̻͖̩̤͖̘ͦ̂͌̆̂ͦ̒͊ͯͬ͊̉̌ͬ͝͡e̵̹̣͍̜̺̤̤̯̫̹̠̮͎͙̯͚̰̼͗͐̀̒͂̉̀̚͝͞s̵̲͍͙͖̪͓͓̺̱̭̩̣͖̣ͤͤ͂̎̈͗͆ͨͪ̆̈͗͝͠";
+ assert_eq!(
+ format.progress_status(3, 4, zalgo_msg),
+ Some("[=============> ] 3/4".to_string() + zalgo_msg)
+ );
+
+ // some non-ASCII ellipsize test
+ assert_eq!(
+ format.progress_status(3, 4, "_123456789123456e\u{301}\u{301}8\u{301}90a"),
+ Some("[=============> ] 3/4_123456789123456e\u{301}\u{301}...".to_string())
+ );
+ assert_eq!(
+ format.progress_status(3, 4, ":每個漢字佔據了兩個字元"),
+ Some("[=============> ] 3/4:每個漢字佔據了...".to_string())
+ );
+ assert_eq!(
+ // handle breaking at middle of character
+ format.progress_status(3, 4, ":-每個漢字佔據了兩個字元"),
+ Some("[=============> ] 3/4:-每個漢字佔據了...".to_string())
+ );
+}
+
+#[test]
+fn test_progress_status_percentage() {
+ let format = Format {
+ style: ProgressStyle::Percentage,
+ max_print: 40,
+ max_width: 60,
+ };
+ assert_eq!(
+ format.progress_status(0, 77, ""),
+ Some("[ ] 0.00%".to_string())
+ );
+ assert_eq!(
+ format.progress_status(1, 77, ""),
+ Some("[ ] 1.30%".to_string())
+ );
+ assert_eq!(
+ format.progress_status(76, 77, ""),
+ Some("[=============> ] 98.70%".to_string())
+ );
+ assert_eq!(
+ format.progress_status(77, 77, ""),
+ Some("[===============] 100.00%".to_string())
+ );
+}
+
+#[test]
+fn test_progress_status_too_short() {
+ let format = Format {
+ style: ProgressStyle::Percentage,
+ max_print: 25,
+ max_width: 25,
+ };
+ assert_eq!(
+ format.progress_status(1, 1, ""),
+ Some("[] 100.00%".to_string())
+ );
+
+ let format = Format {
+ style: ProgressStyle::Percentage,
+ max_print: 24,
+ max_width: 24,
+ };
+ assert_eq!(format.progress_status(1, 1, ""), None);
+}
diff --git a/src/tools/cargo/src/cargo/util/queue.rs b/src/tools/cargo/src/cargo/util/queue.rs
new file mode 100644
index 000000000..bbc68f603
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/queue.rs
@@ -0,0 +1,82 @@
+use std::collections::VecDeque;
+use std::sync::{Condvar, Mutex};
+use std::time::Duration;
+
+/// A simple, threadsafe, queue of items of type `T`
+///
+/// This is a sort of channel where any thread can push to a queue and any
+/// thread can pop from a queue.
+///
+/// This supports both bounded and unbounded operations. [`push`] will never block,
+/// and allows the queue to grow without bounds. [`push_bounded`] will block if
+/// the queue is over capacity, and will resume once there is enough capacity.
+///
+/// [`push`]: Self::push
+/// [`push_bounded`]: Self::push_bounded
+pub struct Queue<T> {
+ state: Mutex<State<T>>,
+ popper_cv: Condvar,
+ bounded_cv: Condvar,
+ bound: usize,
+}
+
+struct State<T> {
+ items: VecDeque<T>,
+}
+
+impl<T> Queue<T> {
+ /// Creates a queue with a given bound.
+ pub fn new(bound: usize) -> Queue<T> {
+ Queue {
+ state: Mutex::new(State {
+ items: VecDeque::new(),
+ }),
+ popper_cv: Condvar::new(),
+ bounded_cv: Condvar::new(),
+ bound,
+ }
+ }
+
+ /// Pushes an item onto the queue, regardless of the capacity of the queue.
+ pub fn push(&self, item: T) {
+ self.state.lock().unwrap().items.push_back(item);
+ self.popper_cv.notify_one();
+ }
+
+ /// Pushes an item onto the queue, blocking if the queue is full.
+ pub fn push_bounded(&self, item: T) {
+ let locked_state = self.state.lock().unwrap();
+ let mut state = self
+ .bounded_cv
+ .wait_while(locked_state, |s| s.items.len() >= self.bound)
+ .unwrap();
+ state.items.push_back(item);
+ self.popper_cv.notify_one();
+ }
+
+ /// Pops an item from the queue, blocking if the queue is empty.
+ pub fn pop(&self, timeout: Duration) -> Option<T> {
+ let (mut state, result) = self
+ .popper_cv
+ .wait_timeout_while(self.state.lock().unwrap(), timeout, |s| s.items.is_empty())
+ .unwrap();
+ if result.timed_out() {
+ None
+ } else {
+ let value = state.items.pop_front()?;
+ if state.items.len() < self.bound {
+ // Assumes threads cannot be canceled.
+ self.bounded_cv.notify_one();
+ }
+ Some(value)
+ }
+ }
+
+ /// Pops all items from the queue without blocking.
+ pub fn try_pop_all(&self) -> Vec<T> {
+ let mut state = self.state.lock().unwrap();
+ let result = state.items.drain(..).collect();
+ self.bounded_cv.notify_all();
+ result
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/restricted_names.rs b/src/tools/cargo/src/cargo/util/restricted_names.rs
new file mode 100644
index 000000000..650ae2330
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/restricted_names.rs
@@ -0,0 +1,99 @@
+//! Helpers for validating and checking names like package and crate names.
+
+use crate::util::CargoResult;
+use anyhow::bail;
+use std::path::Path;
+
+/// Returns `true` if the name contains non-ASCII characters.
+pub fn is_non_ascii_name(name: &str) -> bool {
+ name.chars().any(|ch| ch > '\x7f')
+}
+
+/// A Rust keyword.
+pub fn is_keyword(name: &str) -> bool {
+ // See https://doc.rust-lang.org/reference/keywords.html
+ [
+ "Self", "abstract", "as", "async", "await", "become", "box", "break", "const", "continue",
+ "crate", "do", "dyn", "else", "enum", "extern", "false", "final", "fn", "for", "if",
+ "impl", "in", "let", "loop", "macro", "match", "mod", "move", "mut", "override", "priv",
+ "pub", "ref", "return", "self", "static", "struct", "super", "trait", "true", "try",
+ "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield",
+ ]
+ .contains(&name)
+}
+
+/// These names cannot be used on Windows, even with an extension.
+pub fn is_windows_reserved(name: &str) -> bool {
+ [
+ "con", "prn", "aux", "nul", "com1", "com2", "com3", "com4", "com5", "com6", "com7", "com8",
+ "com9", "lpt1", "lpt2", "lpt3", "lpt4", "lpt5", "lpt6", "lpt7", "lpt8", "lpt9",
+ ]
+ .contains(&name.to_ascii_lowercase().as_str())
+}
+
+/// An artifact with this name will conflict with one of Cargo's build directories.
+pub fn is_conflicting_artifact_name(name: &str) -> bool {
+ ["deps", "examples", "build", "incremental"].contains(&name)
+}
+
+/// Check the base requirements for a package name.
+///
+/// This can be used for other things than package names, to enforce some
+/// level of sanity. Note that package names have other restrictions
+/// elsewhere. `cargo new` has a few restrictions, such as checking for
+/// reserved names. crates.io has even more restrictions.
+pub fn validate_package_name(name: &str, what: &str, help: &str) -> CargoResult<()> {
+ let mut chars = name.chars();
+ if let Some(ch) = chars.next() {
+ if ch.is_digit(10) {
+ // A specific error for a potentially common case.
+ bail!(
+ "the name `{}` cannot be used as a {}, \
+ the name cannot start with a digit{}",
+ name,
+ what,
+ help
+ );
+ }
+ if !(unicode_xid::UnicodeXID::is_xid_start(ch) || ch == '_') {
+ bail!(
+ "invalid character `{}` in {}: `{}`, \
+ the first character must be a Unicode XID start character \
+ (most letters or `_`){}",
+ ch,
+ what,
+ name,
+ help
+ );
+ }
+ }
+ for ch in chars {
+ if !(unicode_xid::UnicodeXID::is_xid_continue(ch) || ch == '-') {
+ bail!(
+ "invalid character `{}` in {}: `{}`, \
+ characters must be Unicode XID characters \
+ (numbers, `-`, `_`, or most letters){}",
+ ch,
+ what,
+ name,
+ help
+ );
+ }
+ }
+ Ok(())
+}
+
+/// Check the entire path for names reserved in Windows.
+pub fn is_windows_reserved_path(path: &Path) -> bool {
+ path.iter()
+ .filter_map(|component| component.to_str())
+ .any(|component| {
+ let stem = component.split('.').next().unwrap();
+ is_windows_reserved(stem)
+ })
+}
+
+/// Returns `true` if the name contains any glob pattern wildcards.
+pub fn is_glob_pattern<T: AsRef<str>>(name: T) -> bool {
+ name.as_ref().contains(&['*', '?', '[', ']'][..])
+}
diff --git a/src/tools/cargo/src/cargo/util/rustc.rs b/src/tools/cargo/src/cargo/util/rustc.rs
new file mode 100644
index 000000000..3f1da64d4
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/rustc.rs
@@ -0,0 +1,366 @@
+use std::collections::hash_map::HashMap;
+use std::env;
+use std::hash::{Hash, Hasher};
+use std::path::{Path, PathBuf};
+use std::sync::Mutex;
+
+use anyhow::Context as _;
+use cargo_util::{paths, ProcessBuilder, ProcessError};
+use log::{debug, info, warn};
+use serde::{Deserialize, Serialize};
+
+use crate::util::interning::InternedString;
+use crate::util::{profile, CargoResult, Config, StableHasher};
+
+/// Information on the `rustc` executable
+#[derive(Debug)]
+pub struct Rustc {
+ /// The location of the exe
+ pub path: PathBuf,
+ /// An optional program that will be passed the path of the rust exe as its first argument, and
+ /// rustc args following this.
+ pub wrapper: Option<PathBuf>,
+ /// An optional wrapper to be used in addition to `rustc.wrapper` for workspace crates
+ pub workspace_wrapper: Option<PathBuf>,
+ /// Verbose version information (the output of `rustc -vV`)
+ pub verbose_version: String,
+ /// The rustc version (`1.23.4-beta.2`), this comes from verbose_version.
+ pub version: semver::Version,
+ /// The host triple (arch-platform-OS), this comes from verbose_version.
+ pub host: InternedString,
+ cache: Mutex<Cache>,
+}
+
+impl Rustc {
+ /// Runs the compiler at `path` to learn various pieces of information about
+ /// it, with an optional wrapper.
+ ///
+ /// If successful this function returns a description of the compiler along
+ /// with a list of its capabilities.
+ pub fn new(
+ path: PathBuf,
+ wrapper: Option<PathBuf>,
+ workspace_wrapper: Option<PathBuf>,
+ rustup_rustc: &Path,
+ cache_location: Option<PathBuf>,
+ config: &Config,
+ ) -> CargoResult<Rustc> {
+ let _p = profile::start("Rustc::new");
+
+ let mut cache = Cache::load(
+ wrapper.as_deref(),
+ workspace_wrapper.as_deref(),
+ &path,
+ rustup_rustc,
+ cache_location,
+ config,
+ );
+
+ let mut cmd = ProcessBuilder::new(&path);
+ cmd.arg("-vV");
+ let verbose_version = cache.cached_output(&cmd, 0)?.0;
+
+ let extract = |field: &str| -> CargoResult<&str> {
+ verbose_version
+ .lines()
+ .find(|l| l.starts_with(field))
+ .map(|l| &l[field.len()..])
+ .ok_or_else(|| {
+ anyhow::format_err!(
+ "`rustc -vV` didn't have a line for `{}`, got:\n{}",
+ field.trim(),
+ verbose_version
+ )
+ })
+ };
+
+ let host = InternedString::new(extract("host: ")?);
+ let version = semver::Version::parse(extract("release: ")?).with_context(|| {
+ format!(
+ "rustc version does not appear to be a valid semver version, from:\n{}",
+ verbose_version
+ )
+ })?;
+
+ Ok(Rustc {
+ path,
+ wrapper,
+ workspace_wrapper,
+ verbose_version,
+ version,
+ host,
+ cache: Mutex::new(cache),
+ })
+ }
+
+ /// Gets a process builder set up to use the found rustc version, with a wrapper if `Some`.
+ pub fn process(&self) -> ProcessBuilder {
+ let mut cmd = ProcessBuilder::new(self.path.as_path()).wrapped(self.wrapper.as_ref());
+ cmd.retry_with_argfile(true);
+ cmd
+ }
+
+ /// Gets a process builder set up to use the found rustc version, with a wrapper if `Some`.
+ pub fn workspace_process(&self) -> ProcessBuilder {
+ let mut cmd = ProcessBuilder::new(self.path.as_path())
+ .wrapped(self.workspace_wrapper.as_ref())
+ .wrapped(self.wrapper.as_ref());
+ cmd.retry_with_argfile(true);
+ cmd
+ }
+
+ pub fn process_no_wrapper(&self) -> ProcessBuilder {
+ let mut cmd = ProcessBuilder::new(&self.path);
+ cmd.retry_with_argfile(true);
+ cmd
+ }
+
+ /// Gets the output for the given command.
+ ///
+ /// This will return the cached value if available, otherwise it will run
+ /// the command and cache the output.
+ ///
+ /// `extra_fingerprint` is extra data to include in the cache fingerprint.
+ /// Use this if there is other information about the environment that may
+ /// affect the output that is not part of `cmd`.
+ ///
+ /// Returns a tuple of strings `(stdout, stderr)`.
+ pub fn cached_output(
+ &self,
+ cmd: &ProcessBuilder,
+ extra_fingerprint: u64,
+ ) -> CargoResult<(String, String)> {
+ self.cache
+ .lock()
+ .unwrap()
+ .cached_output(cmd, extra_fingerprint)
+ }
+}
+
+/// It is a well known fact that `rustc` is not the fastest compiler in the
+/// world. What is less known is that even `rustc --version --verbose` takes
+/// about a hundred milliseconds! Because we need compiler version info even
+/// for no-op builds, we cache it here, based on compiler's mtime and rustup's
+/// current toolchain.
+///
+/// <https://github.com/rust-lang/cargo/issues/5315>
+/// <https://github.com/rust-lang/rust/issues/49761>
+#[derive(Debug)]
+struct Cache {
+ cache_location: Option<PathBuf>,
+ dirty: bool,
+ data: CacheData,
+}
+
+#[derive(Serialize, Deserialize, Debug, Default)]
+struct CacheData {
+ rustc_fingerprint: u64,
+ outputs: HashMap<u64, Output>,
+ successes: HashMap<u64, bool>,
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+struct Output {
+ success: bool,
+ status: String,
+ code: Option<i32>,
+ stdout: String,
+ stderr: String,
+}
+
+impl Cache {
+ fn load(
+ wrapper: Option<&Path>,
+ workspace_wrapper: Option<&Path>,
+ rustc: &Path,
+ rustup_rustc: &Path,
+ cache_location: Option<PathBuf>,
+ config: &Config,
+ ) -> Cache {
+ match (
+ cache_location,
+ rustc_fingerprint(wrapper, workspace_wrapper, rustc, rustup_rustc, config),
+ ) {
+ (Some(cache_location), Ok(rustc_fingerprint)) => {
+ let empty = CacheData {
+ rustc_fingerprint,
+ outputs: HashMap::new(),
+ successes: HashMap::new(),
+ };
+ let mut dirty = true;
+ let data = match read(&cache_location) {
+ Ok(data) => {
+ if data.rustc_fingerprint == rustc_fingerprint {
+ debug!("reusing existing rustc info cache");
+ dirty = false;
+ data
+ } else {
+ debug!("different compiler, creating new rustc info cache");
+ empty
+ }
+ }
+ Err(e) => {
+ debug!("failed to read rustc info cache: {}", e);
+ empty
+ }
+ };
+ return Cache {
+ cache_location: Some(cache_location),
+ dirty,
+ data,
+ };
+
+ fn read(path: &Path) -> CargoResult<CacheData> {
+ let json = paths::read(path)?;
+ Ok(serde_json::from_str(&json)?)
+ }
+ }
+ (_, fingerprint) => {
+ if let Err(e) = fingerprint {
+ warn!("failed to calculate rustc fingerprint: {}", e);
+ }
+ debug!("rustc info cache disabled");
+ Cache {
+ cache_location: None,
+ dirty: false,
+ data: CacheData::default(),
+ }
+ }
+ }
+ }
+
+ fn cached_output(
+ &mut self,
+ cmd: &ProcessBuilder,
+ extra_fingerprint: u64,
+ ) -> CargoResult<(String, String)> {
+ let key = process_fingerprint(cmd, extra_fingerprint);
+ if self.data.outputs.contains_key(&key) {
+ debug!("rustc info cache hit");
+ } else {
+ debug!("rustc info cache miss");
+ debug!("running {}", cmd);
+ let output = cmd.output()?;
+ let stdout = String::from_utf8(output.stdout)
+ .map_err(|e| anyhow::anyhow!("{}: {:?}", e, e.as_bytes()))
+ .with_context(|| format!("`{}` didn't return utf8 output", cmd))?;
+ let stderr = String::from_utf8(output.stderr)
+ .map_err(|e| anyhow::anyhow!("{}: {:?}", e, e.as_bytes()))
+ .with_context(|| format!("`{}` didn't return utf8 output", cmd))?;
+ self.data.outputs.insert(
+ key,
+ Output {
+ success: output.status.success(),
+ status: if output.status.success() {
+ String::new()
+ } else {
+ cargo_util::exit_status_to_string(output.status)
+ },
+ code: output.status.code(),
+ stdout,
+ stderr,
+ },
+ );
+ self.dirty = true;
+ }
+ let output = &self.data.outputs[&key];
+ if output.success {
+ Ok((output.stdout.clone(), output.stderr.clone()))
+ } else {
+ Err(ProcessError::new_raw(
+ &format!("process didn't exit successfully: {}", cmd),
+ output.code,
+ &output.status,
+ Some(output.stdout.as_ref()),
+ Some(output.stderr.as_ref()),
+ )
+ .into())
+ }
+ }
+}
+
+impl Drop for Cache {
+ fn drop(&mut self) {
+ if !self.dirty {
+ return;
+ }
+ if let Some(ref path) = self.cache_location {
+ let json = serde_json::to_string(&self.data).unwrap();
+ match paths::write(path, json.as_bytes()) {
+ Ok(()) => info!("updated rustc info cache"),
+ Err(e) => warn!("failed to update rustc info cache: {}", e),
+ }
+ }
+ }
+}
+
+fn rustc_fingerprint(
+ wrapper: Option<&Path>,
+ workspace_wrapper: Option<&Path>,
+ rustc: &Path,
+ rustup_rustc: &Path,
+ config: &Config,
+) -> CargoResult<u64> {
+ let mut hasher = StableHasher::new();
+
+ let hash_exe = |hasher: &mut _, path| -> CargoResult<()> {
+ let path = paths::resolve_executable(path)?;
+ path.hash(hasher);
+
+ paths::mtime(&path)?.hash(hasher);
+ Ok(())
+ };
+
+ hash_exe(&mut hasher, rustc)?;
+ if let Some(wrapper) = wrapper {
+ hash_exe(&mut hasher, wrapper)?;
+ }
+ if let Some(workspace_wrapper) = workspace_wrapper {
+ hash_exe(&mut hasher, workspace_wrapper)?;
+ }
+
+ // Rustup can change the effective compiler without touching
+ // the `rustc` binary, so we try to account for this here.
+ // If we see rustup's env vars, we mix them into the fingerprint,
+ // but we also mix in the mtime of the actual compiler (and not
+ // the rustup shim at `~/.cargo/bin/rustup`), because `RUSTUP_TOOLCHAIN`
+ // could be just `stable-x86_64-unknown-linux-gnu`, i.e, it could
+ // not mention the version of Rust at all, which changes after
+ // `rustup update`.
+ //
+ // If we don't see rustup env vars, but it looks like the compiler
+ // is managed by rustup, we conservatively bail out.
+ let maybe_rustup = rustup_rustc == rustc;
+ match (
+ maybe_rustup,
+ config.get_env("RUSTUP_HOME"),
+ config.get_env("RUSTUP_TOOLCHAIN"),
+ ) {
+ (_, Ok(rustup_home), Ok(rustup_toolchain)) => {
+ debug!("adding rustup info to rustc fingerprint");
+ rustup_toolchain.hash(&mut hasher);
+ rustup_home.hash(&mut hasher);
+ let real_rustc = Path::new(&rustup_home)
+ .join("toolchains")
+ .join(rustup_toolchain)
+ .join("bin")
+ .join("rustc")
+ .with_extension(env::consts::EXE_EXTENSION);
+ paths::mtime(&real_rustc)?.hash(&mut hasher);
+ }
+ (true, _, _) => anyhow::bail!("probably rustup rustc, but without rustup's env vars"),
+ _ => (),
+ }
+
+ Ok(hasher.finish())
+}
+
+fn process_fingerprint(cmd: &ProcessBuilder, extra_fingerprint: u64) -> u64 {
+ let mut hasher = StableHasher::new();
+ extra_fingerprint.hash(&mut hasher);
+ cmd.get_args().for_each(|arg| arg.hash(&mut hasher));
+ let mut env = cmd.get_envs().iter().collect::<Vec<_>>();
+ env.sort_unstable();
+ env.hash(&mut hasher);
+ hasher.finish()
+}
diff --git a/src/tools/cargo/src/cargo/util/semver_ext.rs b/src/tools/cargo/src/cargo/util/semver_ext.rs
new file mode 100644
index 000000000..de6d68e16
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/semver_ext.rs
@@ -0,0 +1,146 @@
+use semver::{Comparator, Op, Version, VersionReq};
+use std::fmt::{self, Display};
+
+#[derive(PartialEq, Eq, Hash, Clone, Debug)]
+pub enum OptVersionReq {
+ Any,
+ Req(VersionReq),
+ /// The exact locked version and the original version requirement.
+ Locked(Version, VersionReq),
+}
+
+pub trait VersionExt {
+ fn is_prerelease(&self) -> bool;
+}
+
+pub trait VersionReqExt {
+ fn exact(version: &Version) -> Self;
+}
+
+impl VersionExt for Version {
+ fn is_prerelease(&self) -> bool {
+ !self.pre.is_empty()
+ }
+}
+
+impl VersionReqExt for VersionReq {
+ fn exact(version: &Version) -> Self {
+ VersionReq {
+ comparators: vec![Comparator {
+ op: Op::Exact,
+ major: version.major,
+ minor: Some(version.minor),
+ patch: Some(version.patch),
+ pre: version.pre.clone(),
+ }],
+ }
+ }
+}
+
+impl OptVersionReq {
+ pub fn exact(version: &Version) -> Self {
+ OptVersionReq::Req(VersionReq::exact(version))
+ }
+
+ pub fn is_exact(&self) -> bool {
+ match self {
+ OptVersionReq::Any => false,
+ OptVersionReq::Req(req) => {
+ req.comparators.len() == 1 && {
+ let cmp = &req.comparators[0];
+ cmp.op == Op::Exact && cmp.minor.is_some() && cmp.patch.is_some()
+ }
+ }
+ OptVersionReq::Locked(..) => true,
+ }
+ }
+
+ pub fn lock_to(&mut self, version: &Version) {
+ assert!(self.matches(version), "cannot lock {} to {}", self, version);
+ use OptVersionReq::*;
+ let version = version.clone();
+ *self = match self {
+ Any => Locked(version, VersionReq::STAR),
+ Req(req) => Locked(version, req.clone()),
+ Locked(_, req) => Locked(version, req.clone()),
+ };
+ }
+
+ pub fn is_locked(&self) -> bool {
+ matches!(self, OptVersionReq::Locked(..))
+ }
+
+ /// Gets the version to which this req is locked, if any.
+ pub fn locked_version(&self) -> Option<&Version> {
+ match self {
+ OptVersionReq::Locked(version, _) => Some(version),
+ _ => None,
+ }
+ }
+
+ pub fn matches(&self, version: &Version) -> bool {
+ match self {
+ OptVersionReq::Any => true,
+ OptVersionReq::Req(req) => req.matches(version),
+ OptVersionReq::Locked(v, _) => {
+ v.major == version.major
+ && v.minor == version.minor
+ && v.patch == version.patch
+ && v.pre == version.pre
+ }
+ }
+ }
+}
+
+impl Display for OptVersionReq {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ OptVersionReq::Any => f.write_str("*"),
+ OptVersionReq::Req(req) => Display::fmt(req, f),
+ OptVersionReq::Locked(_, req) => Display::fmt(req, f),
+ }
+ }
+}
+
+impl From<VersionReq> for OptVersionReq {
+ fn from(req: VersionReq) -> Self {
+ OptVersionReq::Req(req)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn locked_has_the_same_with_exact() {
+ fn test_versions(target_ver: &str, vers: &[&str]) {
+ let ver = Version::parse(target_ver).unwrap();
+ let exact = OptVersionReq::exact(&ver);
+ let mut locked = exact.clone();
+ locked.lock_to(&ver);
+ for v in vers {
+ let v = Version::parse(v).unwrap();
+ assert_eq!(exact.matches(&v), locked.matches(&v));
+ }
+ }
+
+ test_versions(
+ "1.0.0",
+ &["1.0.0", "1.0.1", "0.9.9", "0.10.0", "0.1.0", "1.0.0-pre"],
+ );
+ test_versions("0.9.0", &["0.9.0", "0.9.1", "1.9.0", "0.0.9", "0.9.0-pre"]);
+ test_versions("0.0.2", &["0.0.2", "0.0.1", "0.0.3", "0.0.2-pre"]);
+ test_versions(
+ "0.1.0-beta2.a",
+ &[
+ "0.1.0-beta2.a",
+ "0.9.1",
+ "0.1.0",
+ "0.1.1-beta2.a",
+ "0.1.0-beta2",
+ ],
+ );
+ test_versions("0.1.0+meta", &["0.1.0", "0.1.0+meta", "0.1.0+any"]);
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/to_semver.rs b/src/tools/cargo/src/cargo/util/to_semver.rs
new file mode 100644
index 000000000..25da9dfb9
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/to_semver.rs
@@ -0,0 +1,33 @@
+use crate::util::errors::CargoResult;
+use semver::Version;
+
+pub trait ToSemver {
+ fn to_semver(self) -> CargoResult<Version>;
+}
+
+impl ToSemver for Version {
+ fn to_semver(self) -> CargoResult<Version> {
+ Ok(self)
+ }
+}
+
+impl<'a> ToSemver for &'a str {
+ fn to_semver(self) -> CargoResult<Version> {
+ match Version::parse(self.trim()) {
+ Ok(v) => Ok(v),
+ Err(..) => Err(anyhow::format_err!("cannot parse '{}' as a semver", self)),
+ }
+ }
+}
+
+impl<'a> ToSemver for &'a String {
+ fn to_semver(self) -> CargoResult<Version> {
+ (**self).to_semver()
+ }
+}
+
+impl<'a> ToSemver for &'a Version {
+ fn to_semver(self) -> CargoResult<Version> {
+ Ok(self.clone())
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/toml/mod.rs b/src/tools/cargo/src/cargo/util/toml/mod.rs
new file mode 100644
index 000000000..9e7c6f63e
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/toml/mod.rs
@@ -0,0 +1,3303 @@
+use std::collections::{BTreeMap, BTreeSet, HashMap};
+use std::fmt;
+use std::marker::PhantomData;
+use std::path::{Path, PathBuf};
+use std::rc::Rc;
+use std::str::{self, FromStr};
+
+use anyhow::{anyhow, bail, Context as _};
+use cargo_platform::Platform;
+use cargo_util::paths;
+use itertools::Itertools;
+use lazycell::LazyCell;
+use log::{debug, trace};
+use semver::{self, VersionReq};
+use serde::de;
+use serde::de::IntoDeserializer as _;
+use serde::ser;
+use serde::{Deserialize, Serialize};
+use url::Url;
+
+use crate::core::compiler::{CompileKind, CompileTarget};
+use crate::core::dependency::{Artifact, ArtifactTarget, DepKind};
+use crate::core::manifest::{ManifestMetadata, TargetSourcePath, Warnings};
+use crate::core::resolver::ResolveBehavior;
+use crate::core::{find_workspace_root, resolve_relative_path, CliUnstable};
+use crate::core::{Dependency, Manifest, PackageId, Summary, Target};
+use crate::core::{Edition, EitherManifest, Feature, Features, VirtualManifest, Workspace};
+use crate::core::{GitReference, PackageIdSpec, SourceId, WorkspaceConfig, WorkspaceRootConfig};
+use crate::sources::{CRATES_IO_INDEX, CRATES_IO_REGISTRY};
+use crate::util::errors::{CargoResult, ManifestError};
+use crate::util::interning::InternedString;
+use crate::util::{
+ self, config::ConfigRelativePath, validate_package_name, Config, IntoUrl, VersionReqExt,
+};
+
+mod targets;
+use self::targets::targets;
+
+/// Loads a `Cargo.toml` from a file on disk.
+///
+/// This could result in a real or virtual manifest being returned.
+///
+/// A list of nested paths is also returned, one for each path dependency
+/// within the manifest. For virtual manifests, these paths can only
+/// come from patched or replaced dependencies. These paths are not
+/// canonicalized.
+pub fn read_manifest(
+ path: &Path,
+ source_id: SourceId,
+ config: &Config,
+) -> Result<(EitherManifest, Vec<PathBuf>), ManifestError> {
+ trace!(
+ "read_manifest; path={}; source-id={}",
+ path.display(),
+ source_id
+ );
+ let contents = paths::read(path).map_err(|err| ManifestError::new(err, path.into()))?;
+
+ read_manifest_from_str(&contents, path, source_id, config)
+ .with_context(|| format!("failed to parse manifest at `{}`", path.display()))
+ .map_err(|err| ManifestError::new(err, path.into()))
+}
+
+/// Parse an already-loaded `Cargo.toml` as a Cargo manifest.
+///
+/// This could result in a real or virtual manifest being returned.
+///
+/// A list of nested paths is also returned, one for each path dependency
+/// within the manifest. For virtual manifests, these paths can only
+/// come from patched or replaced dependencies. These paths are not
+/// canonicalized.
+pub fn read_manifest_from_str(
+ contents: &str,
+ manifest_file: &Path,
+ source_id: SourceId,
+ config: &Config,
+) -> CargoResult<(EitherManifest, Vec<PathBuf>)> {
+ let package_root = manifest_file.parent().unwrap();
+
+ let toml = {
+ let pretty_filename = manifest_file
+ .strip_prefix(config.cwd())
+ .unwrap_or(manifest_file);
+ parse_document(contents, pretty_filename, config)?
+ };
+
+ // Provide a helpful error message for a common user error.
+ if let Some(package) = toml.get("package").or_else(|| toml.get("project")) {
+ if let Some(feats) = package.get("cargo-features") {
+ bail!(
+ "cargo-features = {} was found in the wrong location: it \
+ should be set at the top of Cargo.toml before any tables",
+ feats
+ );
+ }
+ }
+
+ let mut unused = BTreeSet::new();
+ let manifest: TomlManifest = serde_ignored::deserialize(toml.into_deserializer(), |path| {
+ let mut key = String::new();
+ stringify(&mut key, &path);
+ unused.insert(key);
+ })?;
+ let add_unused = |warnings: &mut Warnings| {
+ for key in unused {
+ warnings.add_warning(format!("unused manifest key: {}", key));
+ if key == "profiles.debug" {
+ warnings.add_warning("use `[profile.dev]` to configure debug builds".to_string());
+ }
+ }
+ };
+
+ let manifest = Rc::new(manifest);
+ if let Some(deps) = manifest
+ .workspace
+ .as_ref()
+ .and_then(|ws| ws.dependencies.as_ref())
+ {
+ for (name, dep) in deps {
+ if dep.is_optional() {
+ bail!(
+ "{} is optional, but workspace dependencies cannot be optional",
+ name
+ );
+ }
+ }
+ }
+ return if manifest.project.is_some() || manifest.package.is_some() {
+ let (mut manifest, paths) =
+ TomlManifest::to_real_manifest(&manifest, source_id, package_root, config)?;
+ add_unused(manifest.warnings_mut());
+ if manifest.targets().iter().all(|t| t.is_custom_build()) {
+ bail!(
+ "no targets specified in the manifest\n\
+ either src/lib.rs, src/main.rs, a [lib] section, or \
+ [[bin]] section must be present"
+ )
+ }
+ Ok((EitherManifest::Real(manifest), paths))
+ } else {
+ let (mut m, paths) =
+ TomlManifest::to_virtual_manifest(&manifest, source_id, package_root, config)?;
+ add_unused(m.warnings_mut());
+ Ok((EitherManifest::Virtual(m), paths))
+ };
+
+ fn stringify(dst: &mut String, path: &serde_ignored::Path<'_>) {
+ use serde_ignored::Path;
+
+ match *path {
+ Path::Root => {}
+ Path::Seq { parent, index } => {
+ stringify(dst, parent);
+ if !dst.is_empty() {
+ dst.push('.');
+ }
+ dst.push_str(&index.to_string());
+ }
+ Path::Map { parent, ref key } => {
+ stringify(dst, parent);
+ if !dst.is_empty() {
+ dst.push('.');
+ }
+ dst.push_str(key);
+ }
+ Path::Some { parent }
+ | Path::NewtypeVariant { parent }
+ | Path::NewtypeStruct { parent } => stringify(dst, parent),
+ }
+ }
+}
+
+pub fn parse_document(toml: &str, _file: &Path, _config: &Config) -> CargoResult<toml::Table> {
+ // At the moment, no compatibility checks are needed.
+ toml.parse()
+ .map_err(|e| anyhow::Error::from(e).context("could not parse input as TOML"))
+}
+
+/// Warn about paths that have been deprecated and may conflict.
+fn warn_on_deprecated(new_path: &str, name: &str, kind: &str, warnings: &mut Vec<String>) {
+ let old_path = new_path.replace("-", "_");
+ warnings.push(format!(
+ "conflicting between `{new_path}` and `{old_path}` in the `{name}` {kind}.\n
+ `{old_path}` is ignored and not recommended for use in the future"
+ ))
+}
+
+type TomlLibTarget = TomlTarget;
+type TomlBinTarget = TomlTarget;
+type TomlExampleTarget = TomlTarget;
+type TomlTestTarget = TomlTarget;
+type TomlBenchTarget = TomlTarget;
+
+#[derive(Clone, Debug, Serialize)]
+#[serde(untagged)]
+pub enum TomlDependency<P: Clone = String> {
+ /// In the simple format, only a version is specified, eg.
+ /// `package = "<version>"`
+ Simple(String),
+ /// The simple format is equivalent to a detailed dependency
+ /// specifying only a version, eg.
+ /// `package = { version = "<version>" }`
+ Detailed(DetailedTomlDependency<P>),
+}
+
+impl<'de, P: Deserialize<'de> + Clone> de::Deserialize<'de> for TomlDependency<P> {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ struct TomlDependencyVisitor<P>(PhantomData<P>);
+
+ impl<'de, P: Deserialize<'de> + Clone> de::Visitor<'de> for TomlDependencyVisitor<P> {
+ type Value = TomlDependency<P>;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str(
+ "a version string like \"0.9.8\" or a \
+ detailed dependency like { version = \"0.9.8\" }",
+ )
+ }
+
+ fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+ where
+ E: de::Error,
+ {
+ Ok(TomlDependency::Simple(s.to_owned()))
+ }
+
+ fn visit_map<V>(self, map: V) -> Result<Self::Value, V::Error>
+ where
+ V: de::MapAccess<'de>,
+ {
+ let mvd = de::value::MapAccessDeserializer::new(map);
+ DetailedTomlDependency::deserialize(mvd).map(TomlDependency::Detailed)
+ }
+ }
+ deserializer.deserialize_any(TomlDependencyVisitor(PhantomData))
+ }
+}
+
+impl TomlDependency {
+ fn unused_keys(&self) -> Vec<String> {
+ match self {
+ TomlDependency::Simple(_) => vec![],
+ TomlDependency::Detailed(detailed) => detailed.other.keys().cloned().collect(),
+ }
+ }
+}
+
+pub trait ResolveToPath {
+ fn resolve(&self, config: &Config) -> PathBuf;
+}
+
+impl ResolveToPath for String {
+ fn resolve(&self, _: &Config) -> PathBuf {
+ self.into()
+ }
+}
+
+impl ResolveToPath for ConfigRelativePath {
+ fn resolve(&self, c: &Config) -> PathBuf {
+ self.resolve_path(c)
+ }
+}
+
+#[derive(Deserialize, Serialize, Clone, Debug)]
+#[serde(rename_all = "kebab-case")]
+pub struct DetailedTomlDependency<P: Clone = String> {
+ version: Option<String>,
+ registry: Option<String>,
+ /// The URL of the `registry` field.
+ /// This is an internal implementation detail. When Cargo creates a
+ /// package, it replaces `registry` with `registry-index` so that the
+ /// manifest contains the correct URL. All users won't have the same
+ /// registry names configured, so Cargo can't rely on just the name for
+ /// crates published by other users.
+ registry_index: Option<String>,
+ // `path` is relative to the file it appears in. If that's a `Cargo.toml`, it'll be relative to
+ // that TOML file, and if it's a `.cargo/config` file, it'll be relative to that file.
+ path: Option<P>,
+ git: Option<String>,
+ branch: Option<String>,
+ tag: Option<String>,
+ rev: Option<String>,
+ features: Option<Vec<String>>,
+ optional: Option<bool>,
+ default_features: Option<bool>,
+ #[serde(rename = "default_features")]
+ default_features2: Option<bool>,
+ package: Option<String>,
+ public: Option<bool>,
+
+ /// One or more of `bin`, `cdylib`, `staticlib`, `bin:<name>`.
+ artifact: Option<StringOrVec>,
+ /// If set, the artifact should also be a dependency
+ lib: Option<bool>,
+ /// A platform name, like `x86_64-apple-darwin`
+ target: Option<String>,
+ /// This is here to provide a way to see the "unused manifest keys" when deserializing
+ #[serde(skip_serializing)]
+ #[serde(flatten)]
+ other: BTreeMap<String, toml::Value>,
+}
+
+// Explicit implementation so we avoid pulling in P: Default
+impl<P: Clone> Default for DetailedTomlDependency<P> {
+ fn default() -> Self {
+ Self {
+ version: Default::default(),
+ registry: Default::default(),
+ registry_index: Default::default(),
+ path: Default::default(),
+ git: Default::default(),
+ branch: Default::default(),
+ tag: Default::default(),
+ rev: Default::default(),
+ features: Default::default(),
+ optional: Default::default(),
+ default_features: Default::default(),
+ default_features2: Default::default(),
+ package: Default::default(),
+ public: Default::default(),
+ artifact: Default::default(),
+ lib: Default::default(),
+ target: Default::default(),
+ other: Default::default(),
+ }
+ }
+}
+
+/// This type is used to deserialize `Cargo.toml` files.
+#[derive(Debug, Deserialize, Serialize)]
+#[serde(rename_all = "kebab-case")]
+pub struct TomlManifest {
+ cargo_features: Option<Vec<String>>,
+ package: Option<Box<TomlPackage>>,
+ project: Option<Box<TomlPackage>>,
+ profile: Option<TomlProfiles>,
+ lib: Option<TomlLibTarget>,
+ bin: Option<Vec<TomlBinTarget>>,
+ example: Option<Vec<TomlExampleTarget>>,
+ test: Option<Vec<TomlTestTarget>>,
+ bench: Option<Vec<TomlTestTarget>>,
+ dependencies: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
+ dev_dependencies: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
+ #[serde(rename = "dev_dependencies")]
+ dev_dependencies2: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
+ build_dependencies: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
+ #[serde(rename = "build_dependencies")]
+ build_dependencies2: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
+ features: Option<BTreeMap<InternedString, Vec<InternedString>>>,
+ target: Option<BTreeMap<String, TomlPlatform>>,
+ replace: Option<BTreeMap<String, TomlDependency>>,
+ patch: Option<BTreeMap<String, BTreeMap<String, TomlDependency>>>,
+ workspace: Option<TomlWorkspace>,
+ badges: Option<MaybeWorkspaceBtreeMap>,
+}
+
+#[derive(Deserialize, Serialize, Clone, Debug, Default)]
+pub struct TomlProfiles(BTreeMap<InternedString, TomlProfile>);
+
+impl TomlProfiles {
+ pub fn get_all(&self) -> &BTreeMap<InternedString, TomlProfile> {
+ &self.0
+ }
+
+ pub fn get(&self, name: &str) -> Option<&TomlProfile> {
+ self.0.get(name)
+ }
+
+ /// Checks syntax validity and unstable feature gate for each profile.
+ ///
+ /// It's a bit unfortunate both `-Z` flags and `cargo-features` are required,
+ /// because profiles can now be set in either `Cargo.toml` or `config.toml`.
+ pub fn validate(
+ &self,
+ cli_unstable: &CliUnstable,
+ features: &Features,
+ warnings: &mut Vec<String>,
+ ) -> CargoResult<()> {
+ for (name, profile) in &self.0 {
+ profile.validate(name, cli_unstable, features, warnings)?;
+ }
+ Ok(())
+ }
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct TomlOptLevel(pub String);
+
+impl<'de> de::Deserialize<'de> for TomlOptLevel {
+ fn deserialize<D>(d: D) -> Result<TomlOptLevel, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ struct Visitor;
+
+ impl<'de> de::Visitor<'de> for Visitor {
+ type Value = TomlOptLevel;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str("an optimization level")
+ }
+
+ fn visit_i64<E>(self, value: i64) -> Result<TomlOptLevel, E>
+ where
+ E: de::Error,
+ {
+ Ok(TomlOptLevel(value.to_string()))
+ }
+
+ fn visit_str<E>(self, value: &str) -> Result<TomlOptLevel, E>
+ where
+ E: de::Error,
+ {
+ if value == "s" || value == "z" {
+ Ok(TomlOptLevel(value.to_string()))
+ } else {
+ Err(E::custom(format!(
+ "must be `0`, `1`, `2`, `3`, `s` or `z`, \
+ but found the string: \"{}\"",
+ value
+ )))
+ }
+ }
+ }
+
+ d.deserialize_any(Visitor)
+ }
+}
+
+impl ser::Serialize for TomlOptLevel {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: ser::Serializer,
+ {
+ match self.0.parse::<u32>() {
+ Ok(n) => n.serialize(serializer),
+ Err(_) => self.0.serialize(serializer),
+ }
+ }
+}
+
+#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
+#[serde(untagged, expecting = "expected a boolean or an integer")]
+pub enum U32OrBool {
+ U32(u32),
+ Bool(bool),
+}
+
+#[derive(Deserialize, Serialize, Clone, Debug, Default, Eq, PartialEq)]
+#[serde(default, rename_all = "kebab-case")]
+pub struct TomlProfile {
+ pub opt_level: Option<TomlOptLevel>,
+ pub lto: Option<StringOrBool>,
+ pub codegen_backend: Option<InternedString>,
+ pub codegen_units: Option<u32>,
+ pub debug: Option<U32OrBool>,
+ pub split_debuginfo: Option<String>,
+ pub debug_assertions: Option<bool>,
+ pub rpath: Option<bool>,
+ pub panic: Option<String>,
+ pub overflow_checks: Option<bool>,
+ pub incremental: Option<bool>,
+ pub dir_name: Option<InternedString>,
+ pub inherits: Option<InternedString>,
+ pub strip: Option<StringOrBool>,
+ // Note that `rustflags` is used for the cargo-feature `profile_rustflags`
+ pub rustflags: Option<Vec<InternedString>>,
+ // These two fields must be last because they are sub-tables, and TOML
+ // requires all non-tables to be listed first.
+ pub package: Option<BTreeMap<ProfilePackageSpec, TomlProfile>>,
+ pub build_override: Option<Box<TomlProfile>>,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash)]
+pub enum ProfilePackageSpec {
+ Spec(PackageIdSpec),
+ All,
+}
+
+impl ser::Serialize for ProfilePackageSpec {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: ser::Serializer,
+ {
+ self.to_string().serialize(s)
+ }
+}
+
+impl<'de> de::Deserialize<'de> for ProfilePackageSpec {
+ fn deserialize<D>(d: D) -> Result<ProfilePackageSpec, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ let string = String::deserialize(d)?;
+ if string == "*" {
+ Ok(ProfilePackageSpec::All)
+ } else {
+ PackageIdSpec::parse(&string)
+ .map_err(de::Error::custom)
+ .map(ProfilePackageSpec::Spec)
+ }
+ }
+}
+
+impl fmt::Display for ProfilePackageSpec {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ProfilePackageSpec::Spec(spec) => spec.fmt(f),
+ ProfilePackageSpec::All => f.write_str("*"),
+ }
+ }
+}
+
+impl TomlProfile {
+ /// Checks stytax validity and unstable feature gate for a given profile.
+ pub fn validate(
+ &self,
+ name: &str,
+ cli_unstable: &CliUnstable,
+ features: &Features,
+ warnings: &mut Vec<String>,
+ ) -> CargoResult<()> {
+ self.validate_profile(name, cli_unstable, features)?;
+ if let Some(ref profile) = self.build_override {
+ profile.validate_override("build-override")?;
+ profile.validate_profile(&format!("{name}.build-override"), cli_unstable, features)?;
+ }
+ if let Some(ref packages) = self.package {
+ for (override_name, profile) in packages {
+ profile.validate_override("package")?;
+ profile.validate_profile(
+ &format!("{name}.package.{override_name}"),
+ cli_unstable,
+ features,
+ )?;
+ }
+ }
+
+ // Profile name validation
+ Self::validate_name(name)?;
+
+ if let Some(dir_name) = self.dir_name {
+ // This is disabled for now, as we would like to stabilize named
+ // profiles without this, and then decide in the future if it is
+ // needed. This helps simplify the UI a little.
+ bail!(
+ "dir-name=\"{}\" in profile `{}` is not currently allowed, \
+ directory names are tied to the profile name for custom profiles",
+ dir_name,
+ name
+ );
+ }
+
+ // `inherits` validation
+ if matches!(self.inherits.map(|s| s.as_str()), Some("debug")) {
+ bail!(
+ "profile.{}.inherits=\"debug\" should be profile.{}.inherits=\"dev\"",
+ name,
+ name
+ );
+ }
+
+ match name {
+ "doc" => {
+ warnings.push("profile `doc` is deprecated and has no effect".to_string());
+ }
+ "test" | "bench" => {
+ if self.panic.is_some() {
+ warnings.push(format!("`panic` setting is ignored for `{}` profile", name))
+ }
+ }
+ _ => {}
+ }
+
+ if let Some(panic) = &self.panic {
+ if panic != "unwind" && panic != "abort" {
+ bail!(
+ "`panic` setting of `{}` is not a valid setting, \
+ must be `unwind` or `abort`",
+ panic
+ );
+ }
+ }
+
+ if let Some(StringOrBool::String(arg)) = &self.lto {
+ if arg == "true" || arg == "false" {
+ bail!(
+ "`lto` setting of string `\"{arg}\"` for `{name}` profile is not \
+ a valid setting, must be a boolean (`true`/`false`) or a string \
+ (`\"thin\"`/`\"fat\"`/`\"off\"`) or omitted.",
+ );
+ }
+ }
+
+ Ok(())
+ }
+
+ /// Validate dir-names and profile names according to RFC 2678.
+ pub fn validate_name(name: &str) -> CargoResult<()> {
+ if let Some(ch) = name
+ .chars()
+ .find(|ch| !ch.is_alphanumeric() && *ch != '_' && *ch != '-')
+ {
+ bail!(
+ "invalid character `{}` in profile name `{}`\n\
+ Allowed characters are letters, numbers, underscore, and hyphen.",
+ ch,
+ name
+ );
+ }
+
+ const SEE_DOCS: &str = "See https://doc.rust-lang.org/cargo/reference/profiles.html \
+ for more on configuring profiles.";
+
+ let lower_name = name.to_lowercase();
+ if lower_name == "debug" {
+ bail!(
+ "profile name `{}` is reserved\n\
+ To configure the default development profile, use the name `dev` \
+ as in [profile.dev]\n\
+ {}",
+ name,
+ SEE_DOCS
+ );
+ }
+ if lower_name == "build-override" {
+ bail!(
+ "profile name `{}` is reserved\n\
+ To configure build dependency settings, use [profile.dev.build-override] \
+ and [profile.release.build-override]\n\
+ {}",
+ name,
+ SEE_DOCS
+ );
+ }
+
+ // These are some arbitrary reservations. We have no plans to use
+ // these, but it seems safer to reserve a few just in case we want to
+ // add more built-in profiles in the future. We can also uses special
+ // syntax like cargo:foo if needed. But it is unlikely these will ever
+ // be used.
+ if matches!(
+ lower_name.as_str(),
+ "build"
+ | "check"
+ | "clean"
+ | "config"
+ | "fetch"
+ | "fix"
+ | "install"
+ | "metadata"
+ | "package"
+ | "publish"
+ | "report"
+ | "root"
+ | "run"
+ | "rust"
+ | "rustc"
+ | "rustdoc"
+ | "target"
+ | "tmp"
+ | "uninstall"
+ ) || lower_name.starts_with("cargo")
+ {
+ bail!(
+ "profile name `{}` is reserved\n\
+ Please choose a different name.\n\
+ {}",
+ name,
+ SEE_DOCS
+ );
+ }
+
+ Ok(())
+ }
+
+ /// Validates a profile.
+ ///
+ /// This is a shallow check, which is reused for the profile itself and any overrides.
+ fn validate_profile(
+ &self,
+ name: &str,
+ cli_unstable: &CliUnstable,
+ features: &Features,
+ ) -> CargoResult<()> {
+ if let Some(codegen_backend) = &self.codegen_backend {
+ match (
+ features.require(Feature::codegen_backend()),
+ cli_unstable.codegen_backend,
+ ) {
+ (Err(e), false) => return Err(e),
+ _ => {}
+ }
+
+ if codegen_backend.contains(|c: char| !c.is_ascii_alphanumeric() && c != '_') {
+ bail!(
+ "`profile.{}.codegen-backend` setting of `{}` is not a valid backend name.",
+ name,
+ codegen_backend,
+ );
+ }
+ }
+ if self.rustflags.is_some() {
+ match (
+ features.require(Feature::profile_rustflags()),
+ cli_unstable.profile_rustflags,
+ ) {
+ (Err(e), false) => return Err(e),
+ _ => {}
+ }
+ }
+ Ok(())
+ }
+
+ /// Validation that is specific to an override.
+ fn validate_override(&self, which: &str) -> CargoResult<()> {
+ if self.package.is_some() {
+ bail!("package-specific profiles cannot be nested");
+ }
+ if self.build_override.is_some() {
+ bail!("build-override profiles cannot be nested");
+ }
+ if self.panic.is_some() {
+ bail!("`panic` may not be specified in a `{}` profile", which)
+ }
+ if self.lto.is_some() {
+ bail!("`lto` may not be specified in a `{}` profile", which)
+ }
+ if self.rpath.is_some() {
+ bail!("`rpath` may not be specified in a `{}` profile", which)
+ }
+ Ok(())
+ }
+
+ /// Overwrite self's values with the given profile.
+ pub fn merge(&mut self, profile: &TomlProfile) {
+ if let Some(v) = &profile.opt_level {
+ self.opt_level = Some(v.clone());
+ }
+
+ if let Some(v) = &profile.lto {
+ self.lto = Some(v.clone());
+ }
+
+ if let Some(v) = profile.codegen_backend {
+ self.codegen_backend = Some(v);
+ }
+
+ if let Some(v) = profile.codegen_units {
+ self.codegen_units = Some(v);
+ }
+
+ if let Some(v) = &profile.debug {
+ self.debug = Some(v.clone());
+ }
+
+ if let Some(v) = profile.debug_assertions {
+ self.debug_assertions = Some(v);
+ }
+
+ if let Some(v) = &profile.split_debuginfo {
+ self.split_debuginfo = Some(v.clone());
+ }
+
+ if let Some(v) = profile.rpath {
+ self.rpath = Some(v);
+ }
+
+ if let Some(v) = &profile.panic {
+ self.panic = Some(v.clone());
+ }
+
+ if let Some(v) = profile.overflow_checks {
+ self.overflow_checks = Some(v);
+ }
+
+ if let Some(v) = profile.incremental {
+ self.incremental = Some(v);
+ }
+
+ if let Some(v) = &profile.rustflags {
+ self.rustflags = Some(v.clone());
+ }
+
+ if let Some(other_package) = &profile.package {
+ match &mut self.package {
+ Some(self_package) => {
+ for (spec, other_pkg_profile) in other_package {
+ match self_package.get_mut(spec) {
+ Some(p) => p.merge(other_pkg_profile),
+ None => {
+ self_package.insert(spec.clone(), other_pkg_profile.clone());
+ }
+ }
+ }
+ }
+ None => self.package = Some(other_package.clone()),
+ }
+ }
+
+ if let Some(other_bo) = &profile.build_override {
+ match &mut self.build_override {
+ Some(self_bo) => self_bo.merge(other_bo),
+ None => self.build_override = Some(other_bo.clone()),
+ }
+ }
+
+ if let Some(v) = &profile.inherits {
+ self.inherits = Some(*v);
+ }
+
+ if let Some(v) = &profile.dir_name {
+ self.dir_name = Some(*v);
+ }
+
+ if let Some(v) = &profile.strip {
+ self.strip = Some(v.clone());
+ }
+ }
+}
+
+/// A StringOrVec can be parsed from either a TOML string or array,
+/// but is always stored as a vector.
+#[derive(Clone, Debug, Serialize, Eq, PartialEq, PartialOrd, Ord)]
+pub struct StringOrVec(Vec<String>);
+
+impl<'de> de::Deserialize<'de> for StringOrVec {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ struct Visitor;
+
+ impl<'de> de::Visitor<'de> for Visitor {
+ type Value = StringOrVec;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str("string or list of strings")
+ }
+
+ fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+ where
+ E: de::Error,
+ {
+ Ok(StringOrVec(vec![s.to_string()]))
+ }
+
+ fn visit_seq<V>(self, v: V) -> Result<Self::Value, V::Error>
+ where
+ V: de::SeqAccess<'de>,
+ {
+ let seq = de::value::SeqAccessDeserializer::new(v);
+ Vec::deserialize(seq).map(StringOrVec)
+ }
+ }
+
+ deserializer.deserialize_any(Visitor)
+ }
+}
+
+impl StringOrVec {
+ pub fn iter<'a>(&'a self) -> std::slice::Iter<'a, String> {
+ self.0.iter()
+ }
+}
+
+#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
+#[serde(untagged, expecting = "expected a boolean or a string")]
+pub enum StringOrBool {
+ String(String),
+ Bool(bool),
+}
+
+#[derive(PartialEq, Clone, Debug, Serialize)]
+#[serde(untagged)]
+pub enum VecStringOrBool {
+ VecString(Vec<String>),
+ Bool(bool),
+}
+
+impl<'de> de::Deserialize<'de> for VecStringOrBool {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ struct Visitor;
+
+ impl<'de> de::Visitor<'de> for Visitor {
+ type Value = VecStringOrBool;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str("a boolean or vector of strings")
+ }
+
+ fn visit_seq<V>(self, v: V) -> Result<Self::Value, V::Error>
+ where
+ V: de::SeqAccess<'de>,
+ {
+ let seq = de::value::SeqAccessDeserializer::new(v);
+ Vec::deserialize(seq).map(VecStringOrBool::VecString)
+ }
+
+ fn visit_bool<E>(self, b: bool) -> Result<Self::Value, E>
+ where
+ E: de::Error,
+ {
+ Ok(VecStringOrBool::Bool(b))
+ }
+ }
+
+ deserializer.deserialize_any(Visitor)
+ }
+}
+
+fn version_trim_whitespace<'de, D>(deserializer: D) -> Result<MaybeWorkspaceSemverVersion, D::Error>
+where
+ D: de::Deserializer<'de>,
+{
+ struct Visitor;
+
+ impl<'de> de::Visitor<'de> for Visitor {
+ type Value = MaybeWorkspaceSemverVersion;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str("SemVer version")
+ }
+
+ fn visit_str<E>(self, string: &str) -> Result<Self::Value, E>
+ where
+ E: de::Error,
+ {
+ match string.trim().parse().map_err(de::Error::custom) {
+ Ok(parsed) => Ok(MaybeWorkspace::Defined(parsed)),
+ Err(e) => Err(e),
+ }
+ }
+
+ fn visit_map<V>(self, map: V) -> Result<Self::Value, V::Error>
+ where
+ V: de::MapAccess<'de>,
+ {
+ let mvd = de::value::MapAccessDeserializer::new(map);
+ TomlWorkspaceField::deserialize(mvd).map(MaybeWorkspace::Workspace)
+ }
+ }
+
+ deserializer.deserialize_any(Visitor)
+}
+
+/// This Trait exists to make [`MaybeWorkspace::Workspace`] generic. It makes deserialization of
+/// [`MaybeWorkspace`] much easier, as well as making error messages for
+/// [`MaybeWorkspace::resolve`] much nicer
+///
+/// Implementors should have a field `workspace` with the type of `bool`. It is used to ensure
+/// `workspace` is not `false` in a `Cargo.toml`
+pub trait WorkspaceInherit {
+ /// This is the workspace table that is being inherited from.
+ /// For example `[workspace.dependencies]` would be the table "dependencies"
+ fn inherit_toml_table(&self) -> &str;
+
+ /// This is used to output the value of the implementors `workspace` field
+ fn workspace(&self) -> bool;
+}
+
+/// An enum that allows for inheriting keys from a workspace in a Cargo.toml.
+#[derive(Serialize, Clone, Debug)]
+#[serde(untagged)]
+pub enum MaybeWorkspace<T, W: WorkspaceInherit> {
+ /// The "defined" type, or the type that that is used when not inheriting from a workspace.
+ Defined(T),
+ /// The type when inheriting from a workspace.
+ Workspace(W),
+}
+
+impl<T, W: WorkspaceInherit> MaybeWorkspace<T, W> {
+ fn resolve<'a>(
+ self,
+ label: &str,
+ get_ws_inheritable: impl FnOnce() -> CargoResult<T>,
+ ) -> CargoResult<T> {
+ match self {
+ MaybeWorkspace::Defined(value) => Ok(value),
+ MaybeWorkspace::Workspace(w) => get_ws_inheritable().with_context(|| {
+ format!(
+ "error inheriting `{label}` from workspace root manifest's `workspace.{}.{label}`",
+ w.inherit_toml_table(),
+ )
+ }),
+ }
+ }
+
+ fn resolve_with_self<'a>(
+ self,
+ label: &str,
+ get_ws_inheritable: impl FnOnce(&W) -> CargoResult<T>,
+ ) -> CargoResult<T> {
+ match self {
+ MaybeWorkspace::Defined(value) => Ok(value),
+ MaybeWorkspace::Workspace(w) => get_ws_inheritable(&w).with_context(|| {
+ format!(
+ "error inheriting `{label}` from workspace root manifest's `workspace.{}.{label}`",
+ w.inherit_toml_table(),
+ )
+ }),
+ }
+ }
+
+ fn as_defined(&self) -> Option<&T> {
+ match self {
+ MaybeWorkspace::Workspace(_) => None,
+ MaybeWorkspace::Defined(defined) => Some(defined),
+ }
+ }
+}
+
+type MaybeWorkspaceDependency = MaybeWorkspace<TomlDependency, TomlWorkspaceDependency>;
+
+impl<'de> de::Deserialize<'de> for MaybeWorkspaceDependency {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ let value = serde_value::Value::deserialize(deserializer)?;
+
+ if let Ok(w) = TomlWorkspaceDependency::deserialize(serde_value::ValueDeserializer::<
+ D::Error,
+ >::new(value.clone()))
+ {
+ return if w.workspace() {
+ Ok(MaybeWorkspace::Workspace(w))
+ } else {
+ Err(de::Error::custom("`workspace` cannot be false"))
+ };
+ }
+ TomlDependency::deserialize(serde_value::ValueDeserializer::<D::Error>::new(value))
+ .map(MaybeWorkspace::Defined)
+ }
+}
+
+impl MaybeWorkspaceDependency {
+ fn unused_keys(&self) -> Vec<String> {
+ match self {
+ MaybeWorkspaceDependency::Defined(d) => d.unused_keys(),
+ MaybeWorkspaceDependency::Workspace(w) => w.other.keys().cloned().collect(),
+ }
+ }
+}
+
+#[derive(Deserialize, Serialize, Clone, Debug)]
+#[serde(rename_all = "kebab-case")]
+pub struct TomlWorkspaceDependency {
+ workspace: bool,
+ features: Option<Vec<String>>,
+ default_features: Option<bool>,
+ #[serde(rename = "default_features")]
+ default_features2: Option<bool>,
+ optional: Option<bool>,
+ /// This is here to provide a way to see the "unused manifest keys" when deserializing
+ #[serde(skip_serializing)]
+ #[serde(flatten)]
+ other: BTreeMap<String, toml::Value>,
+}
+
+impl WorkspaceInherit for TomlWorkspaceDependency {
+ fn inherit_toml_table(&self) -> &str {
+ "dependencies"
+ }
+
+ fn workspace(&self) -> bool {
+ self.workspace
+ }
+}
+
+impl TomlWorkspaceDependency {
+ fn resolve<'a>(
+ &self,
+ name: &str,
+ inheritable: impl FnOnce() -> CargoResult<&'a InheritableFields>,
+ cx: &mut Context<'_, '_>,
+ ) -> CargoResult<TomlDependency> {
+ fn default_features_msg(label: &str, ws_def_feat: Option<bool>, cx: &mut Context<'_, '_>) {
+ let ws_def_feat = match ws_def_feat {
+ Some(true) => "true",
+ Some(false) => "false",
+ None => "not specified",
+ };
+ cx.warnings.push(format!(
+ "`default-features` is ignored for {label}, since `default-features` was \
+ {ws_def_feat} for `workspace.dependencies.{label}`, \
+ this could become a hard error in the future"
+ ))
+ }
+ if self.default_features.is_some() && self.default_features2.is_some() {
+ warn_on_deprecated("default-features", name, "dependency", cx.warnings);
+ }
+ inheritable()?.get_dependency(name, cx.root).map(|d| {
+ match d {
+ TomlDependency::Simple(s) => {
+ if let Some(false) = self.default_features.or(self.default_features2) {
+ default_features_msg(name, None, cx);
+ }
+ if self.optional.is_some() || self.features.is_some() {
+ TomlDependency::Detailed(DetailedTomlDependency {
+ version: Some(s),
+ optional: self.optional,
+ features: self.features.clone(),
+ ..Default::default()
+ })
+ } else {
+ TomlDependency::Simple(s)
+ }
+ }
+ TomlDependency::Detailed(d) => {
+ let mut d = d.clone();
+ match (
+ self.default_features.or(self.default_features2),
+ d.default_features.or(d.default_features2),
+ ) {
+ // member: default-features = true and
+ // workspace: default-features = false should turn on
+ // default-features
+ (Some(true), Some(false)) => {
+ d.default_features = Some(true);
+ }
+ // member: default-features = false and
+ // workspace: default-features = true should ignore member
+ // default-features
+ (Some(false), Some(true)) => {
+ default_features_msg(name, Some(true), cx);
+ }
+ // member: default-features = false and
+ // workspace: dep = "1.0" should ignore member default-features
+ (Some(false), None) => {
+ default_features_msg(name, None, cx);
+ }
+ _ => {}
+ }
+ d.add_features(self.features.clone());
+ d.update_optional(self.optional);
+ TomlDependency::Detailed(d)
+ }
+ }
+ })
+ }
+}
+
+//. This already has a `Deserialize` impl from version_trim_whitespace
+type MaybeWorkspaceSemverVersion = MaybeWorkspace<semver::Version, TomlWorkspaceField>;
+
+type MaybeWorkspaceString = MaybeWorkspace<String, TomlWorkspaceField>;
+impl<'de> de::Deserialize<'de> for MaybeWorkspaceString {
+ fn deserialize<D>(d: D) -> Result<Self, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ struct Visitor;
+
+ impl<'de> de::Visitor<'de> for Visitor {
+ type Value = MaybeWorkspaceString;
+
+ fn expecting(&self, f: &mut fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
+ f.write_str("a string or workspace")
+ }
+
+ fn visit_string<E>(self, value: String) -> Result<Self::Value, E>
+ where
+ E: de::Error,
+ {
+ Ok(MaybeWorkspaceString::Defined(value))
+ }
+
+ fn visit_map<V>(self, map: V) -> Result<Self::Value, V::Error>
+ where
+ V: de::MapAccess<'de>,
+ {
+ let mvd = de::value::MapAccessDeserializer::new(map);
+ TomlWorkspaceField::deserialize(mvd).map(MaybeWorkspace::Workspace)
+ }
+ }
+
+ d.deserialize_any(Visitor)
+ }
+}
+
+type MaybeWorkspaceVecString = MaybeWorkspace<Vec<String>, TomlWorkspaceField>;
+impl<'de> de::Deserialize<'de> for MaybeWorkspaceVecString {
+ fn deserialize<D>(d: D) -> Result<Self, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ struct Visitor;
+
+ impl<'de> de::Visitor<'de> for Visitor {
+ type Value = MaybeWorkspaceVecString;
+
+ fn expecting(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
+ f.write_str("a vector of strings or workspace")
+ }
+ fn visit_seq<A>(self, v: A) -> Result<Self::Value, A::Error>
+ where
+ A: de::SeqAccess<'de>,
+ {
+ let seq = de::value::SeqAccessDeserializer::new(v);
+ Vec::deserialize(seq).map(MaybeWorkspace::Defined)
+ }
+
+ fn visit_map<V>(self, map: V) -> Result<Self::Value, V::Error>
+ where
+ V: de::MapAccess<'de>,
+ {
+ let mvd = de::value::MapAccessDeserializer::new(map);
+ TomlWorkspaceField::deserialize(mvd).map(MaybeWorkspace::Workspace)
+ }
+ }
+
+ d.deserialize_any(Visitor)
+ }
+}
+
+type MaybeWorkspaceStringOrBool = MaybeWorkspace<StringOrBool, TomlWorkspaceField>;
+impl<'de> de::Deserialize<'de> for MaybeWorkspaceStringOrBool {
+ fn deserialize<D>(d: D) -> Result<Self, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ struct Visitor;
+
+ impl<'de> de::Visitor<'de> for Visitor {
+ type Value = MaybeWorkspaceStringOrBool;
+
+ fn expecting(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
+ f.write_str("a string, a bool, or workspace")
+ }
+
+ fn visit_bool<E>(self, v: bool) -> Result<Self::Value, E>
+ where
+ E: de::Error,
+ {
+ let b = de::value::BoolDeserializer::new(v);
+ StringOrBool::deserialize(b).map(MaybeWorkspace::Defined)
+ }
+
+ fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
+ where
+ E: de::Error,
+ {
+ let string = de::value::StringDeserializer::new(v);
+ StringOrBool::deserialize(string).map(MaybeWorkspace::Defined)
+ }
+
+ fn visit_map<V>(self, map: V) -> Result<Self::Value, V::Error>
+ where
+ V: de::MapAccess<'de>,
+ {
+ let mvd = de::value::MapAccessDeserializer::new(map);
+ TomlWorkspaceField::deserialize(mvd).map(MaybeWorkspace::Workspace)
+ }
+ }
+
+ d.deserialize_any(Visitor)
+ }
+}
+
+type MaybeWorkspaceVecStringOrBool = MaybeWorkspace<VecStringOrBool, TomlWorkspaceField>;
+impl<'de> de::Deserialize<'de> for MaybeWorkspaceVecStringOrBool {
+ fn deserialize<D>(d: D) -> Result<Self, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ struct Visitor;
+
+ impl<'de> de::Visitor<'de> for Visitor {
+ type Value = MaybeWorkspaceVecStringOrBool;
+
+ fn expecting(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
+ f.write_str("a boolean, a vector of strings, or workspace")
+ }
+
+ fn visit_bool<E>(self, v: bool) -> Result<Self::Value, E>
+ where
+ E: de::Error,
+ {
+ let b = de::value::BoolDeserializer::new(v);
+ VecStringOrBool::deserialize(b).map(MaybeWorkspace::Defined)
+ }
+
+ fn visit_seq<A>(self, v: A) -> Result<Self::Value, A::Error>
+ where
+ A: de::SeqAccess<'de>,
+ {
+ let seq = de::value::SeqAccessDeserializer::new(v);
+ VecStringOrBool::deserialize(seq).map(MaybeWorkspace::Defined)
+ }
+
+ fn visit_map<V>(self, map: V) -> Result<Self::Value, V::Error>
+ where
+ V: de::MapAccess<'de>,
+ {
+ let mvd = de::value::MapAccessDeserializer::new(map);
+ TomlWorkspaceField::deserialize(mvd).map(MaybeWorkspace::Workspace)
+ }
+ }
+
+ d.deserialize_any(Visitor)
+ }
+}
+
+type MaybeWorkspaceBtreeMap =
+ MaybeWorkspace<BTreeMap<String, BTreeMap<String, String>>, TomlWorkspaceField>;
+
+impl<'de> de::Deserialize<'de> for MaybeWorkspaceBtreeMap {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ let value = serde_value::Value::deserialize(deserializer)?;
+
+ if let Ok(w) = TomlWorkspaceField::deserialize(
+ serde_value::ValueDeserializer::<D::Error>::new(value.clone()),
+ ) {
+ return if w.workspace() {
+ Ok(MaybeWorkspace::Workspace(w))
+ } else {
+ Err(de::Error::custom("`workspace` cannot be false"))
+ };
+ }
+ BTreeMap::deserialize(serde_value::ValueDeserializer::<D::Error>::new(value))
+ .map(MaybeWorkspace::Defined)
+ }
+}
+
+#[derive(Deserialize, Serialize, Clone, Debug)]
+pub struct TomlWorkspaceField {
+ #[serde(deserialize_with = "bool_no_false")]
+ workspace: bool,
+}
+
+fn bool_no_false<'de, D: de::Deserializer<'de>>(deserializer: D) -> Result<bool, D::Error> {
+ let b: bool = Deserialize::deserialize(deserializer)?;
+ if b {
+ Ok(b)
+ } else {
+ Err(de::Error::custom("`workspace` cannot be false"))
+ }
+}
+
+impl WorkspaceInherit for TomlWorkspaceField {
+ fn inherit_toml_table(&self) -> &str {
+ "package"
+ }
+
+ fn workspace(&self) -> bool {
+ self.workspace
+ }
+}
+
+/// Represents the `package`/`project` sections of a `Cargo.toml`.
+///
+/// Note that the order of the fields matters, since this is the order they
+/// are serialized to a TOML file. For example, you cannot have values after
+/// the field `metadata`, since it is a table and values cannot appear after
+/// tables.
+#[derive(Deserialize, Serialize, Clone, Debug)]
+#[serde(rename_all = "kebab-case")]
+pub struct TomlPackage {
+ edition: Option<MaybeWorkspaceString>,
+ rust_version: Option<MaybeWorkspaceString>,
+ name: InternedString,
+ #[serde(deserialize_with = "version_trim_whitespace")]
+ version: MaybeWorkspaceSemverVersion,
+ authors: Option<MaybeWorkspaceVecString>,
+ build: Option<StringOrBool>,
+ metabuild: Option<StringOrVec>,
+ #[serde(rename = "default-target")]
+ default_target: Option<String>,
+ #[serde(rename = "forced-target")]
+ forced_target: Option<String>,
+ links: Option<String>,
+ exclude: Option<MaybeWorkspaceVecString>,
+ include: Option<MaybeWorkspaceVecString>,
+ publish: Option<MaybeWorkspaceVecStringOrBool>,
+ workspace: Option<String>,
+ im_a_teapot: Option<bool>,
+ autobins: Option<bool>,
+ autoexamples: Option<bool>,
+ autotests: Option<bool>,
+ autobenches: Option<bool>,
+ default_run: Option<String>,
+
+ // Package metadata.
+ description: Option<MaybeWorkspaceString>,
+ homepage: Option<MaybeWorkspaceString>,
+ documentation: Option<MaybeWorkspaceString>,
+ readme: Option<MaybeWorkspaceStringOrBool>,
+ keywords: Option<MaybeWorkspaceVecString>,
+ categories: Option<MaybeWorkspaceVecString>,
+ license: Option<MaybeWorkspaceString>,
+ license_file: Option<MaybeWorkspaceString>,
+ repository: Option<MaybeWorkspaceString>,
+ resolver: Option<String>,
+
+ // Note that this field must come last due to the way toml serialization
+ // works which requires tables to be emitted after all values.
+ metadata: Option<toml::Value>,
+}
+
+#[derive(Debug, Deserialize, Serialize, Clone)]
+pub struct TomlWorkspace {
+ members: Option<Vec<String>>,
+ #[serde(rename = "default-members")]
+ default_members: Option<Vec<String>>,
+ exclude: Option<Vec<String>>,
+ resolver: Option<String>,
+
+ // Properties that can be inherited by members.
+ package: Option<InheritableFields>,
+ dependencies: Option<BTreeMap<String, TomlDependency>>,
+
+ // Note that this field must come last due to the way toml serialization
+ // works which requires tables to be emitted after all values.
+ metadata: Option<toml::Value>,
+}
+
+/// A group of fields that are inheritable by members of the workspace
+#[derive(Clone, Debug, Default, Deserialize, Serialize)]
+pub struct InheritableFields {
+ // We use skip here since it will never be present when deserializing
+ // and we don't want it present when serializing
+ #[serde(skip)]
+ dependencies: Option<BTreeMap<String, TomlDependency>>,
+ version: Option<semver::Version>,
+ authors: Option<Vec<String>>,
+ description: Option<String>,
+ homepage: Option<String>,
+ documentation: Option<String>,
+ readme: Option<StringOrBool>,
+ keywords: Option<Vec<String>>,
+ categories: Option<Vec<String>>,
+ license: Option<String>,
+ #[serde(rename = "license-file")]
+ license_file: Option<String>,
+ repository: Option<String>,
+ publish: Option<VecStringOrBool>,
+ edition: Option<String>,
+ badges: Option<BTreeMap<String, BTreeMap<String, String>>>,
+ exclude: Option<Vec<String>>,
+ include: Option<Vec<String>>,
+ #[serde(rename = "rust-version")]
+ rust_version: Option<String>,
+ // We use skip here since it will never be present when deserializing
+ // and we don't want it present when serializing
+ #[serde(skip)]
+ ws_root: PathBuf,
+}
+
+impl InheritableFields {
+ pub fn update_deps(&mut self, deps: Option<BTreeMap<String, TomlDependency>>) {
+ self.dependencies = deps;
+ }
+
+ pub fn update_ws_path(&mut self, ws_root: PathBuf) {
+ self.ws_root = ws_root;
+ }
+
+ pub fn dependencies(&self) -> CargoResult<BTreeMap<String, TomlDependency>> {
+ self.dependencies.clone().map_or(
+ Err(anyhow!("`workspace.dependencies` was not defined")),
+ |d| Ok(d),
+ )
+ }
+
+ pub fn get_dependency(&self, name: &str, package_root: &Path) -> CargoResult<TomlDependency> {
+ self.dependencies.clone().map_or(
+ Err(anyhow!("`workspace.dependencies` was not defined")),
+ |deps| {
+ deps.get(name).map_or(
+ Err(anyhow!(
+ "`dependency.{}` was not found in `workspace.dependencies`",
+ name
+ )),
+ |dep| {
+ let mut dep = dep.clone();
+ if let TomlDependency::Detailed(detailed) = &mut dep {
+ detailed.resolve_path(name, self.ws_root(), package_root)?
+ }
+ Ok(dep)
+ },
+ )
+ },
+ )
+ }
+
+ pub fn version(&self) -> CargoResult<semver::Version> {
+ self.version.clone().map_or(
+ Err(anyhow!("`workspace.package.version` was not defined")),
+ |d| Ok(d),
+ )
+ }
+
+ pub fn authors(&self) -> CargoResult<Vec<String>> {
+ self.authors.clone().map_or(
+ Err(anyhow!("`workspace.package.authors` was not defined")),
+ |d| Ok(d),
+ )
+ }
+
+ pub fn description(&self) -> CargoResult<String> {
+ self.description.clone().map_or(
+ Err(anyhow!("`workspace.package.description` was not defined")),
+ |d| Ok(d),
+ )
+ }
+
+ pub fn homepage(&self) -> CargoResult<String> {
+ self.homepage.clone().map_or(
+ Err(anyhow!("`workspace.package.homepage` was not defined")),
+ |d| Ok(d),
+ )
+ }
+
+ pub fn documentation(&self) -> CargoResult<String> {
+ self.documentation.clone().map_or(
+ Err(anyhow!("`workspace.package.documentation` was not defined")),
+ |d| Ok(d),
+ )
+ }
+
+ pub fn readme(&self, package_root: &Path) -> CargoResult<StringOrBool> {
+ readme_for_package(self.ws_root.as_path(), self.readme.clone()).map_or(
+ Err(anyhow!("`workspace.package.readme` was not defined")),
+ |readme| {
+ let rel_path =
+ resolve_relative_path("readme", &self.ws_root, package_root, &readme)?;
+ Ok(StringOrBool::String(rel_path))
+ },
+ )
+ }
+
+ pub fn keywords(&self) -> CargoResult<Vec<String>> {
+ self.keywords.clone().map_or(
+ Err(anyhow!("`workspace.package.keywords` was not defined")),
+ |d| Ok(d),
+ )
+ }
+
+ pub fn categories(&self) -> CargoResult<Vec<String>> {
+ self.categories.clone().map_or(
+ Err(anyhow!("`workspace.package.categories` was not defined")),
+ |d| Ok(d),
+ )
+ }
+
+ pub fn license(&self) -> CargoResult<String> {
+ self.license.clone().map_or(
+ Err(anyhow!("`workspace.package.license` was not defined")),
+ |d| Ok(d),
+ )
+ }
+
+ pub fn license_file(&self, package_root: &Path) -> CargoResult<String> {
+ self.license_file.clone().map_or(
+ Err(anyhow!("`workspace.package.license_file` was not defined")),
+ |d| resolve_relative_path("license-file", &self.ws_root, package_root, &d),
+ )
+ }
+
+ pub fn repository(&self) -> CargoResult<String> {
+ self.repository.clone().map_or(
+ Err(anyhow!("`workspace.package.repository` was not defined")),
+ |d| Ok(d),
+ )
+ }
+
+ pub fn publish(&self) -> CargoResult<VecStringOrBool> {
+ self.publish.clone().map_or(
+ Err(anyhow!("`workspace.package.publish` was not defined")),
+ |d| Ok(d),
+ )
+ }
+
+ pub fn edition(&self) -> CargoResult<String> {
+ self.edition.clone().map_or(
+ Err(anyhow!("`workspace.package.edition` was not defined")),
+ |d| Ok(d),
+ )
+ }
+
+ pub fn rust_version(&self) -> CargoResult<String> {
+ self.rust_version.clone().map_or(
+ Err(anyhow!("`workspace.package.rust-version` was not defined")),
+ |d| Ok(d),
+ )
+ }
+
+ pub fn badges(&self) -> CargoResult<BTreeMap<String, BTreeMap<String, String>>> {
+ self.badges.clone().map_or(
+ Err(anyhow!("`workspace.package.badges` was not defined")),
+ |d| Ok(d),
+ )
+ }
+
+ pub fn exclude(&self) -> CargoResult<Vec<String>> {
+ self.exclude.clone().map_or(
+ Err(anyhow!("`workspace.package.exclude` was not defined")),
+ |d| Ok(d),
+ )
+ }
+
+ pub fn include(&self) -> CargoResult<Vec<String>> {
+ self.include.clone().map_or(
+ Err(anyhow!("`workspace.package.include` was not defined")),
+ |d| Ok(d),
+ )
+ }
+
+ pub fn ws_root(&self) -> &PathBuf {
+ &self.ws_root
+ }
+}
+
+impl TomlPackage {
+ pub fn to_package_id(
+ &self,
+ source_id: SourceId,
+ version: semver::Version,
+ ) -> CargoResult<PackageId> {
+ PackageId::new(self.name, version, source_id)
+ }
+}
+
+struct Context<'a, 'b> {
+ deps: &'a mut Vec<Dependency>,
+ source_id: SourceId,
+ nested_paths: &'a mut Vec<PathBuf>,
+ config: &'b Config,
+ warnings: &'a mut Vec<String>,
+ platform: Option<Platform>,
+ root: &'a Path,
+ features: &'a Features,
+}
+
+impl TomlManifest {
+ /// Prepares the manifest for publishing.
+ // - Path and git components of dependency specifications are removed.
+ // - License path is updated to point within the package.
+ pub fn prepare_for_publish(
+ &self,
+ ws: &Workspace<'_>,
+ package_root: &Path,
+ ) -> CargoResult<TomlManifest> {
+ let config = ws.config();
+ let mut package = self
+ .package
+ .as_ref()
+ .or_else(|| self.project.as_ref())
+ .unwrap()
+ .clone();
+ package.workspace = None;
+ let current_resolver = package
+ .resolver
+ .as_ref()
+ .map(|r| ResolveBehavior::from_manifest(r))
+ .unwrap_or_else(|| {
+ package
+ .edition
+ .as_ref()
+ .and_then(|e| e.as_defined())
+ .map(|e| Edition::from_str(e))
+ .unwrap_or(Ok(Edition::Edition2015))
+ .map(|e| e.default_resolve_behavior())
+ })?;
+ if ws.resolve_behavior() != current_resolver {
+ // This ensures the published crate if built as a root (e.g. `cargo install`) will
+ // use the same resolver behavior it was tested with in the workspace.
+ // To avoid forcing a higher MSRV we don't explicitly set this if it would implicitly
+ // result in the same thing.
+ package.resolver = Some(ws.resolve_behavior().to_manifest());
+ }
+ if let Some(license_file) = &package.license_file {
+ let license_file = license_file
+ .as_defined()
+ .context("license file should have been resolved before `prepare_for_publish()`")?;
+ let license_path = Path::new(&license_file);
+ let abs_license_path = paths::normalize_path(&package_root.join(license_path));
+ if abs_license_path.strip_prefix(package_root).is_err() {
+ // This path points outside of the package root. `cargo package`
+ // will copy it into the root, so adjust the path to this location.
+ package.license_file = Some(MaybeWorkspace::Defined(
+ license_path
+ .file_name()
+ .unwrap()
+ .to_str()
+ .unwrap()
+ .to_string(),
+ ));
+ }
+ }
+
+ if let Some(readme) = &package.readme {
+ let readme = readme
+ .as_defined()
+ .context("readme should have been resolved before `prepare_for_publish()`")?;
+ match readme {
+ StringOrBool::String(readme) => {
+ let readme_path = Path::new(&readme);
+ let abs_readme_path = paths::normalize_path(&package_root.join(readme_path));
+ if abs_readme_path.strip_prefix(package_root).is_err() {
+ // This path points outside of the package root. `cargo package`
+ // will copy it into the root, so adjust the path to this location.
+ package.readme = Some(MaybeWorkspace::Defined(StringOrBool::String(
+ readme_path
+ .file_name()
+ .unwrap()
+ .to_str()
+ .unwrap()
+ .to_string(),
+ )));
+ }
+ }
+ StringOrBool::Bool(_) => {}
+ }
+ }
+ let all = |_d: &TomlDependency| true;
+ return Ok(TomlManifest {
+ package: Some(package),
+ project: None,
+ profile: self.profile.clone(),
+ lib: self.lib.clone(),
+ bin: self.bin.clone(),
+ example: self.example.clone(),
+ test: self.test.clone(),
+ bench: self.bench.clone(),
+ dependencies: map_deps(config, self.dependencies.as_ref(), all)?,
+ dev_dependencies: map_deps(
+ config,
+ self.dev_dependencies
+ .as_ref()
+ .or_else(|| self.dev_dependencies2.as_ref()),
+ TomlDependency::is_version_specified,
+ )?,
+ dev_dependencies2: None,
+ build_dependencies: map_deps(
+ config,
+ self.build_dependencies
+ .as_ref()
+ .or_else(|| self.build_dependencies2.as_ref()),
+ all,
+ )?,
+ build_dependencies2: None,
+ features: self.features.clone(),
+ target: match self.target.as_ref().map(|target_map| {
+ target_map
+ .iter()
+ .map(|(k, v)| {
+ Ok((
+ k.clone(),
+ TomlPlatform {
+ dependencies: map_deps(config, v.dependencies.as_ref(), all)?,
+ dev_dependencies: map_deps(
+ config,
+ v.dev_dependencies
+ .as_ref()
+ .or_else(|| v.dev_dependencies2.as_ref()),
+ TomlDependency::is_version_specified,
+ )?,
+ dev_dependencies2: None,
+ build_dependencies: map_deps(
+ config,
+ v.build_dependencies
+ .as_ref()
+ .or_else(|| v.build_dependencies2.as_ref()),
+ all,
+ )?,
+ build_dependencies2: None,
+ },
+ ))
+ })
+ .collect()
+ }) {
+ Some(Ok(v)) => Some(v),
+ Some(Err(e)) => return Err(e),
+ None => None,
+ },
+ replace: None,
+ patch: None,
+ workspace: None,
+ badges: self.badges.clone(),
+ cargo_features: self.cargo_features.clone(),
+ });
+
+ fn map_deps(
+ config: &Config,
+ deps: Option<&BTreeMap<String, MaybeWorkspaceDependency>>,
+ filter: impl Fn(&TomlDependency) -> bool,
+ ) -> CargoResult<Option<BTreeMap<String, MaybeWorkspaceDependency>>> {
+ let deps = match deps {
+ Some(deps) => deps,
+ None => return Ok(None),
+ };
+ let deps = deps
+ .iter()
+ .filter(|(_k, v)| {
+ if let MaybeWorkspace::Defined(def) = v {
+ filter(def)
+ } else {
+ false
+ }
+ })
+ .map(|(k, v)| Ok((k.clone(), map_dependency(config, v)?)))
+ .collect::<CargoResult<BTreeMap<_, _>>>()?;
+ Ok(Some(deps))
+ }
+
+ fn map_dependency(
+ config: &Config,
+ dep: &MaybeWorkspaceDependency,
+ ) -> CargoResult<MaybeWorkspaceDependency> {
+ let dep = match dep {
+ MaybeWorkspace::Defined(TomlDependency::Detailed(d)) => {
+ let mut d = d.clone();
+ // Path dependencies become crates.io deps.
+ d.path.take();
+ // Same with git dependencies.
+ d.git.take();
+ d.branch.take();
+ d.tag.take();
+ d.rev.take();
+ // registry specifications are elaborated to the index URL
+ if let Some(registry) = d.registry.take() {
+ d.registry_index = Some(config.get_registry_index(&registry)?.to_string());
+ }
+ Ok(d)
+ }
+ MaybeWorkspace::Defined(TomlDependency::Simple(s)) => Ok(DetailedTomlDependency {
+ version: Some(s.clone()),
+ ..Default::default()
+ }),
+ _ => unreachable!(),
+ };
+ dep.map(TomlDependency::Detailed)
+ .map(MaybeWorkspace::Defined)
+ }
+ }
+
+ pub fn to_real_manifest(
+ me: &Rc<TomlManifest>,
+ source_id: SourceId,
+ package_root: &Path,
+ config: &Config,
+ ) -> CargoResult<(Manifest, Vec<PathBuf>)> {
+ fn get_ws(
+ config: &Config,
+ resolved_path: &Path,
+ workspace_config: &WorkspaceConfig,
+ ) -> CargoResult<InheritableFields> {
+ match workspace_config {
+ WorkspaceConfig::Root(root) => Ok(root.inheritable().clone()),
+ WorkspaceConfig::Member {
+ root: Some(ref path_to_root),
+ } => {
+ let path = resolved_path
+ .parent()
+ .unwrap()
+ .join(path_to_root)
+ .join("Cargo.toml");
+ let root_path = paths::normalize_path(&path);
+ inheritable_from_path(config, root_path)
+ }
+ WorkspaceConfig::Member { root: None } => {
+ match find_workspace_root(&resolved_path, config)? {
+ Some(path_to_root) => inheritable_from_path(config, path_to_root),
+ None => Err(anyhow!("failed to find a workspace root")),
+ }
+ }
+ }
+ }
+
+ let mut nested_paths = vec![];
+ let mut warnings = vec![];
+ let mut errors = vec![];
+
+ // Parse features first so they will be available when parsing other parts of the TOML.
+ let empty = Vec::new();
+ let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty);
+ let features = Features::new(cargo_features, config, &mut warnings, source_id.is_path())?;
+
+ let mut package = match (&me.package, &me.project) {
+ (Some(_), Some(project)) => {
+ if source_id.is_path() {
+ config.shell().warn(format!(
+ "manifest at `{}` contains both `project` and `package`, \
+ this could become a hard error in the future",
+ package_root.display()
+ ))?;
+ }
+ project.clone()
+ }
+ (Some(package), None) => package.clone(),
+ (None, Some(project)) => {
+ if source_id.is_path() {
+ config.shell().warn(format!(
+ "manifest at `{}` contains `[project]` instead of `[package]`, \
+ this could become a hard error in the future",
+ package_root.display()
+ ))?;
+ }
+ project.clone()
+ }
+ (None, None) => bail!("no `package` section found"),
+ };
+
+ let workspace_config = match (me.workspace.as_ref(), package.workspace.as_ref()) {
+ (Some(toml_config), None) => {
+ let mut inheritable = toml_config.package.clone().unwrap_or_default();
+ inheritable.update_ws_path(package_root.to_path_buf());
+ inheritable.update_deps(toml_config.dependencies.clone());
+ if let Some(ws_deps) = &inheritable.dependencies {
+ for (name, dep) in ws_deps {
+ unused_dep_keys(
+ name,
+ "workspace.dependencies",
+ dep.unused_keys(),
+ &mut warnings,
+ );
+ }
+ }
+ let ws_root_config = WorkspaceRootConfig::new(
+ package_root,
+ &toml_config.members,
+ &toml_config.default_members,
+ &toml_config.exclude,
+ &Some(inheritable),
+ &toml_config.metadata,
+ );
+ config
+ .ws_roots
+ .borrow_mut()
+ .insert(package_root.to_path_buf(), ws_root_config.clone());
+ WorkspaceConfig::Root(ws_root_config)
+ }
+ (None, root) => WorkspaceConfig::Member {
+ root: root.cloned(),
+ },
+ (Some(..), Some(..)) => bail!(
+ "cannot configure both `package.workspace` and \
+ `[workspace]`, only one can be specified"
+ ),
+ };
+
+ let package_name = package.name.trim();
+ if package_name.is_empty() {
+ bail!("package name cannot be an empty string")
+ }
+
+ validate_package_name(package_name, "package name", "")?;
+
+ let resolved_path = package_root.join("Cargo.toml");
+
+ let inherit_cell: LazyCell<InheritableFields> = LazyCell::new();
+ let inherit =
+ || inherit_cell.try_borrow_with(|| get_ws(config, &resolved_path, &workspace_config));
+
+ let version = package
+ .version
+ .clone()
+ .resolve("version", || inherit()?.version())?;
+
+ package.version = MaybeWorkspace::Defined(version.clone());
+
+ let pkgid = package.to_package_id(source_id, version)?;
+
+ let edition = if let Some(edition) = package.edition.clone() {
+ let edition: Edition = edition
+ .resolve("edition", || inherit()?.edition())?
+ .parse()
+ .with_context(|| "failed to parse the `edition` key")?;
+ package.edition = Some(MaybeWorkspace::Defined(edition.to_string()));
+ edition
+ } else {
+ Edition::Edition2015
+ };
+ // Add these lines if start a new unstable edition.
+ // ```
+ // if edition == Edition::Edition20xx {
+ // features.require(Feature::edition20xx))?;
+ // }
+ // ```
+ if !edition.is_stable() {
+ // Guard in case someone forgets to add .require()
+ return Err(util::errors::internal(format!(
+ "edition {} should be gated",
+ edition
+ )));
+ }
+
+ let rust_version = if let Some(rust_version) = &package.rust_version {
+ let rust_version = rust_version
+ .clone()
+ .resolve("rust_version", || inherit()?.rust_version())?;
+ let req = match semver::VersionReq::parse(&rust_version) {
+ // Exclude semver operators like `^` and pre-release identifiers
+ Ok(req) if rust_version.chars().all(|c| c.is_ascii_digit() || c == '.') => req,
+ _ => bail!("`rust-version` must be a value like \"1.32\""),
+ };
+ if let Some(first_version) = edition.first_version() {
+ let unsupported =
+ semver::Version::new(first_version.major, first_version.minor - 1, 9999);
+ if req.matches(&unsupported) {
+ bail!(
+ "rust-version {} is older than first version ({}) required by \
+ the specified edition ({})",
+ rust_version,
+ first_version,
+ edition,
+ )
+ }
+ }
+ Some(rust_version.clone())
+ } else {
+ None
+ };
+
+ if package.metabuild.is_some() {
+ features.require(Feature::metabuild())?;
+ }
+
+ let resolve_behavior = match (
+ package.resolver.as_ref(),
+ me.workspace.as_ref().and_then(|ws| ws.resolver.as_ref()),
+ ) {
+ (None, None) => None,
+ (Some(s), None) | (None, Some(s)) => Some(ResolveBehavior::from_manifest(s)?),
+ (Some(_), Some(_)) => {
+ bail!("cannot specify `resolver` field in both `[workspace]` and `[package]`")
+ }
+ };
+
+ // If we have no lib at all, use the inferred lib, if available.
+ // If we have a lib with a path, we're done.
+ // If we have a lib with no path, use the inferred lib or else the package name.
+ let targets = targets(
+ &features,
+ me,
+ package_name,
+ package_root,
+ edition,
+ &package.build,
+ &package.metabuild,
+ &mut warnings,
+ &mut errors,
+ )?;
+
+ if targets.is_empty() {
+ debug!("manifest has no build targets");
+ }
+
+ if let Err(conflict_targets) = unique_build_targets(&targets, package_root) {
+ conflict_targets
+ .iter()
+ .for_each(|(target_path, conflicts)| {
+ warnings.push(format!(
+ "file `{}` found to be present in multiple \
+ build targets:\n{}",
+ target_path.display().to_string(),
+ conflicts
+ .iter()
+ .map(|t| format!(
+ " * `{}` target `{}`",
+ t.kind().description(),
+ t.name(),
+ ))
+ .join("\n")
+ ));
+ })
+ }
+
+ if let Some(links) = &package.links {
+ if !targets.iter().any(|t| t.is_custom_build()) {
+ bail!(
+ "package `{}` specifies that it links to `{}` but does not \
+ have a custom build script",
+ pkgid,
+ links
+ )
+ }
+ }
+
+ let mut deps = Vec::new();
+
+ let mut cx = Context {
+ deps: &mut deps,
+ source_id,
+ nested_paths: &mut nested_paths,
+ config,
+ warnings: &mut warnings,
+ features: &features,
+ platform: None,
+ root: package_root,
+ };
+
+ fn process_dependencies(
+ cx: &mut Context<'_, '_>,
+ new_deps: Option<&BTreeMap<String, MaybeWorkspaceDependency>>,
+ kind: Option<DepKind>,
+ workspace_config: &WorkspaceConfig,
+ inherit_cell: &LazyCell<InheritableFields>,
+ ) -> CargoResult<Option<BTreeMap<String, MaybeWorkspaceDependency>>> {
+ let dependencies = match new_deps {
+ Some(dependencies) => dependencies,
+ None => return Ok(None),
+ };
+
+ let inheritable = || {
+ inherit_cell.try_borrow_with(|| {
+ get_ws(cx.config, &cx.root.join("Cargo.toml"), &workspace_config)
+ })
+ };
+
+ let mut deps: BTreeMap<String, MaybeWorkspaceDependency> = BTreeMap::new();
+ for (n, v) in dependencies.iter() {
+ let resolved = v
+ .clone()
+ .resolve_with_self(n, |dep| dep.resolve(n, inheritable, cx))?;
+ let dep = resolved.to_dependency(n, cx, kind)?;
+ let name_in_toml = dep.name_in_toml().as_str();
+ validate_package_name(name_in_toml, "dependency name", "")?;
+ let kind_name = match kind {
+ Some(k) => k.kind_table(),
+ None => "dependencies",
+ };
+ let table_in_toml = if let Some(platform) = &cx.platform {
+ format!("target.{}.{kind_name}", platform.to_string())
+ } else {
+ kind_name.to_string()
+ };
+ unused_dep_keys(name_in_toml, &table_in_toml, v.unused_keys(), cx.warnings);
+ cx.deps.push(dep);
+ deps.insert(n.to_string(), MaybeWorkspace::Defined(resolved.clone()));
+ }
+ Ok(Some(deps))
+ }
+
+ // Collect the dependencies.
+ let dependencies = process_dependencies(
+ &mut cx,
+ me.dependencies.as_ref(),
+ None,
+ &workspace_config,
+ &inherit_cell,
+ )?;
+ if me.dev_dependencies.is_some() && me.dev_dependencies2.is_some() {
+ warn_on_deprecated("dev-dependencies", package_name, "package", cx.warnings);
+ }
+ let dev_deps = me
+ .dev_dependencies
+ .as_ref()
+ .or_else(|| me.dev_dependencies2.as_ref());
+ let dev_deps = process_dependencies(
+ &mut cx,
+ dev_deps,
+ Some(DepKind::Development),
+ &workspace_config,
+ &inherit_cell,
+ )?;
+ if me.build_dependencies.is_some() && me.build_dependencies2.is_some() {
+ warn_on_deprecated("build-dependencies", package_name, "package", cx.warnings);
+ }
+ let build_deps = me
+ .build_dependencies
+ .as_ref()
+ .or_else(|| me.build_dependencies2.as_ref());
+ let build_deps = process_dependencies(
+ &mut cx,
+ build_deps,
+ Some(DepKind::Build),
+ &workspace_config,
+ &inherit_cell,
+ )?;
+
+ let mut target: BTreeMap<String, TomlPlatform> = BTreeMap::new();
+ for (name, platform) in me.target.iter().flatten() {
+ cx.platform = {
+ let platform: Platform = name.parse()?;
+ platform.check_cfg_attributes(cx.warnings);
+ Some(platform)
+ };
+ let deps = process_dependencies(
+ &mut cx,
+ platform.dependencies.as_ref(),
+ None,
+ &workspace_config,
+ &inherit_cell,
+ )?;
+ if platform.build_dependencies.is_some() && platform.build_dependencies2.is_some() {
+ warn_on_deprecated("build-dependencies", name, "platform target", cx.warnings);
+ }
+ let build_deps = platform
+ .build_dependencies
+ .as_ref()
+ .or_else(|| platform.build_dependencies2.as_ref());
+ let build_deps = process_dependencies(
+ &mut cx,
+ build_deps,
+ Some(DepKind::Build),
+ &workspace_config,
+ &inherit_cell,
+ )?;
+ if platform.dev_dependencies.is_some() && platform.dev_dependencies2.is_some() {
+ warn_on_deprecated("dev-dependencies", name, "platform target", cx.warnings);
+ }
+ let dev_deps = platform
+ .dev_dependencies
+ .as_ref()
+ .or_else(|| platform.dev_dependencies2.as_ref());
+ let dev_deps = process_dependencies(
+ &mut cx,
+ dev_deps,
+ Some(DepKind::Development),
+ &workspace_config,
+ &inherit_cell,
+ )?;
+ target.insert(
+ name.clone(),
+ TomlPlatform {
+ dependencies: deps,
+ build_dependencies: build_deps,
+ build_dependencies2: None,
+ dev_dependencies: dev_deps,
+ dev_dependencies2: None,
+ },
+ );
+ }
+
+ let target = if target.is_empty() {
+ None
+ } else {
+ Some(target)
+ };
+ let replace = me.replace(&mut cx)?;
+ let patch = me.patch(&mut cx)?;
+
+ {
+ let mut names_sources = BTreeMap::new();
+ for dep in &deps {
+ let name = dep.name_in_toml();
+ let prev = names_sources.insert(name.to_string(), dep.source_id());
+ if prev.is_some() && prev != Some(dep.source_id()) {
+ bail!(
+ "Dependency '{}' has different source paths depending on the build \
+ target. Each dependency must have a single canonical source path \
+ irrespective of build target.",
+ name
+ );
+ }
+ }
+ }
+
+ let exclude = package
+ .exclude
+ .clone()
+ .map(|mw| mw.resolve("exclude", || inherit()?.exclude()))
+ .transpose()?
+ .unwrap_or_default();
+ let include = package
+ .include
+ .clone()
+ .map(|mw| mw.resolve("include", || inherit()?.include()))
+ .transpose()?
+ .unwrap_or_default();
+ let empty_features = BTreeMap::new();
+
+ let summary = Summary::new(
+ config,
+ pkgid,
+ deps,
+ me.features.as_ref().unwrap_or(&empty_features),
+ package.links.as_deref(),
+ )?;
+
+ let metadata = ManifestMetadata {
+ description: package
+ .description
+ .clone()
+ .map(|mw| mw.resolve("description", || inherit()?.description()))
+ .transpose()?,
+ homepage: package
+ .homepage
+ .clone()
+ .map(|mw| mw.resolve("homepage", || inherit()?.homepage()))
+ .transpose()?,
+ documentation: package
+ .documentation
+ .clone()
+ .map(|mw| mw.resolve("documentation", || inherit()?.documentation()))
+ .transpose()?,
+ readme: readme_for_package(
+ package_root,
+ package
+ .readme
+ .clone()
+ .map(|mw| mw.resolve("readme", || inherit()?.readme(package_root)))
+ .transpose()?,
+ ),
+ authors: package
+ .authors
+ .clone()
+ .map(|mw| mw.resolve("authors", || inherit()?.authors()))
+ .transpose()?
+ .unwrap_or_default(),
+ license: package
+ .license
+ .clone()
+ .map(|mw| mw.resolve("license", || inherit()?.license()))
+ .transpose()?,
+ license_file: package
+ .license_file
+ .clone()
+ .map(|mw| mw.resolve("license", || inherit()?.license_file(package_root)))
+ .transpose()?,
+ repository: package
+ .repository
+ .clone()
+ .map(|mw| mw.resolve("repository", || inherit()?.repository()))
+ .transpose()?,
+ keywords: package
+ .keywords
+ .clone()
+ .map(|mw| mw.resolve("keywords", || inherit()?.keywords()))
+ .transpose()?
+ .unwrap_or_default(),
+ categories: package
+ .categories
+ .clone()
+ .map(|mw| mw.resolve("categories", || inherit()?.categories()))
+ .transpose()?
+ .unwrap_or_default(),
+ badges: me
+ .badges
+ .clone()
+ .map(|mw| mw.resolve("badges", || inherit()?.badges()))
+ .transpose()?
+ .unwrap_or_default(),
+ links: package.links.clone(),
+ };
+ package.description = metadata
+ .description
+ .clone()
+ .map(|description| MaybeWorkspace::Defined(description));
+ package.homepage = metadata
+ .homepage
+ .clone()
+ .map(|homepage| MaybeWorkspace::Defined(homepage));
+ package.documentation = metadata
+ .documentation
+ .clone()
+ .map(|documentation| MaybeWorkspace::Defined(documentation));
+ package.readme = metadata
+ .readme
+ .clone()
+ .map(|readme| MaybeWorkspace::Defined(StringOrBool::String(readme)));
+ package.authors = package
+ .authors
+ .as_ref()
+ .map(|_| MaybeWorkspace::Defined(metadata.authors.clone()));
+ package.license = metadata
+ .license
+ .clone()
+ .map(|license| MaybeWorkspace::Defined(license));
+ package.license_file = metadata
+ .license_file
+ .clone()
+ .map(|license_file| MaybeWorkspace::Defined(license_file));
+ package.repository = metadata
+ .repository
+ .clone()
+ .map(|repository| MaybeWorkspace::Defined(repository));
+ package.keywords = package
+ .keywords
+ .as_ref()
+ .map(|_| MaybeWorkspace::Defined(metadata.keywords.clone()));
+ package.categories = package
+ .categories
+ .as_ref()
+ .map(|_| MaybeWorkspace::Defined(metadata.categories.clone()));
+ package.rust_version = rust_version.clone().map(|rv| MaybeWorkspace::Defined(rv));
+ package.exclude = package
+ .exclude
+ .as_ref()
+ .map(|_| MaybeWorkspace::Defined(exclude.clone()));
+ package.include = package
+ .include
+ .as_ref()
+ .map(|_| MaybeWorkspace::Defined(include.clone()));
+
+ let profiles = me.profile.clone();
+ if let Some(profiles) = &profiles {
+ let cli_unstable = config.cli_unstable();
+ profiles.validate(cli_unstable, &features, &mut warnings)?;
+ }
+
+ let publish = package
+ .publish
+ .clone()
+ .map(|publish| publish.resolve("publish", || inherit()?.publish()).unwrap());
+
+ package.publish = publish.clone().map(|p| MaybeWorkspace::Defined(p));
+
+ let publish = match publish {
+ Some(VecStringOrBool::VecString(ref vecstring)) => Some(vecstring.clone()),
+ Some(VecStringOrBool::Bool(false)) => Some(vec![]),
+ None | Some(VecStringOrBool::Bool(true)) => None,
+ };
+
+ if summary.features().contains_key("default-features") {
+ warnings.push(
+ "`default-features = [\"..\"]` was found in [features]. \
+ Did you mean to use `default = [\"..\"]`?"
+ .to_string(),
+ )
+ }
+
+ if let Some(run) = &package.default_run {
+ if !targets
+ .iter()
+ .filter(|t| t.is_bin())
+ .any(|t| t.name() == run)
+ {
+ let suggestion =
+ util::closest_msg(run, targets.iter().filter(|t| t.is_bin()), |t| t.name());
+ bail!("default-run target `{}` not found{}", run, suggestion);
+ }
+ }
+
+ let default_kind = package
+ .default_target
+ .as_ref()
+ .map(|t| CompileTarget::new(&*t))
+ .transpose()?
+ .map(CompileKind::Target);
+ let forced_kind = package
+ .forced_target
+ .as_ref()
+ .map(|t| CompileTarget::new(&*t))
+ .transpose()?
+ .map(CompileKind::Target);
+ let custom_metadata = package.metadata.clone();
+ let resolved_toml = TomlManifest {
+ cargo_features: me.cargo_features.clone(),
+ package: Some(package.clone()),
+ project: None,
+ profile: me.profile.clone(),
+ lib: me.lib.clone(),
+ bin: me.bin.clone(),
+ example: me.example.clone(),
+ test: me.test.clone(),
+ bench: me.bench.clone(),
+ dependencies,
+ dev_dependencies: dev_deps,
+ dev_dependencies2: None,
+ build_dependencies: build_deps,
+ build_dependencies2: None,
+ features: me.features.clone(),
+ target,
+ replace: me.replace.clone(),
+ patch: me.patch.clone(),
+ workspace: me.workspace.clone(),
+ badges: me
+ .badges
+ .as_ref()
+ .map(|_| MaybeWorkspace::Defined(metadata.badges.clone())),
+ };
+ let mut manifest = Manifest::new(
+ summary,
+ default_kind,
+ forced_kind,
+ targets,
+ exclude,
+ include,
+ package.links.clone(),
+ metadata,
+ custom_metadata,
+ profiles,
+ publish,
+ replace,
+ patch,
+ workspace_config,
+ features,
+ edition,
+ rust_version,
+ package.im_a_teapot,
+ package.default_run.clone(),
+ Rc::new(resolved_toml),
+ package.metabuild.clone().map(|sov| sov.0),
+ resolve_behavior,
+ );
+ if package.license_file.is_some() && package.license.is_some() {
+ manifest.warnings_mut().add_warning(
+ "only one of `license` or `license-file` is necessary\n\
+ `license` should be used if the package license can be expressed \
+ with a standard SPDX expression.\n\
+ `license-file` should be used if the package uses a non-standard license.\n\
+ See https://doc.rust-lang.org/cargo/reference/manifest.html#the-license-and-license-file-fields \
+ for more information."
+ .to_string(),
+ );
+ }
+ for warning in warnings {
+ manifest.warnings_mut().add_warning(warning);
+ }
+ for error in errors {
+ manifest.warnings_mut().add_critical_warning(error);
+ }
+
+ manifest.feature_gate()?;
+
+ Ok((manifest, nested_paths))
+ }
+
+ fn to_virtual_manifest(
+ me: &Rc<TomlManifest>,
+ source_id: SourceId,
+ root: &Path,
+ config: &Config,
+ ) -> CargoResult<(VirtualManifest, Vec<PathBuf>)> {
+ if me.project.is_some() {
+ bail!("this virtual manifest specifies a [project] section, which is not allowed");
+ }
+ if me.package.is_some() {
+ bail!("this virtual manifest specifies a [package] section, which is not allowed");
+ }
+ if me.lib.is_some() {
+ bail!("this virtual manifest specifies a [lib] section, which is not allowed");
+ }
+ if me.bin.is_some() {
+ bail!("this virtual manifest specifies a [[bin]] section, which is not allowed");
+ }
+ if me.example.is_some() {
+ bail!("this virtual manifest specifies a [[example]] section, which is not allowed");
+ }
+ if me.test.is_some() {
+ bail!("this virtual manifest specifies a [[test]] section, which is not allowed");
+ }
+ if me.bench.is_some() {
+ bail!("this virtual manifest specifies a [[bench]] section, which is not allowed");
+ }
+ if me.dependencies.is_some() {
+ bail!("this virtual manifest specifies a [dependencies] section, which is not allowed");
+ }
+ if me.dev_dependencies.is_some() || me.dev_dependencies2.is_some() {
+ bail!("this virtual manifest specifies a [dev-dependencies] section, which is not allowed");
+ }
+ if me.build_dependencies.is_some() || me.build_dependencies2.is_some() {
+ bail!("this virtual manifest specifies a [build-dependencies] section, which is not allowed");
+ }
+ if me.features.is_some() {
+ bail!("this virtual manifest specifies a [features] section, which is not allowed");
+ }
+ if me.target.is_some() {
+ bail!("this virtual manifest specifies a [target] section, which is not allowed");
+ }
+ if me.badges.is_some() {
+ bail!("this virtual manifest specifies a [badges] section, which is not allowed");
+ }
+
+ let mut nested_paths = Vec::new();
+ let mut warnings = Vec::new();
+ let mut deps = Vec::new();
+ let empty = Vec::new();
+ let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty);
+ let features = Features::new(cargo_features, config, &mut warnings, source_id.is_path())?;
+
+ let (replace, patch) = {
+ let mut cx = Context {
+ deps: &mut deps,
+ source_id,
+ nested_paths: &mut nested_paths,
+ config,
+ warnings: &mut warnings,
+ platform: None,
+ features: &features,
+ root,
+ };
+ (me.replace(&mut cx)?, me.patch(&mut cx)?)
+ };
+ let profiles = me.profile.clone();
+ if let Some(profiles) = &profiles {
+ profiles.validate(config.cli_unstable(), &features, &mut warnings)?;
+ }
+ let resolve_behavior = me
+ .workspace
+ .as_ref()
+ .and_then(|ws| ws.resolver.as_deref())
+ .map(|r| ResolveBehavior::from_manifest(r))
+ .transpose()?;
+ let workspace_config = match me.workspace {
+ Some(ref toml_config) => {
+ let mut inheritable = toml_config.package.clone().unwrap_or_default();
+ inheritable.update_ws_path(root.to_path_buf());
+ inheritable.update_deps(toml_config.dependencies.clone());
+ let ws_root_config = WorkspaceRootConfig::new(
+ root,
+ &toml_config.members,
+ &toml_config.default_members,
+ &toml_config.exclude,
+ &Some(inheritable),
+ &toml_config.metadata,
+ );
+ config
+ .ws_roots
+ .borrow_mut()
+ .insert(root.to_path_buf(), ws_root_config.clone());
+ WorkspaceConfig::Root(ws_root_config)
+ }
+ None => {
+ bail!("virtual manifests must be configured with [workspace]");
+ }
+ };
+ Ok((
+ VirtualManifest::new(
+ replace,
+ patch,
+ workspace_config,
+ profiles,
+ features,
+ resolve_behavior,
+ ),
+ nested_paths,
+ ))
+ }
+
+ fn replace(&self, cx: &mut Context<'_, '_>) -> CargoResult<Vec<(PackageIdSpec, Dependency)>> {
+ if self.patch.is_some() && self.replace.is_some() {
+ bail!("cannot specify both [replace] and [patch]");
+ }
+ let mut replace = Vec::new();
+ for (spec, replacement) in self.replace.iter().flatten() {
+ let mut spec = PackageIdSpec::parse(spec).with_context(|| {
+ format!(
+ "replacements must specify a valid semver \
+ version to replace, but `{}` does not",
+ spec
+ )
+ })?;
+ if spec.url().is_none() {
+ spec.set_url(CRATES_IO_INDEX.parse().unwrap());
+ }
+
+ if replacement.is_version_specified() {
+ bail!(
+ "replacements cannot specify a version \
+ requirement, but found one for `{}`",
+ spec
+ );
+ }
+
+ let mut dep = replacement.to_dependency(spec.name().as_str(), cx, None)?;
+ let version = spec.version().ok_or_else(|| {
+ anyhow!(
+ "replacements must specify a version \
+ to replace, but `{}` does not",
+ spec
+ )
+ })?;
+ unused_dep_keys(
+ dep.name_in_toml().as_str(),
+ "replace",
+ replacement.unused_keys(),
+ &mut cx.warnings,
+ );
+ dep.set_version_req(VersionReq::exact(version))
+ .lock_version(version);
+ replace.push((spec, dep));
+ }
+ Ok(replace)
+ }
+
+ fn patch(&self, cx: &mut Context<'_, '_>) -> CargoResult<HashMap<Url, Vec<Dependency>>> {
+ let mut patch = HashMap::new();
+ for (toml_url, deps) in self.patch.iter().flatten() {
+ let url = match &toml_url[..] {
+ CRATES_IO_REGISTRY => CRATES_IO_INDEX.parse().unwrap(),
+ _ => cx
+ .config
+ .get_registry_index(toml_url)
+ .or_else(|_| toml_url.into_url())
+ .with_context(|| {
+ format!(
+ "[patch] entry `{}` should be a URL or registry name",
+ toml_url
+ )
+ })?,
+ };
+ patch.insert(
+ url,
+ deps.iter()
+ .map(|(name, dep)| {
+ unused_dep_keys(
+ name,
+ &format!("patch.{toml_url}",),
+ dep.unused_keys(),
+ &mut cx.warnings,
+ );
+ dep.to_dependency(name, cx, None)
+ })
+ .collect::<CargoResult<Vec<_>>>()?,
+ );
+ }
+ Ok(patch)
+ }
+
+ /// Returns the path to the build script if one exists for this crate.
+ fn maybe_custom_build(
+ &self,
+ build: &Option<StringOrBool>,
+ package_root: &Path,
+ ) -> Option<PathBuf> {
+ let build_rs = package_root.join("build.rs");
+ match *build {
+ // Explicitly no build script.
+ Some(StringOrBool::Bool(false)) => None,
+ Some(StringOrBool::Bool(true)) => Some(build_rs),
+ Some(StringOrBool::String(ref s)) => Some(PathBuf::from(s)),
+ None => {
+ // If there is a `build.rs` file next to the `Cargo.toml`, assume it is
+ // a build script.
+ if build_rs.is_file() {
+ Some(build_rs)
+ } else {
+ None
+ }
+ }
+ }
+ }
+
+ pub fn has_profiles(&self) -> bool {
+ self.profile.is_some()
+ }
+
+ pub fn features(&self) -> Option<&BTreeMap<InternedString, Vec<InternedString>>> {
+ self.features.as_ref()
+ }
+}
+
+fn unused_dep_keys(
+ dep_name: &str,
+ kind: &str,
+ unused_keys: Vec<String>,
+ warnings: &mut Vec<String>,
+) {
+ for unused in unused_keys {
+ let key = format!("unused manifest key: {kind}.{dep_name}.{unused}");
+ warnings.push(key);
+ }
+}
+
+fn inheritable_from_path(
+ config: &Config,
+ workspace_path: PathBuf,
+) -> CargoResult<InheritableFields> {
+ // Workspace path should have Cargo.toml at the end
+ let workspace_path_root = workspace_path.parent().unwrap();
+
+ // Let the borrow exit scope so that it can be picked up if there is a need to
+ // read a manifest
+ if let Some(ws_root) = config.ws_roots.borrow().get(workspace_path_root) {
+ return Ok(ws_root.inheritable().clone());
+ };
+
+ let source_id = SourceId::for_path(workspace_path_root)?;
+ let (man, _) = read_manifest(&workspace_path, source_id, config)?;
+ match man.workspace_config() {
+ WorkspaceConfig::Root(root) => {
+ config
+ .ws_roots
+ .borrow_mut()
+ .insert(workspace_path, root.clone());
+ Ok(root.inheritable().clone())
+ }
+ _ => bail!(
+ "root of a workspace inferred but wasn't a root: {}",
+ workspace_path.display()
+ ),
+ }
+}
+
+/// Returns the name of the README file for a [`TomlPackage`].
+pub fn readme_for_package(package_root: &Path, readme: Option<StringOrBool>) -> Option<String> {
+ match &readme {
+ None => default_readme_from_package_root(package_root),
+ Some(value) => match value {
+ StringOrBool::Bool(false) => None,
+ StringOrBool::Bool(true) => Some("README.md".to_string()),
+ StringOrBool::String(v) => Some(v.clone()),
+ },
+ }
+}
+
+const DEFAULT_README_FILES: [&str; 3] = ["README.md", "README.txt", "README"];
+
+/// Checks if a file with any of the default README file names exists in the package root.
+/// If so, returns a `String` representing that name.
+fn default_readme_from_package_root(package_root: &Path) -> Option<String> {
+ for &readme_filename in DEFAULT_README_FILES.iter() {
+ if package_root.join(readme_filename).is_file() {
+ return Some(readme_filename.to_string());
+ }
+ }
+
+ None
+}
+
+/// Checks a list of build targets, and ensures the target names are unique within a vector.
+/// If not, the name of the offending build target is returned.
+fn unique_build_targets(
+ targets: &[Target],
+ package_root: &Path,
+) -> Result<(), HashMap<PathBuf, Vec<Target>>> {
+ let mut source_targets = HashMap::<_, Vec<_>>::new();
+ for target in targets {
+ if let TargetSourcePath::Path(path) = target.src_path() {
+ let full = package_root.join(path);
+ source_targets.entry(full).or_default().push(target.clone());
+ }
+ }
+
+ let conflict_targets = source_targets
+ .into_iter()
+ .filter(|(_, targets)| targets.len() > 1)
+ .collect::<HashMap<_, _>>();
+
+ if !conflict_targets.is_empty() {
+ return Err(conflict_targets);
+ }
+
+ Ok(())
+}
+
+impl<P: ResolveToPath + Clone> TomlDependency<P> {
+ pub(crate) fn to_dependency_split(
+ &self,
+ name: &str,
+ source_id: SourceId,
+ nested_paths: &mut Vec<PathBuf>,
+ config: &Config,
+ warnings: &mut Vec<String>,
+ platform: Option<Platform>,
+ root: &Path,
+ features: &Features,
+ kind: Option<DepKind>,
+ ) -> CargoResult<Dependency> {
+ self.to_dependency(
+ name,
+ &mut Context {
+ deps: &mut Vec::new(),
+ source_id,
+ nested_paths,
+ config,
+ warnings,
+ platform,
+ root,
+ features,
+ },
+ kind,
+ )
+ }
+
+ fn to_dependency(
+ &self,
+ name: &str,
+ cx: &mut Context<'_, '_>,
+ kind: Option<DepKind>,
+ ) -> CargoResult<Dependency> {
+ match *self {
+ TomlDependency::Simple(ref version) => DetailedTomlDependency::<P> {
+ version: Some(version.clone()),
+ ..Default::default()
+ }
+ .to_dependency(name, cx, kind),
+ TomlDependency::Detailed(ref details) => details.to_dependency(name, cx, kind),
+ }
+ }
+
+ fn is_version_specified(&self) -> bool {
+ match self {
+ TomlDependency::Detailed(d) => d.version.is_some(),
+ TomlDependency::Simple(..) => true,
+ }
+ }
+
+ fn is_optional(&self) -> bool {
+ match self {
+ TomlDependency::Detailed(d) => d.optional.unwrap_or(false),
+ TomlDependency::Simple(..) => false,
+ }
+ }
+}
+
+impl<P: ResolveToPath + Clone> DetailedTomlDependency<P> {
+ fn to_dependency(
+ &self,
+ name_in_toml: &str,
+ cx: &mut Context<'_, '_>,
+ kind: Option<DepKind>,
+ ) -> CargoResult<Dependency> {
+ if self.version.is_none() && self.path.is_none() && self.git.is_none() {
+ let msg = format!(
+ "dependency ({}) specified without \
+ providing a local path, Git repository, or \
+ version to use. This will be considered an \
+ error in future versions",
+ name_in_toml
+ );
+ cx.warnings.push(msg);
+ }
+
+ if let Some(version) = &self.version {
+ if version.contains('+') {
+ cx.warnings.push(format!(
+ "version requirement `{}` for dependency `{}` \
+ includes semver metadata which will be ignored, removing the \
+ metadata is recommended to avoid confusion",
+ version, name_in_toml
+ ));
+ }
+ }
+
+ if self.git.is_none() {
+ let git_only_keys = [
+ (&self.branch, "branch"),
+ (&self.tag, "tag"),
+ (&self.rev, "rev"),
+ ];
+
+ for &(key, key_name) in &git_only_keys {
+ if key.is_some() {
+ bail!(
+ "key `{}` is ignored for dependency ({}).",
+ key_name,
+ name_in_toml
+ );
+ }
+ }
+ }
+
+ // Early detection of potentially misused feature syntax
+ // instead of generating a "feature not found" error.
+ if let Some(features) = &self.features {
+ for feature in features {
+ if feature.contains('/') {
+ bail!(
+ "feature `{}` in dependency `{}` is not allowed to contain slashes\n\
+ If you want to enable features of a transitive dependency, \
+ the direct dependency needs to re-export those features from \
+ the `[features]` table.",
+ feature,
+ name_in_toml
+ );
+ }
+ if feature.starts_with("dep:") {
+ bail!(
+ "feature `{}` in dependency `{}` is not allowed to use explicit \
+ `dep:` syntax\n\
+ If you want to enable an optional dependency, specify the name \
+ of the optional dependency without the `dep:` prefix, or specify \
+ a feature from the dependency's `[features]` table that enables \
+ the optional dependency.",
+ feature,
+ name_in_toml
+ );
+ }
+ }
+ }
+
+ let new_source_id = match (
+ self.git.as_ref(),
+ self.path.as_ref(),
+ self.registry.as_ref(),
+ self.registry_index.as_ref(),
+ ) {
+ (Some(_), _, Some(_), _) | (Some(_), _, _, Some(_)) => bail!(
+ "dependency ({}) specification is ambiguous. \
+ Only one of `git` or `registry` is allowed.",
+ name_in_toml
+ ),
+ (_, _, Some(_), Some(_)) => bail!(
+ "dependency ({}) specification is ambiguous. \
+ Only one of `registry` or `registry-index` is allowed.",
+ name_in_toml
+ ),
+ (Some(git), maybe_path, _, _) => {
+ if maybe_path.is_some() {
+ bail!(
+ "dependency ({}) specification is ambiguous. \
+ Only one of `git` or `path` is allowed.",
+ name_in_toml
+ );
+ }
+
+ let n_details = [&self.branch, &self.tag, &self.rev]
+ .iter()
+ .filter(|d| d.is_some())
+ .count();
+
+ if n_details > 1 {
+ bail!(
+ "dependency ({}) specification is ambiguous. \
+ Only one of `branch`, `tag` or `rev` is allowed.",
+ name_in_toml
+ );
+ }
+
+ let reference = self
+ .branch
+ .clone()
+ .map(GitReference::Branch)
+ .or_else(|| self.tag.clone().map(GitReference::Tag))
+ .or_else(|| self.rev.clone().map(GitReference::Rev))
+ .unwrap_or(GitReference::DefaultBranch);
+ let loc = git.into_url()?;
+
+ if let Some(fragment) = loc.fragment() {
+ let msg = format!(
+ "URL fragment `#{}` in git URL is ignored for dependency ({}). \
+ If you were trying to specify a specific git revision, \
+ use `rev = \"{}\"` in the dependency declaration.",
+ fragment, name_in_toml, fragment
+ );
+ cx.warnings.push(msg)
+ }
+
+ SourceId::for_git(&loc, reference)?
+ }
+ (None, Some(path), _, _) => {
+ let path = path.resolve(cx.config);
+ cx.nested_paths.push(path.clone());
+ // If the source ID for the package we're parsing is a path
+ // source, then we normalize the path here to get rid of
+ // components like `..`.
+ //
+ // The purpose of this is to get a canonical ID for the package
+ // that we're depending on to ensure that builds of this package
+ // always end up hashing to the same value no matter where it's
+ // built from.
+ if cx.source_id.is_path() {
+ let path = cx.root.join(path);
+ let path = paths::normalize_path(&path);
+ SourceId::for_path(&path)?
+ } else {
+ cx.source_id
+ }
+ }
+ (None, None, Some(registry), None) => SourceId::alt_registry(cx.config, registry)?,
+ (None, None, None, Some(registry_index)) => {
+ let url = registry_index.into_url()?;
+ SourceId::for_registry(&url)?
+ }
+ (None, None, None, None) => SourceId::crates_io(cx.config)?,
+ };
+
+ let (pkg_name, explicit_name_in_toml) = match self.package {
+ Some(ref s) => (&s[..], Some(name_in_toml)),
+ None => (name_in_toml, None),
+ };
+
+ let version = self.version.as_deref();
+ let mut dep = Dependency::parse(pkg_name, version, new_source_id)?;
+ if self.default_features.is_some() && self.default_features2.is_some() {
+ warn_on_deprecated("default-features", name_in_toml, "dependency", cx.warnings);
+ }
+ dep.set_features(self.features.iter().flatten())
+ .set_default_features(
+ self.default_features
+ .or(self.default_features2)
+ .unwrap_or(true),
+ )
+ .set_optional(self.optional.unwrap_or(false))
+ .set_platform(cx.platform.clone());
+ if let Some(registry) = &self.registry {
+ let registry_id = SourceId::alt_registry(cx.config, registry)?;
+ dep.set_registry_id(registry_id);
+ }
+ if let Some(registry_index) = &self.registry_index {
+ let url = registry_index.into_url()?;
+ let registry_id = SourceId::for_registry(&url)?;
+ dep.set_registry_id(registry_id);
+ }
+
+ if let Some(kind) = kind {
+ dep.set_kind(kind);
+ }
+ if let Some(name_in_toml) = explicit_name_in_toml {
+ dep.set_explicit_name_in_toml(name_in_toml);
+ }
+
+ if let Some(p) = self.public {
+ cx.features.require(Feature::public_dependency())?;
+
+ if dep.kind() != DepKind::Normal {
+ bail!("'public' specifier can only be used on regular dependencies, not {:?} dependencies", dep.kind());
+ }
+
+ dep.set_public(p);
+ }
+
+ if let (Some(artifact), is_lib, target) = (
+ self.artifact.as_ref(),
+ self.lib.unwrap_or(false),
+ self.target.as_deref(),
+ ) {
+ if cx.config.cli_unstable().bindeps {
+ let artifact = Artifact::parse(artifact, is_lib, target)?;
+ if dep.kind() != DepKind::Build
+ && artifact.target() == Some(ArtifactTarget::BuildDependencyAssumeTarget)
+ {
+ bail!(
+ r#"`target = "target"` in normal- or dev-dependencies has no effect ({})"#,
+ name_in_toml
+ );
+ }
+ dep.set_artifact(artifact)
+ } else {
+ bail!("`artifact = …` requires `-Z bindeps` ({})", name_in_toml);
+ }
+ } else if self.lib.is_some() || self.target.is_some() {
+ for (is_set, specifier) in [
+ (self.lib.is_some(), "lib"),
+ (self.target.is_some(), "target"),
+ ] {
+ if !is_set {
+ continue;
+ }
+ bail!(
+ "'{}' specifier cannot be used without an 'artifact = …' value ({})",
+ specifier,
+ name_in_toml
+ )
+ }
+ }
+ Ok(dep)
+ }
+}
+
+impl DetailedTomlDependency {
+ fn add_features(&mut self, features: Option<Vec<String>>) {
+ self.features = match (self.features.clone(), features.clone()) {
+ (Some(dep_feat), Some(inherit_feat)) => Some(
+ dep_feat
+ .into_iter()
+ .chain(inherit_feat)
+ .collect::<Vec<String>>(),
+ ),
+ (Some(dep_fet), None) => Some(dep_fet),
+ (None, Some(inherit_feat)) => Some(inherit_feat),
+ (None, None) => None,
+ };
+ }
+
+ fn update_optional(&mut self, optional: Option<bool>) {
+ self.optional = optional;
+ }
+
+ fn resolve_path(
+ &mut self,
+ name: &str,
+ root_path: &Path,
+ package_root: &Path,
+ ) -> CargoResult<()> {
+ if let Some(rel_path) = &self.path {
+ self.path = Some(resolve_relative_path(
+ name,
+ root_path,
+ package_root,
+ rel_path,
+ )?)
+ }
+ Ok(())
+ }
+}
+
+#[derive(Default, Serialize, Deserialize, Debug, Clone)]
+#[serde(rename_all = "kebab-case")]
+struct TomlTarget {
+ name: Option<String>,
+
+ // The intention was to only accept `crate-type` here but historical
+ // versions of Cargo also accepted `crate_type`, so look for both.
+ crate_type: Option<Vec<String>>,
+ #[serde(rename = "crate_type")]
+ crate_type2: Option<Vec<String>>,
+
+ path: Option<PathValue>,
+ // Note that `filename` is used for the cargo-feature `different_binary_name`
+ filename: Option<String>,
+ test: Option<bool>,
+ doctest: Option<bool>,
+ bench: Option<bool>,
+ doc: Option<bool>,
+ plugin: Option<bool>,
+ doc_scrape_examples: Option<bool>,
+ #[serde(rename = "proc-macro")]
+ proc_macro_raw: Option<bool>,
+ #[serde(rename = "proc_macro")]
+ proc_macro_raw2: Option<bool>,
+ harness: Option<bool>,
+ required_features: Option<Vec<String>>,
+ edition: Option<String>,
+}
+
+#[derive(Clone)]
+struct PathValue(PathBuf);
+
+impl<'de> de::Deserialize<'de> for PathValue {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ Ok(PathValue(String::deserialize(deserializer)?.into()))
+ }
+}
+
+impl ser::Serialize for PathValue {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: ser::Serializer,
+ {
+ self.0.serialize(serializer)
+ }
+}
+
+/// Corresponds to a `target` entry, but `TomlTarget` is already used.
+#[derive(Serialize, Deserialize, Debug, Clone)]
+struct TomlPlatform {
+ dependencies: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
+ #[serde(rename = "build-dependencies")]
+ build_dependencies: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
+ #[serde(rename = "build_dependencies")]
+ build_dependencies2: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
+ #[serde(rename = "dev-dependencies")]
+ dev_dependencies: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
+ #[serde(rename = "dev_dependencies")]
+ dev_dependencies2: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
+}
+
+impl TomlTarget {
+ fn new() -> TomlTarget {
+ TomlTarget::default()
+ }
+
+ fn name(&self) -> String {
+ match self.name {
+ Some(ref name) => name.clone(),
+ None => panic!("target name is required"),
+ }
+ }
+
+ fn validate_proc_macro(&self, warnings: &mut Vec<String>) {
+ if self.proc_macro_raw.is_some() && self.proc_macro_raw2.is_some() {
+ warn_on_deprecated(
+ "proc-macro",
+ self.name().as_str(),
+ "library target",
+ warnings,
+ );
+ }
+ }
+
+ fn proc_macro(&self) -> Option<bool> {
+ self.proc_macro_raw.or(self.proc_macro_raw2).or_else(|| {
+ if let Some(types) = self.crate_types() {
+ if types.contains(&"proc-macro".to_string()) {
+ return Some(true);
+ }
+ }
+ None
+ })
+ }
+
+ fn validate_crate_types(&self, target_kind_human: &str, warnings: &mut Vec<String>) {
+ if self.crate_type.is_some() && self.crate_type2.is_some() {
+ warn_on_deprecated(
+ "crate-type",
+ self.name().as_str(),
+ format!("{target_kind_human} target").as_str(),
+ warnings,
+ );
+ }
+ }
+
+ fn crate_types(&self) -> Option<&Vec<String>> {
+ self.crate_type
+ .as_ref()
+ .or_else(|| self.crate_type2.as_ref())
+ }
+}
+
+impl fmt::Debug for PathValue {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/toml/targets.rs b/src/tools/cargo/src/cargo/util/toml/targets.rs
new file mode 100644
index 000000000..a7e30c61b
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/toml/targets.rs
@@ -0,0 +1,969 @@
+//! This module implements Cargo conventions for directory layout:
+//!
+//! * `src/lib.rs` is a library
+//! * `src/main.rs` is a binary
+//! * `src/bin/*.rs` are binaries
+//! * `examples/*.rs` are examples
+//! * `tests/*.rs` are integration tests
+//! * `benches/*.rs` are benchmarks
+//!
+//! It is a bit tricky because we need match explicit information from `Cargo.toml`
+//! with implicit info in directory layout.
+
+use std::collections::HashSet;
+use std::fs::{self, DirEntry};
+use std::path::{Path, PathBuf};
+
+use super::{
+ PathValue, StringOrBool, StringOrVec, TomlBenchTarget, TomlBinTarget, TomlExampleTarget,
+ TomlLibTarget, TomlManifest, TomlTarget, TomlTestTarget,
+};
+use crate::core::compiler::rustdoc::RustdocScrapeExamples;
+use crate::core::compiler::CrateType;
+use crate::core::{Edition, Feature, Features, Target};
+use crate::util::errors::CargoResult;
+use crate::util::restricted_names;
+
+use anyhow::Context as _;
+
+const DEFAULT_TEST_DIR_NAME: &'static str = "tests";
+const DEFAULT_BENCH_DIR_NAME: &'static str = "benches";
+const DEFAULT_EXAMPLE_DIR_NAME: &'static str = "examples";
+const DEFAULT_BIN_DIR_NAME: &'static str = "bin";
+
+pub fn targets(
+ features: &Features,
+ manifest: &TomlManifest,
+ package_name: &str,
+ package_root: &Path,
+ edition: Edition,
+ custom_build: &Option<StringOrBool>,
+ metabuild: &Option<StringOrVec>,
+ warnings: &mut Vec<String>,
+ errors: &mut Vec<String>,
+) -> CargoResult<Vec<Target>> {
+ let mut targets = Vec::new();
+
+ let has_lib;
+
+ if let Some(target) = clean_lib(
+ manifest.lib.as_ref(),
+ package_root,
+ package_name,
+ edition,
+ warnings,
+ )? {
+ targets.push(target);
+ has_lib = true;
+ } else {
+ has_lib = false;
+ }
+
+ let package = manifest
+ .package
+ .as_ref()
+ .or_else(|| manifest.project.as_ref())
+ .ok_or_else(|| anyhow::format_err!("manifest has no `package` (or `project`)"))?;
+
+ targets.extend(clean_bins(
+ features,
+ manifest.bin.as_ref(),
+ package_root,
+ package_name,
+ edition,
+ package.autobins,
+ warnings,
+ errors,
+ has_lib,
+ )?);
+
+ targets.extend(clean_examples(
+ manifest.example.as_ref(),
+ package_root,
+ edition,
+ package.autoexamples,
+ warnings,
+ errors,
+ )?);
+
+ targets.extend(clean_tests(
+ manifest.test.as_ref(),
+ package_root,
+ edition,
+ package.autotests,
+ warnings,
+ errors,
+ )?);
+
+ targets.extend(clean_benches(
+ manifest.bench.as_ref(),
+ package_root,
+ edition,
+ package.autobenches,
+ warnings,
+ errors,
+ )?);
+
+ // processing the custom build script
+ if let Some(custom_build) = manifest.maybe_custom_build(custom_build, package_root) {
+ if metabuild.is_some() {
+ anyhow::bail!("cannot specify both `metabuild` and `build`");
+ }
+ let name = format!(
+ "build-script-{}",
+ custom_build
+ .file_stem()
+ .and_then(|s| s.to_str())
+ .unwrap_or("")
+ );
+ targets.push(Target::custom_build_target(
+ &name,
+ package_root.join(custom_build),
+ edition,
+ ));
+ }
+ if let Some(metabuild) = metabuild {
+ // Verify names match available build deps.
+ let bdeps = manifest.build_dependencies.as_ref();
+ for name in &metabuild.0 {
+ if !bdeps.map_or(false, |bd| bd.contains_key(name)) {
+ anyhow::bail!(
+ "metabuild package `{}` must be specified in `build-dependencies`",
+ name
+ );
+ }
+ }
+
+ targets.push(Target::metabuild_target(&format!(
+ "metabuild-{}",
+ package.name
+ )));
+ }
+
+ Ok(targets)
+}
+
+fn clean_lib(
+ toml_lib: Option<&TomlLibTarget>,
+ package_root: &Path,
+ package_name: &str,
+ edition: Edition,
+ warnings: &mut Vec<String>,
+) -> CargoResult<Option<Target>> {
+ let inferred = inferred_lib(package_root);
+ let lib = match toml_lib {
+ Some(lib) => {
+ if let Some(ref name) = lib.name {
+ // XXX: other code paths dodge this validation
+ if name.contains('-') {
+ anyhow::bail!("library target names cannot contain hyphens: {}", name)
+ }
+ }
+ Some(TomlTarget {
+ name: lib.name.clone().or_else(|| Some(package_name.to_owned())),
+ ..lib.clone()
+ })
+ }
+ None => inferred.as_ref().map(|lib| TomlTarget {
+ name: Some(package_name.to_string()),
+ path: Some(PathValue(lib.clone())),
+ ..TomlTarget::new()
+ }),
+ };
+
+ let lib = match lib {
+ Some(ref lib) => lib,
+ None => return Ok(None),
+ };
+ lib.validate_proc_macro(warnings);
+ lib.validate_crate_types("library", warnings);
+
+ validate_target_name(lib, "library", "lib", warnings)?;
+
+ let path = match (lib.path.as_ref(), inferred) {
+ (Some(path), _) => package_root.join(&path.0),
+ (None, Some(path)) => path,
+ (None, None) => {
+ let legacy_path = package_root.join("src").join(format!("{}.rs", lib.name()));
+ if edition == Edition::Edition2015 && legacy_path.exists() {
+ warnings.push(format!(
+ "path `{}` was erroneously implicitly accepted for library `{}`,\n\
+ please rename the file to `src/lib.rs` or set lib.path in Cargo.toml",
+ legacy_path.display(),
+ lib.name()
+ ));
+ legacy_path
+ } else {
+ anyhow::bail!(
+ "can't find library `{}`, \
+ rename file to `src/lib.rs` or specify lib.path",
+ lib.name()
+ )
+ }
+ }
+ };
+
+ // Per the Macros 1.1 RFC:
+ //
+ // > Initially if a crate is compiled with the `proc-macro` crate type
+ // > (and possibly others) it will forbid exporting any items in the
+ // > crate other than those functions tagged #[proc_macro_derive] and
+ // > those functions must also be placed at the crate root.
+ //
+ // A plugin requires exporting plugin_registrar so a crate cannot be
+ // both at once.
+ let crate_types = match (lib.crate_types(), lib.plugin, lib.proc_macro()) {
+ (Some(kinds), _, _)
+ if kinds.contains(&CrateType::Dylib.as_str().to_owned())
+ && kinds.contains(&CrateType::Cdylib.as_str().to_owned()) =>
+ {
+ anyhow::bail!(format!(
+ "library `{}` cannot set the crate type of both `dylib` and `cdylib`",
+ lib.name()
+ ));
+ }
+ (Some(kinds), _, _) if kinds.contains(&"proc-macro".to_string()) => {
+ if let Some(true) = lib.plugin {
+ // This is a warning to retain backwards compatibility.
+ warnings.push(format!(
+ "proc-macro library `{}` should not specify `plugin = true`",
+ lib.name()
+ ));
+ }
+ warnings.push(format!(
+ "library `{}` should only specify `proc-macro = true` instead of setting `crate-type`",
+ lib.name()
+ ));
+ if kinds.len() > 1 {
+ anyhow::bail!("cannot mix `proc-macro` crate type with others");
+ }
+ vec![CrateType::ProcMacro]
+ }
+ (_, Some(true), Some(true)) => {
+ anyhow::bail!("`lib.plugin` and `lib.proc-macro` cannot both be `true`")
+ }
+ (Some(kinds), _, _) => kinds.iter().map(|s| s.into()).collect(),
+ (None, Some(true), _) => vec![CrateType::Dylib],
+ (None, _, Some(true)) => vec![CrateType::ProcMacro],
+ (None, _, _) => vec![CrateType::Lib],
+ };
+
+ let mut target = Target::lib_target(&lib.name(), crate_types, path, edition);
+ configure(lib, &mut target)?;
+ Ok(Some(target))
+}
+
+fn clean_bins(
+ features: &Features,
+ toml_bins: Option<&Vec<TomlBinTarget>>,
+ package_root: &Path,
+ package_name: &str,
+ edition: Edition,
+ autodiscover: Option<bool>,
+ warnings: &mut Vec<String>,
+ errors: &mut Vec<String>,
+ has_lib: bool,
+) -> CargoResult<Vec<Target>> {
+ let inferred = inferred_bins(package_root, package_name);
+
+ let bins = toml_targets_and_inferred(
+ toml_bins,
+ &inferred,
+ package_root,
+ autodiscover,
+ edition,
+ warnings,
+ "binary",
+ "bin",
+ "autobins",
+ );
+
+ // This loop performs basic checks on each of the TomlTarget in `bins`.
+ for bin in &bins {
+ // For each binary, check if the `filename` parameter is populated. If it is,
+ // check if the corresponding cargo feature has been activated.
+ if bin.filename.is_some() {
+ features.require(Feature::different_binary_name())?;
+ }
+
+ validate_target_name(bin, "binary", "bin", warnings)?;
+
+ let name = bin.name();
+
+ if let Some(crate_types) = bin.crate_types() {
+ if !crate_types.is_empty() {
+ errors.push(format!(
+ "the target `{}` is a binary and can't have any \
+ crate-types set (currently \"{}\")",
+ name,
+ crate_types.join(", ")
+ ));
+ }
+ }
+
+ if bin.proc_macro() == Some(true) {
+ errors.push(format!(
+ "the target `{}` is a binary and can't have `proc-macro` \
+ set `true`",
+ name
+ ));
+ }
+
+ if restricted_names::is_conflicting_artifact_name(&name) {
+ anyhow::bail!(
+ "the binary target name `{}` is forbidden, \
+ it conflicts with with cargo's build directory names",
+ name
+ )
+ }
+ }
+
+ validate_unique_names(&bins, "binary")?;
+
+ let mut result = Vec::new();
+ for bin in &bins {
+ let path = target_path(bin, &inferred, "bin", package_root, edition, &mut |_| {
+ if let Some(legacy_path) = legacy_bin_path(package_root, &bin.name(), has_lib) {
+ warnings.push(format!(
+ "path `{}` was erroneously implicitly accepted for binary `{}`,\n\
+ please set bin.path in Cargo.toml",
+ legacy_path.display(),
+ bin.name()
+ ));
+ Some(legacy_path)
+ } else {
+ None
+ }
+ });
+ let path = match path {
+ Ok(path) => path,
+ Err(e) => anyhow::bail!("{}", e),
+ };
+
+ let mut target = Target::bin_target(
+ &bin.name(),
+ bin.filename.clone(),
+ path,
+ bin.required_features.clone(),
+ edition,
+ );
+
+ configure(bin, &mut target)?;
+ result.push(target);
+ }
+ return Ok(result);
+
+ fn legacy_bin_path(package_root: &Path, name: &str, has_lib: bool) -> Option<PathBuf> {
+ if !has_lib {
+ let path = package_root.join("src").join(format!("{}.rs", name));
+ if path.exists() {
+ return Some(path);
+ }
+ }
+ let path = package_root.join("src").join("main.rs");
+ if path.exists() {
+ return Some(path);
+ }
+
+ let path = package_root
+ .join("src")
+ .join(DEFAULT_BIN_DIR_NAME)
+ .join("main.rs");
+ if path.exists() {
+ return Some(path);
+ }
+ None
+ }
+}
+
+fn clean_examples(
+ toml_examples: Option<&Vec<TomlExampleTarget>>,
+ package_root: &Path,
+ edition: Edition,
+ autodiscover: Option<bool>,
+ warnings: &mut Vec<String>,
+ errors: &mut Vec<String>,
+) -> CargoResult<Vec<Target>> {
+ let inferred = infer_from_directory(&package_root.join(DEFAULT_EXAMPLE_DIR_NAME));
+
+ let targets = clean_targets(
+ "example",
+ "example",
+ toml_examples,
+ &inferred,
+ package_root,
+ edition,
+ autodiscover,
+ warnings,
+ errors,
+ "autoexamples",
+ )?;
+
+ let mut result = Vec::new();
+ for (path, toml) in targets {
+ toml.validate_crate_types("example", warnings);
+ let crate_types = match toml.crate_types() {
+ Some(kinds) => kinds.iter().map(|s| s.into()).collect(),
+ None => Vec::new(),
+ };
+
+ let mut target = Target::example_target(
+ &toml.name(),
+ crate_types,
+ path,
+ toml.required_features.clone(),
+ edition,
+ );
+ configure(&toml, &mut target)?;
+ result.push(target);
+ }
+
+ Ok(result)
+}
+
+fn clean_tests(
+ toml_tests: Option<&Vec<TomlTestTarget>>,
+ package_root: &Path,
+ edition: Edition,
+ autodiscover: Option<bool>,
+ warnings: &mut Vec<String>,
+ errors: &mut Vec<String>,
+) -> CargoResult<Vec<Target>> {
+ let inferred = infer_from_directory(&package_root.join(DEFAULT_TEST_DIR_NAME));
+
+ let targets = clean_targets(
+ "test",
+ "test",
+ toml_tests,
+ &inferred,
+ package_root,
+ edition,
+ autodiscover,
+ warnings,
+ errors,
+ "autotests",
+ )?;
+
+ let mut result = Vec::new();
+ for (path, toml) in targets {
+ let mut target =
+ Target::test_target(&toml.name(), path, toml.required_features.clone(), edition);
+ configure(&toml, &mut target)?;
+ result.push(target);
+ }
+ Ok(result)
+}
+
+fn clean_benches(
+ toml_benches: Option<&Vec<TomlBenchTarget>>,
+ package_root: &Path,
+ edition: Edition,
+ autodiscover: Option<bool>,
+ warnings: &mut Vec<String>,
+ errors: &mut Vec<String>,
+) -> CargoResult<Vec<Target>> {
+ let mut legacy_warnings = vec![];
+
+ let targets = {
+ let mut legacy_bench_path = |bench: &TomlTarget| {
+ let legacy_path = package_root.join("src").join("bench.rs");
+ if !(bench.name() == "bench" && legacy_path.exists()) {
+ return None;
+ }
+ legacy_warnings.push(format!(
+ "path `{}` was erroneously implicitly accepted for benchmark `{}`,\n\
+ please set bench.path in Cargo.toml",
+ legacy_path.display(),
+ bench.name()
+ ));
+ Some(legacy_path)
+ };
+
+ let inferred = infer_from_directory(&package_root.join("benches"));
+
+ clean_targets_with_legacy_path(
+ "benchmark",
+ "bench",
+ toml_benches,
+ &inferred,
+ package_root,
+ edition,
+ autodiscover,
+ warnings,
+ errors,
+ &mut legacy_bench_path,
+ "autobenches",
+ )?
+ };
+
+ warnings.append(&mut legacy_warnings);
+
+ let mut result = Vec::new();
+ for (path, toml) in targets {
+ let mut target =
+ Target::bench_target(&toml.name(), path, toml.required_features.clone(), edition);
+ configure(&toml, &mut target)?;
+ result.push(target);
+ }
+
+ Ok(result)
+}
+
+fn clean_targets(
+ target_kind_human: &str,
+ target_kind: &str,
+ toml_targets: Option<&Vec<TomlTarget>>,
+ inferred: &[(String, PathBuf)],
+ package_root: &Path,
+ edition: Edition,
+ autodiscover: Option<bool>,
+ warnings: &mut Vec<String>,
+ errors: &mut Vec<String>,
+ autodiscover_flag_name: &str,
+) -> CargoResult<Vec<(PathBuf, TomlTarget)>> {
+ clean_targets_with_legacy_path(
+ target_kind_human,
+ target_kind,
+ toml_targets,
+ inferred,
+ package_root,
+ edition,
+ autodiscover,
+ warnings,
+ errors,
+ &mut |_| None,
+ autodiscover_flag_name,
+ )
+}
+
+fn clean_targets_with_legacy_path(
+ target_kind_human: &str,
+ target_kind: &str,
+ toml_targets: Option<&Vec<TomlTarget>>,
+ inferred: &[(String, PathBuf)],
+ package_root: &Path,
+ edition: Edition,
+ autodiscover: Option<bool>,
+ warnings: &mut Vec<String>,
+ errors: &mut Vec<String>,
+ legacy_path: &mut dyn FnMut(&TomlTarget) -> Option<PathBuf>,
+ autodiscover_flag_name: &str,
+) -> CargoResult<Vec<(PathBuf, TomlTarget)>> {
+ let toml_targets = toml_targets_and_inferred(
+ toml_targets,
+ inferred,
+ package_root,
+ autodiscover,
+ edition,
+ warnings,
+ target_kind_human,
+ target_kind,
+ autodiscover_flag_name,
+ );
+
+ for target in &toml_targets {
+ validate_target_name(target, target_kind_human, target_kind, warnings)?;
+ }
+
+ validate_unique_names(&toml_targets, target_kind)?;
+ let mut result = Vec::new();
+ for target in toml_targets {
+ let path = target_path(
+ &target,
+ inferred,
+ target_kind,
+ package_root,
+ edition,
+ legacy_path,
+ );
+ let path = match path {
+ Ok(path) => path,
+ Err(e) => {
+ errors.push(e);
+ continue;
+ }
+ };
+ result.push((path, target));
+ }
+ Ok(result)
+}
+
+fn inferred_lib(package_root: &Path) -> Option<PathBuf> {
+ let lib = package_root.join("src").join("lib.rs");
+ if lib.exists() {
+ Some(lib)
+ } else {
+ None
+ }
+}
+
+fn inferred_bins(package_root: &Path, package_name: &str) -> Vec<(String, PathBuf)> {
+ let main = package_root.join("src").join("main.rs");
+ let mut result = Vec::new();
+ if main.exists() {
+ result.push((package_name.to_string(), main));
+ }
+ result.extend(infer_from_directory(
+ &package_root.join("src").join(DEFAULT_BIN_DIR_NAME),
+ ));
+
+ result
+}
+
+fn infer_from_directory(directory: &Path) -> Vec<(String, PathBuf)> {
+ let entries = match fs::read_dir(directory) {
+ Err(_) => return Vec::new(),
+ Ok(dir) => dir,
+ };
+
+ entries
+ .filter_map(|e| e.ok())
+ .filter(is_not_dotfile)
+ .filter_map(|d| infer_any(&d))
+ .collect()
+}
+
+fn infer_any(entry: &DirEntry) -> Option<(String, PathBuf)> {
+ if entry.file_type().map_or(false, |t| t.is_dir()) {
+ infer_subdirectory(entry)
+ } else if entry.path().extension().and_then(|p| p.to_str()) == Some("rs") {
+ infer_file(entry)
+ } else {
+ None
+ }
+}
+
+fn infer_file(entry: &DirEntry) -> Option<(String, PathBuf)> {
+ let path = entry.path();
+ path.file_stem()
+ .and_then(|p| p.to_str())
+ .map(|p| (p.to_owned(), path.clone()))
+}
+
+fn infer_subdirectory(entry: &DirEntry) -> Option<(String, PathBuf)> {
+ let path = entry.path();
+ let main = path.join("main.rs");
+ let name = path.file_name().and_then(|n| n.to_str());
+ match (name, main.exists()) {
+ (Some(name), true) => Some((name.to_owned(), main)),
+ _ => None,
+ }
+}
+
+fn is_not_dotfile(entry: &DirEntry) -> bool {
+ entry.file_name().to_str().map(|s| s.starts_with('.')) == Some(false)
+}
+
+fn toml_targets_and_inferred(
+ toml_targets: Option<&Vec<TomlTarget>>,
+ inferred: &[(String, PathBuf)],
+ package_root: &Path,
+ autodiscover: Option<bool>,
+ edition: Edition,
+ warnings: &mut Vec<String>,
+ target_kind_human: &str,
+ target_kind: &str,
+ autodiscover_flag_name: &str,
+) -> Vec<TomlTarget> {
+ let inferred_targets = inferred_to_toml_targets(inferred);
+ match toml_targets {
+ None => {
+ if let Some(false) = autodiscover {
+ vec![]
+ } else {
+ inferred_targets
+ }
+ }
+ Some(targets) => {
+ let mut targets = targets.clone();
+
+ let target_path =
+ |target: &TomlTarget| target.path.clone().map(|p| package_root.join(p.0));
+
+ let mut seen_names = HashSet::new();
+ let mut seen_paths = HashSet::new();
+ for target in targets.iter() {
+ seen_names.insert(target.name.clone());
+ seen_paths.insert(target_path(target));
+ }
+
+ let mut rem_targets = vec![];
+ for target in inferred_targets {
+ if !seen_names.contains(&target.name) && !seen_paths.contains(&target_path(&target))
+ {
+ rem_targets.push(target);
+ }
+ }
+
+ let autodiscover = match autodiscover {
+ Some(autodiscover) => autodiscover,
+ None => {
+ if edition == Edition::Edition2015 {
+ if !rem_targets.is_empty() {
+ let mut rem_targets_str = String::new();
+ for t in rem_targets.iter() {
+ if let Some(p) = t.path.clone() {
+ rem_targets_str.push_str(&format!("* {}\n", p.0.display()))
+ }
+ }
+ warnings.push(format!(
+ "\
+An explicit [[{section}]] section is specified in Cargo.toml which currently
+disables Cargo from automatically inferring other {target_kind_human} targets.
+This inference behavior will change in the Rust 2018 edition and the following
+files will be included as a {target_kind_human} target:
+
+{rem_targets_str}
+This is likely to break cargo build or cargo test as these files may not be
+ready to be compiled as a {target_kind_human} target today. You can future-proof yourself
+and disable this warning by adding `{autodiscover_flag_name} = false` to your [package]
+section. You may also move the files to a location where Cargo would not
+automatically infer them to be a target, such as in subfolders.
+
+For more information on this warning you can consult
+https://github.com/rust-lang/cargo/issues/5330",
+ section = target_kind,
+ target_kind_human = target_kind_human,
+ rem_targets_str = rem_targets_str,
+ autodiscover_flag_name = autodiscover_flag_name,
+ ));
+ };
+ false
+ } else {
+ true
+ }
+ }
+ };
+
+ if autodiscover {
+ targets.append(&mut rem_targets);
+ }
+
+ targets
+ }
+ }
+}
+
+fn inferred_to_toml_targets(inferred: &[(String, PathBuf)]) -> Vec<TomlTarget> {
+ inferred
+ .iter()
+ .map(|&(ref name, ref path)| TomlTarget {
+ name: Some(name.clone()),
+ path: Some(PathValue(path.clone())),
+ ..TomlTarget::new()
+ })
+ .collect()
+}
+
+fn validate_target_name(
+ target: &TomlTarget,
+ target_kind_human: &str,
+ target_kind: &str,
+ warnings: &mut Vec<String>,
+) -> CargoResult<()> {
+ match target.name {
+ Some(ref name) => {
+ if name.trim().is_empty() {
+ anyhow::bail!("{} target names cannot be empty", target_kind_human)
+ }
+ if cfg!(windows) && restricted_names::is_windows_reserved(name) {
+ warnings.push(format!(
+ "{} target `{}` is a reserved Windows filename, \
+ this target will not work on Windows platforms",
+ target_kind_human, name
+ ));
+ }
+ }
+ None => anyhow::bail!(
+ "{} target {}.name is required",
+ target_kind_human,
+ target_kind
+ ),
+ }
+
+ Ok(())
+}
+
+/// Will check a list of toml targets, and make sure the target names are unique within a vector.
+fn validate_unique_names(targets: &[TomlTarget], target_kind: &str) -> CargoResult<()> {
+ let mut seen = HashSet::new();
+ for name in targets.iter().map(|e| e.name()) {
+ if !seen.insert(name.clone()) {
+ anyhow::bail!(
+ "found duplicate {target_kind} name {name}, \
+ but all {target_kind} targets must have a unique name",
+ target_kind = target_kind,
+ name = name
+ );
+ }
+ }
+ Ok(())
+}
+
+fn configure(toml: &TomlTarget, target: &mut Target) -> CargoResult<()> {
+ let t2 = target.clone();
+ target
+ .set_tested(toml.test.unwrap_or_else(|| t2.tested()))
+ .set_doc(toml.doc.unwrap_or_else(|| t2.documented()))
+ .set_doctest(toml.doctest.unwrap_or_else(|| t2.doctested()))
+ .set_benched(toml.bench.unwrap_or_else(|| t2.benched()))
+ .set_harness(toml.harness.unwrap_or_else(|| t2.harness()))
+ .set_proc_macro(toml.proc_macro().unwrap_or_else(|| t2.proc_macro()))
+ .set_doc_scrape_examples(match toml.doc_scrape_examples {
+ None => RustdocScrapeExamples::Unset,
+ Some(false) => RustdocScrapeExamples::Disabled,
+ Some(true) => RustdocScrapeExamples::Enabled,
+ })
+ .set_for_host(match (toml.plugin, toml.proc_macro()) {
+ (None, None) => t2.for_host(),
+ (Some(true), _) | (_, Some(true)) => true,
+ (Some(false), _) | (_, Some(false)) => false,
+ });
+ if let Some(edition) = toml.edition.clone() {
+ target.set_edition(
+ edition
+ .parse()
+ .with_context(|| "failed to parse the `edition` key")?,
+ );
+ }
+ Ok(())
+}
+
+/// Build an error message for a target path that cannot be determined either
+/// by auto-discovery or specifying.
+///
+/// This function tries to detect commonly wrong paths for targets:
+///
+/// test -> tests/*.rs, tests/*/main.rs
+/// bench -> benches/*.rs, benches/*/main.rs
+/// example -> examples/*.rs, examples/*/main.rs
+/// bin -> src/bin/*.rs, src/bin/*/main.rs
+///
+/// Note that the logic need to sync with [`infer_from_directory`] if changes.
+fn target_path_not_found_error_message(
+ package_root: &Path,
+ target: &TomlTarget,
+ target_kind: &str,
+) -> String {
+ fn possible_target_paths(name: &str, kind: &str, commonly_wrong: bool) -> [PathBuf; 2] {
+ let mut target_path = PathBuf::new();
+ match (kind, commonly_wrong) {
+ // commonly wrong paths
+ ("test" | "bench" | "example", true) => target_path.push(kind),
+ ("bin", true) => {
+ target_path.push("src");
+ target_path.push("bins");
+ }
+ // default inferred paths
+ ("test", false) => target_path.push(DEFAULT_TEST_DIR_NAME),
+ ("bench", false) => target_path.push(DEFAULT_BENCH_DIR_NAME),
+ ("example", false) => target_path.push(DEFAULT_EXAMPLE_DIR_NAME),
+ ("bin", false) => {
+ target_path.push("src");
+ target_path.push(DEFAULT_BIN_DIR_NAME);
+ }
+ _ => unreachable!("invalid target kind: {}", kind),
+ }
+ target_path.push(name);
+
+ let target_path_file = {
+ let mut path = target_path.clone();
+ path.set_extension("rs");
+ path
+ };
+ let target_path_subdir = {
+ target_path.push("main.rs");
+ target_path
+ };
+ return [target_path_file, target_path_subdir];
+ }
+
+ let target_name = target.name();
+ let commonly_wrong_paths = possible_target_paths(&target_name, target_kind, true);
+ let possible_paths = possible_target_paths(&target_name, target_kind, false);
+ let existing_wrong_path_index = match (
+ package_root.join(&commonly_wrong_paths[0]).exists(),
+ package_root.join(&commonly_wrong_paths[1]).exists(),
+ ) {
+ (true, _) => Some(0),
+ (_, true) => Some(1),
+ _ => None,
+ };
+
+ if let Some(i) = existing_wrong_path_index {
+ return format!(
+ "\
+can't find `{name}` {kind} at default paths, but found a file at `{wrong_path}`.
+Perhaps rename the file to `{possible_path}` for target auto-discovery, \
+or specify {kind}.path if you want to use a non-default path.",
+ name = target_name,
+ kind = target_kind,
+ wrong_path = commonly_wrong_paths[i].display(),
+ possible_path = possible_paths[i].display(),
+ );
+ }
+
+ format!(
+ "can't find `{name}` {kind} at `{path_file}` or `{path_dir}`. \
+ Please specify {kind}.path if you want to use a non-default path.",
+ name = target_name,
+ kind = target_kind,
+ path_file = possible_paths[0].display(),
+ path_dir = possible_paths[1].display(),
+ )
+}
+
+fn target_path(
+ target: &TomlTarget,
+ inferred: &[(String, PathBuf)],
+ target_kind: &str,
+ package_root: &Path,
+ edition: Edition,
+ legacy_path: &mut dyn FnMut(&TomlTarget) -> Option<PathBuf>,
+) -> Result<PathBuf, String> {
+ if let Some(ref path) = target.path {
+ // Should we verify that this path exists here?
+ return Ok(package_root.join(&path.0));
+ }
+ let name = target.name();
+
+ let mut matching = inferred
+ .iter()
+ .filter(|&&(ref n, _)| n == &name)
+ .map(|&(_, ref p)| p.clone());
+
+ let first = matching.next();
+ let second = matching.next();
+ match (first, second) {
+ (Some(path), None) => Ok(path),
+ (None, None) => {
+ if edition == Edition::Edition2015 {
+ if let Some(path) = legacy_path(target) {
+ return Ok(path);
+ }
+ }
+ Err(target_path_not_found_error_message(
+ package_root,
+ target,
+ target_kind,
+ ))
+ }
+ (Some(p0), Some(p1)) => {
+ if edition == Edition::Edition2015 {
+ if let Some(path) = legacy_path(target) {
+ return Ok(path);
+ }
+ }
+ Err(format!(
+ "\
+cannot infer path for `{}` {}
+Cargo doesn't know which to use because multiple target files found at `{}` and `{}`.",
+ target.name(),
+ target_kind,
+ p0.strip_prefix(package_root).unwrap_or(&p0).display(),
+ p1.strip_prefix(package_root).unwrap_or(&p1).display(),
+ ))
+ }
+ (None, Some(_)) => unreachable!(),
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/toml_mut/dependency.rs b/src/tools/cargo/src/cargo/util/toml_mut/dependency.rs
new file mode 100644
index 000000000..d8a2f2750
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/toml_mut/dependency.rs
@@ -0,0 +1,1132 @@
+//! Information about dependencies in a manifest.
+
+use std::fmt::{Display, Formatter};
+use std::path::{Path, PathBuf};
+
+use indexmap::IndexSet;
+use toml_edit::KeyMut;
+
+use super::manifest::str_or_1_len_table;
+use crate::core::GitReference;
+use crate::core::SourceId;
+use crate::core::Summary;
+use crate::CargoResult;
+use crate::Config;
+
+/// A dependency handled by Cargo.
+///
+/// `None` means the field will be blank in TOML.
+#[derive(Debug, PartialEq, Eq, Clone)]
+#[non_exhaustive]
+pub struct Dependency {
+ /// The name of the dependency (as it is set in its `Cargo.toml` and known
+ /// to crates.io).
+ pub name: String,
+ /// Whether the dependency is opted-in with a feature flag.
+ pub optional: Option<bool>,
+
+ /// List of features to add (or None to keep features unchanged).
+ pub features: Option<IndexSet<String>>,
+ /// Whether default features are enabled.
+ pub default_features: Option<bool>,
+ /// List of features inherited from a workspace dependency.
+ pub inherited_features: Option<IndexSet<String>>,
+
+ /// Where the dependency comes from.
+ pub source: Option<Source>,
+ /// Non-default registry.
+ pub registry: Option<String>,
+
+ /// If the dependency is renamed, this is the new name for the dependency
+ /// as a string. None if it is not renamed.
+ pub rename: Option<String>,
+}
+
+impl Dependency {
+ /// Create a new dependency with a name.
+ pub fn new(name: &str) -> Self {
+ Self {
+ name: name.into(),
+ optional: None,
+ features: None,
+ default_features: None,
+ inherited_features: None,
+ source: None,
+ registry: None,
+ rename: None,
+ }
+ }
+
+ /// Set dependency to a given version.
+ pub fn set_source(mut self, source: impl Into<Source>) -> Self {
+ self.source = Some(source.into());
+ self
+ }
+
+ /// Remove the existing version requirement.
+ pub fn clear_version(mut self) -> Self {
+ match &mut self.source {
+ Some(Source::Registry(_)) => {
+ self.source = None;
+ }
+ Some(Source::Path(path)) => {
+ path.version = None;
+ }
+ Some(Source::Git(git)) => {
+ git.version = None;
+ }
+ Some(Source::Workspace(_workspace)) => {}
+ None => {}
+ }
+ self
+ }
+
+ /// Set whether the dependency is optional.
+ #[allow(dead_code)]
+ pub fn set_optional(mut self, opt: bool) -> Self {
+ self.optional = Some(opt);
+ self
+ }
+
+ /// Set features as an array of string (does some basic parsing).
+ #[allow(dead_code)]
+ pub fn set_features(mut self, features: IndexSet<String>) -> Self {
+ self.features = Some(features);
+ self
+ }
+
+ /// Set features as an array of string (does some basic parsing).
+ pub fn extend_features(mut self, features: impl IntoIterator<Item = String>) -> Self {
+ self.features
+ .get_or_insert_with(Default::default)
+ .extend(features);
+ self
+ }
+
+ /// Set the value of default-features for the dependency.
+ #[allow(dead_code)]
+ pub fn set_default_features(mut self, default_features: bool) -> Self {
+ self.default_features = Some(default_features);
+ self
+ }
+
+ /// Set the alias for the dependency.
+ pub fn set_rename(mut self, rename: &str) -> Self {
+ self.rename = Some(rename.into());
+ self
+ }
+
+ /// Set the value of registry for the dependency.
+ pub fn set_registry(mut self, registry: impl Into<String>) -> Self {
+ self.registry = Some(registry.into());
+ self
+ }
+
+ /// Set features as an array of string (does some basic parsing).
+ pub fn set_inherited_features(mut self, features: IndexSet<String>) -> Self {
+ self.inherited_features = Some(features);
+ self
+ }
+
+ /// Get the dependency source.
+ pub fn source(&self) -> Option<&Source> {
+ self.source.as_ref()
+ }
+
+ /// Get version of dependency.
+ pub fn version(&self) -> Option<&str> {
+ match self.source()? {
+ Source::Registry(src) => Some(src.version.as_str()),
+ Source::Path(src) => src.version.as_deref(),
+ Source::Git(src) => src.version.as_deref(),
+ Source::Workspace(_) => None,
+ }
+ }
+
+ /// Get registry of the dependency.
+ pub fn registry(&self) -> Option<&str> {
+ self.registry.as_deref()
+ }
+
+ /// Get the alias for the dependency (if any).
+ pub fn rename(&self) -> Option<&str> {
+ self.rename.as_deref()
+ }
+
+ /// Whether default features are activated.
+ pub fn default_features(&self) -> Option<bool> {
+ self.default_features
+ }
+
+ /// Get whether the dep is optional.
+ pub fn optional(&self) -> Option<bool> {
+ self.optional
+ }
+
+ /// Get the SourceID for this dependency.
+ pub fn source_id(&self, config: &Config) -> CargoResult<MaybeWorkspace<SourceId>> {
+ match &self.source.as_ref() {
+ Some(Source::Registry(_)) | None => {
+ if let Some(r) = self.registry() {
+ let source_id = SourceId::alt_registry(config, r)?;
+ Ok(MaybeWorkspace::Other(source_id))
+ } else {
+ let source_id = SourceId::crates_io(config)?;
+ Ok(MaybeWorkspace::Other(source_id))
+ }
+ }
+ Some(Source::Path(source)) => Ok(MaybeWorkspace::Other(source.source_id()?)),
+ Some(Source::Git(source)) => Ok(MaybeWorkspace::Other(source.source_id()?)),
+ Some(Source::Workspace(workspace)) => Ok(MaybeWorkspace::Workspace(workspace.clone())),
+ }
+ }
+
+ /// Query to find this dependency.
+ pub fn query(
+ &self,
+ config: &Config,
+ ) -> CargoResult<MaybeWorkspace<crate::core::dependency::Dependency>> {
+ let source_id = self.source_id(config)?;
+ match source_id {
+ MaybeWorkspace::Workspace(workspace) => Ok(MaybeWorkspace::Workspace(workspace)),
+ MaybeWorkspace::Other(source_id) => Ok(MaybeWorkspace::Other(
+ crate::core::dependency::Dependency::parse(
+ self.name.as_str(),
+ self.version(),
+ source_id,
+ )?,
+ )),
+ }
+ }
+}
+
+/// Either a workspace or another type.
+pub enum MaybeWorkspace<T> {
+ Workspace(WorkspaceSource),
+ Other(T),
+}
+
+impl Dependency {
+ /// Create a dependency from a TOML table entry.
+ pub fn from_toml(crate_root: &Path, key: &str, item: &toml_edit::Item) -> CargoResult<Self> {
+ if let Some(version) = item.as_str() {
+ let dep = Self::new(key).set_source(RegistrySource::new(version));
+ Ok(dep)
+ } else if let Some(table) = item.as_table_like() {
+ let (name, rename) = if let Some(value) = table.get("package") {
+ (
+ value
+ .as_str()
+ .ok_or_else(|| invalid_type(key, "package", value.type_name(), "string"))?
+ .to_owned(),
+ Some(key.to_owned()),
+ )
+ } else {
+ (key.to_owned(), None)
+ };
+
+ let source: Source =
+ if let Some(git) = table.get("git") {
+ let mut src = GitSource::new(
+ git.as_str()
+ .ok_or_else(|| invalid_type(key, "git", git.type_name(), "string"))?,
+ );
+ if let Some(value) = table.get("branch") {
+ src = src.set_branch(value.as_str().ok_or_else(|| {
+ invalid_type(key, "branch", value.type_name(), "string")
+ })?);
+ }
+ if let Some(value) = table.get("tag") {
+ src = src.set_tag(value.as_str().ok_or_else(|| {
+ invalid_type(key, "tag", value.type_name(), "string")
+ })?);
+ }
+ if let Some(value) = table.get("rev") {
+ src = src.set_rev(value.as_str().ok_or_else(|| {
+ invalid_type(key, "rev", value.type_name(), "string")
+ })?);
+ }
+ if let Some(value) = table.get("version") {
+ src = src.set_version(value.as_str().ok_or_else(|| {
+ invalid_type(key, "version", value.type_name(), "string")
+ })?);
+ }
+ src.into()
+ } else if let Some(path) = table.get("path") {
+ let path = crate_root
+ .join(path.as_str().ok_or_else(|| {
+ invalid_type(key, "path", path.type_name(), "string")
+ })?);
+ let mut src = PathSource::new(path);
+ if let Some(value) = table.get("version") {
+ src = src.set_version(value.as_str().ok_or_else(|| {
+ invalid_type(key, "version", value.type_name(), "string")
+ })?);
+ }
+ src.into()
+ } else if let Some(version) = table.get("version") {
+ let src = RegistrySource::new(version.as_str().ok_or_else(|| {
+ invalid_type(key, "version", version.type_name(), "string")
+ })?);
+ src.into()
+ } else if let Some(workspace) = table.get("workspace") {
+ let workspace_bool = workspace.as_bool().ok_or_else(|| {
+ invalid_type(key, "workspace", workspace.type_name(), "bool")
+ })?;
+ if !workspace_bool {
+ anyhow::bail!("`{key}.workspace = false` is unsupported")
+ }
+ let src = WorkspaceSource::new();
+ src.into()
+ } else {
+ anyhow::bail!("Unrecognized dependency source for `{key}`");
+ };
+ let registry = if let Some(value) = table.get("registry") {
+ Some(
+ value
+ .as_str()
+ .ok_or_else(|| invalid_type(key, "registry", value.type_name(), "string"))?
+ .to_owned(),
+ )
+ } else {
+ None
+ };
+
+ let default_features = table.get("default-features").and_then(|v| v.as_bool());
+ if table.contains_key("default_features") {
+ anyhow::bail!("Use of `default_features` in `{key}` is unsupported, please switch to `default-features`");
+ }
+
+ let features = if let Some(value) = table.get("features") {
+ Some(
+ value
+ .as_array()
+ .ok_or_else(|| invalid_type(key, "features", value.type_name(), "array"))?
+ .iter()
+ .map(|v| {
+ v.as_str().map(|s| s.to_owned()).ok_or_else(|| {
+ invalid_type(key, "features", v.type_name(), "string")
+ })
+ })
+ .collect::<CargoResult<IndexSet<String>>>()?,
+ )
+ } else {
+ None
+ };
+
+ let optional = table.get("optional").and_then(|v| v.as_bool());
+
+ let dep = Self {
+ name,
+ rename,
+ source: Some(source),
+ registry,
+ default_features,
+ features,
+ optional,
+ inherited_features: None,
+ };
+ Ok(dep)
+ } else {
+ anyhow::bail!("Unrecognized` dependency entry format for `{key}");
+ }
+ }
+
+ /// Get the dependency name as defined in the manifest,
+ /// that is, either the alias (rename field if Some),
+ /// or the official package name (name field).
+ pub fn toml_key(&self) -> &str {
+ self.rename().unwrap_or(&self.name)
+ }
+
+ /// Convert dependency to TOML.
+ ///
+ /// Returns a tuple with the dependency's name and either the version as a
+ /// `String` or the path/git repository as an `InlineTable`.
+ /// (If the dependency is set as `optional` or `default-features` is set to
+ /// `false`, an `InlineTable` is returned in any case.)
+ ///
+ /// # Panic
+ ///
+ /// Panics if the path is relative
+ pub fn to_toml(&self, crate_root: &Path) -> toml_edit::Item {
+ assert!(
+ crate_root.is_absolute(),
+ "Absolute path needed, got: {}",
+ crate_root.display()
+ );
+ let table: toml_edit::Item = match (
+ self.optional.unwrap_or(false),
+ self.features.as_ref(),
+ self.default_features.unwrap_or(true),
+ self.source.as_ref(),
+ self.registry.as_ref(),
+ self.rename.as_ref(),
+ ) {
+ // Extra short when version flag only
+ (
+ false,
+ None,
+ true,
+ Some(Source::Registry(RegistrySource { version: v })),
+ None,
+ None,
+ ) => toml_edit::value(v),
+ (false, None, true, Some(Source::Workspace(WorkspaceSource {})), None, None) => {
+ let mut table = toml_edit::InlineTable::default();
+ table.set_dotted(true);
+ table.insert("workspace", true.into());
+ toml_edit::value(toml_edit::Value::InlineTable(table))
+ }
+ // Other cases are represented as an inline table
+ (_, _, _, _, _, _) => {
+ let mut table = toml_edit::InlineTable::default();
+
+ match &self.source {
+ Some(Source::Registry(src)) => {
+ table.insert("version", src.version.as_str().into());
+ }
+ Some(Source::Path(src)) => {
+ let relpath = path_field(crate_root, &src.path);
+ if let Some(r) = src.version.as_deref() {
+ table.insert("version", r.into());
+ }
+ table.insert("path", relpath.into());
+ }
+ Some(Source::Git(src)) => {
+ table.insert("git", src.git.as_str().into());
+ if let Some(branch) = src.branch.as_deref() {
+ table.insert("branch", branch.into());
+ }
+ if let Some(tag) = src.tag.as_deref() {
+ table.insert("tag", tag.into());
+ }
+ if let Some(rev) = src.rev.as_deref() {
+ table.insert("rev", rev.into());
+ }
+ if let Some(r) = src.version.as_deref() {
+ table.insert("version", r.into());
+ }
+ }
+ Some(Source::Workspace(_)) => {
+ table.insert("workspace", true.into());
+ }
+ None => {}
+ }
+ if table.contains_key("version") {
+ if let Some(r) = self.registry.as_deref() {
+ table.insert("registry", r.into());
+ }
+ }
+
+ if self.rename.is_some() {
+ table.insert("package", self.name.as_str().into());
+ }
+ if let Some(v) = self.default_features {
+ table.insert("default-features", v.into());
+ }
+ if let Some(features) = self.features.as_ref() {
+ let features: toml_edit::Value = features.iter().cloned().collect();
+ table.insert("features", features);
+ }
+ if let Some(v) = self.optional {
+ table.insert("optional", v.into());
+ }
+
+ toml_edit::value(toml_edit::Value::InlineTable(table))
+ }
+ };
+
+ table
+ }
+
+ /// Modify existing entry to match this dependency.
+ pub fn update_toml<'k>(
+ &self,
+ crate_root: &Path,
+ key: &mut KeyMut<'k>,
+ item: &mut toml_edit::Item,
+ ) {
+ if str_or_1_len_table(item) {
+ // Nothing to preserve
+ *item = self.to_toml(crate_root);
+ key.fmt();
+ } else if let Some(table) = item.as_table_like_mut() {
+ match &self.source {
+ Some(Source::Registry(src)) => {
+ table.insert("version", toml_edit::value(src.version.as_str()));
+
+ for key in ["path", "git", "branch", "tag", "rev", "workspace"] {
+ table.remove(key);
+ }
+ }
+ Some(Source::Path(src)) => {
+ let relpath = path_field(crate_root, &src.path);
+ table.insert("path", toml_edit::value(relpath));
+ if let Some(r) = src.version.as_deref() {
+ table.insert("version", toml_edit::value(r));
+ } else {
+ table.remove("version");
+ }
+
+ for key in ["git", "branch", "tag", "rev", "workspace"] {
+ table.remove(key);
+ }
+ }
+ Some(Source::Git(src)) => {
+ table.insert("git", toml_edit::value(src.git.as_str()));
+ if let Some(branch) = src.branch.as_deref() {
+ table.insert("branch", toml_edit::value(branch));
+ } else {
+ table.remove("branch");
+ }
+ if let Some(tag) = src.tag.as_deref() {
+ table.insert("tag", toml_edit::value(tag));
+ } else {
+ table.remove("tag");
+ }
+ if let Some(rev) = src.rev.as_deref() {
+ table.insert("rev", toml_edit::value(rev));
+ } else {
+ table.remove("rev");
+ }
+ if let Some(r) = src.version.as_deref() {
+ table.insert("version", toml_edit::value(r));
+ } else {
+ table.remove("version");
+ }
+
+ for key in ["path", "workspace"] {
+ table.remove(key);
+ }
+ }
+ Some(Source::Workspace(_)) => {
+ table.insert("workspace", toml_edit::value(true));
+ table.set_dotted(true);
+ key.fmt();
+ for key in [
+ "version",
+ "registry",
+ "registry-index",
+ "path",
+ "git",
+ "branch",
+ "tag",
+ "rev",
+ "package",
+ "default-features",
+ ] {
+ table.remove(key);
+ }
+ }
+ None => {}
+ }
+ if table.contains_key("version") {
+ if let Some(r) = self.registry.as_deref() {
+ table.insert("registry", toml_edit::value(r));
+ } else {
+ table.remove("registry");
+ }
+ } else {
+ table.remove("registry");
+ }
+
+ if self.rename.is_some() {
+ table.insert("package", toml_edit::value(self.name.as_str()));
+ }
+ match self.default_features {
+ Some(v) => {
+ table.insert("default-features", toml_edit::value(v));
+ }
+ None => {
+ table.remove("default-features");
+ }
+ }
+ if let Some(new_features) = self.features.as_ref() {
+ let mut features = table
+ .get("features")
+ .and_then(|i| i.as_value())
+ .and_then(|v| v.as_array())
+ .and_then(|a| {
+ a.iter()
+ .map(|v| v.as_str())
+ .collect::<Option<IndexSet<_>>>()
+ })
+ .unwrap_or_default();
+ features.extend(new_features.iter().map(|s| s.as_str()));
+ let features = toml_edit::value(features.into_iter().collect::<toml_edit::Value>());
+ table.set_dotted(false);
+ table.insert("features", features);
+ } else {
+ table.remove("features");
+ }
+ match self.optional {
+ Some(v) => {
+ table.set_dotted(false);
+ table.insert("optional", toml_edit::value(v));
+ }
+ None => {
+ table.remove("optional");
+ }
+ }
+
+ table.fmt();
+ } else {
+ unreachable!("Invalid dependency type: {}", item.type_name());
+ }
+ }
+}
+
+fn invalid_type(dep: &str, key: &str, actual: &str, expected: &str) -> anyhow::Error {
+ anyhow::format_err!("Found {actual} for {key} when {expected} was expected for {dep}")
+}
+
+impl std::fmt::Display for Dependency {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ if let Some(source) = self.source() {
+ write!(f, "{}@{}", self.name, source)
+ } else {
+ self.toml_key().fmt(f)
+ }
+ }
+}
+
+impl<'s> From<&'s Summary> for Dependency {
+ fn from(other: &'s Summary) -> Self {
+ let source: Source = if let Some(path) = other.source_id().local_path() {
+ PathSource::new(path)
+ .set_version(other.version().to_string())
+ .into()
+ } else if let Some(git_ref) = other.source_id().git_reference() {
+ let mut src = GitSource::new(other.source_id().url().to_string())
+ .set_version(other.version().to_string());
+ match git_ref {
+ GitReference::Branch(branch) => src = src.set_branch(branch),
+ GitReference::Tag(tag) => src = src.set_tag(tag),
+ GitReference::Rev(rev) => src = src.set_rev(rev),
+ GitReference::DefaultBranch => {}
+ }
+ src.into()
+ } else {
+ RegistrySource::new(other.version().to_string()).into()
+ };
+ Dependency::new(other.name().as_str()).set_source(source)
+ }
+}
+
+impl From<Summary> for Dependency {
+ fn from(other: Summary) -> Self {
+ (&other).into()
+ }
+}
+
+fn path_field(crate_root: &Path, abs_path: &Path) -> String {
+ let relpath = pathdiff::diff_paths(abs_path, crate_root).expect("both paths are absolute");
+ let relpath = relpath.to_str().unwrap().replace('\\', "/");
+ relpath
+}
+
+/// Primary location of a dependency.
+#[derive(Debug, Hash, PartialEq, Eq, Clone)]
+pub enum Source {
+ /// Dependency from a registry.
+ Registry(RegistrySource),
+ /// Dependency from a local path.
+ Path(PathSource),
+ /// Dependency from a git repo.
+ Git(GitSource),
+ /// Dependency from a workspace.
+ Workspace(WorkspaceSource),
+}
+
+impl Source {
+ /// Access the registry source, if present.
+ pub fn as_registry(&self) -> Option<&RegistrySource> {
+ match self {
+ Self::Registry(src) => Some(src),
+ _ => None,
+ }
+ }
+
+ /// Access the path source, if present.
+ #[allow(dead_code)]
+ pub fn as_path(&self) -> Option<&PathSource> {
+ match self {
+ Self::Path(src) => Some(src),
+ _ => None,
+ }
+ }
+
+ /// Access the git source, if present.
+ #[allow(dead_code)]
+ pub fn as_git(&self) -> Option<&GitSource> {
+ match self {
+ Self::Git(src) => Some(src),
+ _ => None,
+ }
+ }
+
+ /// Access the workspace source, if present.
+ #[allow(dead_code)]
+ pub fn as_workspace(&self) -> Option<&WorkspaceSource> {
+ match self {
+ Self::Workspace(src) => Some(src),
+ _ => None,
+ }
+ }
+}
+
+impl std::fmt::Display for Source {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ Self::Registry(src) => src.fmt(f),
+ Self::Path(src) => src.fmt(f),
+ Self::Git(src) => src.fmt(f),
+ Self::Workspace(src) => src.fmt(f),
+ }
+ }
+}
+
+impl<'s> From<&'s Source> for Source {
+ fn from(inner: &'s Source) -> Self {
+ inner.clone()
+ }
+}
+
+impl From<RegistrySource> for Source {
+ fn from(inner: RegistrySource) -> Self {
+ Self::Registry(inner)
+ }
+}
+
+impl From<PathSource> for Source {
+ fn from(inner: PathSource) -> Self {
+ Self::Path(inner)
+ }
+}
+
+impl From<GitSource> for Source {
+ fn from(inner: GitSource) -> Self {
+ Self::Git(inner)
+ }
+}
+
+impl From<WorkspaceSource> for Source {
+ fn from(inner: WorkspaceSource) -> Self {
+ Self::Workspace(inner)
+ }
+}
+
+/// Dependency from a registry.
+#[derive(Debug, Hash, PartialEq, Eq, Clone)]
+#[non_exhaustive]
+pub struct RegistrySource {
+ /// Version requirement.
+ pub version: String,
+}
+
+impl RegistrySource {
+ /// Specify dependency by version requirement.
+ pub fn new(version: impl AsRef<str>) -> Self {
+ // versions might have semver metadata appended which we do not want to
+ // store in the cargo toml files. This would cause a warning upon compilation
+ // ("version requirement […] includes semver metadata which will be ignored")
+ let version = version.as_ref().split('+').next().unwrap();
+ Self {
+ version: version.to_owned(),
+ }
+ }
+}
+
+impl std::fmt::Display for RegistrySource {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.version.fmt(f)
+ }
+}
+
+/// Dependency from a local path.
+#[derive(Debug, Hash, PartialEq, Eq, Clone)]
+#[non_exhaustive]
+pub struct PathSource {
+ /// Local, absolute path.
+ pub path: PathBuf,
+ /// Version requirement for when published.
+ pub version: Option<String>,
+}
+
+impl PathSource {
+ /// Specify dependency from a path.
+ pub fn new(path: impl Into<PathBuf>) -> Self {
+ Self {
+ path: path.into(),
+ version: None,
+ }
+ }
+
+ /// Set an optional version requirement.
+ pub fn set_version(mut self, version: impl AsRef<str>) -> Self {
+ // versions might have semver metadata appended which we do not want to
+ // store in the cargo toml files. This would cause a warning upon compilation
+ // ("version requirement […] includes semver metadata which will be ignored")
+ let version = version.as_ref().split('+').next().unwrap();
+ self.version = Some(version.to_owned());
+ self
+ }
+
+ /// Get the SourceID for this dependency.
+ pub fn source_id(&self) -> CargoResult<SourceId> {
+ SourceId::for_path(&self.path)
+ }
+}
+
+impl std::fmt::Display for PathSource {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.path.display().fmt(f)
+ }
+}
+
+/// Dependency from a git repo.
+#[derive(Debug, Hash, PartialEq, Eq, Clone)]
+#[non_exhaustive]
+pub struct GitSource {
+ /// Repository URL.
+ pub git: String,
+ /// Select specific branch.
+ pub branch: Option<String>,
+ /// Select specific tag.
+ pub tag: Option<String>,
+ /// Select specific rev.
+ pub rev: Option<String>,
+ /// Version requirement for when published.
+ pub version: Option<String>,
+}
+
+impl GitSource {
+ /// Specify dependency from a git repo.
+ pub fn new(git: impl Into<String>) -> Self {
+ Self {
+ git: git.into(),
+ branch: None,
+ tag: None,
+ rev: None,
+ version: None,
+ }
+ }
+
+ /// Specify an optional branch.
+ pub fn set_branch(mut self, branch: impl Into<String>) -> Self {
+ self.branch = Some(branch.into());
+ self.tag = None;
+ self.rev = None;
+ self
+ }
+
+ /// Specify an optional tag.
+ pub fn set_tag(mut self, tag: impl Into<String>) -> Self {
+ self.branch = None;
+ self.tag = Some(tag.into());
+ self.rev = None;
+ self
+ }
+
+ /// Specify an optional rev.
+ pub fn set_rev(mut self, rev: impl Into<String>) -> Self {
+ self.branch = None;
+ self.tag = None;
+ self.rev = Some(rev.into());
+ self
+ }
+
+ /// Get the SourceID for this dependency.
+ pub fn source_id(&self) -> CargoResult<SourceId> {
+ let git_url = self.git.parse::<url::Url>()?;
+ let git_ref = self.git_ref();
+ SourceId::for_git(&git_url, git_ref)
+ }
+
+ fn git_ref(&self) -> GitReference {
+ match (
+ self.branch.as_deref(),
+ self.tag.as_deref(),
+ self.rev.as_deref(),
+ ) {
+ (Some(branch), _, _) => GitReference::Branch(branch.to_owned()),
+ (_, Some(tag), _) => GitReference::Tag(tag.to_owned()),
+ (_, _, Some(rev)) => GitReference::Rev(rev.to_owned()),
+ _ => GitReference::DefaultBranch,
+ }
+ }
+
+ /// Set an optional version requirement.
+ pub fn set_version(mut self, version: impl AsRef<str>) -> Self {
+ // versions might have semver metadata appended which we do not want to
+ // store in the cargo toml files. This would cause a warning upon compilation
+ // ("version requirement […] includes semver metadata which will be ignored")
+ let version = version.as_ref().split('+').next().unwrap();
+ self.version = Some(version.to_owned());
+ self
+ }
+}
+
+impl std::fmt::Display for GitSource {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let git_ref = self.git_ref();
+ if let Some(pretty_ref) = git_ref.pretty_ref() {
+ write!(f, "{}?{}", self.git, pretty_ref)
+ } else {
+ write!(f, "{}", self.git)
+ }
+ }
+}
+
+/// Dependency from a workspace.
+#[derive(Debug, Hash, PartialEq, Eq, Clone)]
+#[non_exhaustive]
+pub struct WorkspaceSource;
+
+impl WorkspaceSource {
+ pub fn new() -> Self {
+ Self
+ }
+}
+
+impl Display for WorkspaceSource {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ "workspace".fmt(f)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use std::path::Path;
+
+ use crate::util::toml_mut::manifest::LocalManifest;
+ use cargo_util::paths;
+
+ use super::*;
+
+ #[test]
+ fn to_toml_simple_dep() {
+ let crate_root =
+ paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/")));
+ let dep = Dependency::new("dep").set_source(RegistrySource::new("1.0"));
+ let key = dep.toml_key();
+ let item = dep.to_toml(&crate_root);
+
+ assert_eq!(key, "dep".to_owned());
+
+ verify_roundtrip(&crate_root, key, &item);
+ }
+
+ #[test]
+ fn to_toml_simple_dep_with_version() {
+ let crate_root =
+ paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/")));
+ let dep = Dependency::new("dep").set_source(RegistrySource::new("1.0"));
+ let key = dep.toml_key();
+ let item = dep.to_toml(&crate_root);
+
+ assert_eq!(key, "dep".to_owned());
+ assert_eq!(item.as_str(), Some("1.0"));
+
+ verify_roundtrip(&crate_root, key, &item);
+ }
+
+ #[test]
+ fn to_toml_optional_dep() {
+ let crate_root =
+ paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/")));
+ let dep = Dependency::new("dep")
+ .set_source(RegistrySource::new("1.0"))
+ .set_optional(true);
+ let key = dep.toml_key();
+ let item = dep.to_toml(&crate_root);
+
+ assert_eq!(key, "dep".to_owned());
+ assert!(item.is_inline_table());
+
+ let dep = item.as_inline_table().unwrap();
+ assert_eq!(dep.get("optional").unwrap().as_bool(), Some(true));
+
+ verify_roundtrip(&crate_root, key, &item);
+ }
+
+ #[test]
+ fn to_toml_dep_without_default_features() {
+ let crate_root =
+ paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/")));
+ let dep = Dependency::new("dep")
+ .set_source(RegistrySource::new("1.0"))
+ .set_default_features(false);
+ let key = dep.toml_key();
+ let item = dep.to_toml(&crate_root);
+
+ assert_eq!(key, "dep".to_owned());
+ assert!(item.is_inline_table());
+
+ let dep = item.as_inline_table().unwrap();
+ assert_eq!(dep.get("default-features").unwrap().as_bool(), Some(false));
+
+ verify_roundtrip(&crate_root, key, &item);
+ }
+
+ #[test]
+ fn to_toml_dep_with_path_source() {
+ let root = paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/")));
+ let crate_root = root.join("foo");
+ let dep = Dependency::new("dep").set_source(PathSource::new(root.join("bar")));
+ let key = dep.toml_key();
+ let item = dep.to_toml(&crate_root);
+
+ assert_eq!(key, "dep".to_owned());
+ assert!(item.is_inline_table());
+
+ let dep = item.as_inline_table().unwrap();
+ assert_eq!(dep.get("path").unwrap().as_str(), Some("../bar"));
+
+ verify_roundtrip(&crate_root, key, &item);
+ }
+
+ #[test]
+ fn to_toml_dep_with_git_source() {
+ let crate_root =
+ paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/")));
+ let dep = Dependency::new("dep").set_source(GitSource::new("https://foor/bar.git"));
+ let key = dep.toml_key();
+ let item = dep.to_toml(&crate_root);
+
+ assert_eq!(key, "dep".to_owned());
+ assert!(item.is_inline_table());
+
+ let dep = item.as_inline_table().unwrap();
+ assert_eq!(
+ dep.get("git").unwrap().as_str(),
+ Some("https://foor/bar.git")
+ );
+
+ verify_roundtrip(&crate_root, key, &item);
+ }
+
+ #[test]
+ fn to_toml_renamed_dep() {
+ let crate_root =
+ paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/")));
+ let dep = Dependency::new("dep")
+ .set_source(RegistrySource::new("1.0"))
+ .set_rename("d");
+ let key = dep.toml_key();
+ let item = dep.to_toml(&crate_root);
+
+ assert_eq!(key, "d".to_owned());
+ assert!(item.is_inline_table());
+
+ let dep = item.as_inline_table().unwrap();
+ assert_eq!(dep.get("package").unwrap().as_str(), Some("dep"));
+
+ verify_roundtrip(&crate_root, key, &item);
+ }
+
+ #[test]
+ fn to_toml_dep_from_alt_registry() {
+ let crate_root =
+ paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/")));
+ let dep = Dependency::new("dep")
+ .set_source(RegistrySource::new("1.0"))
+ .set_registry("alternative");
+ let key = dep.toml_key();
+ let item = dep.to_toml(&crate_root);
+
+ assert_eq!(key, "dep".to_owned());
+ assert!(item.is_inline_table());
+
+ let dep = item.as_inline_table().unwrap();
+ assert_eq!(dep.get("registry").unwrap().as_str(), Some("alternative"));
+
+ verify_roundtrip(&crate_root, key, &item);
+ }
+
+ #[test]
+ fn to_toml_complex_dep() {
+ let crate_root =
+ paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/")));
+ let dep = Dependency::new("dep")
+ .set_source(RegistrySource::new("1.0"))
+ .set_default_features(false)
+ .set_rename("d");
+ let key = dep.toml_key();
+ let item = dep.to_toml(&crate_root);
+
+ assert_eq!(key, "d".to_owned());
+ assert!(item.is_inline_table());
+
+ let dep = item.as_inline_table().unwrap();
+ assert_eq!(dep.get("package").unwrap().as_str(), Some("dep"));
+ assert_eq!(dep.get("version").unwrap().as_str(), Some("1.0"));
+ assert_eq!(dep.get("default-features").unwrap().as_bool(), Some(false));
+
+ verify_roundtrip(&crate_root, key, &item);
+ }
+
+ #[test]
+ fn paths_with_forward_slashes_are_left_as_is() {
+ let crate_root =
+ paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/")));
+ let path = crate_root.join("sibling/crate");
+ let relpath = "sibling/crate";
+ let dep = Dependency::new("dep").set_source(PathSource::new(path));
+ let key = dep.toml_key();
+ let item = dep.to_toml(&crate_root);
+
+ let table = item.as_inline_table().unwrap();
+ let got = table.get("path").unwrap().as_str().unwrap();
+ assert_eq!(got, relpath);
+
+ verify_roundtrip(&crate_root, key, &item);
+ }
+
+ #[test]
+ fn overwrite_with_workspace_source_fmt_key() {
+ let crate_root =
+ paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("./")));
+ let toml = "dep = \"1.0\"\n";
+ let manifest = toml.parse().unwrap();
+ let mut local = LocalManifest {
+ path: crate_root.clone(),
+ manifest,
+ };
+ assert_eq!(local.manifest.to_string(), toml);
+ for (key, item) in local.data.clone().iter() {
+ let dep = Dependency::from_toml(&crate_root, key, item).unwrap();
+ let dep = dep.set_source(WorkspaceSource::new());
+ local.insert_into_table(&vec![], &dep).unwrap();
+ assert_eq!(local.data.to_string(), "dep.workspace = true\n");
+ }
+ }
+
+ #[test]
+ #[cfg(windows)]
+ fn normalise_windows_style_paths() {
+ let crate_root =
+ paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/")));
+ let original = crate_root.join(r"sibling\crate");
+ let should_be = "sibling/crate";
+ let dep = Dependency::new("dep").set_source(PathSource::new(original));
+ let key = dep.toml_key();
+ let item = dep.to_toml(&crate_root);
+
+ let table = item.as_inline_table().unwrap();
+ let got = table.get("path").unwrap().as_str().unwrap();
+ assert_eq!(got, should_be);
+
+ verify_roundtrip(&crate_root, key, &item);
+ }
+
+ #[track_caller]
+ fn verify_roundtrip(crate_root: &Path, key: &str, item: &toml_edit::Item) {
+ let roundtrip = Dependency::from_toml(crate_root, key, item).unwrap();
+ let round_key = roundtrip.toml_key();
+ let round_item = roundtrip.to_toml(crate_root);
+ assert_eq!(key, round_key);
+ assert_eq!(item.to_string(), round_item.to_string());
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/toml_mut/manifest.rs b/src/tools/cargo/src/cargo/util/toml_mut/manifest.rs
new file mode 100644
index 000000000..f3fc150e1
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/toml_mut/manifest.rs
@@ -0,0 +1,540 @@
+//! Parsing and editing of manifest files.
+
+use std::ops::{Deref, DerefMut};
+use std::path::{Path, PathBuf};
+use std::str;
+
+use anyhow::Context as _;
+
+use super::dependency::Dependency;
+use crate::core::dependency::DepKind;
+use crate::core::FeatureValue;
+use crate::util::interning::InternedString;
+use crate::CargoResult;
+
+/// Dependency table to add deps to.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct DepTable {
+ kind: DepKind,
+ target: Option<String>,
+}
+
+impl DepTable {
+ const KINDS: &'static [Self] = &[
+ Self::new().set_kind(DepKind::Normal),
+ Self::new().set_kind(DepKind::Development),
+ Self::new().set_kind(DepKind::Build),
+ ];
+
+ /// Reference to a Dependency Table.
+ pub const fn new() -> Self {
+ Self {
+ kind: DepKind::Normal,
+ target: None,
+ }
+ }
+
+ /// Choose the type of dependency.
+ pub const fn set_kind(mut self, kind: DepKind) -> Self {
+ self.kind = kind;
+ self
+ }
+
+ /// Choose the platform for the dependency.
+ pub fn set_target(mut self, target: impl Into<String>) -> Self {
+ self.target = Some(target.into());
+ self
+ }
+
+ /// Type of dependency.
+ pub fn kind(&self) -> DepKind {
+ self.kind
+ }
+
+ /// Platform for the dependency.
+ pub fn target(&self) -> Option<&str> {
+ self.target.as_deref()
+ }
+
+ /// Keys to the table.
+ pub fn to_table(&self) -> Vec<&str> {
+ if let Some(target) = &self.target {
+ vec!["target", target, self.kind.kind_table()]
+ } else {
+ vec![self.kind.kind_table()]
+ }
+ }
+}
+
+impl Default for DepTable {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl From<DepKind> for DepTable {
+ fn from(other: DepKind) -> Self {
+ Self::new().set_kind(other)
+ }
+}
+
+/// An editable Cargo manifest.
+#[derive(Debug, Clone)]
+pub struct Manifest {
+ /// Manifest contents as TOML data.
+ pub data: toml_edit::Document,
+}
+
+impl Manifest {
+ /// Get the manifest's package name.
+ pub fn package_name(&self) -> CargoResult<&str> {
+ self.data
+ .as_table()
+ .get("package")
+ .and_then(|m| m.get("name"))
+ .and_then(|m| m.as_str())
+ .ok_or_else(parse_manifest_err)
+ }
+
+ /// Get the specified table from the manifest.
+ pub fn get_table<'a>(&'a self, table_path: &[String]) -> CargoResult<&'a toml_edit::Item> {
+ /// Descend into a manifest until the required table is found.
+ fn descend<'a>(
+ input: &'a toml_edit::Item,
+ path: &[String],
+ ) -> CargoResult<&'a toml_edit::Item> {
+ if let Some(segment) = path.get(0) {
+ let value = input
+ .get(&segment)
+ .ok_or_else(|| non_existent_table_err(segment))?;
+
+ if value.is_table_like() {
+ descend(value, &path[1..])
+ } else {
+ Err(non_existent_table_err(segment))
+ }
+ } else {
+ Ok(input)
+ }
+ }
+
+ descend(self.data.as_item(), table_path)
+ }
+
+ /// Get the specified table from the manifest.
+ pub fn get_table_mut<'a>(
+ &'a mut self,
+ table_path: &[String],
+ ) -> CargoResult<&'a mut toml_edit::Item> {
+ /// Descend into a manifest until the required table is found.
+ fn descend<'a>(
+ input: &'a mut toml_edit::Item,
+ path: &[String],
+ ) -> CargoResult<&'a mut toml_edit::Item> {
+ if let Some(segment) = path.get(0) {
+ let mut default_table = toml_edit::Table::new();
+ default_table.set_implicit(true);
+ let value = input[&segment].or_insert(toml_edit::Item::Table(default_table));
+
+ if value.is_table_like() {
+ descend(value, &path[1..])
+ } else {
+ Err(non_existent_table_err(segment))
+ }
+ } else {
+ Ok(input)
+ }
+ }
+
+ descend(self.data.as_item_mut(), table_path)
+ }
+
+ /// Get all sections in the manifest that exist and might contain
+ /// dependencies. The returned items are always `Table` or
+ /// `InlineTable`.
+ pub fn get_sections(&self) -> Vec<(DepTable, toml_edit::Item)> {
+ let mut sections = Vec::new();
+
+ for table in DepTable::KINDS {
+ let dependency_type = table.kind.kind_table();
+ // Dependencies can be in the three standard sections...
+ if self
+ .data
+ .get(dependency_type)
+ .map(|t| t.is_table_like())
+ .unwrap_or(false)
+ {
+ sections.push((table.clone(), self.data[dependency_type].clone()))
+ }
+
+ // ... and in `target.<target>.(build-/dev-)dependencies`.
+ let target_sections = self
+ .data
+ .as_table()
+ .get("target")
+ .and_then(toml_edit::Item::as_table_like)
+ .into_iter()
+ .flat_map(toml_edit::TableLike::iter)
+ .filter_map(|(target_name, target_table)| {
+ let dependency_table = target_table.get(dependency_type)?;
+ dependency_table.as_table_like().map(|_| {
+ (
+ table.clone().set_target(target_name),
+ dependency_table.clone(),
+ )
+ })
+ });
+
+ sections.extend(target_sections);
+ }
+
+ sections
+ }
+
+ pub fn get_legacy_sections(&self) -> Vec<String> {
+ let mut result = Vec::new();
+
+ for dependency_type in ["dev_dependencies", "build_dependencies"] {
+ if self.data.contains_key(dependency_type) {
+ result.push(dependency_type.to_owned());
+ }
+
+ // ... and in `target.<target>.(build-/dev-)dependencies`.
+ result.extend(
+ self.data
+ .as_table()
+ .get("target")
+ .and_then(toml_edit::Item::as_table_like)
+ .into_iter()
+ .flat_map(toml_edit::TableLike::iter)
+ .filter_map(|(target_name, target_table)| {
+ if target_table.as_table_like()?.contains_key(dependency_type) {
+ Some(format!("target.{target_name}.{dependency_type}"))
+ } else {
+ None
+ }
+ }),
+ );
+ }
+ result
+ }
+}
+
+impl str::FromStr for Manifest {
+ type Err = anyhow::Error;
+
+ /// Read manifest data from string
+ fn from_str(input: &str) -> ::std::result::Result<Self, Self::Err> {
+ let d: toml_edit::Document = input.parse().context("Manifest not valid TOML")?;
+
+ Ok(Manifest { data: d })
+ }
+}
+
+impl std::fmt::Display for Manifest {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.data.fmt(f)
+ }
+}
+
+/// An editable Cargo manifest that is available locally.
+#[derive(Debug, Clone)]
+pub struct LocalManifest {
+ /// Path to the manifest.
+ pub path: PathBuf,
+ /// Manifest contents.
+ pub manifest: Manifest,
+}
+
+impl Deref for LocalManifest {
+ type Target = Manifest;
+
+ fn deref(&self) -> &Manifest {
+ &self.manifest
+ }
+}
+
+impl DerefMut for LocalManifest {
+ fn deref_mut(&mut self) -> &mut Manifest {
+ &mut self.manifest
+ }
+}
+
+impl LocalManifest {
+ /// Construct the `LocalManifest` corresponding to the `Path` provided..
+ pub fn try_new(path: &Path) -> CargoResult<Self> {
+ if !path.is_absolute() {
+ anyhow::bail!("can only edit absolute paths, got {}", path.display());
+ }
+ let data = cargo_util::paths::read(&path)?;
+ let manifest = data.parse().context("Unable to parse Cargo.toml")?;
+ Ok(LocalManifest {
+ manifest,
+ path: path.to_owned(),
+ })
+ }
+
+ /// Write changes back to the file.
+ pub fn write(&self) -> CargoResult<()> {
+ if !self.manifest.data.contains_key("package")
+ && !self.manifest.data.contains_key("project")
+ {
+ if self.manifest.data.contains_key("workspace") {
+ anyhow::bail!(
+ "found virtual manifest at {}, but this command requires running against an \
+ actual package in this workspace.",
+ self.path.display()
+ );
+ } else {
+ anyhow::bail!(
+ "missing expected `package` or `project` fields in {}",
+ self.path.display()
+ );
+ }
+ }
+
+ let s = self.manifest.data.to_string();
+ let new_contents_bytes = s.as_bytes();
+
+ cargo_util::paths::write(&self.path, new_contents_bytes)
+ }
+
+ /// Lookup a dependency.
+ pub fn get_dependency_versions<'s>(
+ &'s self,
+ dep_key: &'s str,
+ ) -> impl Iterator<Item = (DepTable, CargoResult<Dependency>)> + 's {
+ let crate_root = self.path.parent().expect("manifest path is absolute");
+ self.get_sections()
+ .into_iter()
+ .filter_map(move |(table_path, table)| {
+ let table = table.into_table().ok()?;
+ Some(
+ table
+ .into_iter()
+ .filter_map(|(key, item)| {
+ if key.as_str() == dep_key {
+ Some((table_path.clone(), key, item))
+ } else {
+ None
+ }
+ })
+ .collect::<Vec<_>>(),
+ )
+ })
+ .flatten()
+ .map(move |(table_path, dep_key, dep_item)| {
+ let dep = Dependency::from_toml(crate_root, &dep_key, &dep_item);
+ (table_path, dep)
+ })
+ }
+
+ /// Add entry to a Cargo.toml.
+ pub fn insert_into_table(
+ &mut self,
+ table_path: &[String],
+ dep: &Dependency,
+ ) -> CargoResult<()> {
+ let crate_root = self
+ .path
+ .parent()
+ .expect("manifest path is absolute")
+ .to_owned();
+ let dep_key = dep.toml_key();
+
+ let table = self.get_table_mut(table_path)?;
+ if let Some((mut dep_key, dep_item)) = table
+ .as_table_like_mut()
+ .unwrap()
+ .get_key_value_mut(dep_key)
+ {
+ dep.update_toml(&crate_root, &mut dep_key, dep_item);
+ } else {
+ let new_dependency = dep.to_toml(&crate_root);
+ table[dep_key] = new_dependency;
+ }
+ if let Some(t) = table.as_inline_table_mut() {
+ t.fmt()
+ }
+
+ Ok(())
+ }
+
+ /// Remove entry from a Cargo.toml.
+ pub fn remove_from_table(&mut self, table_path: &[String], name: &str) -> CargoResult<()> {
+ let parent_table = self.get_table_mut(table_path)?;
+
+ let dep = parent_table
+ .get_mut(name)
+ .filter(|t| !t.is_none())
+ .ok_or_else(|| non_existent_dependency_err(name, table_path.join(".")))?;
+
+ // remove the dependency
+ *dep = toml_edit::Item::None;
+
+ // remove table if empty
+ if parent_table.as_table_like().unwrap().is_empty() {
+ *parent_table = toml_edit::Item::None;
+ }
+
+ Ok(())
+ }
+
+ /// Remove references to `dep_key` if its no longer present.
+ pub fn gc_dep(&mut self, dep_key: &str) {
+ let explicit_dep_activation = self.is_explicit_dep_activation(dep_key);
+ let status = self.dep_status(dep_key);
+
+ if let Some(toml_edit::Item::Table(feature_table)) =
+ self.data.as_table_mut().get_mut("features")
+ {
+ for (_feature, mut feature_values) in feature_table.iter_mut() {
+ if let toml_edit::Item::Value(toml_edit::Value::Array(feature_values)) =
+ &mut feature_values
+ {
+ fix_feature_activations(
+ feature_values,
+ dep_key,
+ status,
+ explicit_dep_activation,
+ );
+ }
+ }
+ }
+ }
+
+ fn is_explicit_dep_activation(&self, dep_key: &str) -> bool {
+ if let Some(toml_edit::Item::Table(feature_table)) = self.data.as_table().get("features") {
+ for values in feature_table
+ .iter()
+ .map(|(_, a)| a)
+ .filter_map(|i| i.as_value())
+ .filter_map(|v| v.as_array())
+ {
+ for value in values.iter().filter_map(|v| v.as_str()) {
+ let value = FeatureValue::new(InternedString::new(value));
+ if let FeatureValue::Dep { dep_name } = &value {
+ if dep_name.as_str() == dep_key {
+ return true;
+ }
+ }
+ }
+ }
+ }
+
+ false
+ }
+
+ fn dep_status(&self, dep_key: &str) -> DependencyStatus {
+ let mut status = DependencyStatus::None;
+ for (_, tbl) in self.get_sections() {
+ if let toml_edit::Item::Table(tbl) = tbl {
+ if let Some(dep_item) = tbl.get(dep_key) {
+ let optional = dep_item
+ .get("optional")
+ .and_then(|i| i.as_value())
+ .and_then(|i| i.as_bool())
+ .unwrap_or(false);
+ if optional {
+ return DependencyStatus::Optional;
+ } else {
+ status = DependencyStatus::Required;
+ }
+ }
+ }
+ }
+ status
+ }
+}
+
+impl std::fmt::Display for LocalManifest {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.manifest.fmt(f)
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
+enum DependencyStatus {
+ None,
+ Optional,
+ Required,
+}
+
+fn fix_feature_activations(
+ feature_values: &mut toml_edit::Array,
+ dep_key: &str,
+ status: DependencyStatus,
+ explicit_dep_activation: bool,
+) {
+ let remove_list: Vec<usize> = feature_values
+ .iter()
+ .enumerate()
+ .filter_map(|(idx, value)| value.as_str().map(|s| (idx, s)))
+ .filter_map(|(idx, value)| {
+ let parsed_value = FeatureValue::new(InternedString::new(value));
+ match status {
+ DependencyStatus::None => match (parsed_value, explicit_dep_activation) {
+ (FeatureValue::Feature(dep_name), false)
+ | (FeatureValue::Dep { dep_name }, _)
+ | (FeatureValue::DepFeature { dep_name, .. }, _) => dep_name == dep_key,
+ _ => false,
+ },
+ DependencyStatus::Optional => false,
+ DependencyStatus::Required => match (parsed_value, explicit_dep_activation) {
+ (FeatureValue::Feature(dep_name), false)
+ | (FeatureValue::Dep { dep_name }, _) => dep_name == dep_key,
+ (FeatureValue::Feature(_), true) | (FeatureValue::DepFeature { .. }, _) => {
+ false
+ }
+ },
+ }
+ .then(|| idx)
+ })
+ .collect();
+
+ // Remove found idx in revers order so we don't invalidate the idx.
+ for idx in remove_list.iter().rev() {
+ feature_values.remove(*idx);
+ }
+
+ if status == DependencyStatus::Required {
+ for value in feature_values.iter_mut() {
+ let parsed_value = if let Some(value) = value.as_str() {
+ FeatureValue::new(InternedString::new(value))
+ } else {
+ continue;
+ };
+ if let FeatureValue::DepFeature {
+ dep_name,
+ dep_feature,
+ weak,
+ } = parsed_value
+ {
+ if dep_name == dep_key && weak {
+ *value = format!("{dep_name}/{dep_feature}").into();
+ }
+ }
+ }
+ }
+
+ feature_values.fmt();
+}
+
+pub fn str_or_1_len_table(item: &toml_edit::Item) -> bool {
+ item.is_str() || item.as_table_like().map(|t| t.len() == 1).unwrap_or(false)
+}
+
+fn parse_manifest_err() -> anyhow::Error {
+ anyhow::format_err!("unable to parse external Cargo.toml")
+}
+
+fn non_existent_table_err(table: impl std::fmt::Display) -> anyhow::Error {
+ anyhow::format_err!("the table `{table}` could not be found.")
+}
+
+fn non_existent_dependency_err(
+ name: impl std::fmt::Display,
+ table: impl std::fmt::Display,
+) -> anyhow::Error {
+ anyhow::format_err!("the dependency `{name}` could not be found in `{table}`.")
+}
diff --git a/src/tools/cargo/src/cargo/util/toml_mut/mod.rs b/src/tools/cargo/src/cargo/util/toml_mut/mod.rs
new file mode 100644
index 000000000..bdd70e8e6
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/toml_mut/mod.rs
@@ -0,0 +1,13 @@
+//! Utilities for in-place editing of Cargo.toml manifests.
+//!
+//! These utilities operate only on the level of a TOML document, and generally
+//! do not perform any processing of information beyond what is required for
+//! editing. For more comprehensive usage of manifests, see
+//! [`Manifest`](crate::core::manifest::Manifest).
+//!
+//! In most cases, the entrypoint for editing is
+//! [`LocalManifest`](crate::util::toml_mut::manifest::LocalManifest),
+//! which contains editing functionality for a given manifest's dependencies.
+
+pub mod dependency;
+pub mod manifest;
diff --git a/src/tools/cargo/src/cargo/util/vcs.rs b/src/tools/cargo/src/cargo/util/vcs.rs
new file mode 100644
index 000000000..095164e23
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/vcs.rs
@@ -0,0 +1,104 @@
+use crate::util::CargoResult;
+use cargo_util::paths;
+use cargo_util::ProcessBuilder;
+use std::path::Path;
+
+// Check if we are in an existing repo. We define that to be true if either:
+//
+// 1. We are in a git repo and the path to the new package is not an ignored
+// path in that repo.
+// 2. We are in an HG repo.
+pub fn existing_vcs_repo(path: &Path, cwd: &Path) -> bool {
+ fn in_git_repo(path: &Path, cwd: &Path) -> bool {
+ if let Ok(repo) = GitRepo::discover(path, cwd) {
+ // Don't check if the working directory itself is ignored.
+ if repo.workdir().map_or(false, |workdir| workdir == path) {
+ true
+ } else {
+ !repo.is_path_ignored(path).unwrap_or(false)
+ }
+ } else {
+ false
+ }
+ }
+
+ in_git_repo(path, cwd) || HgRepo::discover(path, cwd).is_ok()
+}
+
+pub struct HgRepo;
+pub struct GitRepo;
+pub struct PijulRepo;
+pub struct FossilRepo;
+
+impl GitRepo {
+ pub fn init(path: &Path, _: &Path) -> CargoResult<GitRepo> {
+ git2::Repository::init(path)?;
+ Ok(GitRepo)
+ }
+ pub fn discover(path: &Path, _: &Path) -> Result<git2::Repository, git2::Error> {
+ git2::Repository::discover(path)
+ }
+}
+
+impl HgRepo {
+ pub fn init(path: &Path, cwd: &Path) -> CargoResult<HgRepo> {
+ ProcessBuilder::new("hg")
+ .cwd(cwd)
+ .arg("init")
+ .arg("--")
+ .arg(path)
+ .exec()?;
+ Ok(HgRepo)
+ }
+ pub fn discover(path: &Path, cwd: &Path) -> CargoResult<HgRepo> {
+ ProcessBuilder::new("hg")
+ .cwd(cwd)
+ .arg("--cwd")
+ .arg(path)
+ .arg("root")
+ .exec_with_output()?;
+ Ok(HgRepo)
+ }
+}
+
+impl PijulRepo {
+ pub fn init(path: &Path, cwd: &Path) -> CargoResult<PijulRepo> {
+ ProcessBuilder::new("pijul")
+ .cwd(cwd)
+ .arg("init")
+ .arg("--")
+ .arg(path)
+ .exec()?;
+ Ok(PijulRepo)
+ }
+}
+
+impl FossilRepo {
+ pub fn init(path: &Path, cwd: &Path) -> CargoResult<FossilRepo> {
+ // fossil doesn't create the directory so we'll do that first
+ paths::create_dir_all(path)?;
+
+ // set up the paths we'll use
+ let db_fname = ".fossil";
+ let mut db_path = path.to_owned();
+ db_path.push(db_fname);
+
+ // then create the fossil DB in that location
+ ProcessBuilder::new("fossil")
+ .cwd(cwd)
+ .arg("init")
+ .arg("--")
+ .arg(&db_path)
+ .exec()?;
+
+ // open it in that new directory
+ ProcessBuilder::new("fossil")
+ .cwd(&path)
+ .arg("open")
+ .arg("--")
+ .arg(db_fname)
+ .exec()?;
+
+ Ok(FossilRepo)
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/workspace.rs b/src/tools/cargo/src/cargo/util/workspace.rs
new file mode 100644
index 000000000..e8317f101
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/workspace.rs
@@ -0,0 +1,130 @@
+use crate::core::compiler::Unit;
+use crate::core::manifest::TargetSourcePath;
+use crate::core::{Target, Workspace};
+use crate::ops::CompileOptions;
+use crate::util::CargoResult;
+use anyhow::bail;
+use cargo_util::ProcessBuilder;
+use std::fmt::Write;
+use std::path::PathBuf;
+
+fn get_available_targets<'a>(
+ filter_fn: fn(&Target) -> bool,
+ ws: &'a Workspace<'_>,
+ options: &'a CompileOptions,
+) -> CargoResult<Vec<&'a str>> {
+ let packages = options.spec.get_packages(ws)?;
+
+ let mut targets: Vec<_> = packages
+ .into_iter()
+ .flat_map(|pkg| {
+ pkg.manifest()
+ .targets()
+ .iter()
+ .filter(|target| filter_fn(target))
+ })
+ .map(Target::name)
+ .collect();
+
+ targets.sort();
+
+ Ok(targets)
+}
+
+fn print_available_targets(
+ filter_fn: fn(&Target) -> bool,
+ ws: &Workspace<'_>,
+ options: &CompileOptions,
+ option_name: &str,
+ plural_name: &str,
+) -> CargoResult<()> {
+ let targets = get_available_targets(filter_fn, ws, options)?;
+
+ let mut output = String::new();
+ writeln!(output, "\"{}\" takes one argument.", option_name)?;
+
+ if targets.is_empty() {
+ writeln!(output, "No {} available.", plural_name)?;
+ } else {
+ writeln!(output, "Available {}:", plural_name)?;
+ for target in targets {
+ writeln!(output, " {}", target)?;
+ }
+ }
+ bail!("{}", output)
+}
+
+pub fn print_available_packages(ws: &Workspace<'_>) -> CargoResult<()> {
+ let packages = ws
+ .members()
+ .map(|pkg| pkg.name().as_str())
+ .collect::<Vec<_>>();
+
+ let mut output = "\"--package <SPEC>\" requires a SPEC format value, \
+ which can be any package ID specifier in the dependency graph.\n\
+ Run `cargo help pkgid` for more information about SPEC format.\n\n"
+ .to_string();
+
+ if packages.is_empty() {
+ // This would never happen.
+ // Just in case something regresses we covers it here.
+ writeln!(output, "No packages available.")?;
+ } else {
+ writeln!(output, "Possible packages/workspace members:")?;
+ for package in packages {
+ writeln!(output, " {}", package)?;
+ }
+ }
+ bail!("{}", output)
+}
+
+pub fn print_available_examples(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> {
+ print_available_targets(Target::is_example, ws, options, "--example", "examples")
+}
+
+pub fn print_available_binaries(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> {
+ print_available_targets(Target::is_bin, ws, options, "--bin", "binaries")
+}
+
+pub fn print_available_benches(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> {
+ print_available_targets(Target::is_bench, ws, options, "--bench", "benches")
+}
+
+pub fn print_available_tests(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> {
+ print_available_targets(Target::is_test, ws, options, "--test", "tests")
+}
+
+/// The path that we pass to rustc is actually fairly important because it will
+/// show up in error messages (important for readability), debug information
+/// (important for caching), etc. As a result we need to be pretty careful how we
+/// actually invoke rustc.
+///
+/// In general users don't expect `cargo build` to cause rebuilds if you change
+/// directories. That could be if you just change directories in the package or
+/// if you literally move the whole package wholesale to a new directory. As a
+/// result we mostly don't factor in `cwd` to this calculation. Instead we try to
+/// track the workspace as much as possible and we update the current directory
+/// of rustc/rustdoc where appropriate.
+///
+/// The first returned value here is the argument to pass to rustc, and the
+/// second is the cwd that rustc should operate in.
+pub fn path_args(ws: &Workspace<'_>, unit: &Unit) -> (PathBuf, PathBuf) {
+ let ws_root = ws.root();
+ let src = match unit.target.src_path() {
+ TargetSourcePath::Path(path) => path.to_path_buf(),
+ TargetSourcePath::Metabuild => unit.pkg.manifest().metabuild_path(ws.target_dir()),
+ };
+ assert!(src.is_absolute());
+ if unit.pkg.package_id().source_id().is_path() {
+ if let Ok(path) = src.strip_prefix(ws_root) {
+ return (path.to_path_buf(), ws_root.to_path_buf());
+ }
+ }
+ (src, unit.pkg.root().to_path_buf())
+}
+
+pub fn add_path_args(ws: &Workspace<'_>, unit: &Unit, cmd: &mut ProcessBuilder) {
+ let (arg, cwd) = path_args(ws, unit);
+ cmd.arg(arg);
+ cmd.cwd(cwd);
+}
diff --git a/src/tools/cargo/src/cargo/version.rs b/src/tools/cargo/src/cargo/version.rs
new file mode 100644
index 000000000..9829fd72e
--- /dev/null
+++ b/src/tools/cargo/src/cargo/version.rs
@@ -0,0 +1,80 @@
+//! Code for representing cargo's release version number.
+
+use std::fmt;
+
+/// Information about the git repository where cargo was built from.
+pub struct CommitInfo {
+ pub short_commit_hash: String,
+ pub commit_hash: String,
+ pub commit_date: String,
+}
+
+/// Cargo's version.
+pub struct VersionInfo {
+ /// Cargo's version, such as "1.57.0", "1.58.0-beta.1", "1.59.0-nightly", etc.
+ pub version: String,
+ /// The release channel we were built for (stable/beta/nightly/dev).
+ ///
+ /// `None` if not built via rustuild.
+ pub release_channel: Option<String>,
+ /// Information about the Git repository we may have been built from.
+ ///
+ /// `None` if not built from a git repo.
+ pub commit_info: Option<CommitInfo>,
+}
+
+impl fmt::Display for VersionInfo {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.version)?;
+
+ if let Some(ref ci) = self.commit_info {
+ write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?;
+ };
+ Ok(())
+ }
+}
+
+/// Returns information about cargo's version.
+pub fn version() -> VersionInfo {
+ macro_rules! option_env_str {
+ ($name:expr) => {
+ option_env!($name).map(|s| s.to_string())
+ };
+ }
+
+ // This is the version set in rustbuild, which we use to match rustc.
+ let version = option_env_str!("CFG_RELEASE").unwrap_or_else(|| {
+ // If cargo is not being built by rustbuild, then we just use the
+ // version from cargo's own `Cargo.toml`.
+ //
+ // There are two versions at play here:
+ // - version of cargo-the-binary, which you see when you type `cargo --version`
+ // - version of cargo-the-library, which you download from crates.io for use
+ // in your packages.
+ //
+ // The library is permanently unstable, so it always has a 0 major
+ // version. However, the CLI now reports a stable 1.x version
+ // (starting in 1.26) which stays in sync with rustc's version.
+ //
+ // Coincidentally, the minor version for cargo-the-library is always
+ // +1 of rustc's minor version (that is, `rustc 1.11.0` corresponds to
+ // `cargo `0.12.0`). The versions always get bumped in lockstep, so
+ // this should continue to hold.
+ let minor = env!("CARGO_PKG_VERSION_MINOR").parse::<u8>().unwrap() - 1;
+ let patch = env!("CARGO_PKG_VERSION_PATCH").parse::<u8>().unwrap();
+ format!("1.{}.{}", minor, patch)
+ });
+
+ let release_channel = option_env_str!("CFG_RELEASE_CHANNEL");
+ let commit_info = option_env_str!("CARGO_COMMIT_HASH").map(|commit_hash| CommitInfo {
+ short_commit_hash: option_env_str!("CARGO_COMMIT_SHORT_HASH").unwrap(),
+ commit_hash,
+ commit_date: option_env_str!("CARGO_COMMIT_DATE").unwrap(),
+ });
+
+ VersionInfo {
+ version,
+ release_channel,
+ commit_info,
+ }
+}
diff --git a/src/tools/cargo/src/doc/README.md b/src/tools/cargo/src/doc/README.md
new file mode 100644
index 000000000..79181b7f6
--- /dev/null
+++ b/src/tools/cargo/src/doc/README.md
@@ -0,0 +1,71 @@
+# Cargo documentation
+
+This directory contains Cargo's documentation. There are two parts, [The Cargo
+Book] which is built with [mdbook] and the man pages, which are built with
+[mdman].
+
+[The Cargo Book]: https://doc.rust-lang.org/cargo/
+[mdBook]: https://github.com/rust-lang/mdBook
+[mdman]: https://github.com/rust-lang/cargo/tree/master/crates/mdman/
+
+### Building the book
+
+Building the book requires [mdBook]. To get it:
+
+```console
+$ cargo install mdbook
+```
+
+To build the book:
+
+```console
+$ mdbook build
+```
+
+`mdbook` provides a variety of different commands and options to help you work
+on the book:
+
+* `mdbook build --open`: Build the book and open it in a web browser.
+* `mdbook serve`: Launches a web server on localhost. It also automatically
+ rebuilds the book whenever any file changes and automatically reloads your
+ web browser.
+
+The book contents are driven by the [`SUMMARY.md`](src/SUMMARY.md) file, and
+every file must be linked there.
+
+### Building the man pages
+
+The man pages use a tool called [mdman] to convert markdown to a man page
+format. Check out the documentation at
+[`mdman/doc/`](../../crates/mdman/doc/)
+for more details.
+
+The man pages are converted from a templated markdown (located in the
+[`src/doc/man/`](man)
+directory) to three different formats:
+
+1. Troff-style man pages, saved in [`src/etc/man/`](../etc/man).
+2. Markdown (with some HTML) for the Cargo Book, saved in
+ [`src/doc/src/commands/`](src/commands).
+3. Plain text (needed for embedded man pages on platforms without man such as
+ Windows), saved in [`src/doc/man/generated_txt/`](man/generated_txt).
+
+To rebuild the man pages, run the script `build-man.sh` in the `src/doc` directory.
+
+```console
+$ ./build-man.sh
+```
+
+### SemVer chapter tests
+
+There is a script to verify that the examples in the SemVer chapter work as
+intended. To run the tests, go into the `semver-check` directory and run
+`cargo run`.
+
+## Contributing
+
+We'd love your help with improving the documentation! Please feel free to
+[open issues](https://github.com/rust-lang/cargo/issues) about anything, and
+send in PRs for things you'd like to fix or change. If your change is large,
+please open an issue first, so we can make sure that it's something we'd
+accept before you go through the work of getting a PR together.
diff --git a/src/tools/cargo/src/doc/book.toml b/src/tools/cargo/src/doc/book.toml
new file mode 100644
index 000000000..1dd1f8e0f
--- /dev/null
+++ b/src/tools/cargo/src/doc/book.toml
@@ -0,0 +1,8 @@
+[book]
+title = "The Cargo Book"
+author = "Alex Crichton, Steve Klabnik and Carol Nichols, with contributions from the Rust community"
+
+[output.html]
+curly-quotes = true # Enable smart-punctuation feature for more than quotes.
+git-repository-url = "https://github.com/rust-lang/cargo/tree/master/src/doc/src"
+edit-url-template = "https://github.com/rust-lang/cargo/edit/master/src/doc/{path}"
diff --git a/src/tools/cargo/src/doc/build-man.sh b/src/tools/cargo/src/doc/build-man.sh
new file mode 100755
index 000000000..7b1330b58
--- /dev/null
+++ b/src/tools/cargo/src/doc/build-man.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+#
+# This script builds the Cargo man pages.
+#
+# The source for the man pages are located in src/doc/man/ in markdown format.
+# These also are handlebars templates, see crates/mdman/README.md for details.
+#
+# The generated man pages are placed in the src/etc/man/ directory. The pages
+# are also expanded into markdown (after being expanded by handlebars) and
+# saved in the src/doc/src/commands/ directory. These are included in the
+# Cargo book, which is converted to HTML by mdbook.
+
+set -e
+
+cd "$(dirname "${BASH_SOURCE[0]}")"
+
+OPTIONS="--url https://doc.rust-lang.org/cargo/commands/ \
+ --man rustc:1=https://doc.rust-lang.org/rustc/index.html \
+ --man rustdoc:1=https://doc.rust-lang.org/rustdoc/index.html"
+
+cargo run --manifest-path=../../crates/mdman/Cargo.toml -- \
+ -t md -o src/commands man/cargo*.md \
+ $OPTIONS
+
+cargo run --manifest-path=../../crates/mdman/Cargo.toml -- \
+ -t txt -o man/generated_txt man/cargo*.md \
+ $OPTIONS
+
+cargo run --manifest-path=../../crates/mdman/Cargo.toml -- \
+ -t man -o ../etc/man man/cargo*.md \
+ $OPTIONS
diff --git a/src/tools/cargo/src/doc/contrib/README.md b/src/tools/cargo/src/doc/contrib/README.md
new file mode 100644
index 000000000..57756211b
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/README.md
@@ -0,0 +1,12 @@
+# Cargo Contributor Guide
+
+This is the source of the Cargo Contributor Guide, published at
+<https://rust-lang.github.io/cargo/contrib/>. It is written in Markdown, using
+the [mdbook] tool to convert to HTML. If you are editing these pages, the best
+option to view the results is to run `mdbook serve`, which will start a web
+server on localhost that you can visit to view the book, and it will
+automatically reload each time you edit a page.
+
+This is published via GitHub Actions to GitHub Pages.
+
+[mdbook]: https://rust-lang.github.io/mdBook/
diff --git a/src/tools/cargo/src/doc/contrib/book.toml b/src/tools/cargo/src/doc/contrib/book.toml
new file mode 100644
index 000000000..628179c0d
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/book.toml
@@ -0,0 +1,17 @@
+[book]
+title = "Cargo Contributor Guide"
+authors = ["Eric Huss"]
+
+[output.html]
+curly-quotes = true # Enable smart-punctuation feature for more than quotes.
+git-repository-url = "https://github.com/rust-lang/cargo/tree/master/src/doc/contrib/src"
+
+[output.html.redirect]
+"/apidoc/cargo/index.html" = "https://doc.rust-lang.org/nightly/nightly-rustc/cargo/"
+"/architecture/index.html" = "../implementation/architecture.html"
+"/architecture/console.html" = "../implementation/console.html"
+"/architecture/subcommands.html" = "../implementation/subcommands.html"
+"/architecture/codebase.html" = "https://doc.rust-lang.org/nightly/nightly-rustc/cargo"
+"/architecture/compilation.html" = "https://doc.rust-lang.org/nightly/nightly-rustc/cargo"
+"/architecture/files.html" = "https://doc.rust-lang.org/nightly/nightly-rustc/cargo"
+"/architecture/packages.html" = "https://doc.rust-lang.org/nightly/nightly-rustc/cargo"
diff --git a/src/tools/cargo/src/doc/contrib/src/SUMMARY.md b/src/tools/cargo/src/doc/contrib/src/SUMMARY.md
new file mode 100644
index 000000000..643cc5ac7
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/src/SUMMARY.md
@@ -0,0 +1,20 @@
+# Summary
+
+- [Introduction](./index.md)
+- [Issue Tracker](./issues.md)
+- [Process](./process/index.md)
+ - [Working on Cargo](./process/working-on-cargo.md)
+ - [Release process](./process/release.md)
+ - [Unstable features](./process/unstable.md)
+- [Design Principles](./design.md)
+- [Implementing a Change](./implementation/index.md)
+ - [Architecture](./implementation/architecture.md)
+ - [New subcommands](./implementation/subcommands.md)
+ - [Console Output](./implementation/console.md)
+ - [Filesystem](./implementation/filesystem.md)
+ - [Debugging](./implementation/debugging.md)
+- [Tests](./tests/index.md)
+ - [Running Tests](./tests/running.md)
+ - [Writing Tests](./tests/writing.md)
+ - [Benchmarking and Profiling](./tests/profiling.md)
+ - [Crater](./tests/crater.md)
diff --git a/src/tools/cargo/src/doc/contrib/src/design.md b/src/tools/cargo/src/doc/contrib/src/design.md
new file mode 100644
index 000000000..d51d3eb20
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/src/design.md
@@ -0,0 +1,101 @@
+# Design Principles
+
+The purpose of Cargo is to formalize a canonical Rust workflow, by automating
+the standard tasks associated with distributing software. Cargo simplifies
+structuring a new project, adding dependencies, writing and running unit
+tests, and more.
+
+Cargo is not intended to be a general-purpose build tool. Ideally, it should
+be easy to integrate it within another build tool, though admittedly that is
+not as seamless as desired.
+
+## Stability and compatibility
+
+### Backwards compatibility
+
+Cargo strives to remain backwards compatible with projects created in previous
+versions. The CLI interface also strives to remain backwards compatible, such
+that the commands and options behave the same. That being said, changes in
+behavior, and even outright breakage are sometimes done in limited situations.
+The following outlines some situations where backwards-incompatible changes are
+made:
+
+* Anything that addresses a security concern.
+* Dropping support for older platforms and tooling. Cargo follows the Rust
+ [tiered platform support].
+* Changes to resolve possibly unsafe or unreliable behavior.
+
+None of these changes should be taken lightly, and should be avoided if
+possible, or possibly with some transition period to alert the user of the
+potential change.
+
+Behavior is sometimes changed in ways that have a high confidence that it
+won't break existing workflows. Almost every change carries this risk, so it
+is often a judgment call balancing the benefit of the change with the
+perceived possibility of its negative consequences.
+
+At times, some changes fall in the gray area, where the current behavior is
+undocumented, or not working as intended. These are more difficult judgment
+calls. The general preference is to balance towards avoiding breaking existing
+workflows.
+
+Support for older registry APIs and index formats may be dropped, if there is
+high confidence that there aren't any active registries that may be affected.
+This has never (to my knowledge) happened so far, and is unlikely to happen in
+the future, but remains a possibility.
+
+In all of the above, a transition period may be employed if a change is known
+to cause breakage. A warning can be issued to alert the user that something
+will change, and provide them with an alternative to resolve the issue
+(preferably in a way that is compatible across versions if possible).
+
+Cargo is only expected to work with the version of the related Rust tools
+(`rustc`, `rustdoc`, etc.) that it is released with. As a matter of choice,
+the latest nightly works with the most recent stable release, but that is
+mostly to accommodate development of Cargo itself, and should not be expected
+by users.
+
+### Forwards compatibility
+
+Additionally, Cargo strives a limited degree of *forwards compatibility*.
+Changes should not egregiously prevent older versions from working. This is
+mostly relevant for persistent data, such as on-disk files and the registry
+interface and index. It also applies to a lesser degree to the registry API.
+
+Changes to `Cargo.lock` require a transition time, where the new format is not
+automatically written when the lock file is updated. The transition time
+should not be less than 6 months, though preferably longer. New projects may
+use the new format in a shorter time frame.
+
+Changes to `Cargo.toml` can be made in any release. This is because the user
+must manually modify the file, and opt-in to any new changes. Additionally,
+Cargo will usually only issue a warning about new fields it doesn't
+understand, but otherwise continue to function.
+
+Changes to cache files (such as artifacts in the `target` directory, or cached
+data in Cargo's home directory) should not *prevent* older versions from
+running, but they may cause older versions to recreate the cache, which may
+result in a performance impact.
+
+Changes to the registry index should not prevent older versions from working.
+Generally, older versions ignore new fields, so the format should be easily
+extensible. Changes to the format or interpretation of existing fields should
+be done very carefully to avoid preventing older versions of Cargo from
+working. In some cases, this may mean that older versions of Cargo will not be
+able to *select* a newly published crate, but it shouldn't prevent them from
+working at all. This level of compatibility may not last forever, but the
+exact time frame for such a change has not yet been decided.
+
+The registry API may be changed in such a way to prevent older versions of
+Cargo from working. Generally, compatibility should be retained for as long as
+possible, but the exact length of time is not specified.
+
+## Simplicity and layers
+
+Standard workflows should be easy and consistent. Each knob that is added has
+a high cost, regardless if it is intended for a small audience. Layering and
+defaults can help avoid the surface area that the user needs to be concerned
+with. Try to avoid small functionalities that may have complex interactions
+with one another.
+
+[tiered platform support]: https://doc.rust-lang.org/nightly/rustc/platform-support.html
diff --git a/src/tools/cargo/src/doc/contrib/src/implementation/architecture.md b/src/tools/cargo/src/doc/contrib/src/implementation/architecture.md
new file mode 100644
index 000000000..b712c4fe5
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/src/implementation/architecture.md
@@ -0,0 +1,5 @@
+# Architecture Overview
+
+See the
+[nightly docs](https://doc.rust-lang.org/nightly/nightly-rustc/cargo/index.html)
+for an overview of `cargo`s architecture and links out to further details.
diff --git a/src/tools/cargo/src/doc/contrib/src/implementation/console.md b/src/tools/cargo/src/doc/contrib/src/implementation/console.md
new file mode 100644
index 000000000..a73d232e0
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/src/implementation/console.md
@@ -0,0 +1,58 @@
+# Console Output
+
+All of Cargo's output should go through the [`Shell`] struct. You can normally
+obtain the `Shell` instance from the [`Config`] struct. Do **not** use the std
+`println!` macros.
+
+Most of Cargo's output goes to stderr. When running in JSON mode, the output
+goes to stdout.
+
+It is important to properly handle errors when writing to the console.
+Informational commands, like `cargo list`, should ignore any errors writing
+the output. There are some [`drop_print`] macros that are intended to make
+this easier.
+
+Messages written during compilation should handle errors, and abort the build
+if they are unable to be displayed. This is generally automatically handled in
+the [`JobQueue`] as it processes each message.
+
+[`Shell`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/shell.rs
+[`Config`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/util/config/mod.rs
+[`drop_print`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/util/config/mod.rs#L1820-L1848
+[`JobQueue`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/job_queue.rs
+
+## Errors
+
+Cargo uses [`anyhow`] for managing errors. This makes it convenient to "chain"
+errors together, so that Cargo can report how an error originated, and what it
+was trying to do at the time.
+
+Error helpers are implemented in the [`errors`] module. Use the
+`InternalError` error type for errors that are not expected to happen. This
+will print a message to the user to file a bug report.
+
+The binary side of Cargo uses the `CliError` struct to wrap the process exit
+code. Usually Cargo exits with 101 for an error, but some commands like `cargo
+test` will exit with different codes.
+
+[`errors`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/util/errors.rs
+
+## Style
+
+Some guidelines for Cargo's output:
+
+* Keep the normal output brief. Cargo is already fairly noisy, so try to keep
+ the output as brief and clean as possible.
+* Good error messages are very important! Try to keep them brief and to the
+ point, but good enough that a beginner can understand what is wrong and can
+ figure out how to fix. It is a difficult balance to hit! Err on the side of
+ providing extra information.
+* When using any low-level routines, such as `std::fs`, *always* add error
+ context about what it is doing. For example, reading from a file should
+ include context about which file is being read if there is an error.
+* Cargo's error style is usually a phrase, starting with a lowercase letter.
+ If there is a longer error message that needs multiple sentences, go ahead
+ and use multiple sentences. This should probably be improved sometime in the
+ future to be more structured.
+
+[`anyhow`]: https://docs.rs/anyhow
diff --git a/src/tools/cargo/src/doc/contrib/src/implementation/debugging.md b/src/tools/cargo/src/doc/contrib/src/implementation/debugging.md
new file mode 100644
index 000000000..e148d72c3
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/src/implementation/debugging.md
@@ -0,0 +1,26 @@
+# Debugging
+
+## Logging
+
+Cargo uses the [`env_logger`] crate to display debug log messages. The
+`CARGO_LOG` environment variable can be set to enable debug logging, with a
+value such as `trace`, `debug`, or `warn`. It also supports filtering for
+specific modules. Feel free to use the standard [`log`] macros to help with
+diagnosing problems.
+
+```sh
+# Outputs all logs with levels debug and higher
+CARGO_LOG=debug cargo generate-lockfile
+
+# Don't forget that you can filter by module as well
+CARGO_LOG=cargo::core::resolver=trace cargo generate-lockfile
+
+# This will print lots of info about the download process. `trace` prints even more.
+CARGO_HTTP_DEBUG=true CARGO_LOG=cargo::ops::registry=debug cargo fetch
+
+# This is an important command for diagnosing fingerprint issues.
+CARGO_LOG=cargo::core::compiler::fingerprint=trace cargo build
+```
+
+[`env_logger`]: https://docs.rs/env_logger
+[`log`]: https://docs.rs/log
diff --git a/src/tools/cargo/src/doc/contrib/src/implementation/filesystem.md b/src/tools/cargo/src/doc/contrib/src/implementation/filesystem.md
new file mode 100644
index 000000000..0f70c5833
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/src/implementation/filesystem.md
@@ -0,0 +1,21 @@
+# Filesystem
+
+Cargo tends to get run on a very wide array of file systems. Different file
+systems can have a wide range of capabilities, and Cargo should strive to do
+its best to handle them. Some examples of issues to deal with:
+
+* Not all file systems support locking. Cargo tries to detect if locking is
+ supported, and if not, will ignore lock errors. This isn't ideal, but it is
+ difficult to deal with.
+* The [`fs::canonicalize`] function doesn't work on all file systems
+ (particularly some Windows file systems). If that function is used, there
+ should be a fallback if it fails. This function will also return `\\?\`
+ style paths on Windows, which can have some issues (such as some tools not
+ supporting them, or having issues with relative paths).
+* Timestamps can be unreliable. The [`fingerprint`] module has a deeper
+ discussion of this. One example is that Docker cache layers will erase the
+ fractional part of the time stamp.
+* Symlinks are not always supported, particularly on Windows.
+
+[`fingerprint`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/fingerprint.rs
+[`fs::canonicalize`]: https://doc.rust-lang.org/std/fs/fn.canonicalize.html
diff --git a/src/tools/cargo/src/doc/contrib/src/implementation/index.md b/src/tools/cargo/src/doc/contrib/src/implementation/index.md
new file mode 100644
index 000000000..ad7c80d5e
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/src/implementation/index.md
@@ -0,0 +1,6 @@
+# Implementing a Change
+
+This chapter gives an overview of what you need to know in making a change to cargo.
+
+If you feel something is missing that would help you, feel free to ask on
+[Zulip](https://rust-lang.zulipchat.com/#narrow/stream/246057-t-cargo).
diff --git a/src/tools/cargo/src/doc/contrib/src/implementation/subcommands.md b/src/tools/cargo/src/doc/contrib/src/implementation/subcommands.md
new file mode 100644
index 000000000..9f5da3555
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/src/implementation/subcommands.md
@@ -0,0 +1,25 @@
+# New Subcommands
+
+Cargo is a single binary composed of a set of [`clap`] subcommands. All
+subcommands live in [`src/bin/cargo/commands`] directory.
+[`src/bin/cargo/main.rs`] is the entry point.
+
+Each subcommand, such as [`src/bin/cargo/commands/build.rs`], usually performs
+the following:
+
+1. Parse the CLI flags. See the [`command_prelude`] module for some helpers to make this easier.
+2. Load the config files.
+3. Discover and load the workspace.
+4. Calls the actual implementation of the subcommand which resides in [`src/cargo/ops`].
+
+If the subcommand is not found in the built-in list, then Cargo will
+automatically search for a subcommand named `cargo-{NAME}` in the users `PATH`
+to execute the subcommand.
+
+
+[`clap`]: https://clap.rs/
+[`src/bin/cargo/commands/build.rs`]: https://github.com/rust-lang/cargo/tree/master/src/bin/cargo/commands/build.rs
+[`src/cargo/ops`]: https://github.com/rust-lang/cargo/tree/master/src/cargo/ops
+[`src/bin/cargo/commands`]: https://github.com/rust-lang/cargo/tree/master/src/bin/cargo/commands
+[`src/bin/cargo/main.rs`]: https://github.com/rust-lang/cargo/blob/master/src/bin/cargo/main.rs
+[`command_prelude`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/util/command_prelude.rs
diff --git a/src/tools/cargo/src/doc/contrib/src/index.md b/src/tools/cargo/src/doc/contrib/src/index.md
new file mode 100644
index 000000000..5ab169ea7
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/src/index.md
@@ -0,0 +1,29 @@
+# Introduction
+
+Thank you for your interest in contributing to [Cargo]! This guide provides an
+overview of how to contribute to Cargo, how to dive into the code, and how the
+testing infrastructure works.
+
+There are many ways to contribute, such as [helping other users], [filing
+issues], [improving the documentation], [fixing bugs], and working on [small]
+and [large features].
+
+If you have a general question about Cargo or its internals, feel free to ask
+on [Zulip].
+
+This guide assumes you have some familiarity with Rust, and how to use Cargo,
+[rustup], and general development tools like [git].
+
+Please also read the [Rust Code of Conduct].
+
+[Cargo]: https://doc.rust-lang.org/cargo/
+[Zulip]: https://rust-lang.zulipchat.com/#narrow/stream/246057-t-cargo
+[Rust Code of Conduct]: https://www.rust-lang.org/policies/code-of-conduct
+[helping other users]: https://users.rust-lang.org/
+[filing issues]: issues.md
+[rustup]: https://rust-lang.github.io/rustup/
+[git]: https://git-scm.com/
+[improving the documentation]: https://github.com/rust-lang/cargo/tree/master/src/doc
+[fixing bugs]: process/index.md#working-on-small-bugs
+[small]: process/index.md#working-on-small-features
+[large features]: process/index.md#working-on-large-features
diff --git a/src/tools/cargo/src/doc/contrib/src/issues.md b/src/tools/cargo/src/doc/contrib/src/issues.md
new file mode 100644
index 000000000..8fc69544c
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/src/issues.md
@@ -0,0 +1,109 @@
+# Issue Tracker
+
+Cargo's issue tracker is located at
+<https://github.com/rust-lang/cargo/issues/>. This is the primary spot where
+we track bugs and small feature requests. See [Process] for more about our
+process for proposing changes.
+
+## Filing issues
+
+We can't fix what we don't know about, so please report problems liberally.
+This includes problems with understanding the documentation, unhelpful error
+messages, and unexpected behavior.
+
+**If you think that you have identified an issue with Cargo that might
+compromise its users' security, please do not open a public issue on GitHub.
+Instead, we ask you to refer to Rust's [security policy].**
+
+Opening an issue is as easy as following [this link][new-issues]. There are
+several templates for different issue kinds, but if none of them fit your
+issue, don't hesitate to modify one of the templates, or click the [Open a
+blank issue] link.
+
+The Rust tools are spread across multiple repositories in the Rust
+organization. It may not always be clear where to file an issue. No worries!
+If you file in the wrong tracker, someone will either transfer it to the
+correct one or ask you to move it. Some other repositories that may be
+relevant are:
+
+* [`rust-lang/rust`] --- Home for the [`rustc`] compiler and [`rustdoc`].
+* [`rust-lang/rustup`] --- Home for the [`rustup`] toolchain installer.
+* [`rust-lang/rustfmt`] --- Home for the `rustfmt` tool, which also includes `cargo fmt`.
+* [`rust-lang/rust-clippy`] --- Home for the `clippy` tool, which also includes `cargo clippy`.
+* [`rust-lang/crates.io`] --- Home for the [crates.io] website.
+
+Issues with [`cargo fix`] can be tricky to know where they should be filed,
+since the fixes are driven by `rustc`, processed by [`rustfix`], and the
+front-interface is implemented in Cargo. Feel free to file in the Cargo issue
+tracker, and it will get moved to one of the other issue trackers if
+necessary.
+
+[Process]: process/index.md
+[security policy]: https://www.rust-lang.org/security.html
+[new-issues]: https://github.com/rust-lang/cargo/issues/new/choose
+[Open a blank issue]: https://github.com/rust-lang/cargo/issues/new
+[`rust-lang/rust`]: https://github.com/rust-lang/rust
+[`rust-lang/rustup`]: https://github.com/rust-lang/rustup
+[`rust-lang/rustfmt`]: https://github.com/rust-lang/rustfmt
+[`rust-lang/rust-clippy`]: https://github.com/rust-lang/rust-clippy
+[`rustc`]: https://doc.rust-lang.org/rustc/
+[`rustdoc`]: https://doc.rust-lang.org/rustdoc/
+[`rustup`]: https://rust-lang.github.io/rustup/
+[`rust-lang/crates.io`]: https://github.com/rust-lang/crates.io
+[crates.io]: https://crates.io/
+[`rustfix`]: https://github.com/rust-lang/rustfix/
+[`cargo fix`]: https://doc.rust-lang.org/cargo/commands/cargo-fix.html
+
+## Issue labels
+
+[Issue labels] are very helpful to identify the types of issues and which
+category they are related to. The Cargo team typically manages assigning
+labels. The labels use a naming convention with short prefixes and colors to
+indicate the kind of label:
+
+* Yellow, **A**-prefixed labels state which **area** of the project an issue
+ relates to.
+
+* Light purple, **C**-prefixed labels represent the **category** of an issue.
+ In particular, **[C-feature-request]** marks *proposals* for new features. If
+ an issue is **C-feature-request**, but is not **[Feature accepted]** or
+ **[I-nominated]**, then it was not thoroughly discussed, and might need some
+ additional design or perhaps should be implemented as an external subcommand
+ first. Ping @rust-lang/cargo if you want to send a PR for such issue.
+
+* Dark purple, **Command**-prefixed labels mean the issue has to do with a
+ specific cargo command.
+
+* Green, **E**-prefixed labels indicate the level of **experience** or
+ **effort** necessary to fix the issue. **[E-mentor]** issues also
+ have some instructions on how to get started. Generally, all of the
+ **E**-prefixed labels are issues that are ready for someone to contribute
+ to!
+
+* Red, **I**-prefixed labels indicate the **importance** of the issue. The
+ **[I-nominated]** label indicates that an issue has been nominated for
+ prioritizing at the next triage meeting.
+
+* Purple gray, **O**-prefixed labels are the **operating system** or platform
+ that this issue is specific to.
+
+* Orange, **P**-prefixed labels indicate a bug's **priority**.
+
+* **S**-prefixed labels are "status" labels, typically used for PRs, but can
+ also indicate an issue is **[S-blocked]**.
+
+* The light orange **[relnotes]** label marks issues that should be highlighted
+ in the [Rust release notes] of the next release.
+
+* Dark blue, **Z**-prefixed labels are for unstable, [nightly features].
+
+[Issue labels]: https://github.com/rust-lang/cargo/labels
+[E-easy]: https://github.com/rust-lang/cargo/labels/E-easy
+[E-mentor]: https://github.com/rust-lang/cargo/labels/E-mentor
+[I-nominated]: https://github.com/rust-lang/cargo/labels/I-nominated
+[C-feature-request]: https://github.com/rust-lang/cargo/labels/C-feature-request
+[Feature accepted]: https://github.com/rust-lang/cargo/labels/Feature%20accepted
+[S-blocked]: https://github.com/rust-lang/cargo/labels/S-blocked
+[Rust release notes]: https://github.com/rust-lang/rust/blob/master/RELEASES.md
+[nightly features]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html
+[relnotes]: https://github.com/rust-lang/cargo/issues?q=label%3Arelnotes
diff --git a/src/tools/cargo/src/doc/contrib/src/process/index.md b/src/tools/cargo/src/doc/contrib/src/process/index.md
new file mode 100644
index 000000000..348c49ba9
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/src/process/index.md
@@ -0,0 +1,132 @@
+# Process
+
+This chapter gives an overview of how Cargo comes together, and how you can be
+a part of that process.
+
+See the [Working on Cargo] chapter for an overview of the contribution
+process.
+
+Please read the guidelines below before working on an issue or new feature.
+
+**Due to limited review capacity, the Cargo team is not accepting new features
+or major changes at this time. Please consult with the team before opening a
+new PR. Only issues that have been explicitly marked as accepted will be
+reviewed.**
+
+[Working on Cargo]: working-on-cargo.md
+
+## Cargo team
+
+Cargo is managed by a [team] of volunteers. The Cargo Team reviews all
+changes, and sets the direction for the project.
+
+The team meets on a weekly basis on a video chat. If you are interested in
+participating, feel free to contact us on [Zulip].
+
+If you would like more direct mentorship, you can join our
+[office hours](https://github.com/rust-lang/cargo/wiki/Office-Hours).
+
+## Roadmap
+
+The [Roadmap Project Board] is used for tracking major initiatives. This gives
+an overview of the things the team is interested in and thinking about.
+
+The [RFC Project Board] is used for tracking [RFCs].
+
+[the 2020 roadmap]: https://blog.rust-lang.org/inside-rust/2020/01/10/cargo-in-2020.html
+[Roadmap Project Board]: https://github.com/rust-lang/cargo/projects/1
+[RFC Project Board]: https://github.com/rust-lang/cargo/projects/2
+[RFCs]: https://github.com/rust-lang/rfcs/
+
+## Working on small bugs
+
+Issues labeled with the [E-help-wanted], [E-easy], or [E-mentor] [labels] are
+typically issues that the Cargo team wants to see addressed, and are
+relatively easy to get started with. If you are interested in one of those,
+and it has not already been assigned to someone, leave a comment. See [Issue
+assignment](#issue-assignment) below for assigning yourself.
+
+If there is a specific issue that you are interested in, but it doesn't have
+one of the `E-` labels, leave a comment on the issue. If a Cargo team member
+has the time to help out, they will respond to help with the next steps.
+
+[E-help-wanted]: https://github.com/rust-lang/cargo/labels/E-help-wanted
+[E-easy]: https://github.com/rust-lang/cargo/labels/E-easy
+[E-mentor]: https://github.com/rust-lang/cargo/labels/E-mentor
+[labels]: ../issues.md#issue-labels
+
+## Working on large bugs
+
+Some issues may be difficult to fix. They may require significant code
+changes, or major design decisions. The [E-medium] and [E-hard] [labels] can
+be used to tag such issues. These will typically involve some discussion with
+the Cargo team on how to tackle it.
+
+[E-medium]: https://github.com/rust-lang/cargo/labels/E-medium
+[E-hard]: https://github.com/rust-lang/cargo/labels/E-hard
+
+## Working on small features
+
+Small feature requests are typically managed on the [issue
+tracker][issue-feature-request]. Features that the Cargo team have approved
+will have the [Feature accepted] label or the [E-mentor] label. If there is a
+feature request that you are interested in, feel free to leave a comment
+expressing your interest. If a Cargo team member has the time to help out,
+they will respond to help with the next steps. Keep in mind that the Cargo
+team has limited time, and may not be able to help with every feature request.
+Most of them require some design work, which can be difficult. Check out the
+[design principles chapter] for some guidance.
+
+## Working on large features
+
+Cargo follows the Rust model of evolution. Major features usually go through
+an [RFC process]. Therefore, before opening a feature request issue create a
+Pre-RFC thread on the [internals][irlo] forum to get preliminary feedback.
+
+Implementing a feature as a [custom subcommand][subcommands] is encouraged as
+it helps demonstrate the demand for the functionality and is a great way to
+deliver a working solution faster as it can iterate outside of Cargo's release
+cadence.
+
+See the [unstable chapter] for how new major features are typically
+implemented.
+
+[unstable chapter]: unstable.md
+
+## Bots and infrastructure
+
+The Cargo project uses several bots:
+
+* [GitHub Actions] are used to automatically run all tests for each PR.
+* [triagebot] automatically assigns reviewers for PRs, see [Assignment] for
+ how to configure.
+* [bors] is used to merge PRs. See [The merging process].
+* [triagebot] is used for assigning issues to non-members, see [Issue
+ assignment](#issue-assignment).
+* [rfcbot] is used for making asynchronous decisions by team members.
+
+[bors]: https://buildbot2.rust-lang.org/homu/
+[The merging process]: working-on-cargo.md#the-merging-process
+[GitHub Actions]: https://github.com/features/actions
+[triagebot]: https://github.com/rust-lang/triagebot/wiki
+[rfcbot]: https://github.com/rust-lang/rfcbot-rs
+[Assignment]: https://github.com/rust-lang/triagebot/wiki/Assignment
+
+## Issue assignment
+
+Normally, if you plan to work on an issue that has been marked with one of the
+`E-` tags or [Feature accepted], it is sufficient just to leave a comment that
+you are working on it. We also have a bot that allows you to formally "claim"
+an issue by entering the text `@rustbot claim` in a comment. See the
+[Assignment] docs on how this works.
+
+
+[Assignment]: https://github.com/rust-lang/triagebot/wiki/Assignment
+[team]: https://www.rust-lang.org/governance/teams/dev-tools#cargo
+[Zulip]: https://rust-lang.zulipchat.com/#narrow/stream/246057-t-cargo
+[issue-feature-request]: https://github.com/rust-lang/cargo/labels/C-feature-request
+[Feature accepted]: https://github.com/rust-lang/cargo/labels/Feature%20accepted
+[design principles chapter]: ../design.md
+[RFC process]: https://github.com/rust-lang/rfcs/
+[irlo]: https://internals.rust-lang.org/
+[subcommands]: https://doc.rust-lang.org/cargo/reference/external-tools.html#custom-subcommands
diff --git a/src/tools/cargo/src/doc/contrib/src/process/release.md b/src/tools/cargo/src/doc/contrib/src/process/release.md
new file mode 100644
index 000000000..f0de267c8
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/src/process/release.md
@@ -0,0 +1,164 @@
+# Release process
+
+Cargo is released with `rustc` using a ["train model"][choochoo]. After a
+change lands in Cargo's master branch, it will be synced with the
+[rust-lang/rust] repository by a Cargo team member, which happens about once a
+week. If there are complications, it can take longer. After it is synced and
+merged, the changes will appear in the next nightly release, which is usually
+published around 00:30 UTC.
+
+After changes are in the nightly release, they will make their way to the
+stable release anywhere from 6 to 12 weeks later, depending on when during the
+cycle it landed.
+
+The current release schedule is posted on the [Rust Forge]. See the [release
+process] for more details on how Rust's releases are created. Rust releases
+are managed by the [Release team].
+
+[Rust Forge]: https://forge.rust-lang.org/
+
+## Build process
+
+The build process for Cargo is handled as part of building Rust. Every PR on
+the [rust-lang/rust] repository creates a full collection of release artifacts
+for every platform. The code for this is in the [`dist` bootstrap module].
+Every night at 00:00 UTC, the artifacts from the most recently merged PR are
+promoted to the nightly release channel. A similar process happens for beta
+and stable releases.
+
+[`dist` bootstrap module]: https://github.com/rust-lang/rust/blob/master/src/bootstrap/dist.rs
+
+## Submodule updates
+
+Cargo is tracked in the [rust-lang/rust] repository using a [git submodule].
+It is updated manually about once a week by a Cargo team member.
+However, anyone is welcome to update it as needed.
+
+[@ehuss] has a tool called [subup](https://github.com/ehuss/subup) to automate the process of updating the submodule, updating the lockfile, running tests, and creating a PR.
+Running the tests ahead-of-time helps avoid long cycle times waiting for bors if there are any errors.
+Subup will also provide a message to include in the PR with a list of all PRs it covers.
+Posting this in the PR message also helps create reference links on each Cargo PR to the submodule update PR to help track when it gets merged.
+
+The following is an example of the command to run in a local clone of rust-lang/rust to run a certain set of tests of things that are likely to get broken by a Cargo update:
+
+```bash
+subup --up-branch update-cargo \
+ --commit-message "Update cargo" \
+ --test="src/tools/linkchecker tidy \
+ src/tools/cargo \
+ src/tools/rustfmt" \
+ src/tools/cargo
+```
+
+If doing a [beta backport](#beta-backports), the command is similar, but needs to point to the correct branches:
+
+```bash
+subup --up-branch update-beta-cargo \
+ --rust-branch beta \
+ --set-config rust.channel=beta \
+ --commit-message "[beta] Update cargo" \
+ --test="src/tools/linkchecker tidy \
+ src/tools/cargo \
+ src/tools/rustfmt" \
+ rust-1.66.0:src/tools/cargo
+```
+
+[@ehuss]: https://github.com/ehuss/
+[git submodule]: https://git-scm.com/book/en/v2/Git-Tools-Submodules
+
+## Version updates
+
+Shortly after each major release, a Cargo team member will post a PR to update
+Cargo's version in `Cargo.toml`. Cargo's library is permanently unstable, so
+its version number starts with a `0`. The minor version is always 1 greater
+than the Rust release it is a part of, so cargo 0.49.0 is part of the 1.48
+Rust release. The [CHANGELOG] is also usually updated at this time.
+
+Also, any version-specific checks that are no longer needed can be removed.
+For example, some tests are disabled on stable if they require some nightly
+behavior. Once that behavior is available on the new stable release, the
+checks are no longer necessary. (I usually search for the word "nightly" in
+the testsuite directory, and read the comments to see if any of those nightly
+checks can be removed.)
+
+Sometimes Cargo will have a runtime check to probe `rustc` if it supports a
+specific feature. This is usually stored in the [`TargetInfo`] struct. If this
+behavior is now stable, those checks should be removed.
+
+Cargo has several other packages in the [`crates/` directory]. If any of these
+packages have changed, the version should be bumped **before the beta
+release**. It is rare that these get updated. Bumping these as-needed helps
+avoid churning incompatible version numbers. This process should be improved
+in the future!
+
+[@ehuss] has a tool called [cargo-new-release] to automate the process of doing a version bump.
+It runs through several steps:
+1. Creates a branch
+2. Updates the version numbers
+3. Creates a changelog for anything on the master branch that is not part of beta
+4. Creates a changelog for anything on the beta branch
+
+It opens a browser tab for every PR in order to review each change.
+It places each PR in the changelog with its title, but usually every PR should be rewritten to explain the change from the user's perspective.
+Each PR should also be categorized as an Addition, Change, Fix, or Nightly-only change.
+Most PRs are deleted, since they are not relevant to users of Cargo.
+For example, remove all PRs related to Cargo internals, infrastructure, documentation, error changes, refactorings, etc.
+Usually about half of the PRs get removed.
+This process usually takes @ehuss about an hour to finish.
+
+[@ehuss]: https://github.com/ehuss/
+[cargo-new-release]: https://github.com/ehuss/cargo-new-release
+[`crates/` directory]: https://github.com/rust-lang/cargo/tree/master/crates
+
+## Docs publishing
+
+Docs are automatically published during the Rust release process. The nightly
+channel's docs appear at <https://doc.rust-lang.org/nightly/cargo/>. Once
+nightly is promoted to beta, those docs will appear at
+<https://doc.rust-lang.org/beta/cargo/>. Once the stable release is made, it
+will appear on <https://doc.rust-lang.org/cargo/> (which is the "current"
+stable) and the release-specific URL such as
+<https://doc.rust-lang.org/1.46.0/cargo/>.
+
+The code that builds the documentation is located in the [`doc` bootstrap
+module].
+
+[`doc` bootstrap module]: https://github.com/rust-lang/rust/blob/master/src/bootstrap/doc.rs
+
+## crates.io publishing
+
+Cargo's library is published to [crates.io] as part of the stable release
+process. This is handled by the [Release team] as part of their process. There
+is a [`publish.py` script] that in theory should help with this process. The
+test and build tool crates aren't published.
+
+[`publish.py` script]: https://github.com/rust-lang/cargo/blob/master/publish.py
+
+## Beta backports
+
+If there is a regression or major problem detected during the beta phase, it
+may be necessary to backport a fix to beta. The process is documented in the
+[Beta Backporting] page.
+
+[Beta Backporting]: https://forge.rust-lang.org/release/beta-backporting.html
+
+## Stable backports
+
+In (hopefully!) very rare cases, a major regression or problem may be reported
+after the stable release. Decisions about this are usually coordinated between
+the [Release team] and the Cargo team. There is usually a high bar for making
+a stable patch release, and the decision may be influenced by whether or not
+there are other changes that need a new stable release.
+
+The process here is similar to the beta-backporting process. The
+[rust-lang/cargo] branch is the same as beta (`rust-1.XX.0`). The
+[rust-lang/rust] branch is called `stable`.
+
+[choochoo]: https://doc.rust-lang.org/book/appendix-07-nightly-rust.html
+[rust-lang/rust]: https://github.com/rust-lang/rust/
+[rust-lang/cargo]: https://github.com/rust-lang/cargo/
+[CHANGELOG]: https://github.com/rust-lang/cargo/blob/master/CHANGELOG.md
+[release process]: https://forge.rust-lang.org/release/process.html
+[`TargetInfo`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/build_context/target_info.rs
+[crates.io]: https://crates.io/
+[release team]: https://www.rust-lang.org/governance/teams/operations#release
diff --git a/src/tools/cargo/src/doc/contrib/src/process/unstable.md b/src/tools/cargo/src/doc/contrib/src/process/unstable.md
new file mode 100644
index 000000000..d59b9aa34
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/src/process/unstable.md
@@ -0,0 +1,105 @@
+# Unstable features
+
+Most new features should go through the unstable process. This means that the
+feature will only be usable on the nightly channel, and requires a specific
+opt-in by the user. Small changes can skip this process, but please consult
+with the Cargo team first.
+
+## Unstable feature opt-in
+
+For features that require behavior changes or new syntax in `Cargo.toml`, then
+it will need a `cargo-features` value placed at the top of `Cargo.toml` to
+enable it. The process for doing adding a new feature is described in the
+[`features` module]. Code that implements the feature will need to manually
+check that the feature is enabled for the current manifest.
+
+For features that add new command-line flags, config options, or environment
+variables, then the `-Z` flags will be needed to enable them. The [`features`
+module] also describes how to add these. New flags should use the
+`fail_if_stable_opt` method to check if the `-Z unstable-options` flag has
+been passed.
+
+## Unstable documentation
+
+Every unstable feature should have a section added to the [unstable chapter]
+describing how to use the feature.
+
+[unstable chapter]: https://github.com/rust-lang/cargo/blob/master/src/doc/src/reference/unstable.md
+
+## Tracking issues
+
+Each unstable feature should get a [tracking issue]. These issues are
+typically created when a PR is close to being merged, or soon after it is
+merged. Use the [tracking issue template] when creating a tracking issue.
+
+Larger features should also get a new label in the issue tracker so that when
+issues are filed, they can be easily tied together.
+
+[tracking issue]: https://github.com/rust-lang/cargo/labels/C-tracking-issue
+[tracking issue template]: https://github.com/rust-lang/cargo/issues/new?labels=C-tracking-issue&template=tracking_issue.md
+
+## Pre-Stabilization
+
+Once an unstable feature is "complete", the search for users to test
+and give feedback begins. Testing notes should be written up to give users an
+idea of how to test the new feature. An example being the
+[workspace inheritance testing notes] for workspace inheritance. Once testing
+notes have been written up you should make posts in various rust communities
+([rust subreddit], [users], [internals], etc). Example posts made for workspace
+inheritance: [reddit post], [users post], [internals post]. The unstable feature
+should also be added to [This Week in Rust]. This should be done by adding the
+label `call-for-testing` to the RFC for the feature and making a comment with a
+link to the testing notes and the tracking issue (as needed). If there is not an
+RFC, a pull request should be made to the [TWiR repo] adding the feature to the
+`Call for Testing` section ([example]).
+
+[workspace inheritance testing notes]: https://github.com/rust-lang/cargo/blob/6d6dd9d9be9c91390da620adf43581619c2fa90e/src/doc/src/reference/unstable.md#testing-notes
+[rust subreddit]: https://www.reddit.com/r/rust/
+[users]: https://users.rust-lang.org/
+[internals]: https://internals.rust-lang.org/
+[reddit post]: https://www.reddit.com/r/rust/comments/uo8zeh/help_test_workspace_inheritance_in_preparation/
+[users post]: https://users.rust-lang.org/t/help-test-workspace-inheritance-in-preparation-for-stablization/75582
+[internals post]: https://internals.rust-lang.org/t/help-test-workspace-inheritance-in-preparation-for-stablization/16618
+[This Week in Rust]: https://this-week-in-rust.org/
+[TWiR repo]: https://github.com/rust-lang/this-week-in-rust
+[example]: https://github.com/rust-lang/this-week-in-rust/pull/3256
+
+## Stabilization
+
+After some period of time, typically measured in months, the feature can be
+considered to be stabilized. The feature should not have any significant known
+bugs or issues, and any design concerns should be resolved.
+
+The stabilization process depends on the kind of feature. For smaller
+features, you can leave a comment on the tracking issue expressing interest in
+stabilizing it. It can usually help to indicate that the feature has received
+some real-world testing, and has exhibited some demand for broad use.
+
+For larger features that have not gone through the [RFC process], then an RFC
+to call for stabilization might be warranted. This gives the community a final
+chance to provide feedback about the proposed design.
+
+For a small feature, or one that has already gone through the RFC process, a
+Cargo Team member may decide to call for a "final comment period" using
+[rfcbot]. This is a public signal that a major change is being made, and gives
+the Cargo Team members an opportunity to confirm or block the change. This
+process can take a few days or weeks, or longer if a concern is raised.
+
+Once the stabilization has been approved, the person who called for
+stabilization should prepare a PR to stabilize the feature. This PR should:
+
+* Flip the feature to stable in the [`features` module].
+* Remove any unstable checks that aren't automatically handled by the feature
+ system.
+* Move the documentation from the [unstable chapter] into the appropriate
+ places in the Cargo book and man pages.
+* Remove the `-Z` flags and help message if applicable.
+* Update all tests to remove nightly checks.
+* Tag the PR with [relnotes] label if it seems important enough to highlight
+ in the [Rust release notes].
+
+[`features` module]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/features.rs
+[RFC process]: https://github.com/rust-lang/rfcs/
+[rfcbot]: https://github.com/rust-lang/rfcbot-rs
+[Rust release notes]: https://github.com/rust-lang/rust/blob/master/RELEASES.md
+[relnotes]: https://github.com/rust-lang/cargo/issues?q=label%3Arelnotes
diff --git a/src/tools/cargo/src/doc/contrib/src/process/working-on-cargo.md b/src/tools/cargo/src/doc/contrib/src/process/working-on-cargo.md
new file mode 100644
index 000000000..e90bb8588
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/src/process/working-on-cargo.md
@@ -0,0 +1,172 @@
+# Working on Cargo
+
+This chapter gives an overview of how to build Cargo, make a change, and
+submit a Pull Request.
+
+0. [Before hacking on Cargo.](#before-hacking-on-cargo)
+1. [Check out the Cargo source.](#checkout-out-the-source)
+2. [Building Cargo.](#building-cargo)
+3. [Making a change.](#making-a-change)
+4. [Writing and running tests.](../tests/index.md)
+5. [Submitting a Pull Request.](#submitting-a-pull-request)
+6. [The merging process.](#the-merging-process)
+
+## Before hacking on Cargo
+
+We encourage people to discuss their design before hacking on code. This gives
+the Cargo team a chance to know your idea more. Sometimes after a discussion,
+we even find a way to solve the problem without coding! Typically, you
+[file an issue] or start a thread on the [internals forum] before submitting a
+pull request. Please read [the process] of how features and bugs are managed in
+Cargo.
+
+## Checkout out the source
+
+We use the "fork and pull" model [described here][development-models], where
+contributors push changes to their personal fork and [create pull requests] to
+bring those changes into the source repository. Cargo uses [git] and [GitHub]
+for all development.
+
+1. Fork the [`rust-lang/cargo`] repository on GitHub to your personal account
+ (see [GitHub docs][how-to-fork]).
+2. Clone your fork to your local machine using `git clone` (see [GitHub
+ docs][how-to-clone])
+3. It is recommended to start a new branch for the change you want to make.
+ All Pull Requests are made against the master branch.
+
+## Building Cargo
+
+Cargo is built by...running `cargo`! There are a few prerequisites that you
+need to have installed:
+
+* `rustc` and `cargo` need to be installed. Cargo is expected to build and
+ test with the current stable, beta, and nightly releases. It is your choice
+ which to use. Nightly is recommended, since some nightly-specific tests are
+ disabled when using the stable release. But using stable is fine if you
+ aren't working on those.
+* A C compiler (typically gcc, clang, or MSVC).
+* [git]
+* Unix:
+ * pkg-config
+ * OpenSSL (`libssl-dev` on Ubuntu, `openssl-devel` on Fedora)
+* macOS:
+ * OpenSSL ([homebrew] is recommended to install the `openssl` package)
+
+If you can successfully run `cargo build`, you should be good to go!
+
+[homebrew]: https://brew.sh/
+
+## Running Cargo
+
+You can use `cargo run` to run cargo itself, or you can use the path directly
+to the cargo binary, such as `target/debug/cargo`.
+
+If you are using [`rustup`], beware that running the binary directly can cause
+issues with rustup overrides. Usually, when `cargo` is executed as part of
+rustup, the toolchain becomes sticky (via an environment variable), and all
+calls to `rustc` will use the same toolchain. But when `cargo` is not run via
+rustup, the toolchain may change based on the directory. Since Cargo changes
+the directory for each compilation, this can cause different calls to `rustc`
+to use different versions. There are a few workarounds:
+
+* Don't use rustup overrides.
+* Use `rustup run target/debug/cargo` to execute `cargo`.
+* Set the `RUSTC` environment variable to a specific `rustc` executable (not
+ the rustup wrapper).
+* Create a [custom toolchain]. This is a bit of a hack, but you can create a
+ directory in the rustup `toolchains` directory, and create symlinks for all
+ the files and directories in there to your toolchain of choice (such as
+ nightly), except for the `cargo` binary, which you can symlink to your
+ `target/debug/cargo` binary in your project directory.
+
+*Normally*, all development is done by running Cargo's test suite, so running
+it directly usually isn't required. But it can be useful for testing Cargo on
+more complex projects.
+
+[`rustup`]: https://rust-lang.github.io/rustup/
+[custom toolchain]: https://rust-lang.github.io/rustup/concepts/toolchains.html#custom-toolchains
+
+## Making a change
+
+Some guidelines on working on a change:
+
+* All code changes are expected to comply with the formatting suggested by
+ `rustfmt`. You can use `rustup component add rustfmt` to install `rustfmt`
+ and use `cargo fmt` to automatically format your code.
+* Include tests that cover all non-trivial code. See the [Testing chapter] for
+ more about writing and running tests.
+* All code should be warning-free. This is checked during tests.
+
+## Submitting a Pull Request
+
+After you have committed your work, and pushed it to GitHub, you can
+open a Pull Request
+
+* Push your commits to GitHub and create a pull request against Cargo's
+ `master` branch.
+* Include a clear description of what the change is and why it is being made.
+* Use [GitHub's keywords] in the description to automatically link to an issue
+ if the PR resolves the issue. For example `Closes #1234` will link issue
+ #1234 to the PR. When the PR is merged, GitHub will automatically close the
+ issue.
+
+[`@rustbot`] will automatically assign a reviewer for the PR. It
+may take at least a few days for someone to respond. If you don't get a
+response in over a week, feel free to ping the assigned reviewer.
+
+When your PR is submitted, GitHub automatically runs all tests. The GitHub
+interface will show a green checkmark if it passes, or a red X if it fails.
+There are links to the logs on the PR page to diagnose any issues. The tests
+typically finish in under 30 minutes.
+
+The reviewer might point out changes deemed necessary. Large or tricky changes
+may require several passes of review and changes.
+
+### Status labeling
+
+PRs will get marked with [labels] like [`S-waiting-on-review`] or [`S-waiting-on-author`] to indicate their status.
+The [`@rustbot`] bot can be used by anyone to adjust the labels.
+If a PR gets marked as `S-waiting-on-author`, and you have pushed new changes that you would like to be reviewed, you can write a comment on the PR with the text `@rustbot ready`.
+The bot will switch the labels on the PR.
+
+More information about these commands can be found at the [shortcuts documentation].
+
+[labels]: https://github.com/rust-lang/cargo/labels
+[`S-waiting-on-review`]: https://github.com/rust-lang/cargo/labels/S-waiting-on-review
+[`S-waiting-on-author`]: https://github.com/rust-lang/cargo/labels/S-waiting-on-author
+[`@rustbot`]: https://github.com/rustbot
+[shortcuts documentation]: https://github.com/rust-lang/triagebot/wiki/Shortcuts
+
+## The merging process
+
+After a reviewer has approved your PR, they will issue a command to the [bors]
+bot (also known as "Homu", the software that powers [`@bors`]). Bors will
+create a temporary branch with your PR, and run all tests. Only if all tests
+pass will it merge the PR to master. If it fails, the bot will leave a comment
+on the PR. This system ensures that the master branch is always in a good
+state, and that merges are processed one at a time. The [Homu queue
+dashboard][homu-cargo] shows the current merge queue. Cargo's queue is rarely
+busy, but a busy project like the [rust repo][homu-rust] is constantly full.
+
+Assuming everything works, congratulations! It may take at least a week for
+the changes to arrive on the nightly channel. See the [release chapter] for
+more information on how Cargo releases are made.
+
+
+[development-models]: https://help.github.com/articles/about-collaborative-development-models/
+[create pull requests]: https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request
+[how-to-fork]: https://docs.github.com/en/github/getting-started-with-github/fork-a-repo
+[`rust-lang/cargo`]: https://github.com/rust-lang/cargo/
+[git]: https://git-scm.com/
+[GitHub]: https://github.com/
+[how-to-clone]: https://docs.github.com/en/github/creating-cloning-and-archiving-repositories/cloning-a-repository
+[Testing chapter]: ../tests/index.md
+[GitHub's keywords]: https://docs.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue
+[bors]: https://buildbot2.rust-lang.org/homu/
+[`@bors`]: https://github.com/bors
+[homu-cargo]: https://buildbot2.rust-lang.org/homu/queue/cargo
+[homu-rust]: https://buildbot2.rust-lang.org/homu/queue/rust
+[release chapter]: release.md
+[internals forum]: https://internals.rust-lang.org/c/tools-and-infrastructure/cargo
+[file an issue]: https://github.com/rust-lang/cargo/issues
+[the process]: index.md
diff --git a/src/tools/cargo/src/doc/contrib/src/tests/crater.md b/src/tools/cargo/src/doc/contrib/src/tests/crater.md
new file mode 100644
index 000000000..2220cb0c8
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/src/tests/crater.md
@@ -0,0 +1,122 @@
+# Crater
+
+[Crater](https://github.com/rust-lang/crater) is a tool for compiling and running tests for _every_ crate on [crates.io](https://crates.io) (and a few on GitHub).
+It is mainly used for checking the extent of breakage when implementing potentially breaking changes and ensuring lack of breakage by running beta vs stable compiler versions.
+
+Essentially it runs some `cargo` command on every crate twice; once against the "start" toolchain and again against the "end" toolchain.
+For example, "start" could be the stable release, and "end" could be beta.
+If it passes in "start" but fails with "end", then that is reported as a regression.
+
+There is a bot called [craterbot] which is used to run crater on hardware managed by the rust-lang organization.
+
+Crater is run by the release team during the beta cycle.
+If there are any regressions that look like they are caused by Cargo, they should contact the Cargo team to decide how to handle it.
+
+## Running crater
+
+If you have a change that you want to test before the beta release, or you want to test behavior that is not normally exercised by crater, you can do a manual run of crater.
+Roughly the steps are:
+
+1. Create a branch with your changes.
+
+ In your clone of cargo, make the changes to incorporate whatever new thing you want to test and push it to a branch on your fork on GitHub.
+
+2. Get a clone of <https://github.com/rust-lang/rust>
+
+3. Create a branch in your rust-lang/rust clone to add your changes.
+
+4. Change the `src/tools/cargo` submodule to point to your new branch.
+
+ Modify `.gitmodules` to point to your clone and branch of cargo with the changes you want to test.
+ For example:
+
+ ```bash
+ git submodule set-url src/tools/cargo https://github.com/ehuss/cargo.git
+ git submodule set-branch --branch my-awesome-feature src/tools/cargo
+ git submodule update --remote src/tools/cargo
+ git add .gitmodules src/tools/cargo
+ git commit
+ ```
+
+5. Create an PR on rust-lang/rust.
+
+ Push your submodule changes to GitHub and make a PR.
+ Start the PR title with `[EXPERIMENT]` to make it clear what the PR is for and assign yourself or @ghost.
+
+6. Make a "try" build.
+
+ A "try" build creates a full release of x86_64-unknown-linux-gnu and stores it on rust-lang servers.
+ This can be done with a comment `@bors try` on the PR (all Cargo team members should have permission to do this).
+
+7. Run crater.
+
+ Look at the [craterbot] docs to determine the command that you want to run.
+ There are different modes like `check-only`, `build-and-test`, `rustdoc`, etc.
+
+ You can also choose how many crates to run against.
+ If you are uncertain if your cargo changes will work correctly, it might be a good idea to run against `top-100` first to check its behavior.
+ This will run much faster.
+ You can do a full run afterwards.
+
+ After the try build finishes (which should take a couple hours), ask someone to make a crater run.
+ The Cargo team does not have that permission, so just ask someone on Zulip.
+ They will need to write a comment to `@craterbot` with the command that you have specified.
+
+8. Wait.
+
+ Crater can take anywhere from a few hours to a few weeks to run depending on how long the [craterbot queue](https://crater.rust-lang.org/) is and which mode you picked and the priority of your job.
+ When the crater run finishes, craterbot will post a comment to the PR with a link to a report of the results.
+
+9. Investigate the report.
+
+ Look through the report which contains links to build logs for any regressions or errors.
+
+10. Close the PR.
+
+ Whenever you are done doing crater runs, close your PR.
+
+[craterbot]: https://github.com/rust-lang/crater/blob/master/docs/bot-usage.md
+
+
+## Advanced crater modes
+
+Crater only has a few built-in modes, such as running `cargo check` or `cargo test`.
+You can pass extra flags with `+cargoflags`.
+
+More complex tests can be accomplished by customizing Cargo to perform whatever actions you want.
+Since crater essentially runs `cargo check`, you can modify the `check` command to perform whichever actions you want.
+For example, to test `cargo fix --edition`, [this commit](https://github.com/ehuss/cargo/commit/6901690a6f8d519efb4fabf48c1c2b94af0c3bd8) intercepted `cargo check` and modified it to instead:
+
+1. Only run on crates with the 2018 edition.
+2. Run `cargo fix --edition`.
+3. Modify the manifest to switch to the 2021 edition.
+4. Run `cargo check` to verify.
+
+If you need to compare the before and after of a command that is not part of crater's built-in modes, that can be more difficult.
+Two possible options:
+
+* Work with the infra team to add a new mode.
+* Build two custom try builds.
+ Each one should modify the `cargo check` command as described above.
+ The "start" build should perform whichever action you want with an otherwise unmodified cargo.
+ The "end" build should perform whichever action you want with your modified cargo.
+ Then, in the `@craterbot` command, specify the start and end hashes of the two try builds.
+
+## Limitations
+
+There are some limitations of crater to consider when running Cargo:
+
+* A crater run without regressions is not a green light to move forward.
+ * A large portion of Rust code is not tested, such as closed-source projects or things otherwise not collected by crater.
+ * Many crates can't build in crater's environment or are otherwise broken.
+ * Some crates have flaky tests.
+* Crater runs in an isolated environment.
+ * It only runs on Linux x86-64.
+ * It does not have network access.
+ * The crate source is in a read-only mount.
+* Crater does several steps before running the test (using its own copy of the stable toolchain):
+ * It generates a lockfile using `generate-lockfile` and includes `-Zno-index-update` to prevent index updates (which makes it run much faster).
+ * All dependencies are downloaded ahead-of-time with `cargo fetch`.
+* The built-in modes pass several flags to cargo such as `--frozen` or `--message-format=json`.
+ It will sometimes use `--all-targets` and sometimes not.
+ Check the [crater source](https://github.com/rust-lang/crater/blob/master/src/runner/test.rs) for more details on how it works.
diff --git a/src/tools/cargo/src/doc/contrib/src/tests/index.md b/src/tools/cargo/src/doc/contrib/src/tests/index.md
new file mode 100644
index 000000000..dac047684
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/src/tests/index.md
@@ -0,0 +1,20 @@
+# Tests
+
+Cargo has an extensive test suite. Most of it is implemented as integration
+tests in the [`testsuite`] directory. There are several other tests:
+
+* Unit tests are scattered throughout.
+* The dependency resolver has its own set of tests in the [`resolver-tests`]
+ directory.
+* All of the packages in the [`crates`] directory have their own set of tests.
+* The [`build-std`] test is for the [build-std feature]. It is separate since
+ it has some special requirements.
+* Documentation has a variety of tests, such as link validation, and the
+ [SemVer chapter validity checks].
+
+[`testsuite`]: https://github.com/rust-lang/cargo/tree/master/tests/testsuite/
+[`resolver-tests`]: https://github.com/rust-lang/cargo/tree/master/crates/resolver-tests
+[`crates`]: https://github.com/rust-lang/cargo/tree/master/crates
+[`build-std`]: https://github.com/rust-lang/cargo/blob/master/tests/build-std/main.rs
+[build-std feature]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#build-std
+[SemVer chapter validity checks]: https://github.com/rust-lang/cargo/tree/master/src/doc/semver-check
diff --git a/src/tools/cargo/src/doc/contrib/src/tests/profiling.md b/src/tools/cargo/src/doc/contrib/src/tests/profiling.md
new file mode 100644
index 000000000..1cc980ca3
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/src/tests/profiling.md
@@ -0,0 +1,40 @@
+# Benchmarking and Profiling
+
+## Internal profiler
+
+Cargo has a basic, hierarchical profiler built-in. The environment variable
+`CARGO_PROFILE` can be set to an integer which specifies how deep in the
+profile stack to print results for.
+
+```sh
+# Output first three levels of profiling info
+CARGO_PROFILE=3 cargo generate-lockfile
+```
+
+## Benchmarking
+
+### Benchsuite
+
+Head over to the [`benches`
+directory](https://github.com/rust-lang/cargo/tree/master/benches) for more
+information about the benchmarking suite.
+
+### Informal benchmarking
+
+The overhead for starting a build should be kept as low as possible
+(preferably, well under 0.5 seconds on most projects and systems). Currently,
+the primary parts that affect this are:
+
+* Running the resolver.
+* Querying the index.
+* Checking git dependencies.
+* Scanning the local project.
+* Building the unit dependency graph.
+
+One way to test this is to use [hyperfine]. This is a tool that can be used to
+measure the difference between different commands and settings. Usually this
+is done by measuring the time it takes for `cargo build` to finish in a large
+project where the build is fresh (no actual compilation is performed). Just
+run `cargo build` once before using hyperfine.
+
+[hyperfine]: https://github.com/sharkdp/hyperfine
diff --git a/src/tools/cargo/src/doc/contrib/src/tests/running.md b/src/tools/cargo/src/doc/contrib/src/tests/running.md
new file mode 100644
index 000000000..e91702f96
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/src/tests/running.md
@@ -0,0 +1,78 @@
+# Running Tests
+
+Using `cargo test` is usually sufficient for running the full test suite. This
+can take a few minutes, so you may want to use more targeted flags to pick the
+specific test you want to run, such as `cargo test --test testsuite
+-- check::check_success`.
+
+## Running nightly tests
+
+Some tests only run on the nightly toolchain, and will be ignored on other
+channels. It is recommended that you run tests with both nightly and stable to
+ensure everything is working as expected.
+
+Some of the nightly tests require the `rustc-dev` and `llvm-tools-preview`
+rustup components installed. These components include the compiler as a
+library. This may already be installed with your nightly toolchain, but if it
+isn't, run `rustup component add rustc-dev llvm-tools-preview
+--toolchain=nightly`.
+
+## Running cross tests
+
+Some tests exercise cross compiling to a different target. This will require
+you to install the appropriate target. This typically is the 32-bit target of
+your host platform. For example, if your host is a 64-bit
+`x86_64-unknown-linux-gnu`, then you should install the 32-bit target with
+`rustup target add i686-unknown-linux-gnu`. If you don't have the alternate
+target installed, there should be an error message telling you what to do. You
+may also need to install additional tools for the target. For example, on Ubuntu
+you should install the `gcc-multilib` package.
+
+If you can't install an alternate target, you can set the
+`CFG_DISABLE_CROSS_TESTS=1` environment variable to disable these tests. The
+Windows cross tests only support the MSVC toolchain.
+
+## Running build-std tests
+
+The `build-std` tests are disabled by default, but you can run them by setting
+the `CARGO_RUN_BUILD_STD_TESTS=1` environment variable and running `cargo test
+--test build-std`. This requires the nightly channel, and also requires the
+`rust-src` component installed with `rustup component add rust-src
+--toolchain=nightly`.
+
+## Running with `gitoxide` as default git backend in tests
+
+By default, the `git2` backend is used for most git operations. As tests need to explicitly
+opt-in to use nightly features and feature flags, adjusting all tests to run with nightly
+and `-Zgitoxide` is unfeasible.
+
+This is why the private environment variable named `__CARGO_USE_GITOXIDE_INSTEAD_OF_GIT2` can be
+set while running tests to automatically enable the `-Zgitoxide` flag implicitly, allowing to
+test `gitoxide` for the entire cargo test suite.
+
+## Running public network tests
+
+Some (very rare) tests involve connecting to the public internet.
+These tests are disabled by default,
+but you can run them by setting the `CARGO_PUBLIC_NETWORK_TESTS=1` environment variable.
+Additionally our CI suite has a smoke test for fetching dependencies.
+For most contributors, you will never need to bother with this.
+
+## Running container tests
+
+Tests marked with `container_test` involve running Docker to test more complex configurations.
+These tests are disabled by default,
+but you can run them by setting the `CARGO_CONTAINER_TESTS=1` environment variable.
+You will need to have Docker installed and running to use these.
+
+> Note: Container tests mostly do not work on Windows.
+> * The SSH tests require ssh-agent, but the two versions of ssh-agent
+> on Windows are not suitable for testing.
+> * The Microsoft version of ssh-agent runs as a global service, and can't be isolated per test.
+> * The mingw/cygwin one can't be accessed from a Windows executable like cargo.
+> * Pageant similarly does not seem to have a way to isolate it (and I'm not certain it can be driven completely from the command-line).
+>
+> The tests also can't run on Windows CI because the Docker that is preinstalled doesn't support Linux containers, and setting up Windows containers is a pain.
+>
+> macOS should work with Docker installed and running,
+> but unfortunately the tests are not run on CI because Docker is not available.
diff --git a/src/tools/cargo/src/doc/contrib/src/tests/writing.md b/src/tools/cargo/src/doc/contrib/src/tests/writing.md
new file mode 100644
index 000000000..9c56b783d
--- /dev/null
+++ b/src/tools/cargo/src/doc/contrib/src/tests/writing.md
@@ -0,0 +1,341 @@
+# Writing Tests
+
+The following focuses on writing an integration test. However, writing unit
+tests is also encouraged!
+
+## Testsuite
+
+Cargo has a wide variety of integration tests that execute the `cargo` binary
+and verify its behavior, located in the [`testsuite`] directory. The
+[`support`] crate and [`snapbox`] contain many helpers to make this process easy.
+
+There are two styles of tests that can roughly be categorized as
+- functional tests
+ - The fixture is programmatically defined
+ - The assertions are regular string comparisons
+ - Easier to share in an issue as a code block is completely self-contained
+ - More resilient to insignificant changes though ui tests are easy to update when a change does occur
+- ui tests
+ - The fixture is file-based
+ - The assertions use file-backed snapshots that can be updated with an env variable
+ - Easier to review the expected behavior of the command as more details are included
+ - Easier to get up and running from an existing project
+ - Easier to reason about as everything is just files in the repo
+
+These tests typically work by creating a temporary "project" with a
+`Cargo.toml` file, executing the `cargo` binary process, and checking the
+stdout and stderr output against the expected output.
+
+### Functional Tests
+
+Generally, a functional test will be placed in `tests/testsuite/<command>.rs` and will look roughly like:
+```rust,ignore
+#[cargo_test]
+fn <description>() {
+ let p = project()
+ .file("src/main.rs", r#"fn main() { println!("hi!"); }"#)
+ .build();
+
+ p.cargo("run --bin foo")
+ .with_stderr(
+ "\
+ [COMPILING] foo [..]
+ [FINISHED] [..]
+ [RUNNING] `target/debug/foo`
+ ",
+ )
+ .with_stdout("hi!")
+ .run();
+ }
+}
+```
+
+The [`#[cargo_test]` attribute](#cargo_test-attribute) is used in place of `#[test]` to inject some setup code.
+
+[`ProjectBuilder`] via `project()`:
+- Each project is in a separate directory in the sandbox
+- If you do not specify a `Cargo.toml` manifest using `file()`, one is
+ automatically created with a project name of `foo` using `basic_manifest()`.
+
+[`Execs`] via `p.cargo(...)`:
+- This executes the command and evaluates different assertions
+ - See [`support::compare`] for an explanation of the string pattern matching.
+ Patterns are used to make it easier to match against the expected output.
+
+#### `#[cargo_test]` attribute
+
+The `#[cargo_test]` attribute injects code which does some setup before starting the test.
+It will create a filesystem "sandbox" under the "cargo integration test" directory for each test, such as `/path/to/cargo/target/tmp/cit/t123/`.
+The sandbox will contain a `home` directory that will be used instead of your normal home directory.
+
+The `#[cargo_test]` attribute takes several options that will affect how the test is generated.
+They are listed in parentheses separated with commas, such as:
+
+```rust,ignore
+#[cargo_test(nightly, reason = "-Zfoo is unstable")]
+```
+
+The options it supports are:
+
+* `nightly` --- This will cause the test to be ignored if not running on the nightly toolchain.
+ This is useful for tests that use unstable options in `rustc` or `rustdoc`.
+ These tests are run in Cargo's CI, but are disabled in rust-lang/rust's CI due to the difficulty of updating both repos simultaneously.
+ A `reason` field is required to explain why it is nightly-only.
+* `build_std_real` --- This is a "real" `-Zbuild-std` test (in the `build_std` integration test).
+ This only runs on nightly, and only if the environment variable `CARGO_RUN_BUILD_STD_TESTS` is set (these tests on run on Linux).
+* `build_std_mock` --- This is a "mock" `-Zbuild-std` test (which uses a mock standard library).
+ This only runs on nightly, and is disabled for windows-gnu.
+* `requires_` --- This indicates a command that is required to be installed to be run.
+ For example, `requires_rustfmt` means the test will only run if the executable `rustfmt` is installed.
+ These tests are *always* run on CI.
+ This is mainly used to avoid requiring contributors from having every dependency installed.
+* `>=1.64` --- This indicates that the test will only run with the given version of `rustc` or newer.
+ This can be used when a new `rustc` feature has been stabilized that the test depends on.
+ If this is specified, a `reason` is required to explain why it is being checked.
+* `public_network_test` --- This tests contacts the public internet.
+ These tests are disabled unless the `CARGO_PUBLIC_NETWORK_TESTS` environment variable is set.
+ Use of this should be *extremely rare*, please avoid using it if possible.
+ The hosts it contacts should have a relatively high confidence that they are reliable and stable (such as github.com), especially in CI.
+ The tests should be carefully considered for developer security and privacy as well.
+* `container_test` --- This indicates that it is a test that uses Docker.
+ These tests are disabled unless the `CARGO_CONTAINER_TESTS` environment variable is set.
+ This requires that you have Docker installed.
+ The SSH tests also assume that you have OpenSSH installed.
+ These should work on Linux, macOS, and Windows where possible.
+ Unfortunately these tests are not run in CI for macOS or Windows (no Docker on macOS, and Windows does not support Linux images).
+ See [`crates/cargo-test-support/src/containers.rs`](https://github.com/rust-lang/cargo/blob/master/crates/cargo-test-support/src/containers.rs) for more on writing these tests.
+* `ignore_windows="reason"` --- Indicates that the test should be ignored on windows for the given reason.
+
+#### Testing Nightly Features
+
+If you are testing a Cargo feature that only works on "nightly" Cargo, then
+you need to call `masquerade_as_nightly_cargo` on the process builder and pass
+the name of the feature as the reason, like this:
+
+```rust,ignore
+p.cargo("build").masquerade_as_nightly_cargo(&["print-im-a-teapot"])
+```
+
+If you are testing a feature that only works on *nightly rustc* (such as
+benchmarks), then you should use the `nightly` option of the `cargo_test`
+attribute, like this:
+
+```rust,ignore
+#[cargo_test(nightly, reason = "-Zfoo is unstable")]
+```
+
+This will cause the test to be ignored if not running on the nightly toolchain.
+
+#### Specifying Dependencies
+
+You should not write any tests that use the network such as contacting
+crates.io. Typically, simple path dependencies are the easiest way to add a
+dependency. Example:
+
+```rust,ignore
+let p = project()
+ .file("Cargo.toml", r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+
+ [dependencies]
+ bar = {path = "bar"}
+ "#)
+ .file("src/lib.rs", "extern crate bar;")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "1.0.0"))
+ .file("bar/src/lib.rs", "")
+ .build();
+```
+
+If you need to test with registry dependencies, see
+[`support::registry::Package`] for creating packages you can depend on.
+
+If you need to test git dependencies, see [`support::git`] to create a git
+dependency.
+
+#### Cross compilation
+
+There are some utilities to help support tests that need to work against a
+target other than the host. See [Running cross
+tests](running.md#running-cross-tests) for more an introduction on cross
+compilation tests.
+
+Tests that need to do cross-compilation should include this at the top of the
+test to disable it in scenarios where cross compilation isn't available:
+
+```rust,ignore
+if cargo_test_support::cross_compile::disabled() {
+ return;
+}
+```
+
+The name of the target can be fetched with the [`cross_compile::alternate()`]
+function. The name of the host target can be fetched with
+[`cargo_test_support::rustc_host()`].
+
+The cross-tests need to distinguish between targets which can *build* versus
+those which can actually *run* the resulting executable. Unfortunately, macOS is
+currently unable to run an alternate target (Apple removed 32-bit support a
+long time ago). For building, `x86_64-apple-darwin` will target
+`x86_64-apple-ios` as its alternate. However, the iOS target can only execute
+binaries if the iOS simulator is installed and configured. The simulator is
+not available in CI, so all tests that need to run cross-compiled binaries are
+disabled on CI. If you are running on macOS locally, and have the simulator
+installed, then it should be able to run them.
+
+If the test needs to run the cross-compiled binary, then it should have
+something like this to exit the test before doing so:
+
+```rust,ignore
+if cargo_test_support::cross_compile::can_run_on_host() {
+ return;
+}
+```
+
+[`cross_compile::alternate()`]: https://github.com/rust-lang/cargo/blob/d58902e22e148426193cf3b8c4449fd3c05c0afd/crates/cargo-test-support/src/cross_compile.rs#L208-L225
+[`cargo_test_support::rustc_host()`]: https://github.com/rust-lang/cargo/blob/d58902e22e148426193cf3b8c4449fd3c05c0afd/crates/cargo-test-support/src/lib.rs#L1137-L1140
+
+### UI Tests
+
+UI Tests are a bit more spread out and generally look like:
+
+`tests/testsuite/<command>/mod.rs`:
+```rust,ignore
+mod <case>;
+```
+
+`tests/testsuite/<command>/<case>/mod.rs`:
+```rust,ignore
+use cargo_test_support::prelude::*;
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::Project;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("run")
+ .arg_line("--bin foo")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
+```
+
+Then populate
+- `tests/testsuite/<command>/<case>/in` with the project's directory structure
+- `tests/testsuite/<command>/<case>/out` with the files you want verified
+- `tests/testsuite/<command>/<case>/stdout.log` with nothing
+- `tests/testsuite/<command>/<case>/stderr.log` with nothing
+
+`#[cargo_test]`:
+- This is used in place of `#[test]`
+- This attribute injects code which does some setup before starting the
+ test, creating a filesystem "sandbox" under the "cargo integration test"
+ directory for each test such as
+ `/path/to/cargo/target/cit/t123/`
+- The sandbox will contain a `home` directory that will be used instead of your normal home directory
+
+`Project`:
+- The project is copied from a directory in the repo
+- Each project is in a separate directory in the sandbox
+
+[`Command`] via `Command::cargo_ui()`:
+- Set up and run a command.
+
+[`OutputAssert`] via `Command::assert()`:
+- Perform assertions on the result of the [`Command`]
+
+[`Assert`] via `assert_ui()`:
+- Verify the command modified the file system as expected
+
+#### Updating Snapshots
+
+The project, stdout, and stderr snapshots can be updated by running with the
+`SNAPSHOTS=overwrite` environment variable, like:
+```console
+$ SNAPSHOTS=overwrite cargo test
+```
+
+Be sure to check the snapshots to make sure they make sense.
+
+#### Testing Nightly Features
+
+If you are testing a Cargo feature that only works on "nightly" Cargo, then
+you need to call `masquerade_as_nightly_cargo` on the process builder and pass
+the name of the feature as the reason, like this:
+
+```rust,ignore
+ snapbox::cmd::Command::cargo()
+ .masquerade_as_nightly_cargo(&["print-im-a-teapot"])
+```
+
+If you are testing a feature that only works on *nightly rustc* (such as
+benchmarks), then you should use the `nightly` option of the `cargo_test`
+attribute, like this:
+
+```rust,ignore
+#[cargo_test(nightly, reason = "-Zfoo is unstable")]
+```
+
+This will cause the test to be ignored if not running on the nightly toolchain.
+
+### Platform-specific Notes
+
+When checking output, use `/` for paths even on Windows: the actual output
+of `\` on Windows will be replaced with `/`.
+
+Be careful when executing binaries on Windows. You should not rename, delete,
+or overwrite a binary immediately after running it. Under some conditions
+Windows will fail with errors like "directory not empty" or "failed to remove"
+or "access is denied".
+
+## Debugging tests
+
+In some cases, you may need to dig into a test that is not working as you
+expect, or you just generally want to experiment within the sandbox
+environment. The general process is:
+
+1. Build the sandbox for the test you want to investigate. For example:
+
+ `cargo test --test testsuite -- features2::inactivate_targets`.
+2. In another terminal, head into the sandbox directory to inspect the files and run `cargo` directly.
+ 1. The sandbox directories start with `t0` for the first test.
+
+ `cd target/tmp/cit/t0`
+ 2. Set up the environment so that the sandbox configuration takes effect:
+
+ `export CARGO_HOME=$(pwd)/home/.cargo`
+ 3. Most tests create a `foo` project, so head into that:
+
+ `cd foo`
+3. Run whatever cargo command you want. See [Running Cargo] for more details
+ on running the correct `cargo` process. Some examples:
+
+ * `/path/to/my/cargo/target/debug/cargo check`
+ * Using a debugger like `lldb` or `gdb`:
+ 1. `lldb /path/to/my/cargo/target/debug/cargo`
+ 2. Set a breakpoint, for example: `b generate_root_units`
+ 3. Run with arguments: `r check`
+
+[`testsuite`]: https://github.com/rust-lang/cargo/tree/master/tests/testsuite/
+[`ProjectBuilder`]: https://github.com/rust-lang/cargo/blob/d847468768446168b596f721844193afaaf9d3f2/crates/cargo-test-support/src/lib.rs#L196-L202
+[`Execs`]: https://github.com/rust-lang/cargo/blob/d847468768446168b596f721844193afaaf9d3f2/crates/cargo-test-support/src/lib.rs#L531-L550
+[`support`]: https://github.com/rust-lang/cargo/blob/master/crates/cargo-test-support/src/lib.rs
+[`support::compare`]: https://github.com/rust-lang/cargo/blob/master/crates/cargo-test-support/src/compare.rs
+[`support::registry::Package`]: https://github.com/rust-lang/cargo/blob/d847468768446168b596f721844193afaaf9d3f2/crates/cargo-test-support/src/registry.rs#L311-L389
+[`support::git`]: https://github.com/rust-lang/cargo/blob/master/crates/cargo-test-support/src/git.rs
+[Running Cargo]: ../process/working-on-cargo.md#running-cargo
+[`snapbox`]: https://docs.rs/snapbox/latest/snapbox/
+[`Command`]: https://docs.rs/snapbox/latest/snapbox/cmd/struct.Command.html
+[`OutputAssert`]: https://docs.rs/snapbox/latest/snapbox/cmd/struct.OutputAssert.html
+[`Assert`]: https://docs.rs/snapbox/latest/snapbox/struct.Assert.html
diff --git a/src/tools/cargo/src/doc/man/cargo-add.md b/src/tools/cargo/src/doc/man/cargo-add.md
new file mode 100644
index 000000000..c441a82b2
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-add.md
@@ -0,0 +1,176 @@
+# cargo-add(1)
+
+{{*set actionverb="Add"}}
+{{*set nouns="adds"}}
+
+## NAME
+
+cargo-add --- Add dependencies to a Cargo.toml manifest file
+
+## SYNOPSIS
+
+`cargo add` [_options_] _crate_...\
+`cargo add` [_options_] `--path` _path_\
+`cargo add` [_options_] `--git` _url_ [_crate_...]\
+
+
+## DESCRIPTION
+
+This command can add or modify dependencies.
+
+The source for the dependency can be specified with:
+
+* _crate_`@`_version_: Fetch from a registry with a version constraint of "_version_"
+* `--path` _path_: Fetch from the specified _path_
+* `--git` _url_: Pull from a git repo at _url_
+
+If no source is specified, then a best effort will be made to select one, including:
+
+* Existing dependencies in other tables (like `dev-dependencies`)
+* Workspace members
+* Latest release in the registry
+
+When you add a package that is already present, the existing entry will be updated with the flags specified.
+
+Upon successful invocation, the enabled (`+`) and disabled (`-`) [features] of the specified
+dependency will be listed in the command's output.
+
+[features]: ../reference/features.md
+
+## OPTIONS
+
+### Source options
+
+{{#options}}
+
+{{#option "`--git` _url_" }}
+[Git URL to add the specified crate from](../reference/specifying-dependencies.html#specifying-dependencies-from-git-repositories).
+{{/option}}
+
+{{#option "`--branch` _branch_" }}
+Branch to use when adding from git.
+{{/option}}
+
+{{#option "`--tag` _tag_" }}
+Tag to use when adding from git.
+{{/option}}
+
+{{#option "`--rev` _sha_" }}
+Specific commit to use when adding from git.
+{{/option}}
+
+{{#option "`--path` _path_" }}
+[Filesystem path](../reference/specifying-dependencies.html#specifying-path-dependencies) to local crate to add.
+{{/option}}
+
+{{> options-registry }}
+
+{{/options}}
+
+### Section options
+
+{{#options}}
+
+{{#option "`--dev`" }}
+Add as a [development dependency](../reference/specifying-dependencies.html#development-dependencies).
+{{/option}}
+
+{{#option "`--build`" }}
+Add as a [build dependency](../reference/specifying-dependencies.html#build-dependencies).
+{{/option}}
+
+{{#option "`--target` _target_" }}
+Add as a dependency to the [given target platform](../reference/specifying-dependencies.html#platform-specific-dependencies).
+
+To avoid unexpected shell expansions, you may use quotes around each target, e.g., `--target 'cfg(unix)'`.
+{{/option}}
+
+{{/options}}
+
+### Dependency options
+
+{{#options}}
+
+{{#option "`--dry-run`" }}
+Don't actually write the manifest
+{{/option}}
+
+{{#option "`--rename` _name_" }}
+[Rename](../reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml) the dependency.
+{{/option}}
+
+{{#option "`--optional`" }}
+Mark the dependency as [optional](../reference/features.html#optional-dependencies).
+{{/option}}
+
+{{#option "`--no-optional`" }}
+Mark the dependency as [required](../reference/features.html#optional-dependencies).
+{{/option}}
+
+{{#option "`--no-default-features`" }}
+Disable the [default features](../reference/features.html#dependency-features).
+{{/option}}
+
+{{#option "`--default-features`" }}
+Re-enable the [default features](../reference/features.html#dependency-features).
+{{/option}}
+
+{{#option "`-F` _features_" "`--features` _features_" }}
+Space or comma separated list of [features to
+activate](../reference/features.html#dependency-features). When adding multiple
+crates, the features for a specific crate may be enabled with
+`package-name/feature-name` syntax. This flag may be specified multiple times,
+which enables all specified features.
+{{/option}}
+
+{{/options}}
+
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+{{> options-manifest-path }}
+
+{{#option "`-p` _spec_" "`--package` _spec_" }}
+Add dependencies to only the specified package.
+{{/option}}
+
+{{> options-locked }}
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Add `regex` as a dependency
+
+ cargo add regex
+
+2. Add `trybuild` as a dev-dependency
+
+ cargo add --dev trybuild
+
+3. Add an older version of `nom` as a dependency
+
+ cargo add nom@5
+
+4. Add support for serializing data structures to json with `derive`s
+
+ cargo add serde serde_json -F serde/derive
+
+5. Add `windows` as a platform specific dependency on `cfg(windows)`
+
+ cargo add windows --target 'cfg(windows)'
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-remove" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-bench.md b/src/tools/cargo/src/doc/man/cargo-bench.md
new file mode 100644
index 000000000..80785891b
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-bench.md
@@ -0,0 +1,172 @@
+# cargo-bench(1)
+{{*set actionverb="Benchmark"}}
+{{*set nouns="benchmarks"}}
+{{*set multitarget=true}}
+
+## NAME
+
+cargo-bench --- Execute benchmarks of a package
+
+## SYNOPSIS
+
+`cargo bench` [_options_] [_benchname_] [`--` _bench-options_]
+
+## DESCRIPTION
+
+Compile and execute benchmarks.
+
+The benchmark filtering argument _benchname_ and all the arguments following
+the two dashes (`--`) are passed to the benchmark binaries and thus to
+_libtest_ (rustc's built in unit-test and micro-benchmarking framework). If
+you are passing arguments to both Cargo and the binary, the ones after `--` go
+to the binary, the ones before go to Cargo. For details about libtest's
+arguments see the output of `cargo bench -- --help` and check out the rustc
+book's chapter on how tests work at
+<https://doc.rust-lang.org/rustc/tests/index.html>.
+
+As an example, this will run only the benchmark named `foo` (and skip other
+similarly named benchmarks like `foobar`):
+
+ cargo bench -- foo --exact
+
+Benchmarks are built with the `--test` option to `rustc` which creates a
+special executable by linking your code with libtest. The executable
+automatically runs all functions annotated with the `#[bench]` attribute.
+Cargo passes the `--bench` flag to the test harness to tell it to run
+only benchmarks.
+
+The libtest harness may be disabled by setting `harness = false` in the target
+manifest settings, in which case your code will need to provide its own `main`
+function to handle running benchmarks.
+
+> **Note**: The
+> [`#[bench]` attribute](https://doc.rust-lang.org/nightly/unstable-book/library-features/test.html)
+> is currently unstable and only available on the
+> [nightly channel](https://doc.rust-lang.org/book/appendix-07-nightly-rust.html).
+> There are some packages available on
+> [crates.io](https://crates.io/keywords/benchmark) that may help with
+> running benchmarks on the stable channel, such as
+> [Criterion](https://crates.io/crates/criterion).
+
+By default, `cargo bench` uses the [`bench` profile], which enables
+optimizations and disables debugging information. If you need to debug a
+benchmark, you can use the `--profile=dev` command-line option to switch to
+the dev profile. You can then run the debug-enabled benchmark within a
+debugger.
+
+[`bench` profile]: ../reference/profiles.html#bench
+
+### Working directory of benchmarks
+
+The working directory of every benchmark is set to the root directory of the
+package the benchmark belongs to.
+Setting the working directory of benchmarks to the package's root directory
+makes it possible for benchmarks to reliably access the package's files using
+relative paths, regardless from where `cargo bench` was executed from.
+
+## OPTIONS
+
+### Benchmark Options
+
+{{> options-test }}
+
+{{> section-package-selection }}
+
+### Target Selection
+
+When no target selection options are given, `cargo bench` will build the
+following targets of the selected packages:
+
+- lib --- used to link with binaries and benchmarks
+- bins (only if benchmark targets are built and required features are
+ available)
+- lib as a benchmark
+- bins as benchmarks
+- benchmark targets
+
+The default behavior can be changed by setting the `bench` flag for the target
+in the manifest settings. Setting examples to `bench = true` will build and
+run the example as a benchmark. Setting targets to `bench = false` will stop
+them from being benchmarked by default. Target selection options that take a
+target by name ignore the `bench` flag and will always benchmark the given
+target.
+
+{{> options-targets-bin-auto-built }}
+
+{{> options-targets }}
+
+{{> section-features }}
+
+### Compilation Options
+
+{{#options}}
+
+{{> options-target-triple }}
+
+{{> options-profile }}
+
+{{> options-ignore-rust-version }}
+
+{{> options-timings }}
+
+{{/options}}
+
+### Output Options
+
+{{#options}}
+{{> options-target-dir }}
+{{/options}}
+
+### Display Options
+
+By default the Rust test harness hides output from benchmark execution to keep
+results readable. Benchmark output can be recovered (e.g., for debugging) by
+passing `--nocapture` to the benchmark binaries:
+
+ cargo bench -- --nocapture
+
+{{#options}}
+
+{{> options-display }}
+
+{{> options-message-format }}
+
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+{{> options-manifest-path }}
+
+{{> options-locked }}
+{{/options}}
+
+{{> section-options-common }}
+
+### Miscellaneous Options
+
+The `--jobs` argument affects the building of the benchmark executable but
+does not affect how many threads are used when running the benchmarks. The
+Rust test harness runs benchmarks serially in a single thread.
+
+{{#options}}
+{{> options-jobs }}
+{{> options-keep-going }}
+{{/options}}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Build and execute all the benchmarks of the current package:
+
+ cargo bench
+
+2. Run only a specific benchmark within a specific benchmark target:
+
+ cargo bench --bench bench_name -- modname::some_benchmark
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-test" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-build.md b/src/tools/cargo/src/doc/man/cargo-build.md
new file mode 100644
index 000000000..986144443
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-build.md
@@ -0,0 +1,116 @@
+# cargo-build(1)
+{{*set actionverb="Build"}}
+{{*set multitarget=true}}
+
+## NAME
+
+cargo-build --- Compile the current package
+
+## SYNOPSIS
+
+`cargo build` [_options_]
+
+## DESCRIPTION
+
+Compile local packages and all of their dependencies.
+
+## OPTIONS
+
+{{> section-package-selection }}
+
+### Target Selection
+
+When no target selection options are given, `cargo build` will build all
+binary and library targets of the selected packages. Binaries are skipped if
+they have `required-features` that are missing.
+
+{{> options-targets-bin-auto-built }}
+
+{{> options-targets }}
+
+{{> section-features }}
+
+### Compilation Options
+
+{{#options}}
+
+{{> options-target-triple }}
+
+{{> options-release }}
+
+{{> options-profile }}
+
+{{> options-ignore-rust-version }}
+
+{{> options-timings }}
+
+{{/options}}
+
+### Output Options
+
+{{#options}}
+{{> options-target-dir }}
+
+{{#option "`--out-dir` _directory_" }}
+Copy final artifacts to this directory.
+
+This option is unstable and available only on the
+[nightly channel](https://doc.rust-lang.org/book/appendix-07-nightly-rust.html)
+and requires the `-Z unstable-options` flag to enable.
+See <https://github.com/rust-lang/cargo/issues/6790> for more information.
+{{/option}}
+
+{{/options}}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+
+{{> options-message-format }}
+
+{{#option "`--build-plan`" }}
+Outputs a series of JSON messages to stdout that indicate the commands to run
+the build.
+
+This option is unstable and available only on the
+[nightly channel](https://doc.rust-lang.org/book/appendix-07-nightly-rust.html)
+and requires the `-Z unstable-options` flag to enable.
+See <https://github.com/rust-lang/cargo/issues/5579> for more information.
+{{/option}}
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+{{> options-manifest-path }}
+
+{{> options-locked }}
+{{/options}}
+
+{{> section-options-common }}
+
+### Miscellaneous Options
+
+{{#options}}
+{{> options-jobs }}
+{{> options-keep-going }}
+{{> options-future-incompat }}
+{{/options}}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Build the local package and all of its dependencies:
+
+ cargo build
+
+2. Build with optimizations:
+
+ cargo build --release
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-rustc" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-check.md b/src/tools/cargo/src/doc/man/cargo-check.md
new file mode 100644
index 000000000..055adff0d
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-check.md
@@ -0,0 +1,99 @@
+# cargo-check(1)
+{{*set actionverb="Check"}}
+{{*set multitarget=true}}
+
+## NAME
+
+cargo-check --- Check the current package
+
+## SYNOPSIS
+
+`cargo check` [_options_]
+
+## DESCRIPTION
+
+Check a local package and all of its dependencies for errors. This will
+essentially compile the packages without performing the final step of code
+generation, which is faster than running `cargo build`. The compiler will save
+metadata files to disk so that future runs will reuse them if the source has
+not been modified. Some diagnostics and errors are only emitted during code
+generation, so they inherently won't be reported with `cargo check`.
+
+## OPTIONS
+
+{{> section-package-selection }}
+
+### Target Selection
+
+When no target selection options are given, `cargo check` will check all
+binary and library targets of the selected packages. Binaries are skipped if
+they have `required-features` that are missing.
+
+{{> options-targets }}
+
+{{> section-features }}
+
+### Compilation Options
+
+{{#options}}
+
+{{> options-target-triple }}
+
+{{> options-release }}
+
+{{> options-profile-legacy-check }}
+
+{{> options-ignore-rust-version }}
+
+{{> options-timings }}
+
+{{/options}}
+
+### Output Options
+
+{{#options}}
+{{> options-target-dir }}
+{{/options}}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+
+{{> options-message-format }}
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+{{> options-manifest-path }}
+
+{{> options-locked }}
+{{/options}}
+
+{{> section-options-common }}
+
+### Miscellaneous Options
+
+{{#options}}
+{{> options-jobs }}
+{{> options-keep-going }}
+{{> options-future-incompat }}
+{{/options}}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Check the local package for errors:
+
+ cargo check
+
+2. Check all targets, including unit tests:
+
+ cargo check --all-targets --profile=test
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-build" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-clean.md b/src/tools/cargo/src/doc/man/cargo-clean.md
new file mode 100644
index 000000000..3222f7bb0
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-clean.md
@@ -0,0 +1,88 @@
+# cargo-clean(1)
+{{*set actionverb="Clean"}}
+{{*set multitarget=true}}
+
+## NAME
+
+cargo-clean --- Remove generated artifacts
+
+## SYNOPSIS
+
+`cargo clean` [_options_]
+
+## DESCRIPTION
+
+Remove artifacts from the target directory that Cargo has generated in the
+past.
+
+With no options, `cargo clean` will delete the entire target directory.
+
+## OPTIONS
+
+### Package Selection
+
+When no packages are selected, all packages and all dependencies in the
+workspace are cleaned.
+
+{{#options}}
+{{#option "`-p` _spec_..." "`--package` _spec_..." }}
+Clean only the specified packages. This flag may be specified
+multiple times. See {{man "cargo-pkgid" 1}} for the SPEC format.
+{{/option}}
+{{/options}}
+
+### Clean Options
+
+{{#options}}
+
+{{#option "`--doc`" }}
+This option will cause `cargo clean` to remove only the `doc` directory in
+the target directory.
+{{/option}}
+
+{{#option "`--release`" }}
+Remove all artifacts in the `release` directory.
+{{/option}}
+
+{{#option "`--profile` _name_" }}
+Remove all artifacts in the directory with the given profile name.
+{{/option}}
+
+{{> options-target-dir }}
+
+{{> options-target-triple }}
+
+{{/options}}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+{{> options-manifest-path }}
+
+{{> options-locked }}
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Remove the entire target directory:
+
+ cargo clean
+
+2. Remove only the release artifacts:
+
+ cargo clean --release
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-build" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-doc.md b/src/tools/cargo/src/doc/man/cargo-doc.md
new file mode 100644
index 000000000..9d5b77648
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-doc.md
@@ -0,0 +1,129 @@
+# cargo-doc(1)
+{{*set actionverb="Document"}}
+{{*set multitarget=true}}
+
+## NAME
+
+cargo-doc --- Build a package's documentation
+
+## SYNOPSIS
+
+`cargo doc` [_options_]
+
+## DESCRIPTION
+
+Build the documentation for the local package and all dependencies. The output
+is placed in `target/doc` in rustdoc's usual format.
+
+## OPTIONS
+
+### Documentation Options
+
+{{#options}}
+
+{{#option "`--open`" }}
+Open the docs in a browser after building them. This will use your default
+browser unless you define another one in the `BROWSER` environment variable
+or use the [`doc.browser`](../reference/config.html#docbrowser) configuration
+option.
+{{/option}}
+
+{{#option "`--no-deps`" }}
+Do not build documentation for dependencies.
+{{/option}}
+
+{{#option "`--document-private-items`" }}
+Include non-public items in the documentation. This will be enabled by default if documenting a binary target.
+{{/option}}
+
+{{/options}}
+
+{{> section-package-selection }}
+
+### Target Selection
+
+When no target selection options are given, `cargo doc` will document all
+binary and library targets of the selected package. The binary will be skipped
+if its name is the same as the lib target. Binaries are skipped if they have
+`required-features` that are missing.
+
+The default behavior can be changed by setting `doc = false` for the target in
+the manifest settings. Using target selection options will ignore the `doc`
+flag and will always document the given target.
+
+{{#options}}
+{{> options-targets-lib-bin }}
+
+{{#option "`--example` _name_..." }}
+{{actionverb}} the specified example. This flag may be specified multiple times
+and supports common Unix glob patterns.
+{{/option}}
+
+{{#option "`--examples`" }}
+{{actionverb}} all example targets.
+{{/option}}
+
+{{/options}}
+
+{{> section-features }}
+
+### Compilation Options
+
+{{#options}}
+
+{{> options-target-triple }}
+
+{{> options-release }}
+
+{{> options-profile }}
+
+{{> options-ignore-rust-version }}
+
+{{> options-timings }}
+
+{{/options}}
+
+### Output Options
+
+{{#options}}
+{{> options-target-dir }}
+{{/options}}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+
+{{> options-message-format }}
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+{{> options-manifest-path }}
+
+{{> options-locked }}
+{{/options}}
+
+{{> section-options-common }}
+
+### Miscellaneous Options
+
+{{#options}}
+{{> options-jobs }}
+{{> options-keep-going }}
+{{/options}}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Build the local package documentation and its dependencies and output to
+ `target/doc`.
+
+ cargo doc
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-rustdoc" 1}}, {{man "rustdoc" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-fetch.md b/src/tools/cargo/src/doc/man/cargo-fetch.md
new file mode 100644
index 000000000..c31166a9b
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-fetch.md
@@ -0,0 +1,65 @@
+# cargo-fetch(1)
+{{*set actionverb="Fetch"}}
+{{*set target-default-to-all-arch=true}}
+{{*set multitarget=true}}
+
+## NAME
+
+cargo-fetch --- Fetch dependencies of a package from the network
+
+## SYNOPSIS
+
+`cargo fetch` [_options_]
+
+## DESCRIPTION
+
+If a `Cargo.lock` file is available, this command will ensure that all of the
+git dependencies and/or registry dependencies are downloaded and locally
+available. Subsequent Cargo commands will be able to run offline after a `cargo
+fetch` unless the lock file changes.
+
+If the lock file is not available, then this command will generate the lock
+file before fetching the dependencies.
+
+If `--target` is not specified, then all target dependencies are fetched.
+
+See also the [cargo-prefetch](https://crates.io/crates/cargo-prefetch)
+plugin which adds a command to download popular crates. This may be useful if
+you plan to use Cargo without a network with the `--offline` flag.
+
+## OPTIONS
+
+### Fetch options
+
+{{#options}}
+{{> options-target-triple }}
+{{/options}}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+{{> options-manifest-path }}
+
+{{> options-locked }}
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Fetch all dependencies:
+
+ cargo fetch
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-update" 1}}, {{man "cargo-generate-lockfile" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-fix.md b/src/tools/cargo/src/doc/man/cargo-fix.md
new file mode 100644
index 000000000..64fe29944
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-fix.md
@@ -0,0 +1,182 @@
+# cargo-fix(1)
+{{*set actionverb="Fix"}}
+{{*set multitarget=true}}
+
+## NAME
+
+cargo-fix --- Automatically fix lint warnings reported by rustc
+
+## SYNOPSIS
+
+`cargo fix` [_options_]
+
+## DESCRIPTION
+
+This Cargo subcommand will automatically take rustc's suggestions from
+diagnostics like warnings and apply them to your source code. This is intended
+to help automate tasks that rustc itself already knows how to tell you to fix!
+
+Executing `cargo fix` will under the hood execute {{man "cargo-check" 1}}. Any warnings
+applicable to your crate will be automatically fixed (if possible) and all
+remaining warnings will be displayed when the check process is finished. For
+example if you'd like to apply all fixes to the current package, you can run:
+
+ cargo fix
+
+which behaves the same as `cargo check --all-targets`.
+
+`cargo fix` is only capable of fixing code that is normally compiled with
+`cargo check`. If code is conditionally enabled with optional features, you
+will need to enable those features for that code to be analyzed:
+
+ cargo fix --features foo
+
+Similarly, other `cfg` expressions like platform-specific code will need to
+pass `--target` to fix code for the given target.
+
+ cargo fix --target x86_64-pc-windows-gnu
+
+If you encounter any problems with `cargo fix` or otherwise have any questions
+or feature requests please don't hesitate to file an issue at
+<https://github.com/rust-lang/cargo>.
+
+### Edition migration
+
+The `cargo fix` subcommand can also be used to migrate a package from one
+[edition] to the next. The general procedure is:
+
+1. Run `cargo fix --edition`. Consider also using the `--all-features` flag if
+ your project has multiple features. You may also want to run `cargo fix
+ --edition` multiple times with different `--target` flags if your project
+ has platform-specific code gated by `cfg` attributes.
+2. Modify `Cargo.toml` to set the [edition field] to the new edition.
+3. Run your project tests to verify that everything still works. If new
+ warnings are issued, you may want to consider running `cargo fix` again
+ (without the `--edition` flag) to apply any suggestions given by the
+ compiler.
+
+And hopefully that's it! Just keep in mind of the caveats mentioned above that
+`cargo fix` cannot update code for inactive features or `cfg` expressions.
+Also, in some rare cases the compiler is unable to automatically migrate all
+code to the new edition, and this may require manual changes after building
+with the new edition.
+
+[edition]: https://doc.rust-lang.org/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html
+[edition field]: ../reference/manifest.html#the-edition-field
+
+## OPTIONS
+
+### Fix options
+
+{{#options}}
+
+{{#option "`--broken-code`" }}
+Fix code even if it already has compiler errors. This is useful if `cargo fix`
+fails to apply the changes. It will apply the changes and leave the broken
+code in the working directory for you to inspect and manually fix.
+{{/option}}
+
+{{#option "`--edition`" }}
+Apply changes that will update the code to the next edition. This will not
+update the edition in the `Cargo.toml` manifest, which must be updated
+manually after `cargo fix --edition` has finished.
+{{/option}}
+
+{{#option "`--edition-idioms`" }}
+Apply suggestions that will update code to the preferred style for the current
+edition.
+{{/option}}
+
+{{#option "`--allow-no-vcs`" }}
+Fix code even if a VCS was not detected.
+{{/option}}
+
+{{#option "`--allow-dirty`" }}
+Fix code even if the working directory has changes.
+{{/option}}
+
+{{#option "`--allow-staged`" }}
+Fix code even if the working directory has staged changes.
+{{/option}}
+
+{{/options}}
+
+{{> section-package-selection }}
+
+### Target Selection
+
+When no target selection options are given, `cargo fix` will fix all targets
+(`--all-targets` implied). Binaries are skipped if they have
+`required-features` that are missing.
+
+{{> options-targets }}
+
+{{> section-features }}
+
+### Compilation Options
+
+{{#options}}
+
+{{> options-target-triple }}
+
+{{> options-release }}
+
+{{> options-profile-legacy-check }}
+
+{{> options-ignore-rust-version }}
+
+{{> options-timings }}
+
+{{/options}}
+
+### Output Options
+
+{{#options}}
+{{> options-target-dir }}
+{{/options}}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+
+{{> options-message-format }}
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+{{> options-manifest-path }}
+
+{{> options-locked }}
+{{/options}}
+
+{{> section-options-common }}
+
+### Miscellaneous Options
+
+{{#options}}
+{{> options-jobs }}
+{{> options-keep-going }}
+{{/options}}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Apply compiler suggestions to the local package:
+
+ cargo fix
+
+2. Update a package to prepare it for the next edition:
+
+ cargo fix --edition
+
+3. Apply suggested idioms for the current edition:
+
+ cargo fix --edition-idioms
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-check" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-generate-lockfile.md b/src/tools/cargo/src/doc/man/cargo-generate-lockfile.md
new file mode 100644
index 000000000..3a2f52b39
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-generate-lockfile.md
@@ -0,0 +1,49 @@
+# cargo-generate-lockfile(1)
+
+## NAME
+
+cargo-generate-lockfile --- Generate the lockfile for a package
+
+## SYNOPSIS
+
+`cargo generate-lockfile` [_options_]
+
+## DESCRIPTION
+
+This command will create the `Cargo.lock` lockfile for the current package or
+workspace. If the lockfile already exists, it will be rebuilt with the latest
+available version of every package.
+
+See also {{man "cargo-update" 1}} which is also capable of creating a `Cargo.lock`
+lockfile and has more options for controlling update behavior.
+
+## OPTIONS
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+{{> options-manifest-path }}
+
+{{> options-locked }}
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Create or update the lockfile for the current package or workspace:
+
+ cargo generate-lockfile
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-update" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-help.md b/src/tools/cargo/src/doc/man/cargo-help.md
new file mode 100644
index 000000000..4a5a8f515
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-help.md
@@ -0,0 +1,26 @@
+# cargo-help(1)
+
+## NAME
+
+cargo-help --- Get help for a Cargo command
+
+## SYNOPSIS
+
+`cargo help` [_subcommand_]
+
+## DESCRIPTION
+
+Prints a help message for the given command.
+
+## EXAMPLES
+
+1. Get help for a command:
+
+ cargo help build
+
+2. Help is also available with the `--help` flag:
+
+ cargo build --help
+
+## SEE ALSO
+{{man "cargo" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-init.md b/src/tools/cargo/src/doc/man/cargo-init.md
new file mode 100644
index 000000000..cd8e623ca
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-init.md
@@ -0,0 +1,51 @@
+# cargo-init(1)
+
+## NAME
+
+cargo-init --- Create a new Cargo package in an existing directory
+
+## SYNOPSIS
+
+`cargo init` [_options_] [_path_]
+
+## DESCRIPTION
+
+This command will create a new Cargo manifest in the current directory. Give a
+path as an argument to create in the given directory.
+
+If there are typically-named Rust source files already in the directory, those
+will be used. If not, then a sample `src/main.rs` file will be created, or
+`src/lib.rs` if `--lib` is passed.
+
+If the directory is not already in a VCS repository, then a new repository
+is created (see `--vcs` below).
+
+See {{man "cargo-new" 1}} for a similar command which will create a new package in
+a new directory.
+
+## OPTIONS
+
+### Init Options
+
+{{> options-new }}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Create a binary Cargo package in the current directory:
+
+ cargo init
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-new" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-install.md b/src/tools/cargo/src/doc/man/cargo-install.md
new file mode 100644
index 000000000..31c3d6048
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-install.md
@@ -0,0 +1,231 @@
+# cargo-install(1)
+{{*set actionverb="Install"}}
+{{*set temp-target-dir=true}}
+
+## NAME
+
+cargo-install --- Build and install a Rust binary
+
+## SYNOPSIS
+
+`cargo install` [_options_] _crate_[@_version_]...\
+`cargo install` [_options_] `--path` _path_\
+`cargo install` [_options_] `--git` _url_ [_crate_...]\
+`cargo install` [_options_] `--list`
+
+## DESCRIPTION
+
+This command manages Cargo's local set of installed binary crates. Only
+packages which have executable `[[bin]]` or `[[example]]` targets can be
+installed, and all executables are installed into the installation root's
+`bin` folder.
+
+{{> description-install-root }}
+
+There are multiple sources from which a crate can be installed. The default
+location is crates.io but the `--git`, `--path`, and `--registry` flags can
+change this source. If the source contains more than one package (such as
+crates.io or a git repository with multiple crates) the _crate_ argument is
+required to indicate which crate should be installed.
+
+Crates from crates.io can optionally specify the version they wish to install
+via the `--version` flags, and similarly packages from git repositories can
+optionally specify the branch, tag, or revision that should be installed. If a
+crate has multiple binaries, the `--bin` argument can selectively install only
+one of them, and if you'd rather install examples the `--example` argument can
+be used as well.
+
+If the package is already installed, Cargo will reinstall it if the installed
+version does not appear to be up-to-date. If any of the following values
+change, then Cargo will reinstall the package:
+
+- The package version and source.
+- The set of binary names installed.
+- The chosen features.
+- The profile (`--profile`).
+- The target (`--target`).
+
+Installing with `--path` will always build and install, unless there are
+conflicting binaries from another package. The `--force` flag may be used to
+force Cargo to always reinstall the package.
+
+If the source is crates.io or `--git` then by default the crate will be built
+in a temporary target directory. To avoid this, the target directory can be
+specified by setting the `CARGO_TARGET_DIR` environment variable to a relative
+path. In particular, this can be useful for caching build artifacts on
+continuous integration systems.
+
+### Dealing with the Lockfile
+
+By default, the `Cargo.lock` file that is included with the package will be
+ignored. This means that Cargo will recompute which versions of dependencies
+to use, possibly using newer versions that have been released since the
+package was published. The `--locked` flag can be used to force Cargo to use
+the packaged `Cargo.lock` file if it is available. This may be useful for
+ensuring reproducible builds, to use the exact same set of dependencies that
+were available when the package was published. It may also be useful if a
+newer version of a dependency is published that no longer builds on your
+system, or has other problems. The downside to using `--locked` is that you
+will not receive any fixes or updates to any dependency. Note that Cargo did
+not start publishing `Cargo.lock` files until version 1.37, which means
+packages published with prior versions will not have a `Cargo.lock` file
+available.
+
+### Configuration Discovery
+
+This command operates on system or user level, not project level.
+This means that the local [configuration discovery] is ignored.
+Instead, the configuration discovery begins at `$CARGO_HOME/config.toml`.
+If the package is installed with `--path $PATH`, the local configuration
+will be used, beginning discovery at `$PATH/.cargo/config.toml`.
+
+[configuration discovery]: ../reference/config.html#hierarchical-structure
+
+## OPTIONS
+
+### Install Options
+
+{{#options}}
+
+{{#option "`--vers` _version_" "`--version` _version_" }}
+Specify a version to install. This may be a [version
+requirement](../reference/specifying-dependencies.md), like `~1.2`, to have Cargo
+select the newest version from the given requirement. If the version does not
+have a requirement operator (such as `^` or `~`), then it must be in the form
+_MAJOR.MINOR.PATCH_, and will install exactly that version; it is *not*
+treated as a caret requirement like Cargo dependencies are.
+{{/option}}
+
+{{#option "`--git` _url_" }}
+Git URL to install the specified crate from.
+{{/option}}
+
+{{#option "`--branch` _branch_" }}
+Branch to use when installing from git.
+{{/option}}
+
+{{#option "`--tag` _tag_" }}
+Tag to use when installing from git.
+{{/option}}
+
+{{#option "`--rev` _sha_" }}
+Specific commit to use when installing from git.
+{{/option}}
+
+{{#option "`--path` _path_" }}
+Filesystem path to local crate to install.
+{{/option}}
+
+{{#option "`--list`" }}
+List all installed packages and their versions.
+{{/option}}
+
+{{#option "`-f`" "`--force`" }}
+Force overwriting existing crates or binaries. This can be used if a package
+has installed a binary with the same name as another package. This is also
+useful if something has changed on the system that you want to rebuild with,
+such as a newer version of `rustc`.
+{{/option}}
+
+{{#option "`--no-track`" }}
+By default, Cargo keeps track of the installed packages with a metadata file
+stored in the installation root directory. This flag tells Cargo not to use or
+create that file. With this flag, Cargo will refuse to overwrite any existing
+files unless the `--force` flag is used. This also disables Cargo's ability to
+protect against multiple concurrent invocations of Cargo installing at the
+same time.
+{{/option}}
+
+{{#option "`--bin` _name_..." }}
+Install only the specified binary.
+{{/option}}
+
+{{#option "`--bins`" }}
+Install all binaries.
+{{/option}}
+
+{{#option "`--example` _name_..." }}
+Install only the specified example.
+{{/option}}
+
+{{#option "`--examples`" }}
+Install all examples.
+{{/option}}
+
+{{#option "`--root` _dir_" }}
+Directory to install packages into.
+{{/option}}
+
+{{> options-registry }}
+
+{{> options-index }}
+
+{{/options}}
+
+{{> section-features }}
+
+### Compilation Options
+
+{{#options}}
+
+{{> options-target-triple }}
+
+{{> options-target-dir }}
+
+{{#option "`--debug`" }}
+Build with the `dev` profile instead the `release` profile.
+See also the `--profile` option for choosing a specific profile by name.
+{{/option}}
+
+{{> options-profile }}
+
+{{> options-ignore-rust-version }}
+
+{{> options-timings }}
+
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+{{> options-locked }}
+{{/options}}
+
+### Miscellaneous Options
+
+{{#options}}
+{{> options-jobs }}
+{{> options-keep-going }}
+{{/options}}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+
+{{> options-message-format }}
+
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Install or upgrade a package from crates.io:
+
+ cargo install ripgrep
+
+2. Install or reinstall the package in the current directory:
+
+ cargo install --path .
+
+3. View the list of installed packages:
+
+ cargo install --list
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-uninstall" 1}}, {{man "cargo-search" 1}}, {{man "cargo-publish" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-locate-project.md b/src/tools/cargo/src/doc/man/cargo-locate-project.md
new file mode 100644
index 000000000..4ebf36d7d
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-locate-project.md
@@ -0,0 +1,66 @@
+# cargo-locate-project(1)
+
+## NAME
+
+cargo-locate-project --- Print a JSON representation of a Cargo.toml file's location
+
+## SYNOPSIS
+
+`cargo locate-project` [_options_]
+
+## DESCRIPTION
+
+This command will print a JSON object to stdout with the full path to the manifest. The
+manifest is found by searching upward for a file named `Cargo.toml` starting from the current
+working directory.
+
+If the project happens to be a part of a workspace, the manifest of the project, rather than
+the workspace root, is output. This can be overridden by the `--workspace` flag. The root
+workspace is found by traversing further upward or by using the field `package.workspace` after
+locating the manifest of a workspace member.
+
+## OPTIONS
+
+{{#options}}
+
+{{#option "`--workspace`" }}
+Locate the `Cargo.toml` at the root of the workspace, as opposed to the current
+workspace member.
+{{/option}}
+
+{{/options}}
+
+### Display Options
+
+{{#options}}
+
+{{#option "`--message-format` _fmt_" }}
+The representation in which to print the project location. Valid values:
+
+- `json` (default): JSON object with the path under the key "root".
+- `plain`: Just the path.
+{{/option}}
+
+{{> options-display }}
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+{{> options-manifest-path }}
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Display the path to the manifest based on the current directory:
+
+ cargo locate-project
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-metadata" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-login.md b/src/tools/cargo/src/doc/man/cargo-login.md
new file mode 100644
index 000000000..54c823d2d
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-login.md
@@ -0,0 +1,51 @@
+# cargo-login(1)
+
+## NAME
+
+cargo-login --- Save an API token from the registry locally
+
+## SYNOPSIS
+
+`cargo login` [_options_] [_token_]
+
+## DESCRIPTION
+
+This command will save the API token to disk so that commands that require
+authentication, such as {{man "cargo-publish" 1}}, will be automatically
+authenticated. The token is saved in `$CARGO_HOME/credentials.toml`. `CARGO_HOME`
+defaults to `.cargo` in your home directory.
+
+If the _token_ argument is not specified, it will be read from stdin.
+
+The API token for crates.io may be retrieved from <https://crates.io/me>.
+
+Take care to keep the token secret, it should not be shared with anyone else.
+
+## OPTIONS
+
+### Login Options
+
+{{#options}}
+{{> options-registry }}
+{{/options}}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Save the API token to disk:
+
+ cargo login
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-logout" 1}}, {{man "cargo-publish" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-logout.md b/src/tools/cargo/src/doc/man/cargo-logout.md
new file mode 100644
index 000000000..f9c0db58c
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-logout.md
@@ -0,0 +1,57 @@
+# cargo-logout(1)
+
+## NAME
+
+cargo-logout --- Remove an API token from the registry locally
+
+## SYNOPSIS
+
+`cargo logout` [_options_]
+
+## DESCRIPTION
+
+This command will remove the API token from the local credential storage.
+Credentials are stored in `$CARGO_HOME/credentials.toml` where `$CARGO_HOME`
+defaults to `.cargo` in your home directory.
+
+If `--registry` is not specified, then the credentials for the default
+registry will be removed (configured by
+[`registry.default`](../reference/config.html#registrydefault), which defaults
+to <https://crates.io/>).
+
+This will not revoke the token on the server. If you need to revoke the token,
+visit the registry website and follow its instructions (see
+<https://crates.io/me> to revoke the token for <https://crates.io/>).
+
+## OPTIONS
+
+### Logout Options
+
+{{#options}}
+{{> options-registry }}
+{{/options}}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Remove the default registry token:
+
+ cargo logout
+
+2. Remove the token for a specific registry:
+
+ cargo logout --registry my-registry
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-login" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-metadata.md b/src/tools/cargo/src/doc/man/cargo-metadata.md
new file mode 100644
index 000000000..4f9032d56
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-metadata.md
@@ -0,0 +1,353 @@
+# cargo-metadata(1)
+
+## NAME
+
+cargo-metadata --- Machine-readable metadata about the current package
+
+## SYNOPSIS
+
+`cargo metadata` [_options_]
+
+## DESCRIPTION
+
+Output JSON to stdout containing information about the workspace members and
+resolved dependencies of the current package.
+
+It is recommended to include the `--format-version` flag to future-proof
+your code to ensure the output is in the format you are expecting.
+
+See the [cargo_metadata crate](https://crates.io/crates/cargo_metadata)
+for a Rust API for reading the metadata.
+
+## OUTPUT FORMAT
+
+The output has the following format:
+
+```javascript
+{
+ /* Array of all packages in the workspace.
+ It also includes all feature-enabled dependencies unless --no-deps is used.
+ */
+ "packages": [
+ {
+ /* The name of the package. */
+ "name": "my-package",
+ /* The version of the package. */
+ "version": "0.1.0",
+ /* The Package ID, a unique identifier for referring to the package. */
+ "id": "my-package 0.1.0 (path+file:///path/to/my-package)",
+ /* The license value from the manifest, or null. */
+ "license": "MIT/Apache-2.0",
+ /* The license-file value from the manifest, or null. */
+ "license_file": "LICENSE",
+ /* The description value from the manifest, or null. */
+ "description": "Package description.",
+ /* The source ID of the package. This represents where
+ a package is retrieved from.
+ This is null for path dependencies and workspace members.
+ For other dependencies, it is a string with the format:
+ - "registry+URL" for registry-based dependencies.
+ Example: "registry+https://github.com/rust-lang/crates.io-index"
+ - "git+URL" for git-based dependencies.
+ Example: "git+https://github.com/rust-lang/cargo?rev=5e85ba14aaa20f8133863373404cb0af69eeef2c#5e85ba14aaa20f8133863373404cb0af69eeef2c"
+ */
+ "source": null,
+ /* Array of dependencies declared in the package's manifest. */
+ "dependencies": [
+ {
+ /* The name of the dependency. */
+ "name": "bitflags",
+ /* The source ID of the dependency. May be null, see
+ description for the package source.
+ */
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ /* The version requirement for the dependency.
+ Dependencies without a version requirement have a value of "*".
+ */
+ "req": "^1.0",
+ /* The dependency kind.
+ "dev", "build", or null for a normal dependency.
+ */
+ "kind": null,
+ /* If the dependency is renamed, this is the new name for
+ the dependency as a string. null if it is not renamed.
+ */
+ "rename": null,
+ /* Boolean of whether or not this is an optional dependency. */
+ "optional": false,
+ /* Boolean of whether or not default features are enabled. */
+ "uses_default_features": true,
+ /* Array of features enabled. */
+ "features": [],
+ /* The target platform for the dependency.
+ null if not a target dependency.
+ */
+ "target": "cfg(windows)",
+ /* The file system path for a local path dependency.
+ not present if not a path dependency.
+ */
+ "path": "/path/to/dep",
+ /* A string of the URL of the registry this dependency is from.
+ If not specified or null, the dependency is from the default
+ registry (crates.io).
+ */
+ "registry": null
+ }
+ ],
+ /* Array of Cargo targets. */
+ "targets": [
+ {
+ /* Array of target kinds.
+ - lib targets list the `crate-type` values from the
+ manifest such as "lib", "rlib", "dylib",
+ "proc-macro", etc. (default ["lib"])
+ - binary is ["bin"]
+ - example is ["example"]
+ - integration test is ["test"]
+ - benchmark is ["bench"]
+ - build script is ["custom-build"]
+ */
+ "kind": [
+ "bin"
+ ],
+ /* Array of crate types.
+ - lib and example libraries list the `crate-type` values
+ from the manifest such as "lib", "rlib", "dylib",
+ "proc-macro", etc. (default ["lib"])
+ - all other target kinds are ["bin"]
+ */
+ "crate_types": [
+ "bin"
+ ],
+ /* The name of the target. */
+ "name": "my-package",
+ /* Absolute path to the root source file of the target. */
+ "src_path": "/path/to/my-package/src/main.rs",
+ /* The Rust edition of the target.
+ Defaults to the package edition.
+ */
+ "edition": "2018",
+ /* Array of required features.
+ This property is not included if no required features are set.
+ */
+ "required-features": ["feat1"],
+ /* Whether the target should be documented by `cargo doc`. */
+ "doc": true,
+ /* Whether or not this target has doc tests enabled, and
+ the target is compatible with doc testing.
+ */
+ "doctest": false,
+ /* Whether or not this target should be built and run with `--test`
+ */
+ "test": true
+ }
+ ],
+ /* Set of features defined for the package.
+ Each feature maps to an array of features or dependencies it
+ enables.
+ */
+ "features": {
+ "default": [
+ "feat1"
+ ],
+ "feat1": [],
+ "feat2": []
+ },
+ /* Absolute path to this package's manifest. */
+ "manifest_path": "/path/to/my-package/Cargo.toml",
+ /* Package metadata.
+ This is null if no metadata is specified.
+ */
+ "metadata": {
+ "docs": {
+ "rs": {
+ "all-features": true
+ }
+ }
+ },
+ /* List of registries to which this package may be published.
+ Publishing is unrestricted if null, and forbidden if an empty array. */
+ "publish": [
+ "crates-io"
+ ],
+ /* Array of authors from the manifest.
+ Empty array if no authors specified.
+ */
+ "authors": [
+ "Jane Doe <user@example.com>"
+ ],
+ /* Array of categories from the manifest. */
+ "categories": [
+ "command-line-utilities"
+ ],
+ /* Optional string that is the default binary picked by cargo run. */
+ "default_run": null,
+ /* Optional string that is the minimum supported rust version */
+ "rust_version": "1.56",
+ /* Array of keywords from the manifest. */
+ "keywords": [
+ "cli"
+ ],
+ /* The readme value from the manifest or null if not specified. */
+ "readme": "README.md",
+ /* The repository value from the manifest or null if not specified. */
+ "repository": "https://github.com/rust-lang/cargo",
+ /* The homepage value from the manifest or null if not specified. */
+ "homepage": "https://rust-lang.org",
+ /* The documentation value from the manifest or null if not specified. */
+ "documentation": "https://doc.rust-lang.org/stable/std",
+ /* The default edition of the package.
+ Note that individual targets may have different editions.
+ */
+ "edition": "2018",
+ /* Optional string that is the name of a native library the package
+ is linking to.
+ */
+ "links": null,
+ }
+ ],
+ /* Array of members of the workspace.
+ Each entry is the Package ID for the package.
+ */
+ "workspace_members": [
+ "my-package 0.1.0 (path+file:///path/to/my-package)",
+ ],
+ // The resolved dependency graph for the entire workspace. The enabled
+ // features are based on the enabled features for the "current" package.
+ // Inactivated optional dependencies are not listed.
+ //
+ // This is null if --no-deps is specified.
+ //
+ // By default, this includes all dependencies for all target platforms.
+ // The `--filter-platform` flag may be used to narrow to a specific
+ // target triple.
+ "resolve": {
+ /* Array of nodes within the dependency graph.
+ Each node is a package.
+ */
+ "nodes": [
+ {
+ /* The Package ID of this node. */
+ "id": "my-package 0.1.0 (path+file:///path/to/my-package)",
+ /* The dependencies of this package, an array of Package IDs. */
+ "dependencies": [
+ "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)"
+ ],
+ /* The dependencies of this package. This is an alternative to
+ "dependencies" which contains additional information. In
+ particular, this handles renamed dependencies.
+ */
+ "deps": [
+ {
+ /* The name of the dependency's library target.
+ If this is a renamed dependency, this is the new
+ name.
+ */
+ "name": "bitflags",
+ /* The Package ID of the dependency. */
+ "pkg": "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ /* Array of dependency kinds. Added in Cargo 1.40. */
+ "dep_kinds": [
+ {
+ /* The dependency kind.
+ "dev", "build", or null for a normal dependency.
+ */
+ "kind": null,
+ /* The target platform for the dependency.
+ null if not a target dependency.
+ */
+ "target": "cfg(windows)"
+ }
+ ]
+ }
+ ],
+ /* Array of features enabled on this package. */
+ "features": [
+ "default"
+ ]
+ }
+ ],
+ /* The root package of the workspace.
+ This is null if this is a virtual workspace. Otherwise it is
+ the Package ID of the root package.
+ */
+ "root": "my-package 0.1.0 (path+file:///path/to/my-package)"
+ },
+ /* The absolute path to the build directory where Cargo places its output. */
+ "target_directory": "/path/to/my-package/target",
+ /* The version of the schema for this metadata structure.
+ This will be changed if incompatible changes are ever made.
+ */
+ "version": 1,
+ /* The absolute path to the root of the workspace. */
+ "workspace_root": "/path/to/my-package"
+ /* Workspace metadata.
+ This is null if no metadata is specified. */
+ "metadata": {
+ "docs": {
+ "rs": {
+ "all-features": true
+ }
+ }
+ }
+}
+````
+
+## OPTIONS
+
+### Output Options
+
+{{#options}}
+
+{{#option "`--no-deps`" }}
+Output information only about the workspace members and don't fetch
+dependencies.
+{{/option}}
+
+{{#option "`--format-version` _version_" }}
+Specify the version of the output format to use. Currently `1` is the only
+possible value.
+{{/option}}
+
+{{#option "`--filter-platform` _triple_" }}
+This filters the `resolve` output to only include dependencies for the
+given [target triple](../appendix/glossary.html#target).
+Without this flag, the resolve includes all targets.
+
+Note that the dependencies listed in the "packages" array still includes all
+dependencies. Each package definition is intended to be an unaltered
+reproduction of the information within `Cargo.toml`.
+{{/option}}
+
+{{/options}}
+
+{{> section-features }}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+{{> options-manifest-path }}
+
+{{> options-locked }}
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Output JSON about the current package:
+
+ cargo metadata --format-version=1
+
+## SEE ALSO
+{{man "cargo" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-new.md b/src/tools/cargo/src/doc/man/cargo-new.md
new file mode 100644
index 000000000..ea6182ac3
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-new.md
@@ -0,0 +1,46 @@
+# cargo-new(1)
+
+## NAME
+
+cargo-new --- Create a new Cargo package
+
+## SYNOPSIS
+
+`cargo new` [_options_] _path_
+
+## DESCRIPTION
+
+This command will create a new Cargo package in the given directory. This
+includes a simple template with a `Cargo.toml` manifest, sample source file,
+and a VCS ignore file. If the directory is not already in a VCS repository,
+then a new repository is created (see `--vcs` below).
+
+See {{man "cargo-init" 1}} for a similar command which will create a new manifest
+in an existing directory.
+
+## OPTIONS
+
+### New Options
+
+{{> options-new }}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Create a binary Cargo package in the given directory:
+
+ cargo new foo
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-init" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-owner.md b/src/tools/cargo/src/doc/man/cargo-owner.md
new file mode 100644
index 000000000..327916968
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-owner.md
@@ -0,0 +1,81 @@
+# cargo-owner(1)
+
+## NAME
+
+cargo-owner --- Manage the owners of a crate on the registry
+
+## SYNOPSIS
+
+`cargo owner` [_options_] `--add` _login_ [_crate_]\
+`cargo owner` [_options_] `--remove` _login_ [_crate_]\
+`cargo owner` [_options_] `--list` [_crate_]
+
+## DESCRIPTION
+
+This command will modify the owners for a crate on the registry. Owners of a
+crate can upload new versions and yank old versions. Non-team owners can also
+modify the set of owners, so take care!
+
+This command requires you to be authenticated with either the `--token` option
+or using {{man "cargo-login" 1}}.
+
+If the crate name is not specified, it will use the package name from the
+current directory.
+
+See [the reference](../reference/publishing.html#cargo-owner) for more
+information about owners and publishing.
+
+## OPTIONS
+
+### Owner Options
+
+{{#options}}
+
+{{#option "`-a`" "`--add` _login_..." }}
+Invite the given user or team as an owner.
+{{/option}}
+
+{{#option "`-r`" "`--remove` _login_..." }}
+Remove the given user or team as an owner.
+{{/option}}
+
+{{#option "`-l`" "`--list`" }}
+List owners of a crate.
+{{/option}}
+
+{{> options-token }}
+
+{{> options-index }}
+
+{{> options-registry }}
+
+{{/options}}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. List owners of a package:
+
+ cargo owner --list foo
+
+2. Invite an owner to a package:
+
+ cargo owner --add username foo
+
+3. Remove an owner from a package:
+
+ cargo owner --remove username foo
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-login" 1}}, {{man "cargo-publish" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-package.md b/src/tools/cargo/src/doc/man/cargo-package.md
new file mode 100644
index 000000000..2000353cc
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-package.md
@@ -0,0 +1,138 @@
+# cargo-package(1)
+{{*set actionverb="Package"}}
+{{*set noall=true}}
+{{*set multitarget=true}}
+
+## NAME
+
+cargo-package --- Assemble the local package into a distributable tarball
+
+## SYNOPSIS
+
+`cargo package` [_options_]
+
+## DESCRIPTION
+
+This command will create a distributable, compressed `.crate` file with the
+source code of the package in the current directory. The resulting file will
+be stored in the `target/package` directory. This performs the following
+steps:
+
+1. Load and check the current workspace, performing some basic checks.
+ - Path dependencies are not allowed unless they have a version key. Cargo
+ will ignore the path key for dependencies in published packages.
+ `dev-dependencies` do not have this restriction.
+2. Create the compressed `.crate` file.
+ - The original `Cargo.toml` file is rewritten and normalized.
+ - `[patch]`, `[replace]`, and `[workspace]` sections are removed from the
+ manifest.
+ - `Cargo.lock` is automatically included if the package contains an
+ executable binary or example target. {{man "cargo-install" 1}} will use the
+ packaged lock file if the `--locked` flag is used.
+ - A `.cargo_vcs_info.json` file is included that contains information
+ about the current VCS checkout hash if available (not included with
+ `--allow-dirty`).
+3. Extract the `.crate` file and build it to verify it can build.
+ - This will rebuild your package from scratch to ensure that it can be
+ built from a pristine state. The `--no-verify` flag can be used to skip
+ this step.
+4. Check that build scripts did not modify any source files.
+
+The list of files included can be controlled with the `include` and `exclude`
+fields in the manifest.
+
+See [the reference](../reference/publishing.html) for more details about
+packaging and publishing.
+
+### .cargo_vcs_info.json format
+
+Will generate a `.cargo_vcs_info.json` in the following format
+
+```javascript
+{
+ "git": {
+ "sha1": "aac20b6e7e543e6dd4118b246c77225e3a3a1302"
+ },
+ "path_in_vcs": ""
+}
+```
+
+`path_in_vcs` will be set to a repo-relative path for packages
+in subdirectories of the version control repository.
+
+## OPTIONS
+
+### Package Options
+
+{{#options}}
+
+{{#option "`-l`" "`--list`" }}
+Print files included in a package without making one.
+{{/option}}
+
+{{#option "`--no-verify`" }}
+Don't verify the contents by building them.
+{{/option}}
+
+{{#option "`--no-metadata`" }}
+Ignore warnings about a lack of human-usable metadata (such as the description
+or the license).
+{{/option}}
+
+{{#option "`--allow-dirty`" }}
+Allow working directories with uncommitted VCS changes to be packaged.
+{{/option}}
+
+{{/options}}
+
+{{> section-package-selection }}
+
+### Compilation Options
+
+{{#options}}
+
+{{> options-target-triple }}
+
+{{> options-target-dir }}
+
+{{/options}}
+
+{{> section-features }}
+
+### Manifest Options
+
+{{#options}}
+
+{{> options-manifest-path }}
+
+{{> options-locked }}
+
+{{/options}}
+
+### Miscellaneous Options
+
+{{#options}}
+{{> options-jobs }}
+{{> options-keep-going }}
+{{/options}}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Create a compressed `.crate` file of the current package:
+
+ cargo package
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-publish" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-pkgid.md b/src/tools/cargo/src/doc/man/cargo-pkgid.md
new file mode 100644
index 000000000..3c1689b5a
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-pkgid.md
@@ -0,0 +1,89 @@
+# cargo-pkgid(1)
+
+## NAME
+
+cargo-pkgid --- Print a fully qualified package specification
+
+## SYNOPSIS
+
+`cargo pkgid` [_options_] [_spec_]
+
+## DESCRIPTION
+
+Given a _spec_ argument, print out the fully qualified package ID specifier
+for a package or dependency in the current workspace. This command will
+generate an error if _spec_ is ambiguous as to which package it refers to in
+the dependency graph. If no _spec_ is given, then the specifier for the local
+package is printed.
+
+This command requires that a lockfile is available and dependencies have been
+fetched.
+
+A package specifier consists of a name, version, and source URL. You are
+allowed to use partial specifiers to succinctly match a specific package as
+long as it matches only one package. The format of a _spec_ can be one of the
+following:
+
+SPEC Structure | Example SPEC
+---------------------------|--------------
+_name_ | `bitflags`
+_name_`@`_version_ | `bitflags@1.0.4`
+_url_ | `https://github.com/rust-lang/cargo`
+_url_`#`_version_ | `https://github.com/rust-lang/cargo#0.33.0`
+_url_`#`_name_ | `https://github.com/rust-lang/crates.io-index#bitflags`
+_url_`#`_name_`@`_version_ | `https://github.com/rust-lang/cargo#crates-io@0.21.0`
+
+## OPTIONS
+
+### Package Selection
+
+{{#options}}
+
+{{#option "`-p` _spec_" "`--package` _spec_" }}
+Get the package ID for the given package instead of the current package.
+{{/option}}
+
+{{/options}}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+
+{{> options-manifest-path }}
+
+{{> options-locked }}
+
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Retrieve package specification for `foo` package:
+
+ cargo pkgid foo
+
+2. Retrieve package specification for version 1.0.0 of `foo`:
+
+ cargo pkgid foo@1.0.0
+
+3. Retrieve package specification for `foo` from crates.io:
+
+ cargo pkgid https://github.com/rust-lang/crates.io-index#foo
+
+4. Retrieve package specification for `foo` from a local package:
+
+ cargo pkgid file:///path/to/local/package#foo
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-generate-lockfile" 1}}, {{man "cargo-metadata" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-publish.md b/src/tools/cargo/src/doc/man/cargo-publish.md
new file mode 100644
index 000000000..4ccb5b529
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-publish.md
@@ -0,0 +1,117 @@
+# cargo-publish(1)
+{{*set actionverb="Publish"}}
+{{*set multitarget=true}}
+
+## NAME
+
+cargo-publish --- Upload a package to the registry
+
+## SYNOPSIS
+
+`cargo publish` [_options_]
+
+## DESCRIPTION
+
+This command will create a distributable, compressed `.crate` file with the
+source code of the package in the current directory and upload it to a
+registry. The default registry is <https://crates.io>. This performs the
+following steps:
+
+1. Performs a few checks, including:
+ - Checks the `package.publish` key in the manifest for restrictions on
+ which registries you are allowed to publish to.
+2. Create a `.crate` file by following the steps in {{man "cargo-package" 1}}.
+3. Upload the crate to the registry. Note that the server will perform
+ additional checks on the crate.
+
+This command requires you to be authenticated with either the `--token` option
+or using {{man "cargo-login" 1}}.
+
+See [the reference](../reference/publishing.html) for more details about
+packaging and publishing.
+
+## OPTIONS
+
+### Publish Options
+
+{{#options}}
+
+{{#option "`--dry-run`" }}
+Perform all checks without uploading.
+{{/option}}
+
+{{> options-token }}
+
+{{#option "`--no-verify`" }}
+Don't verify the contents by building them.
+{{/option}}
+
+{{#option "`--allow-dirty`" }}
+Allow working directories with uncommitted VCS changes to be packaged.
+{{/option}}
+
+{{> options-index }}
+
+{{#option "`--registry` _registry_"}}
+Name of the registry to publish to. Registry names are defined in [Cargo
+config files](../reference/config.html). If not specified, and there is a
+[`package.publish`](../reference/manifest.html#the-publish-field) field in
+`Cargo.toml` with a single registry, then it will publish to that registry.
+Otherwise it will use the default registry, which is defined by the
+[`registry.default`](../reference/config.html#registrydefault) config key
+which defaults to `crates-io`.
+{{/option}}
+
+{{/options}}
+
+{{> section-options-package }}
+
+### Compilation Options
+
+{{#options}}
+
+{{> options-target-triple }}
+
+{{> options-target-dir }}
+
+{{/options}}
+
+{{> section-features }}
+
+### Manifest Options
+
+{{#options}}
+
+{{> options-manifest-path }}
+
+{{> options-locked }}
+
+{{/options}}
+
+### Miscellaneous Options
+
+{{#options}}
+{{> options-jobs }}
+{{> options-keep-going }}
+{{/options}}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Publish the current package:
+
+ cargo publish
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-package" 1}}, {{man "cargo-login" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-remove.md b/src/tools/cargo/src/doc/man/cargo-remove.md
new file mode 100644
index 000000000..e38589ccb
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-remove.md
@@ -0,0 +1,94 @@
+# cargo-remove(1)
+{{*set actionverb="Remove"}}
+{{*set nouns="removes"}}
+
+## NAME
+
+cargo-remove --- Remove dependencies from a Cargo.toml manifest file
+
+## SYNOPSIS
+
+`cargo remove` [_options_] _dependency_...
+
+## DESCRIPTION
+
+Remove one or more dependencies from a `Cargo.toml` manifest.
+
+## OPTIONS
+
+### Section options
+
+{{#options}}
+
+{{#option "`--dev`" }}
+Remove as a [development dependency](../reference/specifying-dependencies.html#development-dependencies).
+{{/option}}
+
+{{#option "`--build`" }}
+Remove as a [build dependency](../reference/specifying-dependencies.html#build-dependencies).
+{{/option}}
+
+{{#option "`--target` _target_" }}
+Remove as a dependency to the [given target platform](../reference/specifying-dependencies.html#platform-specific-dependencies).
+
+To avoid unexpected shell expansions, you may use quotes around each target, e.g., `--target 'cfg(unix)'`.
+{{/option}}
+
+{{/options}}
+
+### Miscellaneous Options
+
+{{#options}}
+
+{{#option "`--dry-run`" }}
+Don't actually write to the manifest.
+{{/option}}
+
+{{/options}}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+{{> options-manifest-path }}
+
+{{> options-locked }}
+{{/options}}
+
+### Package Selection
+
+{{#options}}
+
+{{#option "`-p` _spec_..." "`--package` _spec_..." }}
+Package to remove from.
+{{/option}}
+
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Remove `regex` as a dependency
+
+ cargo remove regex
+
+2. Remove `trybuild` as a dev-dependency
+
+ cargo remove --dev trybuild
+
+3. Remove `nom` from the `x86_64-pc-windows-gnu` dependencies table
+
+ cargo remove --target x86_64-pc-windows-gnu nom
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-add" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-report.md b/src/tools/cargo/src/doc/man/cargo-report.md
new file mode 100644
index 000000000..ba33617db
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-report.md
@@ -0,0 +1,42 @@
+# cargo-report(1)
+
+## NAME
+
+cargo-report --- Generate and display various kinds of reports
+
+## SYNOPSIS
+
+`cargo report` _type_ [_options_]
+
+### DESCRIPTION
+
+Displays a report of the given _type_ --- currently, only `future-incompat` is supported
+
+## OPTIONS
+
+{{#options}}
+
+{{#option "`--id` _id_" }}
+Show the report with the specified Cargo-generated id
+{{/option}}
+
+{{#option "`-p` _spec_..." "`--package` _spec_..." }}
+Only display a report for the specified package
+{{/option}}
+
+{{/options}}
+
+## EXAMPLES
+
+1. Display the latest future-incompat report:
+
+ cargo report future-incompat
+
+2. Display the latest future-incompat report for a specific package:
+
+ cargo report future-incompat --package my-dep:0.0.1
+
+## SEE ALSO
+[Future incompat report](../reference/future-incompat-report.html)
+
+{{man "cargo" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-run.md b/src/tools/cargo/src/doc/man/cargo-run.md
new file mode 100644
index 000000000..034a35f23
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-run.md
@@ -0,0 +1,115 @@
+# cargo-run(1)
+{{*set actionverb="Run"}}
+
+## NAME
+
+cargo-run --- Run the current package
+
+## SYNOPSIS
+
+`cargo run` [_options_] [`--` _args_]
+
+## DESCRIPTION
+
+Run a binary or example of the local package.
+
+All the arguments following the two dashes (`--`) are passed to the binary to
+run. If you're passing arguments to both Cargo and the binary, the ones after
+`--` go to the binary, the ones before go to Cargo.
+
+Unlike {{man "cargo-test" 1}} and {{man "cargo-bench" 1}}, `cargo run` sets the
+working directory of the binary executed to the current working directory, same
+as if it was executed in the shell directly.
+
+## OPTIONS
+
+{{> section-options-package }}
+
+### Target Selection
+
+When no target selection options are given, `cargo run` will run the binary
+target. If there are multiple binary targets, you must pass a target flag to
+choose one. Or, the `default-run` field may be specified in the `[package]`
+section of `Cargo.toml` to choose the name of the binary to run by default.
+
+{{#options}}
+
+{{#option "`--bin` _name_" }}
+Run the specified binary.
+{{/option}}
+
+{{#option "`--example` _name_" }}
+Run the specified example.
+{{/option}}
+
+{{/options}}
+
+{{> section-features }}
+
+### Compilation Options
+
+{{#options}}
+
+{{> options-target-triple }}
+
+{{> options-release }}
+
+{{> options-profile }}
+
+{{> options-ignore-rust-version }}
+
+{{> options-timings }}
+
+{{/options}}
+
+### Output Options
+
+{{#options}}
+{{> options-target-dir }}
+{{/options}}
+
+### Display Options
+
+{{#options}}
+
+{{> options-display }}
+
+{{> options-message-format }}
+
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+
+{{> options-manifest-path }}
+
+{{> options-locked }}
+
+{{/options}}
+
+{{> section-options-common }}
+
+### Miscellaneous Options
+
+{{#options}}
+{{> options-jobs }}
+{{> options-keep-going }}
+{{/options}}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Build the local package and run its main target (assuming only one binary):
+
+ cargo run
+
+2. Run an example with extra arguments:
+
+ cargo run --example exname -- --exoption exarg1 exarg2
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-build" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-rustc.md b/src/tools/cargo/src/doc/man/cargo-rustc.md
new file mode 100644
index 000000000..18c0856f2
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-rustc.md
@@ -0,0 +1,145 @@
+# cargo-rustc(1)
+{{*set actionverb="Build"}}
+{{*set multitarget=true}}
+
+## NAME
+
+cargo-rustc --- Compile the current package, and pass extra options to the compiler
+
+## SYNOPSIS
+
+`cargo rustc` [_options_] [`--` _args_]
+
+## DESCRIPTION
+
+The specified target for the current package (or package specified by `-p` if
+provided) will be compiled along with all of its dependencies. The specified
+_args_ will all be passed to the final compiler invocation, not any of the
+dependencies. Note that the compiler will still unconditionally receive
+arguments such as `-L`, `--extern`, and `--crate-type`, and the specified
+_args_ will simply be added to the compiler invocation.
+
+See <https://doc.rust-lang.org/rustc/index.html> for documentation on rustc
+flags.
+
+{{> description-one-target }}
+To pass flags to all compiler processes spawned by Cargo, use the `RUSTFLAGS`
+[environment variable](../reference/environment-variables.html) or the
+`build.rustflags` [config value](../reference/config.html).
+
+## OPTIONS
+
+{{> section-options-package }}
+
+### Target Selection
+
+When no target selection options are given, `cargo rustc` will build all
+binary and library targets of the selected package.
+
+{{> options-targets-bin-auto-built }}
+
+{{> options-targets }}
+
+{{> section-features }}
+
+### Compilation Options
+
+{{#options}}
+
+{{> options-target-triple }}
+
+{{> options-release }}
+
+{{#option "`--profile` _name_" }}
+Build with the given profile.
+
+The `rustc` subcommand will treat the following named profiles with special behaviors:
+
+* `check` --- Builds in the same way as the {{man "cargo-check" 1}} command with
+ the `dev` profile.
+* `test` --- Builds in the same way as the {{man "cargo-test" 1}} command,
+ enabling building in test mode which will enable tests and enable the `test`
+ cfg option. See [rustc
+ tests](https://doc.rust-lang.org/rustc/tests/index.html) for more detail.
+* `bench` --- Builds in the same was as the {{man "cargo-bench" 1}} command,
+ similar to the `test` profile.
+
+See the [the reference](../reference/profiles.html) for more details on profiles.
+{{/option}}
+
+{{> options-ignore-rust-version }}
+
+{{> options-timings }}
+
+{{#option "`--crate-type` _crate-type_"}}
+Build for the given crate type. This flag accepts a comma-separated list of
+1 or more crate types, of which the allowed values are the same as `crate-type`
+field in the manifest for configuring a Cargo target. See
+[`crate-type` field](../reference/cargo-targets.html#the-crate-type-field)
+for possible values.
+
+If the manifest contains a list, and `--crate-type` is provided,
+the command-line argument value will override what is in the manifest.
+
+This flag only works when building a `lib` or `example` library target.
+{{/option}}
+
+{{/options}}
+
+### Output Options
+
+{{#options}}
+{{> options-target-dir }}
+{{/options}}
+
+### Display Options
+
+{{#options}}
+
+{{> options-display }}
+
+{{> options-message-format }}
+
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+
+{{> options-manifest-path }}
+
+{{> options-locked }}
+
+{{/options}}
+
+{{> section-options-common }}
+
+### Miscellaneous Options
+
+{{#options}}
+{{> options-jobs }}
+{{> options-keep-going }}
+{{> options-future-incompat }}
+{{/options}}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Check if your package (not including dependencies) uses unsafe code:
+
+ cargo rustc --lib -- -D unsafe-code
+
+2. Try an experimental flag on the nightly compiler, such as this which prints
+ the size of every type:
+
+ cargo rustc --lib -- -Z print-type-sizes
+
+3. Override `crate-type` field in Cargo.toml with command-line option:
+
+ cargo rustc --lib --crate-type lib,cdylib
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-build" 1}}, {{man "rustc" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-rustdoc.md b/src/tools/cargo/src/doc/man/cargo-rustdoc.md
new file mode 100644
index 000000000..23be579e9
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-rustdoc.md
@@ -0,0 +1,116 @@
+# cargo-rustdoc(1)
+{{*set actionverb="Document"}}
+{{*set multitarget=true}}
+
+## NAME
+
+cargo-rustdoc --- Build a package's documentation, using specified custom flags
+
+## SYNOPSIS
+
+`cargo rustdoc` [_options_] [`--` _args_]
+
+## DESCRIPTION
+
+The specified target for the current package (or package specified by `-p` if
+provided) will be documented with the specified _args_ being passed to the
+final rustdoc invocation. Dependencies will not be documented as part of this
+command. Note that rustdoc will still unconditionally receive arguments such
+as `-L`, `--extern`, and `--crate-type`, and the specified _args_ will simply
+be added to the rustdoc invocation.
+
+See <https://doc.rust-lang.org/rustdoc/index.html> for documentation on rustdoc
+flags.
+
+{{> description-one-target }}
+To pass flags to all rustdoc processes spawned by Cargo, use the
+`RUSTDOCFLAGS` [environment variable](../reference/environment-variables.html)
+or the `build.rustdocflags` [config value](../reference/config.html).
+
+## OPTIONS
+
+### Documentation Options
+
+{{#options}}
+
+{{#option "`--open`" }}
+Open the docs in a browser after building them. This will use your default
+browser unless you define another one in the `BROWSER` environment variable
+or use the [`doc.browser`](../reference/config.html#docbrowser) configuration
+option.
+{{/option}}
+
+{{/options}}
+
+{{> section-options-package }}
+
+### Target Selection
+
+When no target selection options are given, `cargo rustdoc` will document all
+binary and library targets of the selected package. The binary will be skipped
+if its name is the same as the lib target. Binaries are skipped if they have
+`required-features` that are missing.
+
+{{> options-targets }}
+
+{{> section-features }}
+
+### Compilation Options
+
+{{#options}}
+
+{{> options-target-triple }}
+
+{{> options-release }}
+
+{{> options-profile }}
+
+{{> options-ignore-rust-version }}
+
+{{> options-timings }}
+
+{{/options}}
+
+### Output Options
+
+{{#options}}
+{{> options-target-dir }}
+{{/options}}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+
+{{> options-message-format }}
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+{{> options-manifest-path }}
+
+{{> options-locked }}
+{{/options}}
+
+{{> section-options-common }}
+
+### Miscellaneous Options
+
+{{#options}}
+{{> options-jobs }}
+{{> options-keep-going }}
+{{/options}}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Build documentation with custom CSS included from a given file:
+
+ cargo rustdoc --lib -- --extend-css extra.css
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-doc" 1}}, {{man "rustdoc" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-search.md b/src/tools/cargo/src/doc/man/cargo-search.md
new file mode 100644
index 000000000..f3d87cb12
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-search.md
@@ -0,0 +1,52 @@
+# cargo-search(1)
+
+## NAME
+
+cargo-search --- Search packages in crates.io
+
+## SYNOPSIS
+
+`cargo search` [_options_] [_query_...]
+
+## DESCRIPTION
+
+This performs a textual search for crates on <https://crates.io>. The matching
+crates will be displayed along with their description in TOML format suitable
+for copying into a `Cargo.toml` manifest.
+
+## OPTIONS
+
+### Search Options
+
+{{#options}}
+
+{{#option "`--limit` _limit_" }}
+Limit the number of results (default: 10, max: 100).
+{{/option}}
+
+{{> options-index }}
+
+{{> options-registry }}
+
+{{/options}}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Search for a package from crates.io:
+
+ cargo search serde
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-install" 1}}, {{man "cargo-publish" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-test.md b/src/tools/cargo/src/doc/man/cargo-test.md
new file mode 100644
index 000000000..3dce146e6
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-test.md
@@ -0,0 +1,199 @@
+# cargo-test(1)
+{{*set actionverb="Test"}}
+{{*set nouns="tests"}}
+{{*set multitarget=true}}
+
+## NAME
+
+cargo-test --- Execute unit and integration tests of a package
+
+## SYNOPSIS
+
+`cargo test` [_options_] [_testname_] [`--` _test-options_]
+
+## DESCRIPTION
+
+Compile and execute unit, integration, and documentation tests.
+
+The test filtering argument `TESTNAME` and all the arguments following the two
+dashes (`--`) are passed to the test binaries and thus to _libtest_ (rustc's
+built in unit-test and micro-benchmarking framework). If you're passing
+arguments to both Cargo and the binary, the ones after `--` go to the binary,
+the ones before go to Cargo. For details about libtest's arguments see the
+output of `cargo test -- --help` and check out the rustc book's chapter on
+how tests work at <https://doc.rust-lang.org/rustc/tests/index.html>.
+
+As an example, this will filter for tests with `foo` in their name and run them
+on 3 threads in parallel:
+
+ cargo test foo -- --test-threads 3
+
+Tests are built with the `--test` option to `rustc` which creates a special
+executable by linking your code with libtest. The executable automatically
+runs all functions annotated with the `#[test]` attribute in multiple threads.
+`#[bench]` annotated functions will also be run with one iteration to verify
+that they are functional.
+
+If the package contains multiple test targets, each target compiles to a
+special executable as aforementioned, and then is run serially.
+
+The libtest harness may be disabled by setting `harness = false` in the target
+manifest settings, in which case your code will need to provide its own `main`
+function to handle running tests.
+
+### Documentation tests
+
+Documentation tests are also run by default, which is handled by `rustdoc`. It
+extracts code samples from documentation comments of the library target, and
+then executes them.
+
+Different from normal test targets, each code block compiles to a doctest
+executable on the fly with `rustc`. These executables run in parallel in
+separate processes. The compilation of a code block is in fact a part of test
+function controlled by libtest, so some options such as `--jobs` might not
+take effect. Note that this execution model of doctests is not guaranteed
+and may change in the future; beware of depending on it.
+
+See the [rustdoc book](https://doc.rust-lang.org/rustdoc/) for more information
+on writing doc tests.
+
+### Working directory of tests
+
+The working directory of every test is set to the root directory of the package
+the test belongs to.
+Setting the working directory of tests to the package's root directory makes it
+possible for tests to reliably access the package's files using relative paths,
+regardless from where `cargo test` was executed from.
+
+## OPTIONS
+
+### Test Options
+
+{{> options-test }}
+
+{{> section-package-selection }}
+
+### Target Selection
+
+When no target selection options are given, `cargo test` will build the
+following targets of the selected packages:
+
+- lib --- used to link with binaries, examples, integration tests, and doc tests
+- bins (only if integration tests are built and required features are
+ available)
+- examples --- to ensure they compile
+- lib as a unit test
+- bins as unit tests
+- integration tests
+- doc tests for the lib target
+
+The default behavior can be changed by setting the `test` flag for the target
+in the manifest settings. Setting examples to `test = true` will build and run
+the example as a test. Setting targets to `test = false` will stop them from
+being tested by default. Target selection options that take a target by name
+ignore the `test` flag and will always test the given target.
+
+Doc tests for libraries may be disabled by setting `doctest = false` for the
+library in the manifest.
+
+{{> options-targets-bin-auto-built }}
+
+{{> options-targets }}
+
+{{#options}}
+
+{{#option "`--doc`" }}
+Test only the library's documentation. This cannot be mixed with other
+target options.
+{{/option}}
+
+{{/options}}
+
+{{> section-features }}
+
+### Compilation Options
+
+{{#options}}
+
+{{> options-target-triple }}
+
+{{> options-release }}
+
+{{> options-profile }}
+
+{{> options-ignore-rust-version }}
+
+{{> options-timings }}
+
+{{/options}}
+
+### Output Options
+
+{{#options}}
+{{> options-target-dir }}
+{{/options}}
+
+### Display Options
+
+By default the Rust test harness hides output from test execution to keep
+results readable. Test output can be recovered (e.g., for debugging) by passing
+`--nocapture` to the test binaries:
+
+ cargo test -- --nocapture
+
+{{#options}}
+
+{{> options-display }}
+
+{{> options-message-format }}
+
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+
+{{> options-manifest-path }}
+
+{{> options-locked }}
+
+{{/options}}
+
+{{> section-options-common }}
+
+### Miscellaneous Options
+
+The `--jobs` argument affects the building of the test executable but does not
+affect how many threads are used when running the tests. The Rust test harness
+includes an option to control the number of threads used:
+
+ cargo test -j 2 -- --test-threads=2
+
+{{#options}}
+
+{{> options-jobs }}
+{{> options-keep-going }}
+{{> options-future-incompat }}
+
+{{/options}}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Execute all the unit and integration tests of the current package:
+
+ cargo test
+
+2. Run only tests whose names match against a filter string:
+
+ cargo test name_filter
+
+3. Run only a specific test within a specific integration test:
+
+ cargo test --test int_test_name -- modname::test_name
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-bench" 1}}, [types of tests](../reference/cargo-targets.html#tests), [how to write tests](https://doc.rust-lang.org/rustc/tests/index.html)
diff --git a/src/tools/cargo/src/doc/man/cargo-tree.md b/src/tools/cargo/src/doc/man/cargo-tree.md
new file mode 100644
index 000000000..3e1da20df
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-tree.md
@@ -0,0 +1,268 @@
+# cargo-tree(1)
+{{*set actionverb="Display"}}
+{{*set noall=true}}
+
+## NAME
+
+cargo-tree --- Display a tree visualization of a dependency graph
+
+## SYNOPSIS
+
+`cargo tree` [_options_]
+
+## DESCRIPTION
+
+This command will display a tree of dependencies to the terminal. An example
+of a simple project that depends on the "rand" package:
+
+```
+myproject v0.1.0 (/myproject)
+└── rand v0.7.3
+ ├── getrandom v0.1.14
+ │ ├── cfg-if v0.1.10
+ │ └── libc v0.2.68
+ ├── libc v0.2.68 (*)
+ ├── rand_chacha v0.2.2
+ │ ├── ppv-lite86 v0.2.6
+ │ └── rand_core v0.5.1
+ │ └── getrandom v0.1.14 (*)
+ └── rand_core v0.5.1 (*)
+[build-dependencies]
+└── cc v1.0.50
+```
+
+Packages marked with `(*)` have been "de-duplicated". The dependencies for the
+package have already been shown elsewhere in the graph, and so are not
+repeated. Use the `--no-dedupe` option to repeat the duplicates.
+
+The `-e` flag can be used to select the dependency kinds to display. The
+"features" kind changes the output to display the features enabled by
+each dependency. For example, `cargo tree -e features`:
+
+```
+myproject v0.1.0 (/myproject)
+└── log feature "serde"
+ └── log v0.4.8
+ ├── serde v1.0.106
+ └── cfg-if feature "default"
+ └── cfg-if v0.1.10
+```
+
+In this tree, `myproject` depends on `log` with the `serde` feature. `log` in
+turn depends on `cfg-if` with "default" features. When using `-e features` it
+can be helpful to use `-i` flag to show how the features flow into a package.
+See the examples below for more detail.
+
+### Feature Unification
+
+This command shows a graph much closer to a feature-unified graph Cargo will
+build, rather than what you list in `Cargo.toml`. For instance, if you specify
+the same dependency in both `[dependencies]` and `[dev-dependencies]` but with
+different features on. This command may merge all features and show a `(*)` on
+one of the dependency to indicate the duplicate.
+
+As a result, for a mostly equivalent overview of what `cargo build` does,
+`cargo tree -e normal,build` is pretty close; for a mostly equivalent overview
+of what `cargo test` does, `cargo tree` is pretty close. However, it doesn't
+guarantee the exact equivalence to what Cargo is going to build, since a
+compilation is complex and depends on lots of different factors.
+
+To learn more about feature unification, check out this
+[dedicated section](../reference/features.html#feature-unification).
+
+## OPTIONS
+
+### Tree Options
+
+{{#options}}
+
+{{#option "`-i` _spec_" "`--invert` _spec_" }}
+Show the reverse dependencies for the given package. This flag will invert
+the tree and display the packages that depend on the given package.
+
+Note that in a workspace, by default it will only display the package's
+reverse dependencies inside the tree of the workspace member in the current
+directory. The `--workspace` flag can be used to extend it so that it will
+show the package's reverse dependencies across the entire workspace. The `-p`
+flag can be used to display the package's reverse dependencies only with the
+subtree of the package given to `-p`.
+{{/option}}
+
+{{#option "`--prune` _spec_" }}
+Prune the given package from the display of the dependency tree.
+{{/option}}
+
+{{#option "`--depth` _depth_" }}
+Maximum display depth of the dependency tree. A depth of 1 displays the direct
+dependencies, for example.
+{{/option}}
+
+{{#option "`--no-dedupe`" }}
+Do not de-duplicate repeated dependencies. Usually, when a package has already
+displayed its dependencies, further occurrences will not re-display its
+dependencies, and will include a `(*)` to indicate it has already been shown.
+This flag will cause those duplicates to be repeated.
+{{/option}}
+
+{{#option "`-d`" "`--duplicates`" }}
+Show only dependencies which come in multiple versions (implies `--invert`).
+When used with the `-p` flag, only shows duplicates within the subtree of the
+given package.
+
+It can be beneficial for build times and executable sizes to avoid building
+that same package multiple times. This flag can help identify the offending
+packages. You can then investigate if the package that depends on the
+duplicate with the older version can be updated to the newer version so that
+only one instance is built.
+{{/option}}
+
+{{#option "`-e` _kinds_" "`--edges` _kinds_" }}
+The dependency kinds to display. Takes a comma separated list of values:
+
+- `all` --- Show all edge kinds.
+- `normal` --- Show normal dependencies.
+- `build` --- Show build dependencies.
+- `dev` --- Show development dependencies.
+- `features` --- Show features enabled by each dependency. If this is the only
+ kind given, then it will automatically include the other dependency kinds.
+- `no-normal` --- Do not include normal dependencies.
+- `no-build` --- Do not include build dependencies.
+- `no-dev` --- Do not include development dependencies.
+- `no-proc-macro` --- Do not include procedural macro dependencies.
+
+The `normal`, `build`, `dev`, and `all` dependency kinds cannot be mixed with
+`no-normal`, `no-build`, or `no-dev` dependency kinds.
+
+The default is `normal,build,dev`.
+{{/option}}
+
+{{#option "`--target` _triple_" }}
+Filter dependencies matching the given [target triple](../appendix/glossary.html#target).
+The default is the host platform. Use the value `all` to include *all* targets.
+{{/option}}
+
+{{/options}}
+
+### Tree Formatting Options
+
+{{#options}}
+
+{{#option "`--charset` _charset_" }}
+Chooses the character set to use for the tree. Valid values are "utf8" or
+"ascii". Default is "utf8".
+{{/option}}
+
+{{#option "`-f` _format_" "`--format` _format_" }}
+Set the format string for each package. The default is "{p}".
+
+This is an arbitrary string which will be used to display each package. The following
+strings will be replaced with the corresponding value:
+
+- `{p}` --- The package name.
+- `{l}` --- The package license.
+- `{r}` --- The package repository URL.
+- `{f}` --- Comma-separated list of package features that are enabled.
+- `{lib}` --- The name, as used in a `use` statement, of the package's library.
+{{/option}}
+
+{{#option "`--prefix` _prefix_" }}
+Sets how each line is displayed. The _prefix_ value can be one of:
+
+- `indent` (default) --- Shows each line indented as a tree.
+- `depth` --- Show as a list, with the numeric depth printed before each entry.
+- `none` --- Show as a flat list.
+{{/option}}
+
+{{/options}}
+
+{{> section-package-selection }}
+
+### Manifest Options
+
+{{#options}}
+
+{{> options-manifest-path }}
+
+{{> options-locked }}
+
+{{/options}}
+
+{{> section-features }}
+
+### Display Options
+
+{{#options}}
+
+{{> options-display }}
+
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Display the tree for the package in the current directory:
+
+ cargo tree
+
+2. Display all the packages that depend on the `syn` package:
+
+ cargo tree -i syn
+
+3. Show the features enabled on each package:
+
+ cargo tree --format "{p} {f}"
+
+4. Show all packages that are built multiple times. This can happen if multiple
+ semver-incompatible versions appear in the tree (like 1.0.0 and 2.0.0).
+
+ cargo tree -d
+
+5. Explain why features are enabled for the `syn` package:
+
+ cargo tree -e features -i syn
+
+ The `-e features` flag is used to show features. The `-i` flag is used to
+ invert the graph so that it displays the packages that depend on `syn`. An
+ example of what this would display:
+
+ ```
+ syn v1.0.17
+ ├── syn feature "clone-impls"
+ │ └── syn feature "default"
+ │ └── rustversion v1.0.2
+ │ └── rustversion feature "default"
+ │ └── myproject v0.1.0 (/myproject)
+ │ └── myproject feature "default" (command-line)
+ ├── syn feature "default" (*)
+ ├── syn feature "derive"
+ │ └── syn feature "default" (*)
+ ├── syn feature "full"
+ │ └── rustversion v1.0.2 (*)
+ ├── syn feature "parsing"
+ │ └── syn feature "default" (*)
+ ├── syn feature "printing"
+ │ └── syn feature "default" (*)
+ ├── syn feature "proc-macro"
+ │ └── syn feature "default" (*)
+ └── syn feature "quote"
+ ├── syn feature "printing" (*)
+ └── syn feature "proc-macro" (*)
+ ```
+
+ To read this graph, you can follow the chain for each feature from the root
+ to see why it is included. For example, the "full" feature is added by the
+ `rustversion` crate which is included from `myproject` (with the default
+ features), and `myproject` is the package selected on the command-line. All
+ of the other `syn` features are added by the "default" feature ("quote" is
+ added by "printing" and "proc-macro", both of which are default features).
+
+ If you're having difficulty cross-referencing the de-duplicated `(*)`
+ entries, try with the `--no-dedupe` flag to get the full output.
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-metadata" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-uninstall.md b/src/tools/cargo/src/doc/man/cargo-uninstall.md
new file mode 100644
index 000000000..b2ebd097f
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-uninstall.md
@@ -0,0 +1,63 @@
+# cargo-uninstall(1)
+
+## NAME
+
+cargo-uninstall --- Remove a Rust binary
+
+## SYNOPSIS
+
+`cargo uninstall` [_options_] [_spec_...]
+
+## DESCRIPTION
+
+This command removes a package installed with {{man "cargo-install" 1}}. The _spec_
+argument is a package ID specification of the package to remove (see
+{{man "cargo-pkgid" 1}}).
+
+By default all binaries are removed for a crate but the `--bin` and
+`--example` flags can be used to only remove particular binaries.
+
+{{> description-install-root }}
+
+## OPTIONS
+
+### Install Options
+
+{{#options}}
+
+{{#option "`-p`" "`--package` _spec_..." }}
+Package to uninstall.
+{{/option}}
+
+{{#option "`--bin` _name_..." }}
+Only uninstall the binary _name_.
+{{/option}}
+
+{{#option "`--root` _dir_" }}
+Directory to uninstall packages from.
+{{/option}}
+
+{{/options}}
+
+### Display Options
+
+{{#options}}
+
+{{> options-display }}
+
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Uninstall a previously installed package.
+
+ cargo uninstall ripgrep
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-install" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-update.md b/src/tools/cargo/src/doc/man/cargo-update.md
new file mode 100644
index 000000000..e91606a6a
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-update.md
@@ -0,0 +1,97 @@
+# cargo-update(1)
+
+## NAME
+
+cargo-update --- Update dependencies as recorded in the local lock file
+
+## SYNOPSIS
+
+`cargo update` [_options_]
+
+## DESCRIPTION
+
+This command will update dependencies in the `Cargo.lock` file to the latest
+version. If the `Cargo.lock` file does not exist, it will be created with the
+latest available versions.
+
+## OPTIONS
+
+### Update Options
+
+{{#options}}
+
+{{#option "`-p` _spec_..." "`--package` _spec_..." }}
+Update only the specified packages. This flag may be specified
+multiple times. See {{man "cargo-pkgid" 1}} for the SPEC format.
+
+If packages are specified with the `-p` flag, then a conservative update of
+the lockfile will be performed. This means that only the dependency specified
+by SPEC will be updated. Its transitive dependencies will be updated only if
+SPEC cannot be updated without updating dependencies. All other dependencies
+will remain locked at their currently recorded versions.
+
+If `-p` is not specified, all dependencies are updated.
+{{/option}}
+
+{{#option "`--aggressive`" }}
+When used with `-p`, dependencies of _spec_ are forced to update as well.
+Cannot be used with `--precise`.
+{{/option}}
+
+{{#option "`--precise` _precise_" }}
+When used with `-p`, allows you to specify a specific version number to set
+the package to. If the package comes from a git repository, this can be a git
+revision (such as a SHA hash or tag).
+{{/option}}
+
+{{#option "`-w`" "`--workspace`" }}
+Attempt to update only packages defined in the workspace. Other packages
+are updated only if they don't already exist in the lockfile. This
+option is useful for updating `Cargo.lock` after you've changed version
+numbers in `Cargo.toml`.
+{{/option}}
+
+{{#option "`--dry-run`" }}
+Displays what would be updated, but doesn't actually write the lockfile.
+{{/option}}
+
+{{/options}}
+
+### Display Options
+
+{{#options}}
+{{> options-display }}
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+
+{{> options-manifest-path }}
+
+{{> options-locked }}
+
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Update all dependencies in the lockfile:
+
+ cargo update
+
+2. Update only specific dependencies:
+
+ cargo update -p foo -p bar
+
+3. Set a specific dependency to a specific version:
+
+ cargo update -p foo --precise 1.2.3
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-generate-lockfile" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-vendor.md b/src/tools/cargo/src/doc/man/cargo-vendor.md
new file mode 100644
index 000000000..b30d0d8dd
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-vendor.md
@@ -0,0 +1,93 @@
+# cargo-vendor(1)
+
+## NAME
+
+cargo-vendor --- Vendor all dependencies locally
+
+## SYNOPSIS
+
+`cargo vendor` [_options_] [_path_]
+
+## DESCRIPTION
+
+This cargo subcommand will vendor all crates.io and git dependencies for a
+project into the specified directory at `<path>`. After this command completes
+the vendor directory specified by `<path>` will contain all remote sources from
+dependencies specified. Additional manifests beyond the default one can be
+specified with the `-s` option.
+
+The `cargo vendor` command will also print out the configuration necessary
+to use the vendored sources, which you will need to add to `.cargo/config.toml`.
+
+## OPTIONS
+
+### Vendor Options
+
+{{#options}}
+
+{{#option "`-s` _manifest_" "`--sync` _manifest_" }}
+Specify an extra `Cargo.toml` manifest to workspaces which should also be
+vendored and synced to the output. May be specified multiple times.
+{{/option}}
+
+{{#option "`--no-delete`" }}
+Don't delete the "vendor" directory when vendoring, but rather keep all
+existing contents of the vendor directory
+{{/option}}
+
+{{#option "`--respect-source-config`" }}
+Instead of ignoring `[source]` configuration by default in `.cargo/config.toml`
+read it and use it when downloading crates from crates.io, for example
+{{/option}}
+
+{{#option "`--versioned-dirs`" }}
+Normally versions are only added to disambiguate multiple versions of the
+same package. This option causes all directories in the "vendor" directory
+to be versioned, which makes it easier to track the history of vendored
+packages over time, and can help with the performance of re-vendoring when
+only a subset of the packages have changed.
+{{/option}}
+
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+
+{{> options-manifest-path }}
+
+{{> options-locked }}
+
+{{/options}}
+
+### Display Options
+
+{{#options}}
+
+{{> options-display }}
+
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Vendor all dependencies into a local "vendor" folder
+
+ cargo vendor
+
+2. Vendor all dependencies into a local "third-party/vendor" folder
+
+ cargo vendor third-party/vendor
+
+3. Vendor the current workspace as well as another to "vendor"
+
+ cargo vendor -s ../path/to/Cargo.toml
+
+## SEE ALSO
+{{man "cargo" 1}}
+
diff --git a/src/tools/cargo/src/doc/man/cargo-verify-project.md b/src/tools/cargo/src/doc/man/cargo-verify-project.md
new file mode 100644
index 000000000..8b334fb14
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-verify-project.md
@@ -0,0 +1,58 @@
+# cargo-verify-project(1)
+
+## NAME
+
+cargo-verify-project --- Check correctness of crate manifest
+
+## SYNOPSIS
+
+`cargo verify-project` [_options_]
+
+## DESCRIPTION
+
+This command will parse the local manifest and check its validity. It emits a
+JSON object with the result. A successful validation will display:
+
+ {"success":"true"}
+
+An invalid workspace will display:
+
+ {"invalid":"human-readable error message"}
+
+## OPTIONS
+
+### Display Options
+
+{{#options}}
+
+{{> options-display }}
+
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+
+{{> options-manifest-path }}
+
+{{> options-locked }}
+
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+## EXIT STATUS
+
+* `0`: The workspace is OK.
+* `1`: The workspace is invalid.
+
+## EXAMPLES
+
+1. Check the current workspace for errors:
+
+ cargo verify-project
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-package" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-version.md b/src/tools/cargo/src/doc/man/cargo-version.md
new file mode 100644
index 000000000..9bbadc9ea
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-version.md
@@ -0,0 +1,41 @@
+# cargo-version(1)
+
+## NAME
+
+cargo-version --- Show version information
+
+## SYNOPSIS
+
+`cargo version` [_options_]
+
+## DESCRIPTION
+
+Displays the version of Cargo.
+
+## OPTIONS
+
+{{#options}}
+
+{{#option "`-v`" "`--verbose`" }}
+Display additional version information.
+{{/option}}
+
+{{/options}}
+
+## EXAMPLES
+
+1. Display the version:
+
+ cargo version
+
+2. The version is also available via flags:
+
+ cargo --version
+ cargo -V
+
+3. Display extra version information:
+
+ cargo -Vv
+
+## SEE ALSO
+{{man "cargo" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo-yank.md b/src/tools/cargo/src/doc/man/cargo-yank.md
new file mode 100644
index 000000000..8ad28ef24
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo-yank.md
@@ -0,0 +1,71 @@
+# cargo-yank(1)
+
+## NAME
+
+cargo-yank --- Remove a pushed crate from the index
+
+## SYNOPSIS
+
+`cargo yank` [_options_] _crate_@_version_\
+`cargo yank` [_options_] `--version` _version_ [_crate_]
+
+## DESCRIPTION
+
+The yank command removes a previously published crate's version from the
+server's index. This command does not delete any data, and the crate will
+still be available for download via the registry's download link.
+
+Note that existing crates locked to a yanked version will still be able to
+download the yanked version to use it. Cargo will, however, not allow any new
+crates to be locked to any yanked version.
+
+This command requires you to be authenticated with either the `--token` option
+or using {{man "cargo-login" 1}}.
+
+If the crate name is not specified, it will use the package name from the
+current directory.
+
+## OPTIONS
+
+### Yank Options
+
+{{#options}}
+
+{{#option "`--vers` _version_" "`--version` _version_" }}
+The version to yank or un-yank.
+{{/option}}
+
+{{#option "`--undo`" }}
+Undo a yank, putting a version back into the index.
+{{/option}}
+
+{{> options-token }}
+
+{{> options-index }}
+
+{{> options-registry }}
+
+{{/options}}
+
+### Display Options
+
+{{#options}}
+
+{{> options-display }}
+
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## EXAMPLES
+
+1. Yank a crate from the index:
+
+ cargo yank foo@1.0.7
+
+## SEE ALSO
+{{man "cargo" 1}}, {{man "cargo-login" 1}}, {{man "cargo-publish" 1}}
diff --git a/src/tools/cargo/src/doc/man/cargo.md b/src/tools/cargo/src/doc/man/cargo.md
new file mode 100644
index 000000000..3b1c62e32
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/cargo.md
@@ -0,0 +1,238 @@
+# cargo(1)
+
+## NAME
+
+cargo --- The Rust package manager
+
+## SYNOPSIS
+
+`cargo` [_options_] _command_ [_args_]\
+`cargo` [_options_] `--version`\
+`cargo` [_options_] `--list`\
+`cargo` [_options_] `--help`\
+`cargo` [_options_] `--explain` _code_
+
+## DESCRIPTION
+
+This program is a package manager and build tool for the Rust language,
+available at <https://rust-lang.org>.
+
+## COMMANDS
+
+### Build Commands
+
+{{man "cargo-bench" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Execute benchmarks of a package.
+
+{{man "cargo-build" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Compile a package.
+
+{{man "cargo-check" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Check a local package and all of its dependencies for errors.
+
+{{man "cargo-clean" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Remove artifacts that Cargo has generated in the past.
+
+{{man "cargo-doc" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Build a package's documentation.
+
+{{man "cargo-fetch" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Fetch dependencies of a package from the network.
+
+{{man "cargo-fix" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Automatically fix lint warnings reported by rustc.
+
+{{man "cargo-run" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Run a binary or example of the local package.
+
+{{man "cargo-rustc" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Compile a package, and pass extra options to the compiler.
+
+{{man "cargo-rustdoc" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Build a package's documentation, using specified custom flags.
+
+{{man "cargo-test" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Execute unit and integration tests of a package.
+
+### Manifest Commands
+
+{{man "cargo-generate-lockfile" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Generate `Cargo.lock` for a project.
+
+{{man "cargo-locate-project" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Print a JSON representation of a `Cargo.toml` file's location.
+
+{{man "cargo-metadata" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Output the resolved dependencies of a package in machine-readable format.
+
+{{man "cargo-pkgid" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Print a fully qualified package specification.
+
+{{man "cargo-tree" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Display a tree visualization of a dependency graph.
+
+{{man "cargo-update" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Update dependencies as recorded in the local lock file.
+
+{{man "cargo-vendor" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Vendor all dependencies locally.
+
+{{man "cargo-verify-project" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Check correctness of crate manifest.
+
+### Package Commands
+
+{{man "cargo-init" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Create a new Cargo package in an existing directory.
+
+{{man "cargo-install" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Build and install a Rust binary.
+
+{{man "cargo-new" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Create a new Cargo package.
+
+{{man "cargo-search" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Search packages in crates.io.
+
+{{man "cargo-uninstall" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Remove a Rust binary.
+
+### Publishing Commands
+
+{{man "cargo-login" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Save an API token from the registry locally.
+
+{{man "cargo-logout" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Remove an API token from the registry locally.
+
+{{man "cargo-owner" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Manage the owners of a crate on the registry.
+
+{{man "cargo-package" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Assemble the local package into a distributable tarball.
+
+{{man "cargo-publish" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Upload a package to the registry.
+
+{{man "cargo-yank" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Remove a pushed crate from the index.
+
+### General Commands
+
+{{man "cargo-help" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Display help information about Cargo.
+
+{{man "cargo-version" 1}}\
+&nbsp;&nbsp;&nbsp;&nbsp;Show version information.
+
+## OPTIONS
+
+### Special Options
+
+{{#options}}
+
+{{#option "`-V`" "`--version`" }}
+Print version info and exit. If used with `--verbose`, prints extra
+information.
+{{/option}}
+
+{{#option "`--list`" }}
+List all installed Cargo subcommands. If used with `--verbose`, prints extra
+information.
+{{/option}}
+
+{{#option "`--explain` _code_" }}
+Run `rustc --explain CODE` which will print out a detailed explanation of an
+error message (for example, `E0004`).
+{{/option}}
+
+{{/options}}
+
+### Display Options
+
+{{#options}}
+
+{{> options-display }}
+
+{{/options}}
+
+### Manifest Options
+
+{{#options}}
+{{> options-locked }}
+{{/options}}
+
+{{> section-options-common }}
+
+{{> section-environment }}
+
+{{> section-exit-status }}
+
+## FILES
+
+`~/.cargo/`\
+&nbsp;&nbsp;&nbsp;&nbsp;Default location for Cargo's "home" directory where it
+stores various files. The location can be changed with the `CARGO_HOME`
+environment variable.
+
+`$CARGO_HOME/bin/`\
+&nbsp;&nbsp;&nbsp;&nbsp;Binaries installed by {{man "cargo-install" 1}} will be located here. If using
+[rustup], executables distributed with Rust are also located here.
+
+`$CARGO_HOME/config.toml`\
+&nbsp;&nbsp;&nbsp;&nbsp;The global configuration file. See [the reference](../reference/config.html)
+for more information about configuration files.
+
+`.cargo/config.toml`\
+&nbsp;&nbsp;&nbsp;&nbsp;Cargo automatically searches for a file named `.cargo/config.toml` in the
+current directory, and all parent directories. These configuration files
+will be merged with the global configuration file.
+
+`$CARGO_HOME/credentials.toml`\
+&nbsp;&nbsp;&nbsp;&nbsp;Private authentication information for logging in to a registry.
+
+`$CARGO_HOME/registry/`\
+&nbsp;&nbsp;&nbsp;&nbsp;This directory contains cached downloads of the registry index and any
+downloaded dependencies.
+
+`$CARGO_HOME/git/`\
+&nbsp;&nbsp;&nbsp;&nbsp;This directory contains cached downloads of git dependencies.
+
+Please note that the internal structure of the `$CARGO_HOME` directory is not
+stable yet and may be subject to change.
+
+[rustup]: https://rust-lang.github.io/rustup/
+
+## EXAMPLES
+
+1. Build a local package and all of its dependencies:
+
+ cargo build
+
+2. Build a package with optimizations:
+
+ cargo build --release
+
+3. Run tests for a cross-compiled target:
+
+ cargo test --target i686-unknown-linux-gnu
+
+4. Create a new package that builds an executable:
+
+ cargo new foobar
+
+5. Create a package in the current directory:
+
+ mkdir foo && cd foo
+ cargo init .
+
+6. Learn about a command's options and usage:
+
+ cargo help clean
+
+## BUGS
+
+See <https://github.com/rust-lang/cargo/issues> for issues.
+
+## SEE ALSO
+{{man "rustc" 1}}, {{man "rustdoc" 1}}
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-add.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-add.txt
new file mode 100644
index 000000000..ac332a44e
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-add.txt
@@ -0,0 +1,243 @@
+CARGO-ADD(1)
+
+NAME
+ cargo-add — Add dependencies to a Cargo.toml manifest file
+
+SYNOPSIS
+ cargo add [options] crate…
+ cargo add [options] --path path
+ cargo add [options] --git url [crate…]
+
+DESCRIPTION
+ This command can add or modify dependencies.
+
+ The source for the dependency can be specified with:
+
+ o crate@version: Fetch from a registry with a version constraint of
+ “version”
+
+ o --path path: Fetch from the specified path
+
+ o --git url: Pull from a git repo at url
+
+ If no source is specified, then a best effort will be made to select
+ one, including:
+
+ o Existing dependencies in other tables (like dev-dependencies)
+
+ o Workspace members
+
+ o Latest release in the registry
+
+ When you add a package that is already present, the existing entry will
+ be updated with the flags specified.
+
+ Upon successful invocation, the enabled (+) and disabled (-) features
+ <https://doc.rust-lang.org/cargo/reference/features.md> of the specified
+ dependency will be listed in the command’s output.
+
+OPTIONS
+ Source options
+ --git url
+ Git URL to add the specified crate from
+ <https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#specifying-dependencies-from-git-repositories>.
+
+ --branch branch
+ Branch to use when adding from git.
+
+ --tag tag
+ Tag to use when adding from git.
+
+ --rev sha
+ Specific commit to use when adding from git.
+
+ --path path
+ Filesystem path
+ <https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#specifying-path-dependencies>
+ to local crate to add.
+
+ --registry registry
+ Name of the registry to use. Registry names are defined in Cargo
+ config files
+ <https://doc.rust-lang.org/cargo/reference/config.html>. If not
+ specified, the default registry is used, which is defined by the
+ registry.default config key which defaults to crates-io.
+
+ Section options
+ --dev
+ Add as a development dependency
+ <https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#development-dependencies>.
+
+ --build
+ Add as a build dependency
+ <https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#build-dependencies>.
+
+ --target target
+ Add as a dependency to the given target platform
+ <https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#platform-specific-dependencies>.
+
+ To avoid unexpected shell expansions, you may use quotes around each
+ target, e.g., --target 'cfg(unix)'.
+
+ Dependency options
+ --dry-run
+ Don’t actually write the manifest
+
+ --rename name
+ Rename
+ <https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml>
+ the dependency.
+
+ --optional
+ Mark the dependency as optional
+ <https://doc.rust-lang.org/cargo/reference/features.html#optional-dependencies>.
+
+ --no-optional
+ Mark the dependency as required
+ <https://doc.rust-lang.org/cargo/reference/features.html#optional-dependencies>.
+
+ --no-default-features
+ Disable the default features
+ <https://doc.rust-lang.org/cargo/reference/features.html#dependency-features>.
+
+ --default-features
+ Re-enable the default features
+ <https://doc.rust-lang.org/cargo/reference/features.html#dependency-features>.
+
+ -F features, --features features
+ Space or comma separated list of features to activate
+ <https://doc.rust-lang.org/cargo/reference/features.html#dependency-features>.
+ When adding multiple crates, the features for a specific crate may
+ be enabled with package-name/feature-name syntax. This flag may be
+ specified multiple times, which enables all specified features.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ -p spec, --package spec
+ Add dependencies to only the specified package.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Add regex as a dependency
+
+ cargo add regex
+
+ 2. Add trybuild as a dev-dependency
+
+ cargo add --dev trybuild
+
+ 3. Add an older version of nom as a dependency
+
+ cargo add nom@5
+
+ 4. Add support for serializing data structures to json with derives
+
+ cargo add serde serde_json -F serde/derive
+
+ 5. Add windows as a platform specific dependency on cfg(windows)
+
+ cargo add windows --target 'cfg(windows)'
+
+SEE ALSO
+ cargo(1), cargo-remove(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-bench.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-bench.txt
new file mode 100644
index 000000000..1ca72d577
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-bench.txt
@@ -0,0 +1,434 @@
+CARGO-BENCH(1)
+
+NAME
+ cargo-bench — Execute benchmarks of a package
+
+SYNOPSIS
+ cargo bench [options] [benchname] [-- bench-options]
+
+DESCRIPTION
+ Compile and execute benchmarks.
+
+ The benchmark filtering argument benchname and all the arguments
+ following the two dashes (--) are passed to the benchmark binaries and
+ thus to libtest (rustc’s built in unit-test and micro-benchmarking
+ framework). If you are passing arguments to both Cargo and the binary,
+ the ones after -- go to the binary, the ones before go to Cargo. For
+ details about libtest’s arguments see the output of cargo bench --
+ --help and check out the rustc book’s chapter on how tests work at
+ <https://doc.rust-lang.org/rustc/tests/index.html>.
+
+ As an example, this will run only the benchmark named foo (and skip
+ other similarly named benchmarks like foobar):
+
+ cargo bench -- foo --exact
+
+ Benchmarks are built with the --test option to rustc which creates a
+ special executable by linking your code with libtest. The executable
+ automatically runs all functions annotated with the #[bench] attribute.
+ Cargo passes the --bench flag to the test harness to tell it to run only
+ benchmarks.
+
+ The libtest harness may be disabled by setting harness = false in the
+ target manifest settings, in which case your code will need to provide
+ its own main function to handle running benchmarks.
+
+ Note: The #[bench] attribute
+ <https://doc.rust-lang.org/nightly/unstable-book/library-features/test.html>
+ is currently unstable and only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html>. There
+ are some packages available on crates.io
+ <https://crates.io/keywords/benchmark> that may help with running
+ benchmarks on the stable channel, such as Criterion
+ <https://crates.io/crates/criterion>.
+
+ By default, cargo bench uses the bench profile
+ <https://doc.rust-lang.org/cargo/reference/profiles.html#bench>, which
+ enables optimizations and disables debugging information. If you need to
+ debug a benchmark, you can use the --profile=dev command-line option to
+ switch to the dev profile. You can then run the debug-enabled benchmark
+ within a debugger.
+
+ Working directory of benchmarks
+ The working directory of every benchmark is set to the root directory of
+ the package the benchmark belongs to. Setting the working directory of
+ benchmarks to the package’s root directory makes it possible for
+ benchmarks to reliably access the package’s files using relative
+ paths, regardless from where cargo bench was executed from.
+
+OPTIONS
+ Benchmark Options
+ --no-run
+ Compile, but don’t run benchmarks.
+
+ --no-fail-fast
+ Run all benchmarks regardless of failure. Without this flag, Cargo
+ will exit after the first executable fails. The Rust test harness
+ will run all benchmarks within the executable to completion, this
+ flag only applies to the executable as a whole.
+
+ Package Selection
+ By default, when no package selection options are given, the packages
+ selected depend on the selected manifest file (based on the current
+ working directory if --manifest-path is not given). If the manifest is
+ the root of a workspace then the workspaces default members are
+ selected, otherwise only the package defined by the manifest will be
+ selected.
+
+ The default members of a workspace can be set explicitly with the
+ workspace.default-members key in the root manifest. If this is not set,
+ a virtual workspace will include all workspace members (equivalent to
+ passing --workspace), and a non-virtual workspace will include only the
+ root crate itself.
+
+ -p spec…, --package spec…
+ Benchmark only the specified packages. See cargo-pkgid(1) for the
+ SPEC format. This flag may be specified multiple times and supports
+ common Unix glob patterns like *, ? and []. However, to avoid your
+ shell accidentally expanding glob patterns before Cargo handles
+ them, you must use single quotes or double quotes around each
+ pattern.
+
+ --workspace
+ Benchmark all members in the workspace.
+
+ --all
+ Deprecated alias for --workspace.
+
+ --exclude SPEC…
+ Exclude the specified packages. Must be used in conjunction with the
+ --workspace flag. This flag may be specified multiple times and
+ supports common Unix glob patterns like *, ? and []. However, to
+ avoid your shell accidentally expanding glob patterns before Cargo
+ handles them, you must use single quotes or double quotes around
+ each pattern.
+
+ Target Selection
+ When no target selection options are given, cargo bench will build the
+ following targets of the selected packages:
+
+ o lib — used to link with binaries and benchmarks
+
+ o bins (only if benchmark targets are built and required features are
+ available)
+
+ o lib as a benchmark
+
+ o bins as benchmarks
+
+ o benchmark targets
+
+ The default behavior can be changed by setting the bench flag for the
+ target in the manifest settings. Setting examples to bench = true will
+ build and run the example as a benchmark. Setting targets to bench =
+ false will stop them from being benchmarked by default. Target selection
+ options that take a target by name ignore the bench flag and will always
+ benchmark the given target.
+
+ Binary targets are automatically built if there is an integration test
+ or benchmark being selected to benchmark. This allows an integration
+ test to execute the binary to exercise and test its behavior. The
+ CARGO_BIN_EXE_<name> environment variable
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates>
+ is set when the integration test is built so that it can use the env
+ macro <https://doc.rust-lang.org/std/macro.env.html> to locate the
+ executable.
+
+ Passing target selection flags will benchmark only the specified
+ targets.
+
+ Note that --bin, --example, --test and --bench flags also support common
+ Unix glob patterns like *, ? and []. However, to avoid your shell
+ accidentally expanding glob patterns before Cargo handles them, you must
+ use single quotes or double quotes around each glob pattern.
+
+ --lib
+ Benchmark the package’s library.
+
+ --bin name…
+ Benchmark the specified binary. This flag may be specified multiple
+ times and supports common Unix glob patterns.
+
+ --bins
+ Benchmark all binary targets.
+
+ --example name…
+ Benchmark the specified example. This flag may be specified multiple
+ times and supports common Unix glob patterns.
+
+ --examples
+ Benchmark all example targets.
+
+ --test name…
+ Benchmark the specified integration test. This flag may be specified
+ multiple times and supports common Unix glob patterns.
+
+ --tests
+ Benchmark all targets in test mode that have the test = true
+ manifest flag set. By default this includes the library and binaries
+ built as unittests, and integration tests. Be aware that this will
+ also build any required dependencies, so the lib target may be built
+ twice (once as a unittest, and once as a dependency for binaries,
+ integration tests, etc.). Targets may be enabled or disabled by
+ setting the test flag in the manifest settings for the target.
+
+ --bench name…
+ Benchmark the specified benchmark. This flag may be specified
+ multiple times and supports common Unix glob patterns.
+
+ --benches
+ Benchmark all targets in benchmark mode that have the bench = true
+ manifest flag set. By default this includes the library and binaries
+ built as benchmarks, and bench targets. Be aware that this will also
+ build any required dependencies, so the lib target may be built
+ twice (once as a benchmark, and once as a dependency for binaries,
+ benchmarks, etc.). Targets may be enabled or disabled by setting the
+ bench flag in the manifest settings for the target.
+
+ --all-targets
+ Benchmark all targets. This is equivalent to specifying --lib --bins
+ --tests --benches --examples.
+
+ Feature Selection
+ The feature flags allow you to control which features are enabled. When
+ no feature options are given, the default feature is activated for every
+ selected package.
+
+ See the features documentation
+ <https://doc.rust-lang.org/cargo/reference/features.html#command-line-feature-options>
+ for more details.
+
+ -F features, --features features
+ Space or comma separated list of features to activate. Features of
+ workspace members may be enabled with package-name/feature-name
+ syntax. This flag may be specified multiple times, which enables all
+ specified features.
+
+ --all-features
+ Activate all available features of all selected packages.
+
+ --no-default-features
+ Do not activate the default feature of the selected packages.
+
+ Compilation Options
+ --target triple
+ Benchmark for the given architecture. The default is the host
+ architecture. The general format of the triple is
+ <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for
+ a list of supported targets. This flag may be specified multiple
+ times.
+
+ This may also be specified with the build.target config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Note that specifying this flag makes Cargo run in a different mode
+ where the target artifacts are placed in a separate directory. See
+ the build cache
+ <https://doc.rust-lang.org/cargo/guide/build-cache.html>
+ documentation for more details.
+
+ --profile name
+ Benchmark with the given profile. See the the reference
+ <https://doc.rust-lang.org/cargo/reference/profiles.html> for more
+ details on profiles.
+
+ --ignore-rust-version
+ Benchmark the target even if the selected Rust compiler is older
+ than the required Rust version as configured in the project’s
+ rust-version field.
+
+ --timings=fmts
+ Output information how long each compilation takes, and track
+ concurrency information over time. Accepts an optional
+ comma-separated list of output formats; --timings without an
+ argument will default to --timings=html. Specifying an output format
+ (rather than the default) is unstable and requires
+ -Zunstable-options. Valid output formats:
+
+ o html (unstable, requires -Zunstable-options): Write a
+ human-readable file cargo-timing.html to the target/cargo-timings
+ directory with a report of the compilation. Also write a report
+ to the same directory with a timestamp in the filename if you
+ want to look at older runs. HTML output is suitable for human
+ consumption only, and does not provide machine-readable timing
+ data.
+
+ o json (unstable, requires -Zunstable-options): Emit
+ machine-readable JSON information about timing information.
+
+ Output Options
+ --target-dir directory
+ Directory for all generated artifacts and intermediate files. May
+ also be specified with the CARGO_TARGET_DIR environment variable, or
+ the build.target-dir config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ target in the root of the workspace.
+
+ Display Options
+ By default the Rust test harness hides output from benchmark execution
+ to keep results readable. Benchmark output can be recovered (e.g., for
+ debugging) by passing --nocapture to the benchmark binaries:
+
+ cargo bench -- --nocapture
+
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --message-format fmt
+ The output format for diagnostic messages. Can be specified multiple
+ times and consists of comma-separated values. Valid values:
+
+ o human (default): Display in a human-readable text format.
+ Conflicts with short and json.
+
+ o short: Emit shorter, human-readable text messages. Conflicts with
+ human and json.
+
+ o json: Emit JSON messages to stdout. See the reference
+ <https://doc.rust-lang.org/cargo/reference/external-tools.html#json-messages>
+ for more details. Conflicts with human and short.
+
+ o json-diagnostic-short: Ensure the rendered field of JSON messages
+ contains the “short” rendering from rustc. Cannot be used
+ with human or short.
+
+ o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON
+ messages contains embedded ANSI color codes for respecting
+ rustc’s default color scheme. Cannot be used with human or
+ short.
+
+ o json-render-diagnostics: Instruct Cargo to not include rustc
+ diagnostics in JSON messages printed, but instead Cargo itself
+ should render the JSON diagnostics coming from rustc. Cargo’s
+ own JSON diagnostics and others coming from rustc are still
+ emitted. Cannot be used with human or short.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ Miscellaneous Options
+ The --jobs argument affects the building of the benchmark executable but
+ does not affect how many threads are used when running the benchmarks.
+ The Rust test harness runs benchmarks serially in a single thread.
+
+ -j N, --jobs N
+ Number of parallel jobs to run. May also be specified with the
+ build.jobs config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ the number of logical CPUs. If negative, it sets the maximum number
+ of parallel jobs to the number of logical CPUs plus provided value.
+ Should not be 0.
+
+ --keep-going
+ Build as many crates in the dependency graph as possible, rather
+ than aborting the build on the first one that fails to build.
+ Unstable, requires -Zunstable-options.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Build and execute all the benchmarks of the current package:
+
+ cargo bench
+
+ 2. Run only a specific benchmark within a specific benchmark target:
+
+ cargo bench --bench bench_name -- modname::some_benchmark
+
+SEE ALSO
+ cargo(1), cargo-test(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-build.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-build.txt
new file mode 100644
index 000000000..ff8bdb5ba
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-build.txt
@@ -0,0 +1,378 @@
+CARGO-BUILD(1)
+
+NAME
+ cargo-build — Compile the current package
+
+SYNOPSIS
+ cargo build [options]
+
+DESCRIPTION
+ Compile local packages and all of their dependencies.
+
+OPTIONS
+ Package Selection
+ By default, when no package selection options are given, the packages
+ selected depend on the selected manifest file (based on the current
+ working directory if --manifest-path is not given). If the manifest is
+ the root of a workspace then the workspaces default members are
+ selected, otherwise only the package defined by the manifest will be
+ selected.
+
+ The default members of a workspace can be set explicitly with the
+ workspace.default-members key in the root manifest. If this is not set,
+ a virtual workspace will include all workspace members (equivalent to
+ passing --workspace), and a non-virtual workspace will include only the
+ root crate itself.
+
+ -p spec…, --package spec…
+ Build only the specified packages. See cargo-pkgid(1) for the SPEC
+ format. This flag may be specified multiple times and supports
+ common Unix glob patterns like *, ? and []. However, to avoid your
+ shell accidentally expanding glob patterns before Cargo handles
+ them, you must use single quotes or double quotes around each
+ pattern.
+
+ --workspace
+ Build all members in the workspace.
+
+ --all
+ Deprecated alias for --workspace.
+
+ --exclude SPEC…
+ Exclude the specified packages. Must be used in conjunction with the
+ --workspace flag. This flag may be specified multiple times and
+ supports common Unix glob patterns like *, ? and []. However, to
+ avoid your shell accidentally expanding glob patterns before Cargo
+ handles them, you must use single quotes or double quotes around
+ each pattern.
+
+ Target Selection
+ When no target selection options are given, cargo build will build all
+ binary and library targets of the selected packages. Binaries are
+ skipped if they have required-features that are missing.
+
+ Binary targets are automatically built if there is an integration test
+ or benchmark being selected to build. This allows an integration test to
+ execute the binary to exercise and test its behavior. The
+ CARGO_BIN_EXE_<name> environment variable
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates>
+ is set when the integration test is built so that it can use the env
+ macro <https://doc.rust-lang.org/std/macro.env.html> to locate the
+ executable.
+
+ Passing target selection flags will build only the specified targets.
+
+ Note that --bin, --example, --test and --bench flags also support common
+ Unix glob patterns like *, ? and []. However, to avoid your shell
+ accidentally expanding glob patterns before Cargo handles them, you must
+ use single quotes or double quotes around each glob pattern.
+
+ --lib
+ Build the package’s library.
+
+ --bin name…
+ Build the specified binary. This flag may be specified multiple
+ times and supports common Unix glob patterns.
+
+ --bins
+ Build all binary targets.
+
+ --example name…
+ Build the specified example. This flag may be specified multiple
+ times and supports common Unix glob patterns.
+
+ --examples
+ Build all example targets.
+
+ --test name…
+ Build the specified integration test. This flag may be specified
+ multiple times and supports common Unix glob patterns.
+
+ --tests
+ Build all targets in test mode that have the test = true manifest
+ flag set. By default this includes the library and binaries built as
+ unittests, and integration tests. Be aware that this will also build
+ any required dependencies, so the lib target may be built twice
+ (once as a unittest, and once as a dependency for binaries,
+ integration tests, etc.). Targets may be enabled or disabled by
+ setting the test flag in the manifest settings for the target.
+
+ --bench name…
+ Build the specified benchmark. This flag may be specified multiple
+ times and supports common Unix glob patterns.
+
+ --benches
+ Build all targets in benchmark mode that have the bench = true
+ manifest flag set. By default this includes the library and binaries
+ built as benchmarks, and bench targets. Be aware that this will also
+ build any required dependencies, so the lib target may be built
+ twice (once as a benchmark, and once as a dependency for binaries,
+ benchmarks, etc.). Targets may be enabled or disabled by setting the
+ bench flag in the manifest settings for the target.
+
+ --all-targets
+ Build all targets. This is equivalent to specifying --lib --bins
+ --tests --benches --examples.
+
+ Feature Selection
+ The feature flags allow you to control which features are enabled. When
+ no feature options are given, the default feature is activated for every
+ selected package.
+
+ See the features documentation
+ <https://doc.rust-lang.org/cargo/reference/features.html#command-line-feature-options>
+ for more details.
+
+ -F features, --features features
+ Space or comma separated list of features to activate. Features of
+ workspace members may be enabled with package-name/feature-name
+ syntax. This flag may be specified multiple times, which enables all
+ specified features.
+
+ --all-features
+ Activate all available features of all selected packages.
+
+ --no-default-features
+ Do not activate the default feature of the selected packages.
+
+ Compilation Options
+ --target triple
+ Build for the given architecture. The default is the host
+ architecture. The general format of the triple is
+ <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for
+ a list of supported targets. This flag may be specified multiple
+ times.
+
+ This may also be specified with the build.target config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Note that specifying this flag makes Cargo run in a different mode
+ where the target artifacts are placed in a separate directory. See
+ the build cache
+ <https://doc.rust-lang.org/cargo/guide/build-cache.html>
+ documentation for more details.
+
+ -r, --release
+ Build optimized artifacts with the release profile. See also the
+ --profile option for choosing a specific profile by name.
+
+ --profile name
+ Build with the given profile. See the the reference
+ <https://doc.rust-lang.org/cargo/reference/profiles.html> for more
+ details on profiles.
+
+ --ignore-rust-version
+ Build the target even if the selected Rust compiler is older than
+ the required Rust version as configured in the project’s
+ rust-version field.
+
+ --timings=fmts
+ Output information how long each compilation takes, and track
+ concurrency information over time. Accepts an optional
+ comma-separated list of output formats; --timings without an
+ argument will default to --timings=html. Specifying an output format
+ (rather than the default) is unstable and requires
+ -Zunstable-options. Valid output formats:
+
+ o html (unstable, requires -Zunstable-options): Write a
+ human-readable file cargo-timing.html to the target/cargo-timings
+ directory with a report of the compilation. Also write a report
+ to the same directory with a timestamp in the filename if you
+ want to look at older runs. HTML output is suitable for human
+ consumption only, and does not provide machine-readable timing
+ data.
+
+ o json (unstable, requires -Zunstable-options): Emit
+ machine-readable JSON information about timing information.
+
+ Output Options
+ --target-dir directory
+ Directory for all generated artifacts and intermediate files. May
+ also be specified with the CARGO_TARGET_DIR environment variable, or
+ the build.target-dir config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ target in the root of the workspace.
+
+ --out-dir directory
+ Copy final artifacts to this directory.
+
+ This option is unstable and available only on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable. See
+ <https://github.com/rust-lang/cargo/issues/6790> for more
+ information.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --message-format fmt
+ The output format for diagnostic messages. Can be specified multiple
+ times and consists of comma-separated values. Valid values:
+
+ o human (default): Display in a human-readable text format.
+ Conflicts with short and json.
+
+ o short: Emit shorter, human-readable text messages. Conflicts with
+ human and json.
+
+ o json: Emit JSON messages to stdout. See the reference
+ <https://doc.rust-lang.org/cargo/reference/external-tools.html#json-messages>
+ for more details. Conflicts with human and short.
+
+ o json-diagnostic-short: Ensure the rendered field of JSON messages
+ contains the “short” rendering from rustc. Cannot be used
+ with human or short.
+
+ o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON
+ messages contains embedded ANSI color codes for respecting
+ rustc’s default color scheme. Cannot be used with human or
+ short.
+
+ o json-render-diagnostics: Instruct Cargo to not include rustc
+ diagnostics in JSON messages printed, but instead Cargo itself
+ should render the JSON diagnostics coming from rustc. Cargo’s
+ own JSON diagnostics and others coming from rustc are still
+ emitted. Cannot be used with human or short.
+
+ --build-plan
+ Outputs a series of JSON messages to stdout that indicate the
+ commands to run the build.
+
+ This option is unstable and available only on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable. See
+ <https://github.com/rust-lang/cargo/issues/5579> for more
+ information.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ Miscellaneous Options
+ -j N, --jobs N
+ Number of parallel jobs to run. May also be specified with the
+ build.jobs config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ the number of logical CPUs. If negative, it sets the maximum number
+ of parallel jobs to the number of logical CPUs plus provided value.
+ Should not be 0.
+
+ --keep-going
+ Build as many crates in the dependency graph as possible, rather
+ than aborting the build on the first one that fails to build.
+ Unstable, requires -Zunstable-options.
+
+ --future-incompat-report
+ Displays a future-incompat report for any future-incompatible
+ warnings produced during execution of this command
+
+ See cargo-report(1)
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Build the local package and all of its dependencies:
+
+ cargo build
+
+ 2. Build with optimizations:
+
+ cargo build --release
+
+SEE ALSO
+ cargo(1), cargo-rustc(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-check.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-check.txt
new file mode 100644
index 000000000..bf8cb48f3
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-check.txt
@@ -0,0 +1,363 @@
+CARGO-CHECK(1)
+
+NAME
+ cargo-check — Check the current package
+
+SYNOPSIS
+ cargo check [options]
+
+DESCRIPTION
+ Check a local package and all of its dependencies for errors. This will
+ essentially compile the packages without performing the final step of
+ code generation, which is faster than running cargo build. The compiler
+ will save metadata files to disk so that future runs will reuse them if
+ the source has not been modified. Some diagnostics and errors are only
+ emitted during code generation, so they inherently won’t be reported
+ with cargo check.
+
+OPTIONS
+ Package Selection
+ By default, when no package selection options are given, the packages
+ selected depend on the selected manifest file (based on the current
+ working directory if --manifest-path is not given). If the manifest is
+ the root of a workspace then the workspaces default members are
+ selected, otherwise only the package defined by the manifest will be
+ selected.
+
+ The default members of a workspace can be set explicitly with the
+ workspace.default-members key in the root manifest. If this is not set,
+ a virtual workspace will include all workspace members (equivalent to
+ passing --workspace), and a non-virtual workspace will include only the
+ root crate itself.
+
+ -p spec…, --package spec…
+ Check only the specified packages. See cargo-pkgid(1) for the SPEC
+ format. This flag may be specified multiple times and supports
+ common Unix glob patterns like *, ? and []. However, to avoid your
+ shell accidentally expanding glob patterns before Cargo handles
+ them, you must use single quotes or double quotes around each
+ pattern.
+
+ --workspace
+ Check all members in the workspace.
+
+ --all
+ Deprecated alias for --workspace.
+
+ --exclude SPEC…
+ Exclude the specified packages. Must be used in conjunction with the
+ --workspace flag. This flag may be specified multiple times and
+ supports common Unix glob patterns like *, ? and []. However, to
+ avoid your shell accidentally expanding glob patterns before Cargo
+ handles them, you must use single quotes or double quotes around
+ each pattern.
+
+ Target Selection
+ When no target selection options are given, cargo check will check all
+ binary and library targets of the selected packages. Binaries are
+ skipped if they have required-features that are missing.
+
+ Passing target selection flags will check only the specified targets.
+
+ Note that --bin, --example, --test and --bench flags also support common
+ Unix glob patterns like *, ? and []. However, to avoid your shell
+ accidentally expanding glob patterns before Cargo handles them, you must
+ use single quotes or double quotes around each glob pattern.
+
+ --lib
+ Check the package’s library.
+
+ --bin name…
+ Check the specified binary. This flag may be specified multiple
+ times and supports common Unix glob patterns.
+
+ --bins
+ Check all binary targets.
+
+ --example name…
+ Check the specified example. This flag may be specified multiple
+ times and supports common Unix glob patterns.
+
+ --examples
+ Check all example targets.
+
+ --test name…
+ Check the specified integration test. This flag may be specified
+ multiple times and supports common Unix glob patterns.
+
+ --tests
+ Check all targets in test mode that have the test = true manifest
+ flag set. By default this includes the library and binaries built as
+ unittests, and integration tests. Be aware that this will also build
+ any required dependencies, so the lib target may be built twice
+ (once as a unittest, and once as a dependency for binaries,
+ integration tests, etc.). Targets may be enabled or disabled by
+ setting the test flag in the manifest settings for the target.
+
+ --bench name…
+ Check the specified benchmark. This flag may be specified multiple
+ times and supports common Unix glob patterns.
+
+ --benches
+ Check all targets in benchmark mode that have the bench = true
+ manifest flag set. By default this includes the library and binaries
+ built as benchmarks, and bench targets. Be aware that this will also
+ build any required dependencies, so the lib target may be built
+ twice (once as a benchmark, and once as a dependency for binaries,
+ benchmarks, etc.). Targets may be enabled or disabled by setting the
+ bench flag in the manifest settings for the target.
+
+ --all-targets
+ Check all targets. This is equivalent to specifying --lib --bins
+ --tests --benches --examples.
+
+ Feature Selection
+ The feature flags allow you to control which features are enabled. When
+ no feature options are given, the default feature is activated for every
+ selected package.
+
+ See the features documentation
+ <https://doc.rust-lang.org/cargo/reference/features.html#command-line-feature-options>
+ for more details.
+
+ -F features, --features features
+ Space or comma separated list of features to activate. Features of
+ workspace members may be enabled with package-name/feature-name
+ syntax. This flag may be specified multiple times, which enables all
+ specified features.
+
+ --all-features
+ Activate all available features of all selected packages.
+
+ --no-default-features
+ Do not activate the default feature of the selected packages.
+
+ Compilation Options
+ --target triple
+ Check for the given architecture. The default is the host
+ architecture. The general format of the triple is
+ <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for
+ a list of supported targets. This flag may be specified multiple
+ times.
+
+ This may also be specified with the build.target config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Note that specifying this flag makes Cargo run in a different mode
+ where the target artifacts are placed in a separate directory. See
+ the build cache
+ <https://doc.rust-lang.org/cargo/guide/build-cache.html>
+ documentation for more details.
+
+ -r, --release
+ Check optimized artifacts with the release profile. See also the
+ --profile option for choosing a specific profile by name.
+
+ --profile name
+ Check with the given profile.
+
+ As a special case, specifying the test profile will also enable
+ checking in test mode which will enable checking tests and enable
+ the test cfg option. See rustc tests
+ <https://doc.rust-lang.org/rustc/tests/index.html> for more detail.
+
+ See the the reference
+ <https://doc.rust-lang.org/cargo/reference/profiles.html> for more
+ details on profiles.
+
+ --ignore-rust-version
+ Check the target even if the selected Rust compiler is older than
+ the required Rust version as configured in the project’s
+ rust-version field.
+
+ --timings=fmts
+ Output information how long each compilation takes, and track
+ concurrency information over time. Accepts an optional
+ comma-separated list of output formats; --timings without an
+ argument will default to --timings=html. Specifying an output format
+ (rather than the default) is unstable and requires
+ -Zunstable-options. Valid output formats:
+
+ o html (unstable, requires -Zunstable-options): Write a
+ human-readable file cargo-timing.html to the target/cargo-timings
+ directory with a report of the compilation. Also write a report
+ to the same directory with a timestamp in the filename if you
+ want to look at older runs. HTML output is suitable for human
+ consumption only, and does not provide machine-readable timing
+ data.
+
+ o json (unstable, requires -Zunstable-options): Emit
+ machine-readable JSON information about timing information.
+
+ Output Options
+ --target-dir directory
+ Directory for all generated artifacts and intermediate files. May
+ also be specified with the CARGO_TARGET_DIR environment variable, or
+ the build.target-dir config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ target in the root of the workspace.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --message-format fmt
+ The output format for diagnostic messages. Can be specified multiple
+ times and consists of comma-separated values. Valid values:
+
+ o human (default): Display in a human-readable text format.
+ Conflicts with short and json.
+
+ o short: Emit shorter, human-readable text messages. Conflicts with
+ human and json.
+
+ o json: Emit JSON messages to stdout. See the reference
+ <https://doc.rust-lang.org/cargo/reference/external-tools.html#json-messages>
+ for more details. Conflicts with human and short.
+
+ o json-diagnostic-short: Ensure the rendered field of JSON messages
+ contains the “short” rendering from rustc. Cannot be used
+ with human or short.
+
+ o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON
+ messages contains embedded ANSI color codes for respecting
+ rustc’s default color scheme. Cannot be used with human or
+ short.
+
+ o json-render-diagnostics: Instruct Cargo to not include rustc
+ diagnostics in JSON messages printed, but instead Cargo itself
+ should render the JSON diagnostics coming from rustc. Cargo’s
+ own JSON diagnostics and others coming from rustc are still
+ emitted. Cannot be used with human or short.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ Miscellaneous Options
+ -j N, --jobs N
+ Number of parallel jobs to run. May also be specified with the
+ build.jobs config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ the number of logical CPUs. If negative, it sets the maximum number
+ of parallel jobs to the number of logical CPUs plus provided value.
+ Should not be 0.
+
+ --keep-going
+ Build as many crates in the dependency graph as possible, rather
+ than aborting the build on the first one that fails to build.
+ Unstable, requires -Zunstable-options.
+
+ --future-incompat-report
+ Displays a future-incompat report for any future-incompatible
+ warnings produced during execution of this command
+
+ See cargo-report(1)
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Check the local package for errors:
+
+ cargo check
+
+ 2. Check all targets, including unit tests:
+
+ cargo check --all-targets --profile=test
+
+SEE ALSO
+ cargo(1), cargo-build(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-clean.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-clean.txt
new file mode 100644
index 000000000..33cebb719
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-clean.txt
@@ -0,0 +1,172 @@
+CARGO-CLEAN(1)
+
+NAME
+ cargo-clean — Remove generated artifacts
+
+SYNOPSIS
+ cargo clean [options]
+
+DESCRIPTION
+ Remove artifacts from the target directory that Cargo has generated in
+ the past.
+
+ With no options, cargo clean will delete the entire target directory.
+
+OPTIONS
+ Package Selection
+ When no packages are selected, all packages and all dependencies in the
+ workspace are cleaned.
+
+ -p spec…, --package spec…
+ Clean only the specified packages. This flag may be specified
+ multiple times. See cargo-pkgid(1) for the SPEC format.
+
+ Clean Options
+ --doc
+ This option will cause cargo clean to remove only the doc directory
+ in the target directory.
+
+ --release
+ Remove all artifacts in the release directory.
+
+ --profile name
+ Remove all artifacts in the directory with the given profile name.
+
+ --target-dir directory
+ Directory for all generated artifacts and intermediate files. May
+ also be specified with the CARGO_TARGET_DIR environment variable, or
+ the build.target-dir config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ target in the root of the workspace.
+
+ --target triple
+ Clean for the given architecture. The default is the host
+ architecture. The general format of the triple is
+ <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for
+ a list of supported targets. This flag may be specified multiple
+ times.
+
+ This may also be specified with the build.target config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Note that specifying this flag makes Cargo run in a different mode
+ where the target artifacts are placed in a separate directory. See
+ the build cache
+ <https://doc.rust-lang.org/cargo/guide/build-cache.html>
+ documentation for more details.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Remove the entire target directory:
+
+ cargo clean
+
+ 2. Remove only the release artifacts:
+
+ cargo clean --release
+
+SEE ALSO
+ cargo(1), cargo-build(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-doc.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-doc.txt
new file mode 100644
index 000000000..825032826
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-doc.txt
@@ -0,0 +1,325 @@
+CARGO-DOC(1)
+
+NAME
+ cargo-doc — Build a package’s documentation
+
+SYNOPSIS
+ cargo doc [options]
+
+DESCRIPTION
+ Build the documentation for the local package and all dependencies. The
+ output is placed in target/doc in rustdoc’s usual format.
+
+OPTIONS
+ Documentation Options
+ --open
+ Open the docs in a browser after building them. This will use your
+ default browser unless you define another one in the BROWSER
+ environment variable or use the doc.browser
+ <https://doc.rust-lang.org/cargo/reference/config.html#docbrowser>
+ configuration option.
+
+ --no-deps
+ Do not build documentation for dependencies.
+
+ --document-private-items
+ Include non-public items in the documentation. This will be enabled
+ by default if documenting a binary target.
+
+ Package Selection
+ By default, when no package selection options are given, the packages
+ selected depend on the selected manifest file (based on the current
+ working directory if --manifest-path is not given). If the manifest is
+ the root of a workspace then the workspaces default members are
+ selected, otherwise only the package defined by the manifest will be
+ selected.
+
+ The default members of a workspace can be set explicitly with the
+ workspace.default-members key in the root manifest. If this is not set,
+ a virtual workspace will include all workspace members (equivalent to
+ passing --workspace), and a non-virtual workspace will include only the
+ root crate itself.
+
+ -p spec…, --package spec…
+ Document only the specified packages. See cargo-pkgid(1) for the
+ SPEC format. This flag may be specified multiple times and supports
+ common Unix glob patterns like *, ? and []. However, to avoid your
+ shell accidentally expanding glob patterns before Cargo handles
+ them, you must use single quotes or double quotes around each
+ pattern.
+
+ --workspace
+ Document all members in the workspace.
+
+ --all
+ Deprecated alias for --workspace.
+
+ --exclude SPEC…
+ Exclude the specified packages. Must be used in conjunction with the
+ --workspace flag. This flag may be specified multiple times and
+ supports common Unix glob patterns like *, ? and []. However, to
+ avoid your shell accidentally expanding glob patterns before Cargo
+ handles them, you must use single quotes or double quotes around
+ each pattern.
+
+ Target Selection
+ When no target selection options are given, cargo doc will document all
+ binary and library targets of the selected package. The binary will be
+ skipped if its name is the same as the lib target. Binaries are skipped
+ if they have required-features that are missing.
+
+ The default behavior can be changed by setting doc = false for the
+ target in the manifest settings. Using target selection options will
+ ignore the doc flag and will always document the given target.
+
+ --lib
+ Document the package’s library.
+
+ --bin name…
+ Document the specified binary. This flag may be specified multiple
+ times and supports common Unix glob patterns.
+
+ --bins
+ Document all binary targets.
+
+ --example name…
+ Document the specified example. This flag may be specified multiple
+ times and supports common Unix glob patterns.
+
+ --examples
+ Document all example targets.
+
+ Feature Selection
+ The feature flags allow you to control which features are enabled. When
+ no feature options are given, the default feature is activated for every
+ selected package.
+
+ See the features documentation
+ <https://doc.rust-lang.org/cargo/reference/features.html#command-line-feature-options>
+ for more details.
+
+ -F features, --features features
+ Space or comma separated list of features to activate. Features of
+ workspace members may be enabled with package-name/feature-name
+ syntax. This flag may be specified multiple times, which enables all
+ specified features.
+
+ --all-features
+ Activate all available features of all selected packages.
+
+ --no-default-features
+ Do not activate the default feature of the selected packages.
+
+ Compilation Options
+ --target triple
+ Document for the given architecture. The default is the host
+ architecture. The general format of the triple is
+ <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for
+ a list of supported targets. This flag may be specified multiple
+ times.
+
+ This may also be specified with the build.target config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Note that specifying this flag makes Cargo run in a different mode
+ where the target artifacts are placed in a separate directory. See
+ the build cache
+ <https://doc.rust-lang.org/cargo/guide/build-cache.html>
+ documentation for more details.
+
+ -r, --release
+ Document optimized artifacts with the release profile. See also the
+ --profile option for choosing a specific profile by name.
+
+ --profile name
+ Document with the given profile. See the the reference
+ <https://doc.rust-lang.org/cargo/reference/profiles.html> for more
+ details on profiles.
+
+ --ignore-rust-version
+ Document the target even if the selected Rust compiler is older than
+ the required Rust version as configured in the project’s
+ rust-version field.
+
+ --timings=fmts
+ Output information how long each compilation takes, and track
+ concurrency information over time. Accepts an optional
+ comma-separated list of output formats; --timings without an
+ argument will default to --timings=html. Specifying an output format
+ (rather than the default) is unstable and requires
+ -Zunstable-options. Valid output formats:
+
+ o html (unstable, requires -Zunstable-options): Write a
+ human-readable file cargo-timing.html to the target/cargo-timings
+ directory with a report of the compilation. Also write a report
+ to the same directory with a timestamp in the filename if you
+ want to look at older runs. HTML output is suitable for human
+ consumption only, and does not provide machine-readable timing
+ data.
+
+ o json (unstable, requires -Zunstable-options): Emit
+ machine-readable JSON information about timing information.
+
+ Output Options
+ --target-dir directory
+ Directory for all generated artifacts and intermediate files. May
+ also be specified with the CARGO_TARGET_DIR environment variable, or
+ the build.target-dir config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ target in the root of the workspace.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --message-format fmt
+ The output format for diagnostic messages. Can be specified multiple
+ times and consists of comma-separated values. Valid values:
+
+ o human (default): Display in a human-readable text format.
+ Conflicts with short and json.
+
+ o short: Emit shorter, human-readable text messages. Conflicts with
+ human and json.
+
+ o json: Emit JSON messages to stdout. See the reference
+ <https://doc.rust-lang.org/cargo/reference/external-tools.html#json-messages>
+ for more details. Conflicts with human and short.
+
+ o json-diagnostic-short: Ensure the rendered field of JSON messages
+ contains the “short” rendering from rustc. Cannot be used
+ with human or short.
+
+ o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON
+ messages contains embedded ANSI color codes for respecting
+ rustc’s default color scheme. Cannot be used with human or
+ short.
+
+ o json-render-diagnostics: Instruct Cargo to not include rustc
+ diagnostics in JSON messages printed, but instead Cargo itself
+ should render the JSON diagnostics coming from rustc. Cargo’s
+ own JSON diagnostics and others coming from rustc are still
+ emitted. Cannot be used with human or short.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ Miscellaneous Options
+ -j N, --jobs N
+ Number of parallel jobs to run. May also be specified with the
+ build.jobs config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ the number of logical CPUs. If negative, it sets the maximum number
+ of parallel jobs to the number of logical CPUs plus provided value.
+ Should not be 0.
+
+ --keep-going
+ Build as many crates in the dependency graph as possible, rather
+ than aborting the build on the first one that fails to build.
+ Unstable, requires -Zunstable-options.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Build the local package documentation and its dependencies and output
+ to target/doc.
+
+ cargo doc
+
+SEE ALSO
+ cargo(1), cargo-rustdoc(1), rustdoc(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-fetch.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-fetch.txt
new file mode 100644
index 000000000..cbd3169c3
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-fetch.txt
@@ -0,0 +1,153 @@
+CARGO-FETCH(1)
+
+NAME
+ cargo-fetch — Fetch dependencies of a package from the network
+
+SYNOPSIS
+ cargo fetch [options]
+
+DESCRIPTION
+ If a Cargo.lock file is available, this command will ensure that all of
+ the git dependencies and/or registry dependencies are downloaded and
+ locally available. Subsequent Cargo commands will be able to run offline
+ after a cargo fetch unless the lock file changes.
+
+ If the lock file is not available, then this command will generate the
+ lock file before fetching the dependencies.
+
+ If --target is not specified, then all target dependencies are fetched.
+
+ See also the cargo-prefetch <https://crates.io/crates/cargo-prefetch>
+ plugin which adds a command to download popular crates. This may be
+ useful if you plan to use Cargo without a network with the --offline
+ flag.
+
+OPTIONS
+ Fetch options
+ --target triple
+ Fetch for the given architecture. The default is all architectures.
+ The general format of the triple is
+ <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for
+ a list of supported targets. This flag may be specified multiple
+ times.
+
+ This may also be specified with the build.target config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Note that specifying this flag makes Cargo run in a different mode
+ where the target artifacts are placed in a separate directory. See
+ the build cache
+ <https://doc.rust-lang.org/cargo/guide/build-cache.html>
+ documentation for more details.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Fetch all dependencies:
+
+ cargo fetch
+
+SEE ALSO
+ cargo(1), cargo-update(1), cargo-generate-lockfile(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-fix.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-fix.txt
new file mode 100644
index 000000000..87d72ad38
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-fix.txt
@@ -0,0 +1,434 @@
+CARGO-FIX(1)
+
+NAME
+ cargo-fix — Automatically fix lint warnings reported by rustc
+
+SYNOPSIS
+ cargo fix [options]
+
+DESCRIPTION
+ This Cargo subcommand will automatically take rustc’s suggestions from
+ diagnostics like warnings and apply them to your source code. This is
+ intended to help automate tasks that rustc itself already knows how to
+ tell you to fix!
+
+ Executing cargo fix will under the hood execute cargo-check(1). Any
+ warnings applicable to your crate will be automatically fixed (if
+ possible) and all remaining warnings will be displayed when the check
+ process is finished. For example if you’d like to apply all fixes to
+ the current package, you can run:
+
+ cargo fix
+
+ which behaves the same as cargo check --all-targets.
+
+ cargo fix is only capable of fixing code that is normally compiled with
+ cargo check. If code is conditionally enabled with optional features,
+ you will need to enable those features for that code to be analyzed:
+
+ cargo fix --features foo
+
+ Similarly, other cfg expressions like platform-specific code will need
+ to pass --target to fix code for the given target.
+
+ cargo fix --target x86_64-pc-windows-gnu
+
+ If you encounter any problems with cargo fix or otherwise have any
+ questions or feature requests please don’t hesitate to file an issue
+ at <https://github.com/rust-lang/cargo>.
+
+ Edition migration
+ The cargo fix subcommand can also be used to migrate a package from one
+ edition
+ <https://doc.rust-lang.org/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html>
+ to the next. The general procedure is:
+
+ 1. Run cargo fix --edition. Consider also using the --all-features flag
+ if your project has multiple features. You may also want to run cargo
+ fix --edition multiple times with different --target flags if your
+ project has platform-specific code gated by cfg attributes.
+
+ 2. Modify Cargo.toml to set the edition field
+ <https://doc.rust-lang.org/cargo/reference/manifest.html#the-edition-field>
+ to the new edition.
+
+ 3. Run your project tests to verify that everything still works. If new
+ warnings are issued, you may want to consider running cargo fix again
+ (without the --edition flag) to apply any suggestions given by the
+ compiler.
+
+ And hopefully that’s it! Just keep in mind of the caveats mentioned
+ above that cargo fix cannot update code for inactive features or cfg
+ expressions. Also, in some rare cases the compiler is unable to
+ automatically migrate all code to the new edition, and this may require
+ manual changes after building with the new edition.
+
+OPTIONS
+ Fix options
+ --broken-code
+ Fix code even if it already has compiler errors. This is useful if
+ cargo fix fails to apply the changes. It will apply the changes and
+ leave the broken code in the working directory for you to inspect
+ and manually fix.
+
+ --edition
+ Apply changes that will update the code to the next edition. This
+ will not update the edition in the Cargo.toml manifest, which must
+ be updated manually after cargo fix --edition has finished.
+
+ --edition-idioms
+ Apply suggestions that will update code to the preferred style for
+ the current edition.
+
+ --allow-no-vcs
+ Fix code even if a VCS was not detected.
+
+ --allow-dirty
+ Fix code even if the working directory has changes.
+
+ --allow-staged
+ Fix code even if the working directory has staged changes.
+
+ Package Selection
+ By default, when no package selection options are given, the packages
+ selected depend on the selected manifest file (based on the current
+ working directory if --manifest-path is not given). If the manifest is
+ the root of a workspace then the workspaces default members are
+ selected, otherwise only the package defined by the manifest will be
+ selected.
+
+ The default members of a workspace can be set explicitly with the
+ workspace.default-members key in the root manifest. If this is not set,
+ a virtual workspace will include all workspace members (equivalent to
+ passing --workspace), and a non-virtual workspace will include only the
+ root crate itself.
+
+ -p spec…, --package spec…
+ Fix only the specified packages. See cargo-pkgid(1) for the SPEC
+ format. This flag may be specified multiple times and supports
+ common Unix glob patterns like *, ? and []. However, to avoid your
+ shell accidentally expanding glob patterns before Cargo handles
+ them, you must use single quotes or double quotes around each
+ pattern.
+
+ --workspace
+ Fix all members in the workspace.
+
+ --all
+ Deprecated alias for --workspace.
+
+ --exclude SPEC…
+ Exclude the specified packages. Must be used in conjunction with the
+ --workspace flag. This flag may be specified multiple times and
+ supports common Unix glob patterns like *, ? and []. However, to
+ avoid your shell accidentally expanding glob patterns before Cargo
+ handles them, you must use single quotes or double quotes around
+ each pattern.
+
+ Target Selection
+ When no target selection options are given, cargo fix will fix all
+ targets (--all-targets implied). Binaries are skipped if they have
+ required-features that are missing.
+
+ Passing target selection flags will fix only the specified targets.
+
+ Note that --bin, --example, --test and --bench flags also support common
+ Unix glob patterns like *, ? and []. However, to avoid your shell
+ accidentally expanding glob patterns before Cargo handles them, you must
+ use single quotes or double quotes around each glob pattern.
+
+ --lib
+ Fix the package’s library.
+
+ --bin name…
+ Fix the specified binary. This flag may be specified multiple times
+ and supports common Unix glob patterns.
+
+ --bins
+ Fix all binary targets.
+
+ --example name…
+ Fix the specified example. This flag may be specified multiple times
+ and supports common Unix glob patterns.
+
+ --examples
+ Fix all example targets.
+
+ --test name…
+ Fix the specified integration test. This flag may be specified
+ multiple times and supports common Unix glob patterns.
+
+ --tests
+ Fix all targets in test mode that have the test = true manifest flag
+ set. By default this includes the library and binaries built as
+ unittests, and integration tests. Be aware that this will also build
+ any required dependencies, so the lib target may be built twice
+ (once as a unittest, and once as a dependency for binaries,
+ integration tests, etc.). Targets may be enabled or disabled by
+ setting the test flag in the manifest settings for the target.
+
+ --bench name…
+ Fix the specified benchmark. This flag may be specified multiple
+ times and supports common Unix glob patterns.
+
+ --benches
+ Fix all targets in benchmark mode that have the bench = true
+ manifest flag set. By default this includes the library and binaries
+ built as benchmarks, and bench targets. Be aware that this will also
+ build any required dependencies, so the lib target may be built
+ twice (once as a benchmark, and once as a dependency for binaries,
+ benchmarks, etc.). Targets may be enabled or disabled by setting the
+ bench flag in the manifest settings for the target.
+
+ --all-targets
+ Fix all targets. This is equivalent to specifying --lib --bins
+ --tests --benches --examples.
+
+ Feature Selection
+ The feature flags allow you to control which features are enabled. When
+ no feature options are given, the default feature is activated for every
+ selected package.
+
+ See the features documentation
+ <https://doc.rust-lang.org/cargo/reference/features.html#command-line-feature-options>
+ for more details.
+
+ -F features, --features features
+ Space or comma separated list of features to activate. Features of
+ workspace members may be enabled with package-name/feature-name
+ syntax. This flag may be specified multiple times, which enables all
+ specified features.
+
+ --all-features
+ Activate all available features of all selected packages.
+
+ --no-default-features
+ Do not activate the default feature of the selected packages.
+
+ Compilation Options
+ --target triple
+ Fix for the given architecture. The default is the host
+ architecture. The general format of the triple is
+ <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for
+ a list of supported targets. This flag may be specified multiple
+ times.
+
+ This may also be specified with the build.target config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Note that specifying this flag makes Cargo run in a different mode
+ where the target artifacts are placed in a separate directory. See
+ the build cache
+ <https://doc.rust-lang.org/cargo/guide/build-cache.html>
+ documentation for more details.
+
+ -r, --release
+ Fix optimized artifacts with the release profile. See also the
+ --profile option for choosing a specific profile by name.
+
+ --profile name
+ Fix with the given profile.
+
+ As a special case, specifying the test profile will also enable
+ checking in test mode which will enable checking tests and enable
+ the test cfg option. See rustc tests
+ <https://doc.rust-lang.org/rustc/tests/index.html> for more detail.
+
+ See the the reference
+ <https://doc.rust-lang.org/cargo/reference/profiles.html> for more
+ details on profiles.
+
+ --ignore-rust-version
+ Fix the target even if the selected Rust compiler is older than the
+ required Rust version as configured in the project’s rust-version
+ field.
+
+ --timings=fmts
+ Output information how long each compilation takes, and track
+ concurrency information over time. Accepts an optional
+ comma-separated list of output formats; --timings without an
+ argument will default to --timings=html. Specifying an output format
+ (rather than the default) is unstable and requires
+ -Zunstable-options. Valid output formats:
+
+ o html (unstable, requires -Zunstable-options): Write a
+ human-readable file cargo-timing.html to the target/cargo-timings
+ directory with a report of the compilation. Also write a report
+ to the same directory with a timestamp in the filename if you
+ want to look at older runs. HTML output is suitable for human
+ consumption only, and does not provide machine-readable timing
+ data.
+
+ o json (unstable, requires -Zunstable-options): Emit
+ machine-readable JSON information about timing information.
+
+ Output Options
+ --target-dir directory
+ Directory for all generated artifacts and intermediate files. May
+ also be specified with the CARGO_TARGET_DIR environment variable, or
+ the build.target-dir config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ target in the root of the workspace.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --message-format fmt
+ The output format for diagnostic messages. Can be specified multiple
+ times and consists of comma-separated values. Valid values:
+
+ o human (default): Display in a human-readable text format.
+ Conflicts with short and json.
+
+ o short: Emit shorter, human-readable text messages. Conflicts with
+ human and json.
+
+ o json: Emit JSON messages to stdout. See the reference
+ <https://doc.rust-lang.org/cargo/reference/external-tools.html#json-messages>
+ for more details. Conflicts with human and short.
+
+ o json-diagnostic-short: Ensure the rendered field of JSON messages
+ contains the “short” rendering from rustc. Cannot be used
+ with human or short.
+
+ o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON
+ messages contains embedded ANSI color codes for respecting
+ rustc’s default color scheme. Cannot be used with human or
+ short.
+
+ o json-render-diagnostics: Instruct Cargo to not include rustc
+ diagnostics in JSON messages printed, but instead Cargo itself
+ should render the JSON diagnostics coming from rustc. Cargo’s
+ own JSON diagnostics and others coming from rustc are still
+ emitted. Cannot be used with human or short.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ Miscellaneous Options
+ -j N, --jobs N
+ Number of parallel jobs to run. May also be specified with the
+ build.jobs config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ the number of logical CPUs. If negative, it sets the maximum number
+ of parallel jobs to the number of logical CPUs plus provided value.
+ Should not be 0.
+
+ --keep-going
+ Build as many crates in the dependency graph as possible, rather
+ than aborting the build on the first one that fails to build.
+ Unstable, requires -Zunstable-options.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Apply compiler suggestions to the local package:
+
+ cargo fix
+
+ 2. Update a package to prepare it for the next edition:
+
+ cargo fix --edition
+
+ 3. Apply suggested idioms for the current edition:
+
+ cargo fix --edition-idioms
+
+SEE ALSO
+ cargo(1), cargo-check(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-generate-lockfile.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-generate-lockfile.txt
new file mode 100644
index 000000000..17f2a37ab
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-generate-lockfile.txt
@@ -0,0 +1,128 @@
+CARGO-GENERATE-LOCKFILE(1)
+
+NAME
+ cargo-generate-lockfile — Generate the lockfile for a package
+
+SYNOPSIS
+ cargo generate-lockfile [options]
+
+DESCRIPTION
+ This command will create the Cargo.lock lockfile for the current package
+ or workspace. If the lockfile already exists, it will be rebuilt with
+ the latest available version of every package.
+
+ See also cargo-update(1) which is also capable of creating a Cargo.lock
+ lockfile and has more options for controlling update behavior.
+
+OPTIONS
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Create or update the lockfile for the current package or workspace:
+
+ cargo generate-lockfile
+
+SEE ALSO
+ cargo(1), cargo-update(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-help.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-help.txt
new file mode 100644
index 000000000..0107ebe2c
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-help.txt
@@ -0,0 +1,23 @@
+CARGO-HELP(1)
+
+NAME
+ cargo-help — Get help for a Cargo command
+
+SYNOPSIS
+ cargo help [subcommand]
+
+DESCRIPTION
+ Prints a help message for the given command.
+
+EXAMPLES
+ 1. Get help for a command:
+
+ cargo help build
+
+ 2. Help is also available with the --help flag:
+
+ cargo build --help
+
+SEE ALSO
+ cargo(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-init.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-init.txt
new file mode 100644
index 000000000..678024881
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-init.txt
@@ -0,0 +1,136 @@
+CARGO-INIT(1)
+
+NAME
+ cargo-init — Create a new Cargo package in an existing directory
+
+SYNOPSIS
+ cargo init [options] [path]
+
+DESCRIPTION
+ This command will create a new Cargo manifest in the current directory.
+ Give a path as an argument to create in the given directory.
+
+ If there are typically-named Rust source files already in the directory,
+ those will be used. If not, then a sample src/main.rs file will be
+ created, or src/lib.rs if --lib is passed.
+
+ If the directory is not already in a VCS repository, then a new
+ repository is created (see --vcs below).
+
+ See cargo-new(1) for a similar command which will create a new package
+ in a new directory.
+
+OPTIONS
+ Init Options
+ --bin
+ Create a package with a binary target (src/main.rs). This is the
+ default behavior.
+
+ --lib
+ Create a package with a library target (src/lib.rs).
+
+ --edition edition
+ Specify the Rust edition to use. Default is 2021. Possible values:
+ 2015, 2018, 2021
+
+ --name name
+ Set the package name. Defaults to the directory name.
+
+ --vcs vcs
+ Initialize a new VCS repository for the given version control system
+ (git, hg, pijul, or fossil) or do not initialize any version control
+ at all (none). If not specified, defaults to git or the
+ configuration value cargo-new.vcs, or none if already inside a VCS
+ repository.
+
+ --registry registry
+ This sets the publish field in Cargo.toml to the given registry name
+ which will restrict publishing only to that registry.
+
+ Registry names are defined in Cargo config files
+ <https://doc.rust-lang.org/cargo/reference/config.html>. If not
+ specified, the default registry defined by the registry.default
+ config key is used. If the default registry is not set and
+ --registry is not used, the publish field will not be set which
+ means that publishing will not be restricted.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Create a binary Cargo package in the current directory:
+
+ cargo init
+
+SEE ALSO
+ cargo(1), cargo-new(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-install.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-install.txt
new file mode 100644
index 000000000..a29cdcd46
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-install.txt
@@ -0,0 +1,399 @@
+CARGO-INSTALL(1)
+
+NAME
+ cargo-install — Build and install a Rust binary
+
+SYNOPSIS
+ cargo install [options] crate[@version]…
+ cargo install [options] --path path
+ cargo install [options] --git url [crate…]
+ cargo install [options] --list
+
+DESCRIPTION
+ This command manages Cargo’s local set of installed binary crates.
+ Only packages which have executable [[bin]] or [[example]] targets can
+ be installed, and all executables are installed into the installation
+ root’s bin folder.
+
+ The installation root is determined, in order of precedence:
+
+ o --root option
+
+ o CARGO_INSTALL_ROOT environment variable
+
+ o install.root Cargo config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>
+
+ o CARGO_HOME environment variable
+
+ o $HOME/.cargo
+
+ There are multiple sources from which a crate can be installed. The
+ default location is crates.io but the --git, --path, and --registry
+ flags can change this source. If the source contains more than one
+ package (such as crates.io or a git repository with multiple crates) the
+ crate argument is required to indicate which crate should be installed.
+
+ Crates from crates.io can optionally specify the version they wish to
+ install via the --version flags, and similarly packages from git
+ repositories can optionally specify the branch, tag, or revision that
+ should be installed. If a crate has multiple binaries, the --bin
+ argument can selectively install only one of them, and if you’d rather
+ install examples the --example argument can be used as well.
+
+ If the package is already installed, Cargo will reinstall it if the
+ installed version does not appear to be up-to-date. If any of the
+ following values change, then Cargo will reinstall the package:
+
+ o The package version and source.
+
+ o The set of binary names installed.
+
+ o The chosen features.
+
+ o The profile (--profile).
+
+ o The target (--target).
+
+ Installing with --path will always build and install, unless there are
+ conflicting binaries from another package. The --force flag may be used
+ to force Cargo to always reinstall the package.
+
+ If the source is crates.io or --git then by default the crate will be
+ built in a temporary target directory. To avoid this, the target
+ directory can be specified by setting the CARGO_TARGET_DIR environment
+ variable to a relative path. In particular, this can be useful for
+ caching build artifacts on continuous integration systems.
+
+ Dealing with the Lockfile
+ By default, the Cargo.lock file that is included with the package will
+ be ignored. This means that Cargo will recompute which versions of
+ dependencies to use, possibly using newer versions that have been
+ released since the package was published. The --locked flag can be used
+ to force Cargo to use the packaged Cargo.lock file if it is available.
+ This may be useful for ensuring reproducible builds, to use the exact
+ same set of dependencies that were available when the package was
+ published. It may also be useful if a newer version of a dependency is
+ published that no longer builds on your system, or has other problems.
+ The downside to using --locked is that you will not receive any fixes or
+ updates to any dependency. Note that Cargo did not start publishing
+ Cargo.lock files until version 1.37, which means packages published with
+ prior versions will not have a Cargo.lock file available.
+
+ Configuration Discovery
+ This command operates on system or user level, not project level. This
+ means that the local configuration discovery
+ <https://doc.rust-lang.org/cargo/reference/config.html#hierarchical-structure>
+ is ignored. Instead, the configuration discovery begins at
+ $CARGO_HOME/config.toml. If the package is installed with --path $PATH,
+ the local configuration will be used, beginning discovery at
+ $PATH/.cargo/config.toml.
+
+OPTIONS
+ Install Options
+ --vers version, --version version
+ Specify a version to install. This may be a version requirement
+ <https://doc.rust-lang.org/cargo/reference/specifying-dependencies.md>,
+ like ~1.2, to have Cargo select the newest version from the given
+ requirement. If the version does not have a requirement operator
+ (such as ^ or ~), then it must be in the form MAJOR.MINOR.PATCH, and
+ will install exactly that version; it is not treated as a caret
+ requirement like Cargo dependencies are.
+
+ --git url
+ Git URL to install the specified crate from.
+
+ --branch branch
+ Branch to use when installing from git.
+
+ --tag tag
+ Tag to use when installing from git.
+
+ --rev sha
+ Specific commit to use when installing from git.
+
+ --path path
+ Filesystem path to local crate to install.
+
+ --list
+ List all installed packages and their versions.
+
+ -f, --force
+ Force overwriting existing crates or binaries. This can be used if a
+ package has installed a binary with the same name as another
+ package. This is also useful if something has changed on the system
+ that you want to rebuild with, such as a newer version of rustc.
+
+ --no-track
+ By default, Cargo keeps track of the installed packages with a
+ metadata file stored in the installation root directory. This flag
+ tells Cargo not to use or create that file. With this flag, Cargo
+ will refuse to overwrite any existing files unless the --force flag
+ is used. This also disables Cargo’s ability to protect against
+ multiple concurrent invocations of Cargo installing at the same
+ time.
+
+ --bin name…
+ Install only the specified binary.
+
+ --bins
+ Install all binaries.
+
+ --example name…
+ Install only the specified example.
+
+ --examples
+ Install all examples.
+
+ --root dir
+ Directory to install packages into.
+
+ --registry registry
+ Name of the registry to use. Registry names are defined in Cargo
+ config files
+ <https://doc.rust-lang.org/cargo/reference/config.html>. If not
+ specified, the default registry is used, which is defined by the
+ registry.default config key which defaults to crates-io.
+
+ --index index
+ The URL of the registry index to use.
+
+ Feature Selection
+ The feature flags allow you to control which features are enabled. When
+ no feature options are given, the default feature is activated for every
+ selected package.
+
+ See the features documentation
+ <https://doc.rust-lang.org/cargo/reference/features.html#command-line-feature-options>
+ for more details.
+
+ -F features, --features features
+ Space or comma separated list of features to activate. Features of
+ workspace members may be enabled with package-name/feature-name
+ syntax. This flag may be specified multiple times, which enables all
+ specified features.
+
+ --all-features
+ Activate all available features of all selected packages.
+
+ --no-default-features
+ Do not activate the default feature of the selected packages.
+
+ Compilation Options
+ --target triple
+ Install for the given architecture. The default is the host
+ architecture. The general format of the triple is
+ <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for
+ a list of supported targets.
+
+ This may also be specified with the build.target config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Note that specifying this flag makes Cargo run in a different mode
+ where the target artifacts are placed in a separate directory. See
+ the build cache
+ <https://doc.rust-lang.org/cargo/guide/build-cache.html>
+ documentation for more details.
+
+ --target-dir directory
+ Directory for all generated artifacts and intermediate files. May
+ also be specified with the CARGO_TARGET_DIR environment variable, or
+ the build.target-dir config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ a new temporary folder located in the temporary directory of the
+ platform.
+
+ When using --path, by default it will use target directory in the
+ workspace of the local crate unless --target-dir is specified.
+
+ --debug
+ Build with the dev profile instead the release profile. See also the
+ --profile option for choosing a specific profile by name.
+
+ --profile name
+ Install with the given profile. See the the reference
+ <https://doc.rust-lang.org/cargo/reference/profiles.html> for more
+ details on profiles.
+
+ --ignore-rust-version
+ Install the target even if the selected Rust compiler is older than
+ the required Rust version as configured in the project’s
+ rust-version field.
+
+ --timings=fmts
+ Output information how long each compilation takes, and track
+ concurrency information over time. Accepts an optional
+ comma-separated list of output formats; --timings without an
+ argument will default to --timings=html. Specifying an output format
+ (rather than the default) is unstable and requires
+ -Zunstable-options. Valid output formats:
+
+ o html (unstable, requires -Zunstable-options): Write a
+ human-readable file cargo-timing.html to the target/cargo-timings
+ directory with a report of the compilation. Also write a report
+ to the same directory with a timestamp in the filename if you
+ want to look at older runs. HTML output is suitable for human
+ consumption only, and does not provide machine-readable timing
+ data.
+
+ o json (unstable, requires -Zunstable-options): Emit
+ machine-readable JSON information about timing information.
+
+ Manifest Options
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Miscellaneous Options
+ -j N, --jobs N
+ Number of parallel jobs to run. May also be specified with the
+ build.jobs config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ the number of logical CPUs. If negative, it sets the maximum number
+ of parallel jobs to the number of logical CPUs plus provided value.
+ Should not be 0.
+
+ --keep-going
+ Build as many crates in the dependency graph as possible, rather
+ than aborting the build on the first one that fails to build.
+ Unstable, requires -Zunstable-options.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --message-format fmt
+ The output format for diagnostic messages. Can be specified multiple
+ times and consists of comma-separated values. Valid values:
+
+ o human (default): Display in a human-readable text format.
+ Conflicts with short and json.
+
+ o short: Emit shorter, human-readable text messages. Conflicts with
+ human and json.
+
+ o json: Emit JSON messages to stdout. See the reference
+ <https://doc.rust-lang.org/cargo/reference/external-tools.html#json-messages>
+ for more details. Conflicts with human and short.
+
+ o json-diagnostic-short: Ensure the rendered field of JSON messages
+ contains the “short” rendering from rustc. Cannot be used
+ with human or short.
+
+ o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON
+ messages contains embedded ANSI color codes for respecting
+ rustc’s default color scheme. Cannot be used with human or
+ short.
+
+ o json-render-diagnostics: Instruct Cargo to not include rustc
+ diagnostics in JSON messages printed, but instead Cargo itself
+ should render the JSON diagnostics coming from rustc. Cargo’s
+ own JSON diagnostics and others coming from rustc are still
+ emitted. Cannot be used with human or short.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Install or upgrade a package from crates.io:
+
+ cargo install ripgrep
+
+ 2. Install or reinstall the package in the current directory:
+
+ cargo install --path .
+
+ 3. View the list of installed packages:
+
+ cargo install --list
+
+SEE ALSO
+ cargo(1), cargo-uninstall(1), cargo-search(1), cargo-publish(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-locate-project.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-locate-project.txt
new file mode 100644
index 000000000..68a563bfb
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-locate-project.txt
@@ -0,0 +1,119 @@
+CARGO-LOCATE-PROJECT(1)
+
+NAME
+ cargo-locate-project — Print a JSON representation of a Cargo.toml
+ file’s location
+
+SYNOPSIS
+ cargo locate-project [options]
+
+DESCRIPTION
+ This command will print a JSON object to stdout with the full path to
+ the manifest. The manifest is found by searching upward for a file named
+ Cargo.toml starting from the current working directory.
+
+ If the project happens to be a part of a workspace, the manifest of the
+ project, rather than the workspace root, is output. This can be
+ overridden by the --workspace flag. The root workspace is found by
+ traversing further upward or by using the field package.workspace after
+ locating the manifest of a workspace member.
+
+OPTIONS
+ --workspace
+ Locate the Cargo.toml at the root of the workspace, as opposed to
+ the current workspace member.
+
+ Display Options
+ --message-format fmt
+ The representation in which to print the project location. Valid
+ values:
+
+ o json (default): JSON object with the path under the key
+ “root”.
+
+ o plain: Just the path.
+
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Display the path to the manifest based on the current directory:
+
+ cargo locate-project
+
+SEE ALSO
+ cargo(1), cargo-metadata(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-login.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-login.txt
new file mode 100644
index 000000000..cce8efcfb
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-login.txt
@@ -0,0 +1,111 @@
+CARGO-LOGIN(1)
+
+NAME
+ cargo-login — Save an API token from the registry locally
+
+SYNOPSIS
+ cargo login [options] [token]
+
+DESCRIPTION
+ This command will save the API token to disk so that commands that
+ require authentication, such as cargo-publish(1), will be automatically
+ authenticated. The token is saved in $CARGO_HOME/credentials.toml.
+ CARGO_HOME defaults to .cargo in your home directory.
+
+ If the token argument is not specified, it will be read from stdin.
+
+ The API token for crates.io may be retrieved from
+ <https://crates.io/me>.
+
+ Take care to keep the token secret, it should not be shared with anyone
+ else.
+
+OPTIONS
+ Login Options
+ --registry registry
+ Name of the registry to use. Registry names are defined in Cargo
+ config files
+ <https://doc.rust-lang.org/cargo/reference/config.html>. If not
+ specified, the default registry is used, which is defined by the
+ registry.default config key which defaults to crates-io.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Save the API token to disk:
+
+ cargo login
+
+SEE ALSO
+ cargo(1), cargo-logout(1), cargo-publish(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-logout.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-logout.txt
new file mode 100644
index 000000000..db21a39b4
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-logout.txt
@@ -0,0 +1,115 @@
+CARGO-LOGOUT(1)
+
+NAME
+ cargo-logout — Remove an API token from the registry locally
+
+SYNOPSIS
+ cargo logout [options]
+
+DESCRIPTION
+ This command will remove the API token from the local credential
+ storage. Credentials are stored in $CARGO_HOME/credentials.toml where
+ $CARGO_HOME defaults to .cargo in your home directory.
+
+ If --registry is not specified, then the credentials for the default
+ registry will be removed (configured by registry.default
+ <https://doc.rust-lang.org/cargo/reference/config.html#registrydefault>,
+ which defaults to <https://crates.io/>).
+
+ This will not revoke the token on the server. If you need to revoke the
+ token, visit the registry website and follow its instructions (see
+ <https://crates.io/me> to revoke the token for <https://crates.io/>).
+
+OPTIONS
+ Logout Options
+ --registry registry
+ Name of the registry to use. Registry names are defined in Cargo
+ config files
+ <https://doc.rust-lang.org/cargo/reference/config.html>. If not
+ specified, the default registry is used, which is defined by the
+ registry.default config key which defaults to crates-io.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Remove the default registry token:
+
+ cargo logout
+
+ 2. Remove the token for a specific registry:
+
+ cargo logout --registry my-registry
+
+SEE ALSO
+ cargo(1), cargo-login(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-metadata.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-metadata.txt
new file mode 100644
index 000000000..be8bed7c6
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-metadata.txt
@@ -0,0 +1,441 @@
+CARGO-METADATA(1)
+
+NAME
+ cargo-metadata — Machine-readable metadata about the current package
+
+SYNOPSIS
+ cargo metadata [options]
+
+DESCRIPTION
+ Output JSON to stdout containing information about the workspace members
+ and resolved dependencies of the current package.
+
+ It is recommended to include the --format-version flag to future-proof
+ your code to ensure the output is in the format you are expecting.
+
+ See the cargo_metadata crate <https://crates.io/crates/cargo_metadata>
+ for a Rust API for reading the metadata.
+
+OUTPUT FORMAT
+ The output has the following format:
+
+ {
+ /* Array of all packages in the workspace.
+ It also includes all feature-enabled dependencies unless --no-deps is used.
+ */
+ "packages": [
+ {
+ /* The name of the package. */
+ "name": "my-package",
+ /* The version of the package. */
+ "version": "0.1.0",
+ /* The Package ID, a unique identifier for referring to the package. */
+ "id": "my-package 0.1.0 (path+file:///path/to/my-package)",
+ /* The license value from the manifest, or null. */
+ "license": "MIT/Apache-2.0",
+ /* The license-file value from the manifest, or null. */
+ "license_file": "LICENSE",
+ /* The description value from the manifest, or null. */
+ "description": "Package description.",
+ /* The source ID of the package. This represents where
+ a package is retrieved from.
+ This is null for path dependencies and workspace members.
+ For other dependencies, it is a string with the format:
+ - "registry+URL" for registry-based dependencies.
+ Example: "registry+https://github.com/rust-lang/crates.io-index"
+ - "git+URL" for git-based dependencies.
+ Example: "git+https://github.com/rust-lang/cargo?rev=5e85ba14aaa20f8133863373404cb0af69eeef2c#5e85ba14aaa20f8133863373404cb0af69eeef2c"
+ */
+ "source": null,
+ /* Array of dependencies declared in the package's manifest. */
+ "dependencies": [
+ {
+ /* The name of the dependency. */
+ "name": "bitflags",
+ /* The source ID of the dependency. May be null, see
+ description for the package source.
+ */
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ /* The version requirement for the dependency.
+ Dependencies without a version requirement have a value of "*".
+ */
+ "req": "^1.0",
+ /* The dependency kind.
+ "dev", "build", or null for a normal dependency.
+ */
+ "kind": null,
+ /* If the dependency is renamed, this is the new name for
+ the dependency as a string. null if it is not renamed.
+ */
+ "rename": null,
+ /* Boolean of whether or not this is an optional dependency. */
+ "optional": false,
+ /* Boolean of whether or not default features are enabled. */
+ "uses_default_features": true,
+ /* Array of features enabled. */
+ "features": [],
+ /* The target platform for the dependency.
+ null if not a target dependency.
+ */
+ "target": "cfg(windows)",
+ /* The file system path for a local path dependency.
+ not present if not a path dependency.
+ */
+ "path": "/path/to/dep",
+ /* A string of the URL of the registry this dependency is from.
+ If not specified or null, the dependency is from the default
+ registry (crates.io).
+ */
+ "registry": null
+ }
+ ],
+ /* Array of Cargo targets. */
+ "targets": [
+ {
+ /* Array of target kinds.
+ - lib targets list the `crate-type` values from the
+ manifest such as "lib", "rlib", "dylib",
+ "proc-macro", etc. (default ["lib"])
+ - binary is ["bin"]
+ - example is ["example"]
+ - integration test is ["test"]
+ - benchmark is ["bench"]
+ - build script is ["custom-build"]
+ */
+ "kind": [
+ "bin"
+ ],
+ /* Array of crate types.
+ - lib and example libraries list the `crate-type` values
+ from the manifest such as "lib", "rlib", "dylib",
+ "proc-macro", etc. (default ["lib"])
+ - all other target kinds are ["bin"]
+ */
+ "crate_types": [
+ "bin"
+ ],
+ /* The name of the target. */
+ "name": "my-package",
+ /* Absolute path to the root source file of the target. */
+ "src_path": "/path/to/my-package/src/main.rs",
+ /* The Rust edition of the target.
+ Defaults to the package edition.
+ */
+ "edition": "2018",
+ /* Array of required features.
+ This property is not included if no required features are set.
+ */
+ "required-features": ["feat1"],
+ /* Whether the target should be documented by `cargo doc`. */
+ "doc": true,
+ /* Whether or not this target has doc tests enabled, and
+ the target is compatible with doc testing.
+ */
+ "doctest": false,
+ /* Whether or not this target should be built and run with `--test`
+ */
+ "test": true
+ }
+ ],
+ /* Set of features defined for the package.
+ Each feature maps to an array of features or dependencies it
+ enables.
+ */
+ "features": {
+ "default": [
+ "feat1"
+ ],
+ "feat1": [],
+ "feat2": []
+ },
+ /* Absolute path to this package's manifest. */
+ "manifest_path": "/path/to/my-package/Cargo.toml",
+ /* Package metadata.
+ This is null if no metadata is specified.
+ */
+ "metadata": {
+ "docs": {
+ "rs": {
+ "all-features": true
+ }
+ }
+ },
+ /* List of registries to which this package may be published.
+ Publishing is unrestricted if null, and forbidden if an empty array. */
+ "publish": [
+ "crates-io"
+ ],
+ /* Array of authors from the manifest.
+ Empty array if no authors specified.
+ */
+ "authors": [
+ "Jane Doe <user@example.com>"
+ ],
+ /* Array of categories from the manifest. */
+ "categories": [
+ "command-line-utilities"
+ ],
+ /* Optional string that is the default binary picked by cargo run. */
+ "default_run": null,
+ /* Optional string that is the minimum supported rust version */
+ "rust_version": "1.56",
+ /* Array of keywords from the manifest. */
+ "keywords": [
+ "cli"
+ ],
+ /* The readme value from the manifest or null if not specified. */
+ "readme": "README.md",
+ /* The repository value from the manifest or null if not specified. */
+ "repository": "https://github.com/rust-lang/cargo",
+ /* The homepage value from the manifest or null if not specified. */
+ "homepage": "https://rust-lang.org",
+ /* The documentation value from the manifest or null if not specified. */
+ "documentation": "https://doc.rust-lang.org/stable/std",
+ /* The default edition of the package.
+ Note that individual targets may have different editions.
+ */
+ "edition": "2018",
+ /* Optional string that is the name of a native library the package
+ is linking to.
+ */
+ "links": null,
+ }
+ ],
+ /* Array of members of the workspace.
+ Each entry is the Package ID for the package.
+ */
+ "workspace_members": [
+ "my-package 0.1.0 (path+file:///path/to/my-package)",
+ ],
+ // The resolved dependency graph for the entire workspace. The enabled
+ // features are based on the enabled features for the "current" package.
+ // Inactivated optional dependencies are not listed.
+ //
+ // This is null if --no-deps is specified.
+ //
+ // By default, this includes all dependencies for all target platforms.
+ // The `--filter-platform` flag may be used to narrow to a specific
+ // target triple.
+ "resolve": {
+ /* Array of nodes within the dependency graph.
+ Each node is a package.
+ */
+ "nodes": [
+ {
+ /* The Package ID of this node. */
+ "id": "my-package 0.1.0 (path+file:///path/to/my-package)",
+ /* The dependencies of this package, an array of Package IDs. */
+ "dependencies": [
+ "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)"
+ ],
+ /* The dependencies of this package. This is an alternative to
+ "dependencies" which contains additional information. In
+ particular, this handles renamed dependencies.
+ */
+ "deps": [
+ {
+ /* The name of the dependency's library target.
+ If this is a renamed dependency, this is the new
+ name.
+ */
+ "name": "bitflags",
+ /* The Package ID of the dependency. */
+ "pkg": "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ /* Array of dependency kinds. Added in Cargo 1.40. */
+ "dep_kinds": [
+ {
+ /* The dependency kind.
+ "dev", "build", or null for a normal dependency.
+ */
+ "kind": null,
+ /* The target platform for the dependency.
+ null if not a target dependency.
+ */
+ "target": "cfg(windows)"
+ }
+ ]
+ }
+ ],
+ /* Array of features enabled on this package. */
+ "features": [
+ "default"
+ ]
+ }
+ ],
+ /* The root package of the workspace.
+ This is null if this is a virtual workspace. Otherwise it is
+ the Package ID of the root package.
+ */
+ "root": "my-package 0.1.0 (path+file:///path/to/my-package)"
+ },
+ /* The absolute path to the build directory where Cargo places its output. */
+ "target_directory": "/path/to/my-package/target",
+ /* The version of the schema for this metadata structure.
+ This will be changed if incompatible changes are ever made.
+ */
+ "version": 1,
+ /* The absolute path to the root of the workspace. */
+ "workspace_root": "/path/to/my-package"
+ /* Workspace metadata.
+ This is null if no metadata is specified. */
+ "metadata": {
+ "docs": {
+ "rs": {
+ "all-features": true
+ }
+ }
+ }
+ }
+
+OPTIONS
+ Output Options
+ --no-deps
+ Output information only about the workspace members and don’t
+ fetch dependencies.
+
+ --format-version version
+ Specify the version of the output format to use. Currently 1 is the
+ only possible value.
+
+ --filter-platform triple
+ This filters the resolve output to only include dependencies for the
+ given target triple
+ <https://doc.rust-lang.org/cargo/appendix/glossary.html#target>.
+ Without this flag, the resolve includes all targets.
+
+ Note that the dependencies listed in the “packages” array still
+ includes all dependencies. Each package definition is intended to be
+ an unaltered reproduction of the information within Cargo.toml.
+
+ Feature Selection
+ The feature flags allow you to control which features are enabled. When
+ no feature options are given, the default feature is activated for every
+ selected package.
+
+ See the features documentation
+ <https://doc.rust-lang.org/cargo/reference/features.html#command-line-feature-options>
+ for more details.
+
+ -F features, --features features
+ Space or comma separated list of features to activate. Features of
+ workspace members may be enabled with package-name/feature-name
+ syntax. This flag may be specified multiple times, which enables all
+ specified features.
+
+ --all-features
+ Activate all available features of all selected packages.
+
+ --no-default-features
+ Do not activate the default feature of the selected packages.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Output JSON about the current package:
+
+ cargo metadata --format-version=1
+
+SEE ALSO
+ cargo(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-new.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-new.txt
new file mode 100644
index 000000000..5d2c61b48
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-new.txt
@@ -0,0 +1,131 @@
+CARGO-NEW(1)
+
+NAME
+ cargo-new — Create a new Cargo package
+
+SYNOPSIS
+ cargo new [options] path
+
+DESCRIPTION
+ This command will create a new Cargo package in the given directory.
+ This includes a simple template with a Cargo.toml manifest, sample
+ source file, and a VCS ignore file. If the directory is not already in a
+ VCS repository, then a new repository is created (see --vcs below).
+
+ See cargo-init(1) for a similar command which will create a new manifest
+ in an existing directory.
+
+OPTIONS
+ New Options
+ --bin
+ Create a package with a binary target (src/main.rs). This is the
+ default behavior.
+
+ --lib
+ Create a package with a library target (src/lib.rs).
+
+ --edition edition
+ Specify the Rust edition to use. Default is 2021. Possible values:
+ 2015, 2018, 2021
+
+ --name name
+ Set the package name. Defaults to the directory name.
+
+ --vcs vcs
+ Initialize a new VCS repository for the given version control system
+ (git, hg, pijul, or fossil) or do not initialize any version control
+ at all (none). If not specified, defaults to git or the
+ configuration value cargo-new.vcs, or none if already inside a VCS
+ repository.
+
+ --registry registry
+ This sets the publish field in Cargo.toml to the given registry name
+ which will restrict publishing only to that registry.
+
+ Registry names are defined in Cargo config files
+ <https://doc.rust-lang.org/cargo/reference/config.html>. If not
+ specified, the default registry defined by the registry.default
+ config key is used. If the default registry is not set and
+ --registry is not used, the publish field will not be set which
+ means that publishing will not be restricted.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Create a binary Cargo package in the given directory:
+
+ cargo new foo
+
+SEE ALSO
+ cargo(1), cargo-init(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-owner.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-owner.txt
new file mode 100644
index 000000000..a77975da0
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-owner.txt
@@ -0,0 +1,146 @@
+CARGO-OWNER(1)
+
+NAME
+ cargo-owner — Manage the owners of a crate on the registry
+
+SYNOPSIS
+ cargo owner [options] --add login [crate]
+ cargo owner [options] --remove login [crate]
+ cargo owner [options] --list [crate]
+
+DESCRIPTION
+ This command will modify the owners for a crate on the registry. Owners
+ of a crate can upload new versions and yank old versions. Non-team
+ owners can also modify the set of owners, so take care!
+
+ This command requires you to be authenticated with either the --token
+ option or using cargo-login(1).
+
+ If the crate name is not specified, it will use the package name from
+ the current directory.
+
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/publishing.html#cargo-owner>
+ for more information about owners and publishing.
+
+OPTIONS
+ Owner Options
+ -a, --add login…
+ Invite the given user or team as an owner.
+
+ -r, --remove login…
+ Remove the given user or team as an owner.
+
+ -l, --list
+ List owners of a crate.
+
+ --token token
+ API token to use when authenticating. This overrides the token
+ stored in the credentials file (which is created by cargo-login(1)).
+
+ Cargo config <https://doc.rust-lang.org/cargo/reference/config.html>
+ environment variables can be used to override the tokens stored in
+ the credentials file. The token for crates.io may be specified with
+ the CARGO_REGISTRY_TOKEN environment variable. Tokens for other
+ registries may be specified with environment variables of the form
+ CARGO_REGISTRIES_NAME_TOKEN where NAME is the name of the registry
+ in all capital letters.
+
+ --index index
+ The URL of the registry index to use.
+
+ --registry registry
+ Name of the registry to use. Registry names are defined in Cargo
+ config files
+ <https://doc.rust-lang.org/cargo/reference/config.html>. If not
+ specified, the default registry is used, which is defined by the
+ registry.default config key which defaults to crates-io.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. List owners of a package:
+
+ cargo owner --list foo
+
+ 2. Invite an owner to a package:
+
+ cargo owner --add username foo
+
+ 3. Remove an owner from a package:
+
+ cargo owner --remove username foo
+
+SEE ALSO
+ cargo(1), cargo-login(1), cargo-publish(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-package.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-package.txt
new file mode 100644
index 000000000..960e0248e
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-package.txt
@@ -0,0 +1,280 @@
+CARGO-PACKAGE(1)
+
+NAME
+ cargo-package — Assemble the local package into a distributable
+ tarball
+
+SYNOPSIS
+ cargo package [options]
+
+DESCRIPTION
+ This command will create a distributable, compressed .crate file with
+ the source code of the package in the current directory. The resulting
+ file will be stored in the target/package directory. This performs the
+ following steps:
+
+ 1. Load and check the current workspace, performing some basic checks.
+ o Path dependencies are not allowed unless they have a version key.
+ Cargo will ignore the path key for dependencies in published
+ packages. dev-dependencies do not have this restriction.
+
+ 2. Create the compressed .crate file.
+ o The original Cargo.toml file is rewritten and normalized.
+
+ o [patch], [replace], and [workspace] sections are removed from the
+ manifest.
+
+ o Cargo.lock is automatically included if the package contains an
+ executable binary or example target. cargo-install(1) will use the
+ packaged lock file if the --locked flag is used.
+
+ o A .cargo_vcs_info.json file is included that contains information
+ about the current VCS checkout hash if available (not included
+ with --allow-dirty).
+
+ 3. Extract the .crate file and build it to verify it can build.
+ o This will rebuild your package from scratch to ensure that it can
+ be built from a pristine state. The --no-verify flag can be used
+ to skip this step.
+
+ 4. Check that build scripts did not modify any source files.
+
+ The list of files included can be controlled with the include and
+ exclude fields in the manifest.
+
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/publishing.html> for more
+ details about packaging and publishing.
+
+ .cargo_vcs_info.json format
+ Will generate a .cargo_vcs_info.json in the following format
+
+ {
+ "git": {
+ "sha1": "aac20b6e7e543e6dd4118b246c77225e3a3a1302"
+ },
+ "path_in_vcs": ""
+ }
+
+ path_in_vcs will be set to a repo-relative path for packages in
+ subdirectories of the version control repository.
+
+OPTIONS
+ Package Options
+ -l, --list
+ Print files included in a package without making one.
+
+ --no-verify
+ Don’t verify the contents by building them.
+
+ --no-metadata
+ Ignore warnings about a lack of human-usable metadata (such as the
+ description or the license).
+
+ --allow-dirty
+ Allow working directories with uncommitted VCS changes to be
+ packaged.
+
+ Package Selection
+ By default, when no package selection options are given, the packages
+ selected depend on the selected manifest file (based on the current
+ working directory if --manifest-path is not given). If the manifest is
+ the root of a workspace then the workspaces default members are
+ selected, otherwise only the package defined by the manifest will be
+ selected.
+
+ The default members of a workspace can be set explicitly with the
+ workspace.default-members key in the root manifest. If this is not set,
+ a virtual workspace will include all workspace members (equivalent to
+ passing --workspace), and a non-virtual workspace will include only the
+ root crate itself.
+
+ -p spec…, --package spec…
+ Package only the specified packages. See cargo-pkgid(1) for the SPEC
+ format. This flag may be specified multiple times and supports
+ common Unix glob patterns like *, ? and []. However, to avoid your
+ shell accidentally expanding glob patterns before Cargo handles
+ them, you must use single quotes or double quotes around each
+ pattern.
+
+ --workspace
+ Package all members in the workspace.
+
+ --exclude SPEC…
+ Exclude the specified packages. Must be used in conjunction with the
+ --workspace flag. This flag may be specified multiple times and
+ supports common Unix glob patterns like *, ? and []. However, to
+ avoid your shell accidentally expanding glob patterns before Cargo
+ handles them, you must use single quotes or double quotes around
+ each pattern.
+
+ Compilation Options
+ --target triple
+ Package for the given architecture. The default is the host
+ architecture. The general format of the triple is
+ <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for
+ a list of supported targets. This flag may be specified multiple
+ times.
+
+ This may also be specified with the build.target config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Note that specifying this flag makes Cargo run in a different mode
+ where the target artifacts are placed in a separate directory. See
+ the build cache
+ <https://doc.rust-lang.org/cargo/guide/build-cache.html>
+ documentation for more details.
+
+ --target-dir directory
+ Directory for all generated artifacts and intermediate files. May
+ also be specified with the CARGO_TARGET_DIR environment variable, or
+ the build.target-dir config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ target in the root of the workspace.
+
+ Feature Selection
+ The feature flags allow you to control which features are enabled. When
+ no feature options are given, the default feature is activated for every
+ selected package.
+
+ See the features documentation
+ <https://doc.rust-lang.org/cargo/reference/features.html#command-line-feature-options>
+ for more details.
+
+ -F features, --features features
+ Space or comma separated list of features to activate. Features of
+ workspace members may be enabled with package-name/feature-name
+ syntax. This flag may be specified multiple times, which enables all
+ specified features.
+
+ --all-features
+ Activate all available features of all selected packages.
+
+ --no-default-features
+ Do not activate the default feature of the selected packages.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Miscellaneous Options
+ -j N, --jobs N
+ Number of parallel jobs to run. May also be specified with the
+ build.jobs config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ the number of logical CPUs. If negative, it sets the maximum number
+ of parallel jobs to the number of logical CPUs plus provided value.
+ Should not be 0.
+
+ --keep-going
+ Build as many crates in the dependency graph as possible, rather
+ than aborting the build on the first one that fails to build.
+ Unstable, requires -Zunstable-options.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Create a compressed .crate file of the current package:
+
+ cargo package
+
+SEE ALSO
+ cargo(1), cargo-publish(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-pkgid.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-pkgid.txt
new file mode 100644
index 000000000..e2df3fd98
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-pkgid.txt
@@ -0,0 +1,170 @@
+CARGO-PKGID(1)
+
+NAME
+ cargo-pkgid — Print a fully qualified package specification
+
+SYNOPSIS
+ cargo pkgid [options] [spec]
+
+DESCRIPTION
+ Given a spec argument, print out the fully qualified package ID
+ specifier for a package or dependency in the current workspace. This
+ command will generate an error if spec is ambiguous as to which package
+ it refers to in the dependency graph. If no spec is given, then the
+ specifier for the local package is printed.
+
+ This command requires that a lockfile is available and dependencies have
+ been fetched.
+
+ A package specifier consists of a name, version, and source URL. You are
+ allowed to use partial specifiers to succinctly match a specific package
+ as long as it matches only one package. The format of a spec can be one
+ of the following:
+
+ +-----------------+--------------------------------------------------+
+ | SPEC Structure | Example SPEC |
+ +-----------------+--------------------------------------------------+
+ | name | bitflags |
+ +-----------------+--------------------------------------------------+
+ | name@version | bitflags@1.0.4 |
+ +-----------------+--------------------------------------------------+
+ | url | https://github.com/rust-lang/cargo |
+ +-----------------+--------------------------------------------------+
+ | url#version | https://github.com/rust-lang/cargo#0.33.0 |
+ +-----------------+--------------------------------------------------+
+ | url#name | |
+ | | https://github.com/rust-lang/crates.io-index#bitflags |
+ +-----------------+--------------------------------------------------+
+ | | |
+ | url#name@version | https://github.com/rust-lang/cargo#crates-io@0.21.0 |
+ +-----------------+--------------------------------------------------+
+
+OPTIONS
+ Package Selection
+ -p spec, --package spec
+ Get the package ID for the given package instead of the current
+ package.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Retrieve package specification for foo package:
+
+ cargo pkgid foo
+
+ 2. Retrieve package specification for version 1.0.0 of foo:
+
+ cargo pkgid foo@1.0.0
+
+ 3. Retrieve package specification for foo from crates.io:
+
+ cargo pkgid https://github.com/rust-lang/crates.io-index#foo
+
+ 4. Retrieve package specification for foo from a local package:
+
+ cargo pkgid file:///path/to/local/package#foo
+
+SEE ALSO
+ cargo(1), cargo-generate-lockfile(1), cargo-metadata(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-publish.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-publish.txt
new file mode 100644
index 000000000..d35172ad7
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-publish.txt
@@ -0,0 +1,246 @@
+CARGO-PUBLISH(1)
+
+NAME
+ cargo-publish — Upload a package to the registry
+
+SYNOPSIS
+ cargo publish [options]
+
+DESCRIPTION
+ This command will create a distributable, compressed .crate file with
+ the source code of the package in the current directory and upload it to
+ a registry. The default registry is <https://crates.io>. This performs
+ the following steps:
+
+ 1. Performs a few checks, including:
+ o Checks the package.publish key in the manifest for restrictions on
+ which registries you are allowed to publish to.
+
+ 2. Create a .crate file by following the steps in cargo-package(1).
+
+ 3. Upload the crate to the registry. Note that the server will perform
+ additional checks on the crate.
+
+ This command requires you to be authenticated with either the --token
+ option or using cargo-login(1).
+
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/publishing.html> for more
+ details about packaging and publishing.
+
+OPTIONS
+ Publish Options
+ --dry-run
+ Perform all checks without uploading.
+
+ --token token
+ API token to use when authenticating. This overrides the token
+ stored in the credentials file (which is created by cargo-login(1)).
+
+ Cargo config <https://doc.rust-lang.org/cargo/reference/config.html>
+ environment variables can be used to override the tokens stored in
+ the credentials file. The token for crates.io may be specified with
+ the CARGO_REGISTRY_TOKEN environment variable. Tokens for other
+ registries may be specified with environment variables of the form
+ CARGO_REGISTRIES_NAME_TOKEN where NAME is the name of the registry
+ in all capital letters.
+
+ --no-verify
+ Don’t verify the contents by building them.
+
+ --allow-dirty
+ Allow working directories with uncommitted VCS changes to be
+ packaged.
+
+ --index index
+ The URL of the registry index to use.
+
+ --registry registry
+ Name of the registry to publish to. Registry names are defined in
+ Cargo config files
+ <https://doc.rust-lang.org/cargo/reference/config.html>. If not
+ specified, and there is a package.publish
+ <https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field>
+ field in Cargo.toml with a single registry, then it will publish to
+ that registry. Otherwise it will use the default registry, which is
+ defined by the registry.default
+ <https://doc.rust-lang.org/cargo/reference/config.html#registrydefault>
+ config key which defaults to crates-io.
+
+ Package Selection
+ By default, the package in the current working directory is selected.
+ The -p flag can be used to choose a different package in a workspace.
+
+ -p spec, --package spec
+ The package to publish. See cargo-pkgid(1) for the SPEC format.
+
+ Compilation Options
+ --target triple
+ Publish for the given architecture. The default is the host
+ architecture. The general format of the triple is
+ <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for
+ a list of supported targets. This flag may be specified multiple
+ times.
+
+ This may also be specified with the build.target config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Note that specifying this flag makes Cargo run in a different mode
+ where the target artifacts are placed in a separate directory. See
+ the build cache
+ <https://doc.rust-lang.org/cargo/guide/build-cache.html>
+ documentation for more details.
+
+ --target-dir directory
+ Directory for all generated artifacts and intermediate files. May
+ also be specified with the CARGO_TARGET_DIR environment variable, or
+ the build.target-dir config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ target in the root of the workspace.
+
+ Feature Selection
+ The feature flags allow you to control which features are enabled. When
+ no feature options are given, the default feature is activated for every
+ selected package.
+
+ See the features documentation
+ <https://doc.rust-lang.org/cargo/reference/features.html#command-line-feature-options>
+ for more details.
+
+ -F features, --features features
+ Space or comma separated list of features to activate. Features of
+ workspace members may be enabled with package-name/feature-name
+ syntax. This flag may be specified multiple times, which enables all
+ specified features.
+
+ --all-features
+ Activate all available features of all selected packages.
+
+ --no-default-features
+ Do not activate the default feature of the selected packages.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Miscellaneous Options
+ -j N, --jobs N
+ Number of parallel jobs to run. May also be specified with the
+ build.jobs config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ the number of logical CPUs. If negative, it sets the maximum number
+ of parallel jobs to the number of logical CPUs plus provided value.
+ Should not be 0.
+
+ --keep-going
+ Build as many crates in the dependency graph as possible, rather
+ than aborting the build on the first one that fails to build.
+ Unstable, requires -Zunstable-options.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Publish the current package:
+
+ cargo publish
+
+SEE ALSO
+ cargo(1), cargo-package(1), cargo-login(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-remove.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-remove.txt
new file mode 100644
index 000000000..53451c289
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-remove.txt
@@ -0,0 +1,155 @@
+CARGO-REMOVE(1)
+
+NAME
+ cargo-remove — Remove dependencies from a Cargo.toml manifest file
+
+SYNOPSIS
+ cargo remove [options] dependency…
+
+DESCRIPTION
+ Remove one or more dependencies from a Cargo.toml manifest.
+
+OPTIONS
+ Section options
+ --dev
+ Remove as a development dependency
+ <https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#development-dependencies>.
+
+ --build
+ Remove as a build dependency
+ <https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#build-dependencies>.
+
+ --target target
+ Remove as a dependency to the given target platform
+ <https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#platform-specific-dependencies>.
+
+ To avoid unexpected shell expansions, you may use quotes around each
+ target, e.g., --target 'cfg(unix)'.
+
+ Miscellaneous Options
+ --dry-run
+ Don’t actually write to the manifest.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Package Selection
+ -p spec…, --package spec…
+ Package to remove from.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Remove regex as a dependency
+
+ cargo remove regex
+
+ 2. Remove trybuild as a dev-dependency
+
+ cargo remove --dev trybuild
+
+ 3. Remove nom from the x86_64-pc-windows-gnu dependencies table
+
+ cargo remove --target x86_64-pc-windows-gnu nom
+
+SEE ALSO
+ cargo(1), cargo-add(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-report.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-report.txt
new file mode 100644
index 000000000..f75a60c50
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-report.txt
@@ -0,0 +1,34 @@
+CARGO-REPORT(1)
+
+NAME
+ cargo-report — Generate and display various kinds of reports
+
+SYNOPSIS
+ cargo report type [options]
+
+ DESCRIPTION
+ Displays a report of the given type — currently, only future-incompat
+ is supported
+
+OPTIONS
+ --id id
+ Show the report with the specified Cargo-generated id
+
+ -p spec…, --package spec…
+ Only display a report for the specified package
+
+EXAMPLES
+ 1. Display the latest future-incompat report:
+
+ cargo report future-incompat
+
+ 2. Display the latest future-incompat report for a specific package:
+
+ cargo report future-incompat --package my-dep:0.0.1
+
+SEE ALSO
+ Future incompat report
+ <https://doc.rust-lang.org/cargo/reference/future-incompat-report.html>
+
+ cargo(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-run.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-run.txt
new file mode 100644
index 000000000..f6782be11
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-run.txt
@@ -0,0 +1,277 @@
+CARGO-RUN(1)
+
+NAME
+ cargo-run — Run the current package
+
+SYNOPSIS
+ cargo run [options] [-- args]
+
+DESCRIPTION
+ Run a binary or example of the local package.
+
+ All the arguments following the two dashes (--) are passed to the binary
+ to run. If you’re passing arguments to both Cargo and the binary, the
+ ones after -- go to the binary, the ones before go to Cargo.
+
+ Unlike cargo-test(1) and cargo-bench(1), cargo run sets the working
+ directory of the binary executed to the current working directory, same
+ as if it was executed in the shell directly.
+
+OPTIONS
+ Package Selection
+ By default, the package in the current working directory is selected.
+ The -p flag can be used to choose a different package in a workspace.
+
+ -p spec, --package spec
+ The package to run. See cargo-pkgid(1) for the SPEC format.
+
+ Target Selection
+ When no target selection options are given, cargo run will run the
+ binary target. If there are multiple binary targets, you must pass a
+ target flag to choose one. Or, the default-run field may be specified in
+ the [package] section of Cargo.toml to choose the name of the binary to
+ run by default.
+
+ --bin name
+ Run the specified binary.
+
+ --example name
+ Run the specified example.
+
+ Feature Selection
+ The feature flags allow you to control which features are enabled. When
+ no feature options are given, the default feature is activated for every
+ selected package.
+
+ See the features documentation
+ <https://doc.rust-lang.org/cargo/reference/features.html#command-line-feature-options>
+ for more details.
+
+ -F features, --features features
+ Space or comma separated list of features to activate. Features of
+ workspace members may be enabled with package-name/feature-name
+ syntax. This flag may be specified multiple times, which enables all
+ specified features.
+
+ --all-features
+ Activate all available features of all selected packages.
+
+ --no-default-features
+ Do not activate the default feature of the selected packages.
+
+ Compilation Options
+ --target triple
+ Run for the given architecture. The default is the host
+ architecture. The general format of the triple is
+ <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for
+ a list of supported targets.
+
+ This may also be specified with the build.target config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Note that specifying this flag makes Cargo run in a different mode
+ where the target artifacts are placed in a separate directory. See
+ the build cache
+ <https://doc.rust-lang.org/cargo/guide/build-cache.html>
+ documentation for more details.
+
+ -r, --release
+ Run optimized artifacts with the release profile. See also the
+ --profile option for choosing a specific profile by name.
+
+ --profile name
+ Run with the given profile. See the the reference
+ <https://doc.rust-lang.org/cargo/reference/profiles.html> for more
+ details on profiles.
+
+ --ignore-rust-version
+ Run the target even if the selected Rust compiler is older than the
+ required Rust version as configured in the project’s rust-version
+ field.
+
+ --timings=fmts
+ Output information how long each compilation takes, and track
+ concurrency information over time. Accepts an optional
+ comma-separated list of output formats; --timings without an
+ argument will default to --timings=html. Specifying an output format
+ (rather than the default) is unstable and requires
+ -Zunstable-options. Valid output formats:
+
+ o html (unstable, requires -Zunstable-options): Write a
+ human-readable file cargo-timing.html to the target/cargo-timings
+ directory with a report of the compilation. Also write a report
+ to the same directory with a timestamp in the filename if you
+ want to look at older runs. HTML output is suitable for human
+ consumption only, and does not provide machine-readable timing
+ data.
+
+ o json (unstable, requires -Zunstable-options): Emit
+ machine-readable JSON information about timing information.
+
+ Output Options
+ --target-dir directory
+ Directory for all generated artifacts and intermediate files. May
+ also be specified with the CARGO_TARGET_DIR environment variable, or
+ the build.target-dir config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ target in the root of the workspace.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --message-format fmt
+ The output format for diagnostic messages. Can be specified multiple
+ times and consists of comma-separated values. Valid values:
+
+ o human (default): Display in a human-readable text format.
+ Conflicts with short and json.
+
+ o short: Emit shorter, human-readable text messages. Conflicts with
+ human and json.
+
+ o json: Emit JSON messages to stdout. See the reference
+ <https://doc.rust-lang.org/cargo/reference/external-tools.html#json-messages>
+ for more details. Conflicts with human and short.
+
+ o json-diagnostic-short: Ensure the rendered field of JSON messages
+ contains the “short” rendering from rustc. Cannot be used
+ with human or short.
+
+ o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON
+ messages contains embedded ANSI color codes for respecting
+ rustc’s default color scheme. Cannot be used with human or
+ short.
+
+ o json-render-diagnostics: Instruct Cargo to not include rustc
+ diagnostics in JSON messages printed, but instead Cargo itself
+ should render the JSON diagnostics coming from rustc. Cargo’s
+ own JSON diagnostics and others coming from rustc are still
+ emitted. Cannot be used with human or short.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ Miscellaneous Options
+ -j N, --jobs N
+ Number of parallel jobs to run. May also be specified with the
+ build.jobs config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ the number of logical CPUs. If negative, it sets the maximum number
+ of parallel jobs to the number of logical CPUs plus provided value.
+ Should not be 0.
+
+ --keep-going
+ Build as many crates in the dependency graph as possible, rather
+ than aborting the build on the first one that fails to build.
+ Unstable, requires -Zunstable-options.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Build the local package and run its main target (assuming only one
+ binary):
+
+ cargo run
+
+ 2. Run an example with extra arguments:
+
+ cargo run --example exname -- --exoption exarg1 exarg2
+
+SEE ALSO
+ cargo(1), cargo-build(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-rustc.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-rustc.txt
new file mode 100644
index 000000000..cc4241f93
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-rustc.txt
@@ -0,0 +1,385 @@
+CARGO-RUSTC(1)
+
+NAME
+ cargo-rustc — Compile the current package, and pass extra options to
+ the compiler
+
+SYNOPSIS
+ cargo rustc [options] [-- args]
+
+DESCRIPTION
+ The specified target for the current package (or package specified by -p
+ if provided) will be compiled along with all of its dependencies. The
+ specified args will all be passed to the final compiler invocation, not
+ any of the dependencies. Note that the compiler will still
+ unconditionally receive arguments such as -L, --extern, and
+ --crate-type, and the specified args will simply be added to the
+ compiler invocation.
+
+ See <https://doc.rust-lang.org/rustc/index.html> for documentation on
+ rustc flags.
+
+ This command requires that only one target is being compiled when
+ additional arguments are provided. If more than one target is available
+ for the current package the filters of --lib, --bin, etc, must be used
+ to select which target is compiled.
+
+ To pass flags to all compiler processes spawned by Cargo, use the
+ RUSTFLAGS environment variable
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ or the build.rustflags config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+OPTIONS
+ Package Selection
+ By default, the package in the current working directory is selected.
+ The -p flag can be used to choose a different package in a workspace.
+
+ -p spec, --package spec
+ The package to build. See cargo-pkgid(1) for the SPEC format.
+
+ Target Selection
+ When no target selection options are given, cargo rustc will build all
+ binary and library targets of the selected package.
+
+ Binary targets are automatically built if there is an integration test
+ or benchmark being selected to build. This allows an integration test to
+ execute the binary to exercise and test its behavior. The
+ CARGO_BIN_EXE_<name> environment variable
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates>
+ is set when the integration test is built so that it can use the env
+ macro <https://doc.rust-lang.org/std/macro.env.html> to locate the
+ executable.
+
+ Passing target selection flags will build only the specified targets.
+
+ Note that --bin, --example, --test and --bench flags also support common
+ Unix glob patterns like *, ? and []. However, to avoid your shell
+ accidentally expanding glob patterns before Cargo handles them, you must
+ use single quotes or double quotes around each glob pattern.
+
+ --lib
+ Build the package’s library.
+
+ --bin name…
+ Build the specified binary. This flag may be specified multiple
+ times and supports common Unix glob patterns.
+
+ --bins
+ Build all binary targets.
+
+ --example name…
+ Build the specified example. This flag may be specified multiple
+ times and supports common Unix glob patterns.
+
+ --examples
+ Build all example targets.
+
+ --test name…
+ Build the specified integration test. This flag may be specified
+ multiple times and supports common Unix glob patterns.
+
+ --tests
+ Build all targets in test mode that have the test = true manifest
+ flag set. By default this includes the library and binaries built as
+ unittests, and integration tests. Be aware that this will also build
+ any required dependencies, so the lib target may be built twice
+ (once as a unittest, and once as a dependency for binaries,
+ integration tests, etc.). Targets may be enabled or disabled by
+ setting the test flag in the manifest settings for the target.
+
+ --bench name…
+ Build the specified benchmark. This flag may be specified multiple
+ times and supports common Unix glob patterns.
+
+ --benches
+ Build all targets in benchmark mode that have the bench = true
+ manifest flag set. By default this includes the library and binaries
+ built as benchmarks, and bench targets. Be aware that this will also
+ build any required dependencies, so the lib target may be built
+ twice (once as a benchmark, and once as a dependency for binaries,
+ benchmarks, etc.). Targets may be enabled or disabled by setting the
+ bench flag in the manifest settings for the target.
+
+ --all-targets
+ Build all targets. This is equivalent to specifying --lib --bins
+ --tests --benches --examples.
+
+ Feature Selection
+ The feature flags allow you to control which features are enabled. When
+ no feature options are given, the default feature is activated for every
+ selected package.
+
+ See the features documentation
+ <https://doc.rust-lang.org/cargo/reference/features.html#command-line-feature-options>
+ for more details.
+
+ -F features, --features features
+ Space or comma separated list of features to activate. Features of
+ workspace members may be enabled with package-name/feature-name
+ syntax. This flag may be specified multiple times, which enables all
+ specified features.
+
+ --all-features
+ Activate all available features of all selected packages.
+
+ --no-default-features
+ Do not activate the default feature of the selected packages.
+
+ Compilation Options
+ --target triple
+ Build for the given architecture. The default is the host
+ architecture. The general format of the triple is
+ <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for
+ a list of supported targets. This flag may be specified multiple
+ times.
+
+ This may also be specified with the build.target config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Note that specifying this flag makes Cargo run in a different mode
+ where the target artifacts are placed in a separate directory. See
+ the build cache
+ <https://doc.rust-lang.org/cargo/guide/build-cache.html>
+ documentation for more details.
+
+ -r, --release
+ Build optimized artifacts with the release profile. See also the
+ --profile option for choosing a specific profile by name.
+
+ --profile name
+ Build with the given profile.
+
+ The rustc subcommand will treat the following named profiles with
+ special behaviors:
+
+ o check — Builds in the same way as the cargo-check(1) command
+ with the dev profile.
+
+ o test — Builds in the same way as the cargo-test(1) command,
+ enabling building in test mode which will enable tests and enable
+ the test cfg option. See rustc tests
+ <https://doc.rust-lang.org/rustc/tests/index.html> for more
+ detail.
+
+ o bench — Builds in the same was as the cargo-bench(1) command,
+ similar to the test profile.
+
+ See the the reference
+ <https://doc.rust-lang.org/cargo/reference/profiles.html> for more
+ details on profiles.
+
+ --ignore-rust-version
+ Build the target even if the selected Rust compiler is older than
+ the required Rust version as configured in the project’s
+ rust-version field.
+
+ --timings=fmts
+ Output information how long each compilation takes, and track
+ concurrency information over time. Accepts an optional
+ comma-separated list of output formats; --timings without an
+ argument will default to --timings=html. Specifying an output format
+ (rather than the default) is unstable and requires
+ -Zunstable-options. Valid output formats:
+
+ o html (unstable, requires -Zunstable-options): Write a
+ human-readable file cargo-timing.html to the target/cargo-timings
+ directory with a report of the compilation. Also write a report
+ to the same directory with a timestamp in the filename if you
+ want to look at older runs. HTML output is suitable for human
+ consumption only, and does not provide machine-readable timing
+ data.
+
+ o json (unstable, requires -Zunstable-options): Emit
+ machine-readable JSON information about timing information.
+
+ --crate-type crate-type
+ Build for the given crate type. This flag accepts a comma-separated
+ list of 1 or more crate types, of which the allowed values are the
+ same as crate-type field in the manifest for configuring a Cargo
+ target. See crate-type field
+ <https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-crate-type-field>
+ for possible values.
+
+ If the manifest contains a list, and --crate-type is provided, the
+ command-line argument value will override what is in the manifest.
+
+ This flag only works when building a lib or example library target.
+
+ Output Options
+ --target-dir directory
+ Directory for all generated artifacts and intermediate files. May
+ also be specified with the CARGO_TARGET_DIR environment variable, or
+ the build.target-dir config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ target in the root of the workspace.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --message-format fmt
+ The output format for diagnostic messages. Can be specified multiple
+ times and consists of comma-separated values. Valid values:
+
+ o human (default): Display in a human-readable text format.
+ Conflicts with short and json.
+
+ o short: Emit shorter, human-readable text messages. Conflicts with
+ human and json.
+
+ o json: Emit JSON messages to stdout. See the reference
+ <https://doc.rust-lang.org/cargo/reference/external-tools.html#json-messages>
+ for more details. Conflicts with human and short.
+
+ o json-diagnostic-short: Ensure the rendered field of JSON messages
+ contains the “short” rendering from rustc. Cannot be used
+ with human or short.
+
+ o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON
+ messages contains embedded ANSI color codes for respecting
+ rustc’s default color scheme. Cannot be used with human or
+ short.
+
+ o json-render-diagnostics: Instruct Cargo to not include rustc
+ diagnostics in JSON messages printed, but instead Cargo itself
+ should render the JSON diagnostics coming from rustc. Cargo’s
+ own JSON diagnostics and others coming from rustc are still
+ emitted. Cannot be used with human or short.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ Miscellaneous Options
+ -j N, --jobs N
+ Number of parallel jobs to run. May also be specified with the
+ build.jobs config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ the number of logical CPUs. If negative, it sets the maximum number
+ of parallel jobs to the number of logical CPUs plus provided value.
+ Should not be 0.
+
+ --keep-going
+ Build as many crates in the dependency graph as possible, rather
+ than aborting the build on the first one that fails to build.
+ Unstable, requires -Zunstable-options.
+
+ --future-incompat-report
+ Displays a future-incompat report for any future-incompatible
+ warnings produced during execution of this command
+
+ See cargo-report(1)
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Check if your package (not including dependencies) uses unsafe code:
+
+ cargo rustc --lib -- -D unsafe-code
+
+ 2. Try an experimental flag on the nightly compiler, such as this which
+ prints the size of every type:
+
+ cargo rustc --lib -- -Z print-type-sizes
+
+ 3. Override crate-type field in Cargo.toml with command-line option:
+
+ cargo rustc --lib --crate-type lib,cdylib
+
+SEE ALSO
+ cargo(1), cargo-build(1), rustc(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-rustdoc.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-rustdoc.txt
new file mode 100644
index 000000000..6a32a6b6e
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-rustdoc.txt
@@ -0,0 +1,340 @@
+CARGO-RUSTDOC(1)
+
+NAME
+ cargo-rustdoc — Build a package’s documentation, using specified
+ custom flags
+
+SYNOPSIS
+ cargo rustdoc [options] [-- args]
+
+DESCRIPTION
+ The specified target for the current package (or package specified by -p
+ if provided) will be documented with the specified args being passed to
+ the final rustdoc invocation. Dependencies will not be documented as
+ part of this command. Note that rustdoc will still unconditionally
+ receive arguments such as -L, --extern, and --crate-type, and the
+ specified args will simply be added to the rustdoc invocation.
+
+ See <https://doc.rust-lang.org/rustdoc/index.html> for documentation on
+ rustdoc flags.
+
+ This command requires that only one target is being compiled when
+ additional arguments are provided. If more than one target is available
+ for the current package the filters of --lib, --bin, etc, must be used
+ to select which target is compiled.
+
+ To pass flags to all rustdoc processes spawned by Cargo, use the
+ RUSTDOCFLAGS environment variable
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ or the build.rustdocflags config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+OPTIONS
+ Documentation Options
+ --open
+ Open the docs in a browser after building them. This will use your
+ default browser unless you define another one in the BROWSER
+ environment variable or use the doc.browser
+ <https://doc.rust-lang.org/cargo/reference/config.html#docbrowser>
+ configuration option.
+
+ Package Selection
+ By default, the package in the current working directory is selected.
+ The -p flag can be used to choose a different package in a workspace.
+
+ -p spec, --package spec
+ The package to document. See cargo-pkgid(1) for the SPEC format.
+
+ Target Selection
+ When no target selection options are given, cargo rustdoc will document
+ all binary and library targets of the selected package. The binary will
+ be skipped if its name is the same as the lib target. Binaries are
+ skipped if they have required-features that are missing.
+
+ Passing target selection flags will document only the specified targets.
+
+ Note that --bin, --example, --test and --bench flags also support common
+ Unix glob patterns like *, ? and []. However, to avoid your shell
+ accidentally expanding glob patterns before Cargo handles them, you must
+ use single quotes or double quotes around each glob pattern.
+
+ --lib
+ Document the package’s library.
+
+ --bin name…
+ Document the specified binary. This flag may be specified multiple
+ times and supports common Unix glob patterns.
+
+ --bins
+ Document all binary targets.
+
+ --example name…
+ Document the specified example. This flag may be specified multiple
+ times and supports common Unix glob patterns.
+
+ --examples
+ Document all example targets.
+
+ --test name…
+ Document the specified integration test. This flag may be specified
+ multiple times and supports common Unix glob patterns.
+
+ --tests
+ Document all targets in test mode that have the test = true manifest
+ flag set. By default this includes the library and binaries built as
+ unittests, and integration tests. Be aware that this will also build
+ any required dependencies, so the lib target may be built twice
+ (once as a unittest, and once as a dependency for binaries,
+ integration tests, etc.). Targets may be enabled or disabled by
+ setting the test flag in the manifest settings for the target.
+
+ --bench name…
+ Document the specified benchmark. This flag may be specified
+ multiple times and supports common Unix glob patterns.
+
+ --benches
+ Document all targets in benchmark mode that have the bench = true
+ manifest flag set. By default this includes the library and binaries
+ built as benchmarks, and bench targets. Be aware that this will also
+ build any required dependencies, so the lib target may be built
+ twice (once as a benchmark, and once as a dependency for binaries,
+ benchmarks, etc.). Targets may be enabled or disabled by setting the
+ bench flag in the manifest settings for the target.
+
+ --all-targets
+ Document all targets. This is equivalent to specifying --lib --bins
+ --tests --benches --examples.
+
+ Feature Selection
+ The feature flags allow you to control which features are enabled. When
+ no feature options are given, the default feature is activated for every
+ selected package.
+
+ See the features documentation
+ <https://doc.rust-lang.org/cargo/reference/features.html#command-line-feature-options>
+ for more details.
+
+ -F features, --features features
+ Space or comma separated list of features to activate. Features of
+ workspace members may be enabled with package-name/feature-name
+ syntax. This flag may be specified multiple times, which enables all
+ specified features.
+
+ --all-features
+ Activate all available features of all selected packages.
+
+ --no-default-features
+ Do not activate the default feature of the selected packages.
+
+ Compilation Options
+ --target triple
+ Document for the given architecture. The default is the host
+ architecture. The general format of the triple is
+ <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for
+ a list of supported targets. This flag may be specified multiple
+ times.
+
+ This may also be specified with the build.target config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Note that specifying this flag makes Cargo run in a different mode
+ where the target artifacts are placed in a separate directory. See
+ the build cache
+ <https://doc.rust-lang.org/cargo/guide/build-cache.html>
+ documentation for more details.
+
+ -r, --release
+ Document optimized artifacts with the release profile. See also the
+ --profile option for choosing a specific profile by name.
+
+ --profile name
+ Document with the given profile. See the the reference
+ <https://doc.rust-lang.org/cargo/reference/profiles.html> for more
+ details on profiles.
+
+ --ignore-rust-version
+ Document the target even if the selected Rust compiler is older than
+ the required Rust version as configured in the project’s
+ rust-version field.
+
+ --timings=fmts
+ Output information how long each compilation takes, and track
+ concurrency information over time. Accepts an optional
+ comma-separated list of output formats; --timings without an
+ argument will default to --timings=html. Specifying an output format
+ (rather than the default) is unstable and requires
+ -Zunstable-options. Valid output formats:
+
+ o html (unstable, requires -Zunstable-options): Write a
+ human-readable file cargo-timing.html to the target/cargo-timings
+ directory with a report of the compilation. Also write a report
+ to the same directory with a timestamp in the filename if you
+ want to look at older runs. HTML output is suitable for human
+ consumption only, and does not provide machine-readable timing
+ data.
+
+ o json (unstable, requires -Zunstable-options): Emit
+ machine-readable JSON information about timing information.
+
+ Output Options
+ --target-dir directory
+ Directory for all generated artifacts and intermediate files. May
+ also be specified with the CARGO_TARGET_DIR environment variable, or
+ the build.target-dir config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ target in the root of the workspace.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --message-format fmt
+ The output format for diagnostic messages. Can be specified multiple
+ times and consists of comma-separated values. Valid values:
+
+ o human (default): Display in a human-readable text format.
+ Conflicts with short and json.
+
+ o short: Emit shorter, human-readable text messages. Conflicts with
+ human and json.
+
+ o json: Emit JSON messages to stdout. See the reference
+ <https://doc.rust-lang.org/cargo/reference/external-tools.html#json-messages>
+ for more details. Conflicts with human and short.
+
+ o json-diagnostic-short: Ensure the rendered field of JSON messages
+ contains the “short” rendering from rustc. Cannot be used
+ with human or short.
+
+ o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON
+ messages contains embedded ANSI color codes for respecting
+ rustc’s default color scheme. Cannot be used with human or
+ short.
+
+ o json-render-diagnostics: Instruct Cargo to not include rustc
+ diagnostics in JSON messages printed, but instead Cargo itself
+ should render the JSON diagnostics coming from rustc. Cargo’s
+ own JSON diagnostics and others coming from rustc are still
+ emitted. Cannot be used with human or short.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ Miscellaneous Options
+ -j N, --jobs N
+ Number of parallel jobs to run. May also be specified with the
+ build.jobs config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ the number of logical CPUs. If negative, it sets the maximum number
+ of parallel jobs to the number of logical CPUs plus provided value.
+ Should not be 0.
+
+ --keep-going
+ Build as many crates in the dependency graph as possible, rather
+ than aborting the build on the first one that fails to build.
+ Unstable, requires -Zunstable-options.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Build documentation with custom CSS included from a given file:
+
+ cargo rustdoc --lib -- --extend-css extra.css
+
+SEE ALSO
+ cargo(1), cargo-doc(1), rustdoc(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-search.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-search.txt
new file mode 100644
index 000000000..74bbda9f7
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-search.txt
@@ -0,0 +1,108 @@
+CARGO-SEARCH(1)
+
+NAME
+ cargo-search — Search packages in crates.io
+
+SYNOPSIS
+ cargo search [options] [query…]
+
+DESCRIPTION
+ This performs a textual search for crates on <https://crates.io>. The
+ matching crates will be displayed along with their description in TOML
+ format suitable for copying into a Cargo.toml manifest.
+
+OPTIONS
+ Search Options
+ --limit limit
+ Limit the number of results (default: 10, max: 100).
+
+ --index index
+ The URL of the registry index to use.
+
+ --registry registry
+ Name of the registry to use. Registry names are defined in Cargo
+ config files
+ <https://doc.rust-lang.org/cargo/reference/config.html>. If not
+ specified, the default registry is used, which is defined by the
+ registry.default config key which defaults to crates-io.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Search for a package from crates.io:
+
+ cargo search serde
+
+SEE ALSO
+ cargo(1), cargo-install(1), cargo-publish(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-test.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-test.txt
new file mode 100644
index 000000000..3f4ed6072
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-test.txt
@@ -0,0 +1,466 @@
+CARGO-TEST(1)
+
+NAME
+ cargo-test — Execute unit and integration tests of a package
+
+SYNOPSIS
+ cargo test [options] [testname] [-- test-options]
+
+DESCRIPTION
+ Compile and execute unit, integration, and documentation tests.
+
+ The test filtering argument TESTNAME and all the arguments following the
+ two dashes (--) are passed to the test binaries and thus to libtest
+ (rustc’s built in unit-test and micro-benchmarking framework). If
+ you’re passing arguments to both Cargo and the binary, the ones after
+ -- go to the binary, the ones before go to Cargo. For details about
+ libtest’s arguments see the output of cargo test -- --help and check
+ out the rustc book’s chapter on how tests work at
+ <https://doc.rust-lang.org/rustc/tests/index.html>.
+
+ As an example, this will filter for tests with foo in their name and run
+ them on 3 threads in parallel:
+
+ cargo test foo -- --test-threads 3
+
+ Tests are built with the --test option to rustc which creates a special
+ executable by linking your code with libtest. The executable
+ automatically runs all functions annotated with the #[test] attribute in
+ multiple threads. #[bench] annotated functions will also be run with one
+ iteration to verify that they are functional.
+
+ If the package contains multiple test targets, each target compiles to a
+ special executable as aforementioned, and then is run serially.
+
+ The libtest harness may be disabled by setting harness = false in the
+ target manifest settings, in which case your code will need to provide
+ its own main function to handle running tests.
+
+ Documentation tests
+ Documentation tests are also run by default, which is handled by
+ rustdoc. It extracts code samples from documentation comments of the
+ library target, and then executes them.
+
+ Different from normal test targets, each code block compiles to a
+ doctest executable on the fly with rustc. These executables run in
+ parallel in separate processes. The compilation of a code block is in
+ fact a part of test function controlled by libtest, so some options such
+ as --jobs might not take effect. Note that this execution model of
+ doctests is not guaranteed and may change in the future; beware of
+ depending on it.
+
+ See the rustdoc book <https://doc.rust-lang.org/rustdoc/> for more
+ information on writing doc tests.
+
+ Working directory of tests
+ The working directory of every test is set to the root directory of the
+ package the test belongs to. Setting the working directory of tests to
+ the package’s root directory makes it possible for tests to reliably
+ access the package’s files using relative paths, regardless from where
+ cargo test was executed from.
+
+OPTIONS
+ Test Options
+ --no-run
+ Compile, but don’t run tests.
+
+ --no-fail-fast
+ Run all tests regardless of failure. Without this flag, Cargo will
+ exit after the first executable fails. The Rust test harness will
+ run all tests within the executable to completion, this flag only
+ applies to the executable as a whole.
+
+ Package Selection
+ By default, when no package selection options are given, the packages
+ selected depend on the selected manifest file (based on the current
+ working directory if --manifest-path is not given). If the manifest is
+ the root of a workspace then the workspaces default members are
+ selected, otherwise only the package defined by the manifest will be
+ selected.
+
+ The default members of a workspace can be set explicitly with the
+ workspace.default-members key in the root manifest. If this is not set,
+ a virtual workspace will include all workspace members (equivalent to
+ passing --workspace), and a non-virtual workspace will include only the
+ root crate itself.
+
+ -p spec…, --package spec…
+ Test only the specified packages. See cargo-pkgid(1) for the SPEC
+ format. This flag may be specified multiple times and supports
+ common Unix glob patterns like *, ? and []. However, to avoid your
+ shell accidentally expanding glob patterns before Cargo handles
+ them, you must use single quotes or double quotes around each
+ pattern.
+
+ --workspace
+ Test all members in the workspace.
+
+ --all
+ Deprecated alias for --workspace.
+
+ --exclude SPEC…
+ Exclude the specified packages. Must be used in conjunction with the
+ --workspace flag. This flag may be specified multiple times and
+ supports common Unix glob patterns like *, ? and []. However, to
+ avoid your shell accidentally expanding glob patterns before Cargo
+ handles them, you must use single quotes or double quotes around
+ each pattern.
+
+ Target Selection
+ When no target selection options are given, cargo test will build the
+ following targets of the selected packages:
+
+ o lib — used to link with binaries, examples, integration tests, and
+ doc tests
+
+ o bins (only if integration tests are built and required features are
+ available)
+
+ o examples — to ensure they compile
+
+ o lib as a unit test
+
+ o bins as unit tests
+
+ o integration tests
+
+ o doc tests for the lib target
+
+ The default behavior can be changed by setting the test flag for the
+ target in the manifest settings. Setting examples to test = true will
+ build and run the example as a test. Setting targets to test = false
+ will stop them from being tested by default. Target selection options
+ that take a target by name ignore the test flag and will always test the
+ given target.
+
+ Doc tests for libraries may be disabled by setting doctest = false for
+ the library in the manifest.
+
+ Binary targets are automatically built if there is an integration test
+ or benchmark being selected to test. This allows an integration test to
+ execute the binary to exercise and test its behavior. The
+ CARGO_BIN_EXE_<name> environment variable
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates>
+ is set when the integration test is built so that it can use the env
+ macro <https://doc.rust-lang.org/std/macro.env.html> to locate the
+ executable.
+
+ Passing target selection flags will test only the specified targets.
+
+ Note that --bin, --example, --test and --bench flags also support common
+ Unix glob patterns like *, ? and []. However, to avoid your shell
+ accidentally expanding glob patterns before Cargo handles them, you must
+ use single quotes or double quotes around each glob pattern.
+
+ --lib
+ Test the package’s library.
+
+ --bin name…
+ Test the specified binary. This flag may be specified multiple times
+ and supports common Unix glob patterns.
+
+ --bins
+ Test all binary targets.
+
+ --example name…
+ Test the specified example. This flag may be specified multiple
+ times and supports common Unix glob patterns.
+
+ --examples
+ Test all example targets.
+
+ --test name…
+ Test the specified integration test. This flag may be specified
+ multiple times and supports common Unix glob patterns.
+
+ --tests
+ Test all targets in test mode that have the test = true manifest
+ flag set. By default this includes the library and binaries built as
+ unittests, and integration tests. Be aware that this will also build
+ any required dependencies, so the lib target may be built twice
+ (once as a unittest, and once as a dependency for binaries,
+ integration tests, etc.). Targets may be enabled or disabled by
+ setting the test flag in the manifest settings for the target.
+
+ --bench name…
+ Test the specified benchmark. This flag may be specified multiple
+ times and supports common Unix glob patterns.
+
+ --benches
+ Test all targets in benchmark mode that have the bench = true
+ manifest flag set. By default this includes the library and binaries
+ built as benchmarks, and bench targets. Be aware that this will also
+ build any required dependencies, so the lib target may be built
+ twice (once as a benchmark, and once as a dependency for binaries,
+ benchmarks, etc.). Targets may be enabled or disabled by setting the
+ bench flag in the manifest settings for the target.
+
+ --all-targets
+ Test all targets. This is equivalent to specifying --lib --bins
+ --tests --benches --examples.
+
+ --doc
+ Test only the library’s documentation. This cannot be mixed with
+ other target options.
+
+ Feature Selection
+ The feature flags allow you to control which features are enabled. When
+ no feature options are given, the default feature is activated for every
+ selected package.
+
+ See the features documentation
+ <https://doc.rust-lang.org/cargo/reference/features.html#command-line-feature-options>
+ for more details.
+
+ -F features, --features features
+ Space or comma separated list of features to activate. Features of
+ workspace members may be enabled with package-name/feature-name
+ syntax. This flag may be specified multiple times, which enables all
+ specified features.
+
+ --all-features
+ Activate all available features of all selected packages.
+
+ --no-default-features
+ Do not activate the default feature of the selected packages.
+
+ Compilation Options
+ --target triple
+ Test for the given architecture. The default is the host
+ architecture. The general format of the triple is
+ <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for
+ a list of supported targets. This flag may be specified multiple
+ times.
+
+ This may also be specified with the build.target config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Note that specifying this flag makes Cargo run in a different mode
+ where the target artifacts are placed in a separate directory. See
+ the build cache
+ <https://doc.rust-lang.org/cargo/guide/build-cache.html>
+ documentation for more details.
+
+ -r, --release
+ Test optimized artifacts with the release profile. See also the
+ --profile option for choosing a specific profile by name.
+
+ --profile name
+ Test with the given profile. See the the reference
+ <https://doc.rust-lang.org/cargo/reference/profiles.html> for more
+ details on profiles.
+
+ --ignore-rust-version
+ Test the target even if the selected Rust compiler is older than the
+ required Rust version as configured in the project’s rust-version
+ field.
+
+ --timings=fmts
+ Output information how long each compilation takes, and track
+ concurrency information over time. Accepts an optional
+ comma-separated list of output formats; --timings without an
+ argument will default to --timings=html. Specifying an output format
+ (rather than the default) is unstable and requires
+ -Zunstable-options. Valid output formats:
+
+ o html (unstable, requires -Zunstable-options): Write a
+ human-readable file cargo-timing.html to the target/cargo-timings
+ directory with a report of the compilation. Also write a report
+ to the same directory with a timestamp in the filename if you
+ want to look at older runs. HTML output is suitable for human
+ consumption only, and does not provide machine-readable timing
+ data.
+
+ o json (unstable, requires -Zunstable-options): Emit
+ machine-readable JSON information about timing information.
+
+ Output Options
+ --target-dir directory
+ Directory for all generated artifacts and intermediate files. May
+ also be specified with the CARGO_TARGET_DIR environment variable, or
+ the build.target-dir config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ target in the root of the workspace.
+
+ Display Options
+ By default the Rust test harness hides output from test execution to
+ keep results readable. Test output can be recovered (e.g., for
+ debugging) by passing --nocapture to the test binaries:
+
+ cargo test -- --nocapture
+
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --message-format fmt
+ The output format for diagnostic messages. Can be specified multiple
+ times and consists of comma-separated values. Valid values:
+
+ o human (default): Display in a human-readable text format.
+ Conflicts with short and json.
+
+ o short: Emit shorter, human-readable text messages. Conflicts with
+ human and json.
+
+ o json: Emit JSON messages to stdout. See the reference
+ <https://doc.rust-lang.org/cargo/reference/external-tools.html#json-messages>
+ for more details. Conflicts with human and short.
+
+ o json-diagnostic-short: Ensure the rendered field of JSON messages
+ contains the “short” rendering from rustc. Cannot be used
+ with human or short.
+
+ o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON
+ messages contains embedded ANSI color codes for respecting
+ rustc’s default color scheme. Cannot be used with human or
+ short.
+
+ o json-render-diagnostics: Instruct Cargo to not include rustc
+ diagnostics in JSON messages printed, but instead Cargo itself
+ should render the JSON diagnostics coming from rustc. Cargo’s
+ own JSON diagnostics and others coming from rustc are still
+ emitted. Cannot be used with human or short.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ Miscellaneous Options
+ The --jobs argument affects the building of the test executable but does
+ not affect how many threads are used when running the tests. The Rust
+ test harness includes an option to control the number of threads used:
+
+ cargo test -j 2 -- --test-threads=2
+
+ -j N, --jobs N
+ Number of parallel jobs to run. May also be specified with the
+ build.jobs config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>. Defaults to
+ the number of logical CPUs. If negative, it sets the maximum number
+ of parallel jobs to the number of logical CPUs plus provided value.
+ Should not be 0.
+
+ --keep-going
+ Build as many crates in the dependency graph as possible, rather
+ than aborting the build on the first one that fails to build.
+ Unstable, requires -Zunstable-options.
+
+ --future-incompat-report
+ Displays a future-incompat report for any future-incompatible
+ warnings produced during execution of this command
+
+ See cargo-report(1)
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Execute all the unit and integration tests of the current package:
+
+ cargo test
+
+ 2. Run only tests whose names match against a filter string:
+
+ cargo test name_filter
+
+ 3. Run only a specific test within a specific integration test:
+
+ cargo test --test int_test_name -- modname::test_name
+
+SEE ALSO
+ cargo(1), cargo-bench(1), types of tests
+ <https://doc.rust-lang.org/cargo/reference/cargo-targets.html#tests>,
+ how to write tests <https://doc.rust-lang.org/rustc/tests/index.html>
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-tree.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-tree.txt
new file mode 100644
index 000000000..5b81f0aa1
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-tree.txt
@@ -0,0 +1,394 @@
+CARGO-TREE(1)
+
+NAME
+ cargo-tree — Display a tree visualization of a dependency graph
+
+SYNOPSIS
+ cargo tree [options]
+
+DESCRIPTION
+ This command will display a tree of dependencies to the terminal. An
+ example of a simple project that depends on the “rand” package:
+
+ myproject v0.1.0 (/myproject)
+ └── rand v0.7.3
+ ├── getrandom v0.1.14
+ │ ├── cfg-if v0.1.10
+ │ └── libc v0.2.68
+ ├── libc v0.2.68 (*)
+ ├── rand_chacha v0.2.2
+ │ ├── ppv-lite86 v0.2.6
+ │ └── rand_core v0.5.1
+ │ └── getrandom v0.1.14 (*)
+ └── rand_core v0.5.1 (*)
+ [build-dependencies]
+ └── cc v1.0.50
+
+ Packages marked with (*) have been “de-duplicated”. The dependencies
+ for the package have already been shown elsewhere in the graph, and so
+ are not repeated. Use the --no-dedupe option to repeat the duplicates.
+
+ The -e flag can be used to select the dependency kinds to display. The
+ “features” kind changes the output to display the features enabled
+ by each dependency. For example, cargo tree -e features:
+
+ myproject v0.1.0 (/myproject)
+ └── log feature "serde"
+ └── log v0.4.8
+ ├── serde v1.0.106
+ └── cfg-if feature "default"
+ └── cfg-if v0.1.10
+
+ In this tree, myproject depends on log with the serde feature. log in
+ turn depends on cfg-if with “default” features. When using -e
+ features it can be helpful to use -i flag to show how the features flow
+ into a package. See the examples below for more detail.
+
+ Feature Unification
+ This command shows a graph much closer to a feature-unified graph Cargo
+ will build, rather than what you list in Cargo.toml. For instance, if
+ you specify the same dependency in both [dependencies] and
+ [dev-dependencies] but with different features on. This command may
+ merge all features and show a (*) on one of the dependency to indicate
+ the duplicate.
+
+ As a result, for a mostly equivalent overview of what cargo build does,
+ cargo tree -e normal,build is pretty close; for a mostly equivalent
+ overview of what cargo test does, cargo tree is pretty close. However,
+ it doesn’t guarantee the exact equivalence to what Cargo is going to
+ build, since a compilation is complex and depends on lots of different
+ factors.
+
+ To learn more about feature unification, check out this dedicated
+ section
+ <https://doc.rust-lang.org/cargo/reference/features.html#feature-unification>.
+
+OPTIONS
+ Tree Options
+ -i spec, --invert spec
+ Show the reverse dependencies for the given package. This flag will
+ invert the tree and display the packages that depend on the given
+ package.
+
+ Note that in a workspace, by default it will only display the
+ package’s reverse dependencies inside the tree of the workspace
+ member in the current directory. The --workspace flag can be used to
+ extend it so that it will show the package’s reverse dependencies
+ across the entire workspace. The -p flag can be used to display the
+ package’s reverse dependencies only with the subtree of the
+ package given to -p.
+
+ --prune spec
+ Prune the given package from the display of the dependency tree.
+
+ --depth depth
+ Maximum display depth of the dependency tree. A depth of 1 displays
+ the direct dependencies, for example.
+
+ --no-dedupe
+ Do not de-duplicate repeated dependencies. Usually, when a package
+ has already displayed its dependencies, further occurrences will not
+ re-display its dependencies, and will include a (*) to indicate it
+ has already been shown. This flag will cause those duplicates to be
+ repeated.
+
+ -d, --duplicates
+ Show only dependencies which come in multiple versions (implies
+ --invert). When used with the -p flag, only shows duplicates within
+ the subtree of the given package.
+
+ It can be beneficial for build times and executable sizes to avoid
+ building that same package multiple times. This flag can help
+ identify the offending packages. You can then investigate if the
+ package that depends on the duplicate with the older version can be
+ updated to the newer version so that only one instance is built.
+
+ -e kinds, --edges kinds
+ The dependency kinds to display. Takes a comma separated list of
+ values:
+
+ o all — Show all edge kinds.
+
+ o normal — Show normal dependencies.
+
+ o build — Show build dependencies.
+
+ o dev — Show development dependencies.
+
+ o features — Show features enabled by each dependency. If this is
+ the only kind given, then it will automatically include the other
+ dependency kinds.
+
+ o no-normal — Do not include normal dependencies.
+
+ o no-build — Do not include build dependencies.
+
+ o no-dev — Do not include development dependencies.
+
+ o no-proc-macro — Do not include procedural macro dependencies.
+
+ The normal, build, dev, and all dependency kinds cannot be mixed
+ with no-normal, no-build, or no-dev dependency kinds.
+
+ The default is normal,build,dev.
+
+ --target triple
+ Filter dependencies matching the given target triple
+ <https://doc.rust-lang.org/cargo/appendix/glossary.html#target>. The
+ default is the host platform. Use the value all to include all
+ targets.
+
+ Tree Formatting Options
+ --charset charset
+ Chooses the character set to use for the tree. Valid values are
+ “utf8” or “ascii”. Default is “utf8”.
+
+ -f format, --format format
+ Set the format string for each package. The default is “{p}”.
+
+ This is an arbitrary string which will be used to display each
+ package. The following strings will be replaced with the
+ corresponding value:
+
+ o {p} — The package name.
+
+ o {l} — The package license.
+
+ o {r} — The package repository URL.
+
+ o {f} — Comma-separated list of package features that are
+ enabled.
+
+ o {lib} — The name, as used in a use statement, of the
+ package’s library.
+
+ --prefix prefix
+ Sets how each line is displayed. The prefix value can be one of:
+
+ o indent (default) — Shows each line indented as a tree.
+
+ o depth — Show as a list, with the numeric depth printed before
+ each entry.
+
+ o none — Show as a flat list.
+
+ Package Selection
+ By default, when no package selection options are given, the packages
+ selected depend on the selected manifest file (based on the current
+ working directory if --manifest-path is not given). If the manifest is
+ the root of a workspace then the workspaces default members are
+ selected, otherwise only the package defined by the manifest will be
+ selected.
+
+ The default members of a workspace can be set explicitly with the
+ workspace.default-members key in the root manifest. If this is not set,
+ a virtual workspace will include all workspace members (equivalent to
+ passing --workspace), and a non-virtual workspace will include only the
+ root crate itself.
+
+ -p spec…, --package spec…
+ Display only the specified packages. See cargo-pkgid(1) for the SPEC
+ format. This flag may be specified multiple times and supports
+ common Unix glob patterns like *, ? and []. However, to avoid your
+ shell accidentally expanding glob patterns before Cargo handles
+ them, you must use single quotes or double quotes around each
+ pattern.
+
+ --workspace
+ Display all members in the workspace.
+
+ --exclude SPEC…
+ Exclude the specified packages. Must be used in conjunction with the
+ --workspace flag. This flag may be specified multiple times and
+ supports common Unix glob patterns like *, ? and []. However, to
+ avoid your shell accidentally expanding glob patterns before Cargo
+ handles them, you must use single quotes or double quotes around
+ each pattern.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Feature Selection
+ The feature flags allow you to control which features are enabled. When
+ no feature options are given, the default feature is activated for every
+ selected package.
+
+ See the features documentation
+ <https://doc.rust-lang.org/cargo/reference/features.html#command-line-feature-options>
+ for more details.
+
+ -F features, --features features
+ Space or comma separated list of features to activate. Features of
+ workspace members may be enabled with package-name/feature-name
+ syntax. This flag may be specified multiple times, which enables all
+ specified features.
+
+ --all-features
+ Activate all available features of all selected packages.
+
+ --no-default-features
+ Do not activate the default feature of the selected packages.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Display the tree for the package in the current directory:
+
+ cargo tree
+
+ 2. Display all the packages that depend on the syn package:
+
+ cargo tree -i syn
+
+ 3. Show the features enabled on each package:
+
+ cargo tree --format "{p} {f}"
+
+ 4. Show all packages that are built multiple times. This can happen if
+ multiple semver-incompatible versions appear in the tree (like 1.0.0
+ and 2.0.0).
+
+ cargo tree -d
+
+ 5. Explain why features are enabled for the syn package:
+
+ cargo tree -e features -i syn
+
+ The -e features flag is used to show features. The -i flag is used to
+ invert the graph so that it displays the packages that depend on syn.
+ An example of what this would display:
+
+ syn v1.0.17
+ ├── syn feature "clone-impls"
+ │ └── syn feature "default"
+ │ └── rustversion v1.0.2
+ │ └── rustversion feature "default"
+ │ └── myproject v0.1.0 (/myproject)
+ │ └── myproject feature "default" (command-line)
+ ├── syn feature "default" (*)
+ ├── syn feature "derive"
+ │ └── syn feature "default" (*)
+ ├── syn feature "full"
+ │ └── rustversion v1.0.2 (*)
+ ├── syn feature "parsing"
+ │ └── syn feature "default" (*)
+ ├── syn feature "printing"
+ │ └── syn feature "default" (*)
+ ├── syn feature "proc-macro"
+ │ └── syn feature "default" (*)
+ └── syn feature "quote"
+ ├── syn feature "printing" (*)
+ └── syn feature "proc-macro" (*)
+
+ To read this graph, you can follow the chain for each feature from
+ the root to see why it is included. For example, the “full”
+ feature is added by the rustversion crate which is included from
+ myproject (with the default features), and myproject is the package
+ selected on the command-line. All of the other syn features are added
+ by the “default” feature (“quote” is added by “printing”
+ and “proc-macro”, both of which are default features).
+
+ If you’re having difficulty cross-referencing the de-duplicated (*)
+ entries, try with the --no-dedupe flag to get the full output.
+
+SEE ALSO
+ cargo(1), cargo-metadata(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-uninstall.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-uninstall.txt
new file mode 100644
index 000000000..53b21cde8
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-uninstall.txt
@@ -0,0 +1,120 @@
+CARGO-UNINSTALL(1)
+
+NAME
+ cargo-uninstall — Remove a Rust binary
+
+SYNOPSIS
+ cargo uninstall [options] [spec…]
+
+DESCRIPTION
+ This command removes a package installed with cargo-install(1). The spec
+ argument is a package ID specification of the package to remove (see
+ cargo-pkgid(1)).
+
+ By default all binaries are removed for a crate but the --bin and
+ --example flags can be used to only remove particular binaries.
+
+ The installation root is determined, in order of precedence:
+
+ o --root option
+
+ o CARGO_INSTALL_ROOT environment variable
+
+ o install.root Cargo config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>
+
+ o CARGO_HOME environment variable
+
+ o $HOME/.cargo
+
+OPTIONS
+ Install Options
+ -p, --package spec…
+ Package to uninstall.
+
+ --bin name…
+ Only uninstall the binary name.
+
+ --root dir
+ Directory to uninstall packages from.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Uninstall a previously installed package.
+
+ cargo uninstall ripgrep
+
+SEE ALSO
+ cargo(1), cargo-install(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-update.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-update.txt
new file mode 100644
index 000000000..fb662c389
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-update.txt
@@ -0,0 +1,166 @@
+CARGO-UPDATE(1)
+
+NAME
+ cargo-update — Update dependencies as recorded in the local lock file
+
+SYNOPSIS
+ cargo update [options]
+
+DESCRIPTION
+ This command will update dependencies in the Cargo.lock file to the
+ latest version. If the Cargo.lock file does not exist, it will be
+ created with the latest available versions.
+
+OPTIONS
+ Update Options
+ -p spec…, --package spec…
+ Update only the specified packages. This flag may be specified
+ multiple times. See cargo-pkgid(1) for the SPEC format.
+
+ If packages are specified with the -p flag, then a conservative
+ update of the lockfile will be performed. This means that only the
+ dependency specified by SPEC will be updated. Its transitive
+ dependencies will be updated only if SPEC cannot be updated without
+ updating dependencies. All other dependencies will remain locked at
+ their currently recorded versions.
+
+ If -p is not specified, all dependencies are updated.
+
+ --aggressive
+ When used with -p, dependencies of spec are forced to update as
+ well. Cannot be used with --precise.
+
+ --precise precise
+ When used with -p, allows you to specify a specific version number
+ to set the package to. If the package comes from a git repository,
+ this can be a git revision (such as a SHA hash or tag).
+
+ -w, --workspace
+ Attempt to update only packages defined in the workspace. Other
+ packages are updated only if they don’t already exist in the
+ lockfile. This option is useful for updating Cargo.lock after
+ you’ve changed version numbers in Cargo.toml.
+
+ --dry-run
+ Displays what would be updated, but doesn’t actually write the
+ lockfile.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Update all dependencies in the lockfile:
+
+ cargo update
+
+ 2. Update only specific dependencies:
+
+ cargo update -p foo -p bar
+
+ 3. Set a specific dependency to a specific version:
+
+ cargo update -p foo --precise 1.2.3
+
+SEE ALSO
+ cargo(1), cargo-generate-lockfile(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-vendor.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-vendor.txt
new file mode 100644
index 000000000..c325b7534
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-vendor.txt
@@ -0,0 +1,162 @@
+CARGO-VENDOR(1)
+
+NAME
+ cargo-vendor — Vendor all dependencies locally
+
+SYNOPSIS
+ cargo vendor [options] [path]
+
+DESCRIPTION
+ This cargo subcommand will vendor all crates.io and git dependencies for
+ a project into the specified directory at <path>. After this command
+ completes the vendor directory specified by <path> will contain all
+ remote sources from dependencies specified. Additional manifests beyond
+ the default one can be specified with the -s option.
+
+ The cargo vendor command will also print out the configuration necessary
+ to use the vendored sources, which you will need to add to
+ .cargo/config.toml.
+
+OPTIONS
+ Vendor Options
+ -s manifest, --sync manifest
+ Specify an extra Cargo.toml manifest to workspaces which should also
+ be vendored and synced to the output. May be specified multiple
+ times.
+
+ --no-delete
+ Don’t delete the “vendor” directory when vendoring, but rather
+ keep all existing contents of the vendor directory
+
+ --respect-source-config
+ Instead of ignoring [source] configuration by default in
+ .cargo/config.toml read it and use it when downloading crates from
+ crates.io, for example
+
+ --versioned-dirs
+ Normally versions are only added to disambiguate multiple versions
+ of the same package. This option causes all directories in the
+ “vendor” directory to be versioned, which makes it easier to
+ track the history of vendored packages over time, and can help with
+ the performance of re-vendoring when only a subset of the packages
+ have changed.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Vendor all dependencies into a local “vendor” folder
+
+ cargo vendor
+
+ 2. Vendor all dependencies into a local “third-party/vendor” folder
+
+ cargo vendor third-party/vendor
+
+ 3. Vendor the current workspace as well as another to “vendor”
+
+ cargo vendor -s ../path/to/Cargo.toml
+
+SEE ALSO
+ cargo(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-verify-project.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-verify-project.txt
new file mode 100644
index 000000000..e0e7a4d2b
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-verify-project.txt
@@ -0,0 +1,131 @@
+CARGO-VERIFY-PROJECT(1)
+
+NAME
+ cargo-verify-project — Check correctness of crate manifest
+
+SYNOPSIS
+ cargo verify-project [options]
+
+DESCRIPTION
+ This command will parse the local manifest and check its validity. It
+ emits a JSON object with the result. A successful validation will
+ display:
+
+ {"success":"true"}
+
+ An invalid workspace will display:
+
+ {"invalid":"human-readable error message"}
+
+OPTIONS
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Manifest Options
+ --manifest-path path
+ Path to the Cargo.toml file. By default, Cargo searches for the
+ Cargo.toml file in the current directory or any parent directory.
+
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: The workspace is OK.
+
+ o 1: The workspace is invalid.
+
+EXAMPLES
+ 1. Check the current workspace for errors:
+
+ cargo verify-project
+
+SEE ALSO
+ cargo(1), cargo-package(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-version.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-version.txt
new file mode 100644
index 000000000..138390a27
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-version.txt
@@ -0,0 +1,32 @@
+CARGO-VERSION(1)
+
+NAME
+ cargo-version — Show version information
+
+SYNOPSIS
+ cargo version [options]
+
+DESCRIPTION
+ Displays the version of Cargo.
+
+OPTIONS
+ -v, --verbose
+ Display additional version information.
+
+EXAMPLES
+ 1. Display the version:
+
+ cargo version
+
+ 2. The version is also available via flags:
+
+ cargo --version
+ cargo -V
+
+ 3. Display extra version information:
+
+ cargo -Vv
+
+SEE ALSO
+ cargo(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-yank.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-yank.txt
new file mode 100644
index 000000000..376d7373c
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-yank.txt
@@ -0,0 +1,135 @@
+CARGO-YANK(1)
+
+NAME
+ cargo-yank — Remove a pushed crate from the index
+
+SYNOPSIS
+ cargo yank [options] crate@version
+ cargo yank [options] --version version [crate]
+
+DESCRIPTION
+ The yank command removes a previously published crate’s version from
+ the server’s index. This command does not delete any data, and the
+ crate will still be available for download via the registry’s download
+ link.
+
+ Note that existing crates locked to a yanked version will still be able
+ to download the yanked version to use it. Cargo will, however, not allow
+ any new crates to be locked to any yanked version.
+
+ This command requires you to be authenticated with either the --token
+ option or using cargo-login(1).
+
+ If the crate name is not specified, it will use the package name from
+ the current directory.
+
+OPTIONS
+ Yank Options
+ --vers version, --version version
+ The version to yank or un-yank.
+
+ --undo
+ Undo a yank, putting a version back into the index.
+
+ --token token
+ API token to use when authenticating. This overrides the token
+ stored in the credentials file (which is created by cargo-login(1)).
+
+ Cargo config <https://doc.rust-lang.org/cargo/reference/config.html>
+ environment variables can be used to override the tokens stored in
+ the credentials file. The token for crates.io may be specified with
+ the CARGO_REGISTRY_TOKEN environment variable. Tokens for other
+ registries may be specified with environment variables of the form
+ CARGO_REGISTRIES_NAME_TOKEN where NAME is the name of the registry
+ in all capital letters.
+
+ --index index
+ The URL of the registry index to use.
+
+ --registry registry
+ Name of the registry to use. Registry names are defined in Cargo
+ config files
+ <https://doc.rust-lang.org/cargo/reference/config.html>. If not
+ specified, the default registry is used, which is defined by the
+ registry.default config key which defaults to crates-io.
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+EXAMPLES
+ 1. Yank a crate from the index:
+
+ cargo yank foo@1.0.7
+
+SEE ALSO
+ cargo(1), cargo-login(1), cargo-publish(1)
+
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo.txt
new file mode 100644
index 000000000..5c0762e60
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo.txt
@@ -0,0 +1,298 @@
+CARGO(1)
+
+NAME
+ cargo — The Rust package manager
+
+SYNOPSIS
+ cargo [options] command [args]
+ cargo [options] --version
+ cargo [options] --list
+ cargo [options] --help
+ cargo [options] --explain code
+
+DESCRIPTION
+ This program is a package manager and build tool for the Rust language,
+ available at <https://rust-lang.org>.
+
+COMMANDS
+ Build Commands
+ cargo-bench(1)
+     Execute benchmarks of a package.
+
+ cargo-build(1)
+     Compile a package.
+
+ cargo-check(1)
+     Check a local package and all of its dependencies for errors.
+
+ cargo-clean(1)
+     Remove artifacts that Cargo has generated in the past.
+
+ cargo-doc(1)
+     Build a package’s documentation.
+
+ cargo-fetch(1)
+     Fetch dependencies of a package from the network.
+
+ cargo-fix(1)
+     Automatically fix lint warnings reported by rustc.
+
+ cargo-run(1)
+     Run a binary or example of the local package.
+
+ cargo-rustc(1)
+     Compile a package, and pass extra options to the compiler.
+
+ cargo-rustdoc(1)
+     Build a package’s documentation, using specified custom flags.
+
+ cargo-test(1)
+     Execute unit and integration tests of a package.
+
+ Manifest Commands
+ cargo-generate-lockfile(1)
+     Generate Cargo.lock for a project.
+
+ cargo-locate-project(1)
+     Print a JSON representation of a Cargo.toml file’s location.
+
+ cargo-metadata(1)
+     Output the resolved dependencies of a package in
+ machine-readable format.
+
+ cargo-pkgid(1)
+     Print a fully qualified package specification.
+
+ cargo-tree(1)
+     Display a tree visualization of a dependency graph.
+
+ cargo-update(1)
+     Update dependencies as recorded in the local lock file.
+
+ cargo-vendor(1)
+     Vendor all dependencies locally.
+
+ cargo-verify-project(1)
+     Check correctness of crate manifest.
+
+ Package Commands
+ cargo-init(1)
+     Create a new Cargo package in an existing directory.
+
+ cargo-install(1)
+     Build and install a Rust binary.
+
+ cargo-new(1)
+     Create a new Cargo package.
+
+ cargo-search(1)
+     Search packages in crates.io.
+
+ cargo-uninstall(1)
+     Remove a Rust binary.
+
+ Publishing Commands
+ cargo-login(1)
+     Save an API token from the registry locally.
+
+ cargo-logout(1)
+     Remove an API token from the registry locally.
+
+ cargo-owner(1)
+     Manage the owners of a crate on the registry.
+
+ cargo-package(1)
+     Assemble the local package into a distributable tarball.
+
+ cargo-publish(1)
+     Upload a package to the registry.
+
+ cargo-yank(1)
+     Remove a pushed crate from the index.
+
+ General Commands
+ cargo-help(1)
+     Display help information about Cargo.
+
+ cargo-version(1)
+     Show version information.
+
+OPTIONS
+ Special Options
+ -V, --version
+ Print version info and exit. If used with --verbose, prints extra
+ information.
+
+ --list
+ List all installed Cargo subcommands. If used with --verbose, prints
+ extra information.
+
+ --explain code
+ Run rustc --explain CODE which will print out a detailed explanation
+ of an error message (for example, E0004).
+
+ Display Options
+ -v, --verbose
+ Use verbose output. May be specified twice for “very verbose”
+ output which includes extra output such as dependency warnings and
+ build script output. May also be specified with the term.verbose
+ config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ -q, --quiet
+ Do not print cargo log messages. May also be specified with the
+ term.quiet config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ --color when
+ Control when colored output is used. Valid values:
+
+ o auto (default): Automatically detect if color support is
+ available on the terminal.
+
+ o always: Always display colors.
+
+ o never: Never display colors.
+
+ May also be specified with the term.color config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Manifest Options
+ --frozen, --locked
+ Either of these flags requires that the Cargo.lock file is
+ up-to-date. If the lock file is missing, or it needs to be updated,
+ Cargo will exit with an error. The --frozen flag also prevents Cargo
+ from attempting to access the network to determine if it is
+ out-of-date.
+
+ These may be used in environments where you want to assert that the
+ Cargo.lock file is up-to-date (such as a CI build) or want to avoid
+ network access.
+
+ --offline
+ Prevents Cargo from accessing the network for any reason. Without
+ this flag, Cargo will stop with an error if it needs to access the
+ network and the network is not available. With this flag, Cargo will
+ attempt to proceed without the network if possible.
+
+ Beware that this may result in different dependency resolution than
+ online mode. Cargo will restrict itself to crates that are
+ downloaded locally, even if there might be a newer version as
+ indicated in the local copy of the index. See the cargo-fetch(1)
+ command to download dependencies before going offline.
+
+ May also be specified with the net.offline config value
+ <https://doc.rust-lang.org/cargo/reference/config.html>.
+
+ Common Options
+ +toolchain
+ If Cargo has been installed with rustup, and the first argument to
+ cargo begins with +, it will be interpreted as a rustup toolchain
+ name (such as +stable or +nightly). See the rustup documentation
+ <https://rust-lang.github.io/rustup/overrides.html> for more
+ information about how toolchain overrides work.
+
+ --config KEY=VALUE or PATH
+ Overrides a Cargo configuration value. The argument should be in
+ TOML syntax of KEY=VALUE, or provided as a path to an extra
+ configuration file. This flag may be specified multiple times. See
+ the command-line overrides section
+ <https://doc.rust-lang.org/cargo/reference/config.html#command-line-overrides>
+ for more information.
+
+ -C PATH
+ Changes the current working directory before executing any specified
+ operations. This affects things like where cargo looks by default
+ for the project manifest (Cargo.toml), as well as the directories
+ searched for discovering .cargo/config.toml, for example. This
+ option must appear before the command name, for example cargo -C
+ path/to/my-project build.
+
+ This option is only available on the nightly channel
+ <https://doc.rust-lang.org/book/appendix-07-nightly-rust.html> and
+ requires the -Z unstable-options flag to enable (see #10098
+ <https://github.com/rust-lang/cargo/issues/10098>).
+
+ -h, --help
+ Prints help information.
+
+ -Z flag
+ Unstable (nightly-only) flags to Cargo. Run cargo -Z help for
+ details.
+
+ENVIRONMENT
+ See the reference
+ <https://doc.rust-lang.org/cargo/reference/environment-variables.html>
+ for details on environment variables that Cargo reads.
+
+EXIT STATUS
+ o 0: Cargo succeeded.
+
+ o 101: Cargo failed to complete.
+
+FILES
+ ~/.cargo/
+     Default location for Cargo’s “home” directory where it
+ stores various files. The location can be changed with the CARGO_HOME
+ environment variable.
+
+ $CARGO_HOME/bin/
+     Binaries installed by cargo-install(1) will be located here. If
+ using rustup <https://rust-lang.github.io/rustup/>, executables
+ distributed with Rust are also located here.
+
+ $CARGO_HOME/config.toml
+     The global configuration file. See the reference
+ <https://doc.rust-lang.org/cargo/reference/config.html> for more
+ information about configuration files.
+
+ .cargo/config.toml
+     Cargo automatically searches for a file named .cargo/config.toml
+ in the current directory, and all parent directories. These
+ configuration files will be merged with the global configuration file.
+
+ $CARGO_HOME/credentials.toml
+     Private authentication information for logging in to a registry.
+
+ $CARGO_HOME/registry/
+     This directory contains cached downloads of the registry index
+ and any downloaded dependencies.
+
+ $CARGO_HOME/git/
+     This directory contains cached downloads of git dependencies.
+
+ Please note that the internal structure of the $CARGO_HOME directory is
+ not stable yet and may be subject to change.
+
+EXAMPLES
+ 1. Build a local package and all of its dependencies:
+
+ cargo build
+
+ 2. Build a package with optimizations:
+
+ cargo build --release
+
+ 3. Run tests for a cross-compiled target:
+
+ cargo test --target i686-unknown-linux-gnu
+
+ 4. Create a new package that builds an executable:
+
+ cargo new foobar
+
+ 5. Create a package in the current directory:
+
+ mkdir foo && cd foo
+ cargo init .
+
+ 6. Learn about a command’s options and usage:
+
+ cargo help clean
+
+BUGS
+ See <https://github.com/rust-lang/cargo/issues> for issues.
+
+SEE ALSO
+ rustc(1), rustdoc(1)
+
diff --git a/src/tools/cargo/src/doc/man/includes/description-install-root.md b/src/tools/cargo/src/doc/man/includes/description-install-root.md
new file mode 100644
index 000000000..50cf51bae
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/description-install-root.md
@@ -0,0 +1,7 @@
+The installation root is determined, in order of precedence:
+
+- `--root` option
+- `CARGO_INSTALL_ROOT` environment variable
+- `install.root` Cargo [config value](../reference/config.html)
+- `CARGO_HOME` environment variable
+- `$HOME/.cargo`
diff --git a/src/tools/cargo/src/doc/man/includes/description-one-target.md b/src/tools/cargo/src/doc/man/includes/description-one-target.md
new file mode 100644
index 000000000..7af18131f
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/description-one-target.md
@@ -0,0 +1,4 @@
+This command requires that only one target is being compiled when additional
+arguments are provided. If more than one target is available for the current
+package the filters of `--lib`, `--bin`, etc, must be used to select which
+target is compiled.
diff --git a/src/tools/cargo/src/doc/man/includes/options-display.md b/src/tools/cargo/src/doc/man/includes/options-display.md
new file mode 100644
index 000000000..917dac49c
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-display.md
@@ -0,0 +1,24 @@
+{{#option "`-v`" "`--verbose`"}}
+Use verbose output. May be specified twice for "very verbose" output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the `term.verbose`
+[config value](../reference/config.html).
+{{/option}}
+
+{{#option "`-q`" "`--quiet`"}}
+Do not print cargo log messages.
+May also be specified with the `term.quiet`
+[config value](../reference/config.html).
+{{/option}}
+
+{{#option "`--color` _when_"}}
+Control when colored output is used. Valid values:
+
+- `auto` (default): Automatically detect if color support is available on the
+ terminal.
+- `always`: Always display colors.
+- `never`: Never display colors.
+
+May also be specified with the `term.color`
+[config value](../reference/config.html).
+{{/option}}
diff --git a/src/tools/cargo/src/doc/man/includes/options-future-incompat.md b/src/tools/cargo/src/doc/man/includes/options-future-incompat.md
new file mode 100644
index 000000000..3a8a1e7b7
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-future-incompat.md
@@ -0,0 +1,6 @@
+{{#option "`--future-incompat-report`"}}
+Displays a future-incompat report for any future-incompatible warnings
+produced during execution of this command
+
+See {{man "cargo-report" 1}}
+{{/option}}
diff --git a/src/tools/cargo/src/doc/man/includes/options-ignore-rust-version.md b/src/tools/cargo/src/doc/man/includes/options-ignore-rust-version.md
new file mode 100644
index 000000000..a151534e9
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-ignore-rust-version.md
@@ -0,0 +1,4 @@
+{{#option "`--ignore-rust-version`"}}
+{{actionverb}} the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project's `rust-version` field.
+{{/option}}
diff --git a/src/tools/cargo/src/doc/man/includes/options-index.md b/src/tools/cargo/src/doc/man/includes/options-index.md
new file mode 100644
index 000000000..b19b98365
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-index.md
@@ -0,0 +1,3 @@
+{{#option "`--index` _index_"}}
+The URL of the registry index to use.
+{{/option}}
diff --git a/src/tools/cargo/src/doc/man/includes/options-jobs.md b/src/tools/cargo/src/doc/man/includes/options-jobs.md
new file mode 100644
index 000000000..274263866
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-jobs.md
@@ -0,0 +1,7 @@
+{{#option "`-j` _N_" "`--jobs` _N_"}}
+Number of parallel jobs to run. May also be specified with the
+`build.jobs` [config value](../reference/config.html). Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.
+{{/option}}
diff --git a/src/tools/cargo/src/doc/man/includes/options-keep-going.md b/src/tools/cargo/src/doc/man/includes/options-keep-going.md
new file mode 100644
index 000000000..034181c0e
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-keep-going.md
@@ -0,0 +1,5 @@
+{{#option "`--keep-going`"}}
+Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+`-Zunstable-options`.
+{{/option}}
diff --git a/src/tools/cargo/src/doc/man/includes/options-locked.md b/src/tools/cargo/src/doc/man/includes/options-locked.md
new file mode 100644
index 000000000..c9ac9524e
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-locked.md
@@ -0,0 +1,25 @@
+{{#option "`--frozen`" "`--locked`"}}
+Either of these flags requires that the `Cargo.lock` file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The `--frozen` flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.
+
+These may be used in environments where you want to assert that the
+`Cargo.lock` file is up-to-date (such as a CI build) or want to avoid network
+access.
+{{/option}}
+
+{{#option "`--offline`"}}
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the {{man "cargo-fetch" 1}} command to download dependencies before going
+offline.
+
+May also be specified with the `net.offline` [config value](../reference/config.html).
+{{/option}}
diff --git a/src/tools/cargo/src/doc/man/includes/options-manifest-path.md b/src/tools/cargo/src/doc/man/includes/options-manifest-path.md
new file mode 100644
index 000000000..b1d6eab76
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-manifest-path.md
@@ -0,0 +1,4 @@
+{{#option "`--manifest-path` _path_" }}
+Path to the `Cargo.toml` file. By default, Cargo searches for the
+`Cargo.toml` file in the current directory or any parent directory.
+{{/option}}
diff --git a/src/tools/cargo/src/doc/man/includes/options-message-format.md b/src/tools/cargo/src/doc/man/includes/options-message-format.md
new file mode 100644
index 000000000..61e970ab7
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-message-format.md
@@ -0,0 +1,21 @@
+{{#option "`--message-format` _fmt_" }}
+The output format for diagnostic messages. Can be specified multiple times
+and consists of comma-separated values. Valid values:
+
+- `human` (default): Display in a human-readable text format. Conflicts with
+ `short` and `json`.
+- `short`: Emit shorter, human-readable text messages. Conflicts with `human`
+ and `json`.
+- `json`: Emit JSON messages to stdout. See
+ [the reference](../reference/external-tools.html#json-messages)
+ for more details. Conflicts with `human` and `short`.
+- `json-diagnostic-short`: Ensure the `rendered` field of JSON messages contains
+ the "short" rendering from rustc. Cannot be used with `human` or `short`.
+- `json-diagnostic-rendered-ansi`: Ensure the `rendered` field of JSON messages
+ contains embedded ANSI color codes for respecting rustc's default color
+ scheme. Cannot be used with `human` or `short`.
+- `json-render-diagnostics`: Instruct Cargo to not include rustc diagnostics
+ in JSON messages printed, but instead Cargo itself should render the
+ JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others
+ coming from rustc are still emitted. Cannot be used with `human` or `short`.
+{{/option}}
diff --git a/src/tools/cargo/src/doc/man/includes/options-new.md b/src/tools/cargo/src/doc/man/includes/options-new.md
new file mode 100644
index 000000000..e9792f05e
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-new.md
@@ -0,0 +1,39 @@
+{{#options}}
+
+{{#option "`--bin`" }}
+Create a package with a binary target (`src/main.rs`).
+This is the default behavior.
+{{/option}}
+
+{{#option "`--lib`" }}
+Create a package with a library target (`src/lib.rs`).
+{{/option}}
+
+{{#option "`--edition` _edition_" }}
+Specify the Rust edition to use. Default is 2021.
+Possible values: 2015, 2018, 2021
+{{/option}}
+
+{{#option "`--name` _name_" }}
+Set the package name. Defaults to the directory name.
+{{/option}}
+
+{{#option "`--vcs` _vcs_" }}
+Initialize a new VCS repository for the given version control system (git,
+hg, pijul, or fossil) or do not initialize any version control at all
+(none). If not specified, defaults to `git` or the configuration value
+`cargo-new.vcs`, or `none` if already inside a VCS repository.
+{{/option}}
+
+{{#option "`--registry` _registry_" }}
+This sets the `publish` field in `Cargo.toml` to the given registry name
+which will restrict publishing only to that registry.
+
+Registry names are defined in [Cargo config files](../reference/config.html).
+If not specified, the default registry defined by the `registry.default`
+config key is used. If the default registry is not set and `--registry` is not
+used, the `publish` field will not be set which means that publishing will not
+be restricted.
+{{/option}}
+
+{{/options}}
diff --git a/src/tools/cargo/src/doc/man/includes/options-profile-legacy-check.md b/src/tools/cargo/src/doc/man/includes/options-profile-legacy-check.md
new file mode 100644
index 000000000..0ec82e693
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-profile-legacy-check.md
@@ -0,0 +1,10 @@
+{{#option "`--profile` _name_" }}
+{{actionverb}} with the given profile.
+
+As a special case, specifying the `test` profile will also enable checking in
+test mode which will enable checking tests and enable the `test` cfg option.
+See [rustc tests](https://doc.rust-lang.org/rustc/tests/index.html) for more
+detail.
+
+See the [the reference](../reference/profiles.html) for more details on profiles.
+{{/option}}
diff --git a/src/tools/cargo/src/doc/man/includes/options-profile.md b/src/tools/cargo/src/doc/man/includes/options-profile.md
new file mode 100644
index 000000000..2452e7b14
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-profile.md
@@ -0,0 +1,4 @@
+{{#option "`--profile` _name_" }}
+{{actionverb}} with the given profile.
+See the [the reference](../reference/profiles.html) for more details on profiles.
+{{/option}}
diff --git a/src/tools/cargo/src/doc/man/includes/options-registry.md b/src/tools/cargo/src/doc/man/includes/options-registry.md
new file mode 100644
index 000000000..23e170689
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-registry.md
@@ -0,0 +1,6 @@
+{{#option "`--registry` _registry_"}}
+Name of the registry to use. Registry names are defined in [Cargo config
+files](../reference/config.html). If not specified, the default registry is used,
+which is defined by the `registry.default` config key which defaults to
+`crates-io`.
+{{/option}}
diff --git a/src/tools/cargo/src/doc/man/includes/options-release.md b/src/tools/cargo/src/doc/man/includes/options-release.md
new file mode 100644
index 000000000..723dbba9f
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-release.md
@@ -0,0 +1,4 @@
+{{#option "`-r`" "`--release`"}}
+{{actionverb}} optimized artifacts with the `release` profile.
+See also the `--profile` option for choosing a specific profile by name.
+{{/option}}
diff --git a/src/tools/cargo/src/doc/man/includes/options-target-dir.md b/src/tools/cargo/src/doc/man/includes/options-target-dir.md
new file mode 100644
index 000000000..3646e951f
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-target-dir.md
@@ -0,0 +1,13 @@
+{{#option "`--target-dir` _directory_"}}
+Directory for all generated artifacts and intermediate files. May also be
+specified with the `CARGO_TARGET_DIR` environment variable, or the
+`build.target-dir` [config value](../reference/config.html).
+{{#if temp-target-dir}} Defaults to a new temporary folder located in the
+temporary directory of the platform.
+
+When using `--path`, by default it will use `target` directory in the workspace
+of the local crate unless `--target-dir`
+is specified.
+{{else}} Defaults to `target` in the root of the workspace.
+{{/if}}
+{{/option}}
diff --git a/src/tools/cargo/src/doc/man/includes/options-target-triple.md b/src/tools/cargo/src/doc/man/includes/options-target-triple.md
new file mode 100644
index 000000000..bb180f53d
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-target-triple.md
@@ -0,0 +1,16 @@
+{{#option "`--target` _triple_"}}
+{{actionverb}} for the given architecture.
+{{~#if target-default-to-all-arch}} The default is all architectures.
+{{~else}} The default is the host architecture.
+{{~/if}} The general format of the triple is
+`<arch><sub>-<vendor>-<sys>-<abi>`. Run `rustc --print target-list` for a
+list of supported targets.
+{{~#if multitarget }} This flag may be specified multiple times. {{~/if}}
+
+This may also be specified with the `build.target`
+[config value](../reference/config.html).
+
+Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+[build cache](../guide/build-cache.html) documentation for more details.
+{{/option}}
diff --git a/src/tools/cargo/src/doc/man/includes/options-targets-bin-auto-built.md b/src/tools/cargo/src/doc/man/includes/options-targets-bin-auto-built.md
new file mode 100644
index 000000000..c2234ab79
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-targets-bin-auto-built.md
@@ -0,0 +1,8 @@
+Binary targets are automatically built if there is an integration test or
+benchmark being selected to {{lower actionverb}}. This allows an integration
+test to execute the binary to exercise and test its behavior.
+The `CARGO_BIN_EXE_<name>`
+[environment variable](../reference/environment-variables.html#environment-variables-cargo-sets-for-crates)
+is set when the integration test is built so that it can use the
+[`env` macro](https://doc.rust-lang.org/std/macro.env.html) to locate the
+executable.
diff --git a/src/tools/cargo/src/doc/man/includes/options-targets-lib-bin.md b/src/tools/cargo/src/doc/man/includes/options-targets-lib-bin.md
new file mode 100644
index 000000000..14342acfa
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-targets-lib-bin.md
@@ -0,0 +1,12 @@
+{{#option "`--lib`" }}
+{{actionverb}} the package's library.
+{{/option}}
+
+{{#option "`--bin` _name_..." }}
+{{actionverb}} the specified binary. This flag may be specified multiple times
+and supports common Unix glob patterns.
+{{/option}}
+
+{{#option "`--bins`" }}
+{{actionverb}} all binary targets.
+{{/option}}
diff --git a/src/tools/cargo/src/doc/man/includes/options-targets.md b/src/tools/cargo/src/doc/man/includes/options-targets.md
new file mode 100644
index 000000000..3332001b0
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-targets.md
@@ -0,0 +1,57 @@
+Passing target selection flags will {{lower actionverb}} only the specified
+targets.
+
+Note that `--bin`, `--example`, `--test` and `--bench` flags also
+support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your
+shell accidentally expanding glob patterns before Cargo handles them, you must
+use single quotes or double quotes around each glob pattern.
+
+{{#options}}
+
+{{> options-targets-lib-bin }}
+
+{{#option "`--example` _name_..." }}
+{{actionverb}} the specified example. This flag may be specified multiple times
+and supports common Unix glob patterns.
+{{/option}}
+
+{{#option "`--examples`" }}
+{{actionverb}} all example targets.
+{{/option}}
+
+{{#option "`--test` _name_..." }}
+{{actionverb}} the specified integration test. This flag may be specified
+multiple times and supports common Unix glob patterns.
+{{/option}}
+
+{{#option "`--tests`" }}
+{{actionverb}} all targets in test mode that have the `test = true` manifest
+flag set. By default this includes the library and binaries built as
+unittests, and integration tests. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+unittest, and once as a dependency for binaries, integration tests, etc.).
+Targets may be enabled or disabled by setting the `test` flag in the
+manifest settings for the target.
+{{/option}}
+
+{{#option "`--bench` _name_..." }}
+{{actionverb}} the specified benchmark. This flag may be specified multiple
+times and supports common Unix glob patterns.
+{{/option}}
+
+{{#option "`--benches`" }}
+{{actionverb}} all targets in benchmark mode that have the `bench = true`
+manifest flag set. By default this includes the library and binaries built
+as benchmarks, and bench targets. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+benchmark, and once as a dependency for binaries, benchmarks, etc.).
+Targets may be enabled or disabled by setting the `bench` flag in the
+manifest settings for the target.
+{{/option}}
+
+{{#option "`--all-targets`" }}
+{{actionverb}} all targets. This is equivalent to specifying `--lib --bins
+--tests --benches --examples`.
+{{/option}}
+
+{{/options}}
diff --git a/src/tools/cargo/src/doc/man/includes/options-test.md b/src/tools/cargo/src/doc/man/includes/options-test.md
new file mode 100644
index 000000000..1d2447e8d
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-test.md
@@ -0,0 +1,14 @@
+{{#options}}
+
+{{#option "`--no-run`" }}
+Compile, but don't run {{nouns}}.
+{{/option}}
+
+{{#option "`--no-fail-fast`" }}
+Run all {{nouns}} regardless of failure. Without this flag, Cargo will exit
+after the first executable fails. The Rust test harness will run all {{nouns}}
+within the executable to completion, this flag only applies to the executable
+as a whole.
+{{/option}}
+
+{{/options}}
diff --git a/src/tools/cargo/src/doc/man/includes/options-timings.md b/src/tools/cargo/src/doc/man/includes/options-timings.md
new file mode 100644
index 000000000..d4e5998b5
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-timings.md
@@ -0,0 +1,16 @@
+{{#option "`--timings=`_fmts_"}}
+Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma-separated list of output
+formats; `--timings` without an argument will default to `--timings=html`.
+Specifying an output format (rather than the default) is unstable and requires
+`-Zunstable-options`. Valid output formats:
+
+- `html` (unstable, requires `-Zunstable-options`): Write a human-readable file `cargo-timing.html` to the
+ `target/cargo-timings` directory with a report of the compilation. Also write
+ a report to the same directory with a timestamp in the filename if you want
+ to look at older runs. HTML output is suitable for human consumption only,
+ and does not provide machine-readable timing data.
+- `json` (unstable, requires `-Zunstable-options`): Emit machine-readable JSON
+ information about timing information.
+{{/option}}
+
diff --git a/src/tools/cargo/src/doc/man/includes/options-token.md b/src/tools/cargo/src/doc/man/includes/options-token.md
new file mode 100644
index 000000000..855204de2
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/options-token.md
@@ -0,0 +1,11 @@
+{{#option "`--token` _token_" }}
+API token to use when authenticating. This overrides the token stored in
+the credentials file (which is created by {{man "cargo-login" 1}}).
+
+[Cargo config](../reference/config.html) environment variables can be
+used to override the tokens stored in the credentials file. The token for
+crates.io may be specified with the `CARGO_REGISTRY_TOKEN` environment
+variable. Tokens for other registries may be specified with environment
+variables of the form `CARGO_REGISTRIES_NAME_TOKEN` where `NAME` is the name
+of the registry in all capital letters.
+{{/option}}
diff --git a/src/tools/cargo/src/doc/man/includes/section-environment.md b/src/tools/cargo/src/doc/man/includes/section-environment.md
new file mode 100644
index 000000000..aae5f078a
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/section-environment.md
@@ -0,0 +1,4 @@
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
diff --git a/src/tools/cargo/src/doc/man/includes/section-exit-status.md b/src/tools/cargo/src/doc/man/includes/section-exit-status.md
new file mode 100644
index 000000000..a8123366d
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/section-exit-status.md
@@ -0,0 +1,4 @@
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
diff --git a/src/tools/cargo/src/doc/man/includes/section-features.md b/src/tools/cargo/src/doc/man/includes/section-features.md
new file mode 100644
index 000000000..99c13fe1b
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/section-features.md
@@ -0,0 +1,26 @@
+### Feature Selection
+
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the `default` feature is activated for every
+selected package.
+
+See [the features documentation](../reference/features.html#command-line-feature-options)
+for more details.
+
+{{#options}}
+
+{{#option "`-F` _features_" "`--features` _features_" }}
+Space or comma separated list of features to activate. Features of workspace
+members may be enabled with `package-name/feature-name` syntax. This flag may
+be specified multiple times, which enables all specified features.
+{{/option}}
+
+{{#option "`--all-features`" }}
+Activate all available features of all selected packages.
+{{/option}}
+
+{{#option "`--no-default-features`" }}
+Do not activate the `default` feature of the selected packages.
+{{/option}}
+
+{{/options}}
diff --git a/src/tools/cargo/src/doc/man/includes/section-options-common.md b/src/tools/cargo/src/doc/man/includes/section-options-common.md
new file mode 100644
index 000000000..5a41169d6
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/section-options-common.md
@@ -0,0 +1,39 @@
+### Common Options
+
+{{#options}}
+
+{{#option "`+`_toolchain_"}}
+If Cargo has been installed with rustup, and the first argument to `cargo`
+begins with `+`, it will be interpreted as a rustup toolchain name (such
+as `+stable` or `+nightly`).
+See the [rustup documentation](https://rust-lang.github.io/rustup/overrides.html)
+for more information about how toolchain overrides work.
+{{/option}}
+
+{{#option "`--config` _KEY=VALUE_ or _PATH_"}}
+Overrides a Cargo configuration value. The argument should be in TOML syntax of `KEY=VALUE`,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the [command-line overrides section](../reference/config.html#command-line-overrides) for more information.
+{{/option}}
+
+{{#option "`-C` _PATH_"}}
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (`Cargo.toml`), as well as
+the directories searched for discovering `.cargo/config.toml`, for example. This option must
+appear before the command name, for example `cargo -C path/to/my-project build`.
+
+This option is only available on the [nightly
+channel](https://doc.rust-lang.org/book/appendix-07-nightly-rust.html) and
+requires the `-Z unstable-options` flag to enable (see
+[#10098](https://github.com/rust-lang/cargo/issues/10098)).
+{{/option}}
+
+{{#option "`-h`" "`--help`"}}
+Prints help information.
+{{/option}}
+
+{{#option "`-Z` _flag_"}}
+Unstable (nightly-only) flags to Cargo. Run `cargo -Z help` for details.
+{{/option}}
+
+{{/options}}
diff --git a/src/tools/cargo/src/doc/man/includes/section-options-package.md b/src/tools/cargo/src/doc/man/includes/section-options-package.md
new file mode 100644
index 000000000..4fa732dd3
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/section-options-package.md
@@ -0,0 +1,13 @@
+### Package Selection
+
+By default, the package in the current working directory is selected. The `-p`
+flag can be used to choose a different package in a workspace.
+
+{{#options}}
+
+{{#option "`-p` _spec_" "`--package` _spec_" }}
+The package to {{lower actionverb}}. See {{man "cargo-pkgid" 1}} for the SPEC
+format.
+{{/option}}
+
+{{/options}}
diff --git a/src/tools/cargo/src/doc/man/includes/section-package-selection.md b/src/tools/cargo/src/doc/man/includes/section-package-selection.md
new file mode 100644
index 000000000..8d7d62180
--- /dev/null
+++ b/src/tools/cargo/src/doc/man/includes/section-package-selection.md
@@ -0,0 +1,42 @@
+### Package Selection
+
+By default, when no package selection options are given, the packages selected
+depend on the selected manifest file (based on the current working directory if
+`--manifest-path` is not given). If the manifest is the root of a workspace then
+the workspaces default members are selected, otherwise only the package defined
+by the manifest will be selected.
+
+The default members of a workspace can be set explicitly with the
+`workspace.default-members` key in the root manifest. If this is not set, a
+virtual workspace will include all workspace members (equivalent to passing
+`--workspace`), and a non-virtual workspace will include only the root crate itself.
+
+{{#options}}
+
+{{#option "`-p` _spec_..." "`--package` _spec_..."}}
+{{actionverb}} only the specified packages. See {{man "cargo-pkgid" 1}} for the
+SPEC format. This flag may be specified multiple times and supports common Unix
+glob patterns like `*`, `?` and `[]`. However, to avoid your shell accidentally
+expanding glob patterns before Cargo handles them, you must use single quotes or
+double quotes around each pattern.
+{{/option}}
+
+{{#option "`--workspace`" }}
+{{actionverb}} all members in the workspace.
+{{/option}}
+
+{{#unless noall}}
+{{#option "`--all`" }}
+Deprecated alias for `--workspace`.
+{{/option}}
+{{/unless}}
+
+{{#option "`--exclude` _SPEC_..." }}
+Exclude the specified packages. Must be used in conjunction with the
+`--workspace` flag. This flag may be specified multiple times and supports
+common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your shell
+accidentally expanding glob patterns before Cargo handles them, you must use
+single quotes or double quotes around each pattern.
+{{/option}}
+
+{{/options}}
diff --git a/src/tools/cargo/src/doc/semver-check/Cargo.toml b/src/tools/cargo/src/doc/semver-check/Cargo.toml
new file mode 100644
index 000000000..bdfd8d7d7
--- /dev/null
+++ b/src/tools/cargo/src/doc/semver-check/Cargo.toml
@@ -0,0 +1,10 @@
+[package]
+name = "semver-check"
+version = "0.1.0"
+authors = ["Eric Huss"]
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+tempfile = "3.1.0"
diff --git a/src/tools/cargo/src/doc/semver-check/src/main.rs b/src/tools/cargo/src/doc/semver-check/src/main.rs
new file mode 100644
index 000000000..51aacbe11
--- /dev/null
+++ b/src/tools/cargo/src/doc/semver-check/src/main.rs
@@ -0,0 +1,289 @@
+//! Test runner for the semver compatibility doc chapter.
+//!
+//! This extracts all the "rust" annotated code blocks and tests that they
+//! either fail or succeed as expected. This also checks that the examples are
+//! formatted correctly.
+//!
+//! An example with the word "MINOR" at the top is expected to successfully
+//! build against the before and after. Otherwise it should fail. A comment of
+//! "// Error:" will check that the given message appears in the error output.
+
+use std::error::Error;
+use std::fs;
+use std::path::Path;
+use std::process::{Command, Output};
+
+fn main() {
+ if let Err(e) = doit() {
+ println!("error: {}", e);
+ std::process::exit(1);
+ }
+}
+
+const SEPARATOR: &str = "///////////////////////////////////////////////////////////";
+
+fn doit() -> Result<(), Box<dyn Error>> {
+ let filename = std::env::args()
+ .nth(1)
+ .unwrap_or_else(|| "../src/reference/semver.md".to_string());
+ let contents = fs::read_to_string(filename)?;
+ let mut lines = contents.lines().enumerate();
+
+ loop {
+ // Find a rust block.
+ let (block_start, run_program, deny_warnings) = loop {
+ match lines.next() {
+ Some((lineno, line)) => {
+ if line.trim().starts_with("```rust") && !line.contains("skip") {
+ break (
+ lineno + 1,
+ line.contains("run-fail"),
+ !line.contains("dont-deny"),
+ );
+ }
+ }
+ None => return Ok(()),
+ }
+ };
+ // Read in the code block.
+ let mut block = Vec::new();
+ loop {
+ match lines.next() {
+ Some((_, line)) => {
+ if line.trim() == "```" {
+ break;
+ }
+ block.push(line);
+ }
+ None => {
+ return Err(format!(
+ "rust block did not end for example starting on line {}",
+ block_start
+ )
+ .into());
+ }
+ }
+ }
+ // Split it into the separate source files.
+ let parts: Vec<_> = block.split(|line| line.trim() == SEPARATOR).collect();
+ if parts.len() != 4 {
+ return Err(format!(
+ "expected 4 sections in example starting on line {}, got {}:\n{:?}",
+ block_start,
+ parts.len(),
+ parts
+ )
+ .into());
+ }
+ let join = |part: &[&str]| {
+ let mut result = String::new();
+ result.push_str("#![allow(unused)]\n");
+ if deny_warnings {
+ result.push_str("#![deny(warnings)]\n");
+ }
+ result.push_str(&part.join("\n"));
+ if !result.ends_with('\n') {
+ result.push('\n');
+ }
+ result
+ };
+ let expect_success = parts[0][0].contains("MINOR");
+ println!("Running test from line {}", block_start);
+
+ let result = run_test(
+ join(parts[1]),
+ join(parts[2]),
+ join(parts[3]),
+ expect_success,
+ run_program,
+ );
+
+ if let Err(e) = result {
+ return Err(format!(
+ "test failed for example starting on line {}: {}",
+ block_start, e
+ )
+ .into());
+ }
+ }
+}
+
+const CRATE_NAME: &str = "updated_crate";
+
+fn run_test(
+ before: String,
+ after: String,
+ example: String,
+ expect_success: bool,
+ run_program: bool,
+) -> Result<(), Box<dyn Error>> {
+ let tempdir = tempfile::TempDir::new()?;
+ let before_p = tempdir.path().join("before.rs");
+ let after_p = tempdir.path().join("after.rs");
+ let example_p = tempdir.path().join("example.rs");
+
+ let check_fn = if run_program {
+ run_check
+ } else {
+ compile_check
+ };
+
+ compile_check(before, &before_p, CRATE_NAME, false, true)?;
+ check_fn(example.clone(), &example_p, "example", true, true)?;
+ compile_check(after, &after_p, CRATE_NAME, false, true)?;
+ check_fn(example, &example_p, "example", true, expect_success)?;
+ Ok(())
+}
+
+fn check_formatting(path: &Path) -> Result<(), Box<dyn Error>> {
+ match Command::new("rustfmt")
+ .args(&["--edition=2018", "--check"])
+ .arg(path)
+ .status()
+ {
+ Ok(status) => {
+ if !status.success() {
+ return Err(format!("failed to run rustfmt: {}", status).into());
+ }
+ Ok(())
+ }
+ Err(e) => Err(format!("failed to run rustfmt: {}", e).into()),
+ }
+}
+
+fn compile(
+ contents: &str,
+ path: &Path,
+ crate_name: &str,
+ extern_path: bool,
+) -> Result<Output, Box<dyn Error>> {
+ let crate_type = if contents.contains("fn main()") {
+ "bin"
+ } else {
+ "rlib"
+ };
+
+ fs::write(path, &contents)?;
+ check_formatting(path)?;
+ let out_dir = path.parent().unwrap();
+ let mut cmd = Command::new("rustc");
+ cmd.args(&[
+ "--edition=2021",
+ "--crate-type",
+ crate_type,
+ "--crate-name",
+ crate_name,
+ "--out-dir",
+ ]);
+ cmd.arg(&out_dir);
+ if extern_path {
+ let epath = out_dir.join(format!("lib{}.rlib", CRATE_NAME));
+ cmd.arg("--extern")
+ .arg(format!("{}={}", CRATE_NAME, epath.display()));
+ }
+ cmd.arg(path);
+ cmd.output().map_err(Into::into)
+}
+
+fn compile_check(
+ mut contents: String,
+ path: &Path,
+ crate_name: &str,
+ extern_path: bool,
+ expect_success: bool,
+) -> Result<(), Box<dyn Error>> {
+ // If the example has an error message, remove it so that it can be
+ // compared with the actual output, and also to avoid issues with rustfmt
+ // moving it around.
+ let expected_error = match contents.find("// Error:") {
+ Some(index) => {
+ let start = contents[..index].rfind(|ch| ch != ' ').unwrap();
+ let end = contents[index..].find('\n').unwrap();
+ let error = contents[index + 9..index + end].trim().to_string();
+ contents.replace_range(start + 1..index + end, "");
+ Some(error)
+ }
+ None => None,
+ };
+
+ let output = compile(&contents, path, crate_name, extern_path)?;
+
+ let stderr = std::str::from_utf8(&output.stderr).unwrap();
+ match (output.status.success(), expect_success) {
+ (true, true) => Ok(()),
+ (true, false) => Err(format!(
+ "expected failure, got success {}\n===== Contents:\n{}\n===== Output:\n{}\n",
+ path.display(),
+ contents,
+ stderr
+ )
+ .into()),
+ (false, true) => Err(format!(
+ "expected success, got error {}\n===== Contents:\n{}\n===== Output:\n{}\n",
+ path.display(),
+ contents,
+ stderr
+ )
+ .into()),
+ (false, false) => {
+ if expected_error.is_none() {
+ return Err("failing test should have an \"// Error:\" annotation ".into());
+ }
+ let expected_error = expected_error.unwrap();
+ if !stderr.contains(&expected_error) {
+ Err(format!(
+ "expected error message not found in compiler output\nExpected: {}\nGot:\n{}\n",
+ expected_error, stderr
+ )
+ .into())
+ } else {
+ Ok(())
+ }
+ }
+ }
+}
+
+fn run_check(
+ contents: String,
+ path: &Path,
+ crate_name: &str,
+ extern_path: bool,
+ expect_success: bool,
+) -> Result<(), Box<dyn Error>> {
+ let compile_output = compile(&contents, path, crate_name, extern_path)?;
+
+ if !compile_output.status.success() {
+ let stderr = std::str::from_utf8(&compile_output.stderr).unwrap();
+ return Err(format!(
+ "expected success, got error {}\n===== Contents:\n{}\n===== Output:\n{}\n",
+ path.display(),
+ contents,
+ stderr
+ )
+ .into());
+ }
+
+ let binary_path = path.parent().unwrap().join(crate_name);
+
+ let output = Command::new(binary_path).output()?;
+
+ let stderr = std::str::from_utf8(&output.stderr).unwrap();
+
+ match (output.status.success(), expect_success) {
+ (true, false) => Err(format!(
+ "expected panic, got success {}\n===== Contents:\n{}\n===== Output:\n{}\n",
+ path.display(),
+ contents,
+ stderr
+ )
+ .into()),
+ (false, true) => Err(format!(
+ "expected success, got panic {}\n===== Contents:\n{}\n===== Output:\n{}\n",
+ path.display(),
+ contents,
+ stderr,
+ )
+ .into()),
+ (_, _) => Ok(()),
+ }
+}
diff --git a/src/tools/cargo/src/doc/src/SUMMARY.md b/src/tools/cargo/src/doc/src/SUMMARY.md
new file mode 100644
index 000000000..273936559
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/SUMMARY.md
@@ -0,0 +1,93 @@
+# Summary
+
+[Introduction](index.md)
+
+* [Getting Started](getting-started/index.md)
+ * [Installation](getting-started/installation.md)
+ * [First Steps with Cargo](getting-started/first-steps.md)
+
+* [Cargo Guide](guide/index.md)
+ * [Why Cargo Exists](guide/why-cargo-exists.md)
+ * [Creating a New Package](guide/creating-a-new-project.md)
+ * [Working on an Existing Package](guide/working-on-an-existing-project.md)
+ * [Dependencies](guide/dependencies.md)
+ * [Package Layout](guide/project-layout.md)
+ * [Cargo.toml vs Cargo.lock](guide/cargo-toml-vs-cargo-lock.md)
+ * [Tests](guide/tests.md)
+ * [Continuous Integration](guide/continuous-integration.md)
+ * [Cargo Home](guide/cargo-home.md)
+ * [Build Cache](guide/build-cache.md)
+
+* [Cargo Reference](reference/index.md)
+ * [Specifying Dependencies](reference/specifying-dependencies.md)
+ * [Overriding Dependencies](reference/overriding-dependencies.md)
+ * [The Manifest Format](reference/manifest.md)
+ * [Cargo Targets](reference/cargo-targets.md)
+ * [Workspaces](reference/workspaces.md)
+ * [Features](reference/features.md)
+ * [Features Examples](reference/features-examples.md)
+ * [Profiles](reference/profiles.md)
+ * [Configuration](reference/config.md)
+ * [Environment Variables](reference/environment-variables.md)
+ * [Build Scripts](reference/build-scripts.md)
+ * [Build Script Examples](reference/build-script-examples.md)
+ * [Publishing on crates.io](reference/publishing.md)
+ * [Package ID Specifications](reference/pkgid-spec.md)
+ * [Source Replacement](reference/source-replacement.md)
+ * [External Tools](reference/external-tools.md)
+ * [Registries](reference/registries.md)
+ * [Running a Registry](reference/running-a-registry.md)
+ * [Registry Index](reference/registry-index.md)
+ * [Registry Web API](reference/registry-web-api.md)
+ * [Dependency Resolution](reference/resolver.md)
+ * [SemVer Compatibility](reference/semver.md)
+ * [Future incompat report](reference/future-incompat-report.md)
+ * [Reporting build timings](reference/timings.md)
+ * [Unstable Features](reference/unstable.md)
+
+* [Cargo Commands](commands/index.md)
+ * [General Commands](commands/general-commands.md)
+ * [cargo](commands/cargo.md)
+ * [cargo help](commands/cargo-help.md)
+ * [cargo version](commands/cargo-version.md)
+ * [Build Commands](commands/build-commands.md)
+ * [cargo bench](commands/cargo-bench.md)
+ * [cargo build](commands/cargo-build.md)
+ * [cargo check](commands/cargo-check.md)
+ * [cargo clean](commands/cargo-clean.md)
+ * [cargo doc](commands/cargo-doc.md)
+ * [cargo fetch](commands/cargo-fetch.md)
+ * [cargo fix](commands/cargo-fix.md)
+ * [cargo run](commands/cargo-run.md)
+ * [cargo rustc](commands/cargo-rustc.md)
+ * [cargo rustdoc](commands/cargo-rustdoc.md)
+ * [cargo test](commands/cargo-test.md)
+ * [cargo report](commands/cargo-report.md)
+ * [Manifest Commands](commands/manifest-commands.md)
+ * [cargo add](commands/cargo-add.md)
+ * [cargo generate-lockfile](commands/cargo-generate-lockfile.md)
+ * [cargo locate-project](commands/cargo-locate-project.md)
+ * [cargo metadata](commands/cargo-metadata.md)
+ * [cargo pkgid](commands/cargo-pkgid.md)
+ * [cargo remove](commands/cargo-remove.md)
+ * [cargo tree](commands/cargo-tree.md)
+ * [cargo update](commands/cargo-update.md)
+ * [cargo vendor](commands/cargo-vendor.md)
+ * [cargo verify-project](commands/cargo-verify-project.md)
+ * [Package Commands](commands/package-commands.md)
+ * [cargo init](commands/cargo-init.md)
+ * [cargo install](commands/cargo-install.md)
+ * [cargo new](commands/cargo-new.md)
+ * [cargo search](commands/cargo-search.md)
+ * [cargo uninstall](commands/cargo-uninstall.md)
+ * [Publishing Commands](commands/publishing-commands.md)
+ * [cargo login](commands/cargo-login.md)
+ * [cargo logout](commands/cargo-logout.md)
+ * [cargo owner](commands/cargo-owner.md)
+ * [cargo package](commands/cargo-package.md)
+ * [cargo publish](commands/cargo-publish.md)
+ * [cargo yank](commands/cargo-yank.md)
+
+* [FAQ](faq.md)
+* [Appendix: Glossary](appendix/glossary.md)
+* [Appendix: Git Authentication](appendix/git-authentication.md)
diff --git a/src/tools/cargo/src/doc/src/appendix/git-authentication.md b/src/tools/cargo/src/doc/src/appendix/git-authentication.md
new file mode 100644
index 000000000..8b2db5cb1
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/appendix/git-authentication.md
@@ -0,0 +1,96 @@
+# Git Authentication
+
+Cargo supports some forms of authentication when using git dependencies and
+registries. This appendix contains some information for setting up git
+authentication in a way that works with Cargo.
+
+If you need other authentication methods, the [`net.git-fetch-with-cli`]
+config value can be set to cause Cargo to execute the `git` executable to
+handle fetching remote repositories instead of using the built-in support.
+This can be enabled with the `CARGO_NET_GIT_FETCH_WITH_CLI=true` environment
+variable.
+
+## HTTPS authentication
+
+HTTPS authentication requires the [`credential.helper`] mechanism. There are
+multiple credential helpers, and you specify the one you want to use in your
+global git configuration file.
+
+```ini
+# ~/.gitconfig
+
+[credential]
+helper = store
+```
+
+Cargo does not ask for passwords, so for most helpers you will need to give
+the helper the initial username/password before running Cargo. One way to do
+this is to run `git clone` of the private git repo and enter the
+username/password.
+
+> **Tip:**<br>
+> macOS users may want to consider using the osxkeychain helper.<br>
+> Windows users may want to consider using the [GCM] helper.
+
+> **Note:** Windows users will need to make sure that the `sh` shell is
+> available in your `PATH`. This typically is available with the Git for
+> Windows installation.
+
+## SSH authentication
+
+SSH authentication requires `ssh-agent` to be running to acquire the SSH key.
+Make sure the appropriate environment variables are set up (`SSH_AUTH_SOCK` on
+most Unix-like systems), and that the correct keys are added (with `ssh-add`).
+
+Windows can use Pageant (part of [PuTTY]) or `ssh-agent`.
+To use `ssh-agent`, Cargo needs to use the OpenSSH that is distributed as part
+of Windows, as Cargo does not support the simulated Unix-domain sockets used
+by MinGW or Cygwin.
+More information about installing with Windows can be found at the [Microsoft
+installation documentation] and the page on [key management] has instructions
+on how to start `ssh-agent` and to add keys.
+
+> **Note:** Cargo does not support git's shorthand SSH URLs like
+> `git@example.com:user/repo.git`. Use a full SSH URL like
+> `ssh://git@example.com/user/repo.git`.
+
+> **Note:** SSH configuration files (like OpenSSH's `~/.ssh/config`) are not
+> used by Cargo's built-in SSH library. More advanced requirements should use
+> [`net.git-fetch-with-cli`].
+
+### SSH Known Hosts
+
+When connecting to an SSH host, Cargo must verify the identity of the host
+using "known hosts", which are a list of host keys. Cargo can look for these
+known hosts in OpenSSH-style `known_hosts` files located in their standard
+locations (`.ssh/known_hosts` in your home directory, or
+`/etc/ssh/ssh_known_hosts` on Unix-like platforms or
+`%PROGRAMDATA%\ssh\ssh_known_hosts` on Windows). More information about these
+files can be found in the [sshd man page]. Alternatively, keys may be
+configured in a Cargo configuration file with [`net.ssh.known-hosts`].
+
+When connecting to an SSH host before the known hosts has been configured,
+Cargo will display an error message instructing you how to add the host key.
+This also includes a "fingerprint", which is a smaller hash of the host key,
+which should be easier to visually verify. The server administrator can get
+the fingerprint by running `ssh-keygen` against the public key (for example,
+`ssh-keygen -l -f /etc/ssh/ssh_host_ecdsa_key.pub`). Well-known sites may
+publish their fingerprints on the web; for example GitHub posts theirs at
+<https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/githubs-ssh-key-fingerprints>.
+
+Cargo comes with the host keys for [github.com](https://github.com) built-in.
+If those ever change, you can add the new keys to the config or known_hosts file.
+
+> **Note:** Cargo doesn't support the `@cert-authority` or `@revoked`
+> markers in `known_hosts` files. To make use of this functionality, use
+> [`net.git-fetch-with-cli`]. This is also a good tip if Cargo's SSH client
+> isn't behaving the way you expect it to.
+
+[`credential.helper`]: https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage
+[`net.git-fetch-with-cli`]: ../reference/config.md#netgit-fetch-with-cli
+[`net.ssh.known-hosts`]: ../reference/config.md#netsshknown-hosts
+[GCM]: https://github.com/microsoft/Git-Credential-Manager-Core/
+[PuTTY]: https://www.chiark.greenend.org.uk/~sgtatham/putty/
+[Microsoft installation documentation]: https://docs.microsoft.com/en-us/windows-server/administration/openssh/openssh_install_firstuse
+[key management]: https://docs.microsoft.com/en-us/windows-server/administration/openssh/openssh_keymanagement
+[sshd man page]: https://man.openbsd.org/sshd#SSH_KNOWN_HOSTS_FILE_FORMAT
diff --git a/src/tools/cargo/src/doc/src/appendix/glossary.md b/src/tools/cargo/src/doc/src/appendix/glossary.md
new file mode 100644
index 000000000..143736851
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/appendix/glossary.md
@@ -0,0 +1,274 @@
+# Glossary
+
+### Artifact
+
+An *artifact* is the file or set of files created as a result of the
+compilation process. This includes linkable libraries, executable binaries,
+and generated documentation.
+
+### Cargo
+
+*Cargo* is the Rust [*package manager*](#package-manager), and the primary
+topic of this book.
+
+### Cargo.lock
+
+See [*lock file*](#lock-file).
+
+### Cargo.toml
+
+See [*manifest*](#manifest).
+
+### Crate
+
+A Rust *crate* is either a library or an executable program, referred to as
+either a *library crate* or a *binary crate*, respectively.
+
+Every [target](#target) defined for a Cargo [package](#package) is a *crate*.
+
+Loosely, the term *crate* may refer to either the source code of the target or
+to the compiled artifact that the target produces. It may also refer to a
+compressed package fetched from a [registry](#registry).
+
+The source code for a given crate may be subdivided into [*modules*](#module).
+
+### Edition
+
+A *Rust edition* is a developmental landmark of the Rust language. The
+[edition of a package][edition-field] is specified in the `Cargo.toml`
+[manifest](#manifest), and individual targets can specify which edition they
+use. See the [Edition Guide] for more information.
+
+### Feature
+
+The meaning of *feature* depends on the context:
+
+- A [*feature*][feature] is a named flag which allows for conditional
+ compilation. A feature can refer to an optional dependency, or an arbitrary
+ name defined in a `Cargo.toml` [manifest](#manifest) that can be checked
+ within source code.
+
+- Cargo has [*unstable feature flags*][cargo-unstable] which can be used to
+ enable experimental behavior of Cargo itself.
+
+- The Rust compiler and Rustdoc have their own unstable feature flags (see
+ [The Unstable Book][unstable-book] and [The Rustdoc
+ Book][rustdoc-unstable]).
+
+- CPU targets have [*target features*][target-feature] which specify
+ capabilities of a CPU.
+
+### Index
+
+The *index* is the searchable list of [*crates*](#crate) in a
+[*registry*](#registry).
+
+### Lock file
+
+The `Cargo.lock` *lock file* is a file that captures the exact version of
+every dependency used in a [*workspace*](#workspace) or
+[*package*](#package). It is automatically generated by Cargo. See
+[Cargo.toml vs Cargo.lock].
+
+### Manifest
+
+A [*manifest*][manifest] is a description of a [package](#package) or a
+[workspace](#workspace) in a file named `Cargo.toml`.
+
+A [*virtual manifest*][virtual] is a `Cargo.toml` file that only describes a
+workspace, and does not include a package.
+
+### Member
+
+A *member* is a [*package*](#package) that belongs to a
+[*workspace*](#workspace).
+
+### Module
+
+Rust's module system is used to organize code into logical units called
+*modules*, which provide isolated namespaces within the code.
+
+The source code for a given [crate](#crate) may be subdivided into one or more
+separate modules. This is usually done to organize the code into areas of
+related functionality or to control the visible scope (public/private) of
+symbols within the source (structs, functions, and so on).
+
+A [`Cargo.toml`](#manifest) file is primarily concerned with the
+[package](#package) it defines, its crates, and the packages of the crates on
+which they depend. Nevertheless, you will see the term "module" often when
+working with Rust, so you should understand its relationship to a given crate.
+
+### Package
+
+A *package* is a collection of source files and a `Cargo.toml`
+[*manifest*](#manifest) file which describes the package. A package has a name
+and version which is used for specifying dependencies between packages.
+
+A package contains multiple [*targets*](#target), each of which is a
+[*crate*](#crate). The `Cargo.toml` file describes the type of the crates
+(binary or library) within the package, along with some metadata about each
+one --- how each is to be built, what their direct dependencies are, etc., as
+described throughout this book.
+
+The *package root* is the directory where the package's `Cargo.toml` manifest
+is located. (Compare with [*workspace root*](#workspace).)
+
+The [*package ID specification*][pkgid-spec], or *SPEC*, is a string used to
+uniquely reference a specific version of a package from a specific source.
+
+Small to medium sized Rust projects will only need a single package, though it
+is common for them to have multiple crates.
+
+Larger projects may involve multiple packages, in which case Cargo
+[*workspaces*](#workspace) can be used to manage common dependencies and other
+related metadata between the packages.
+
+### Package manager
+
+Broadly speaking, a *package manager* is a program (or collection of related
+programs) in a software ecosystem that automates the process of obtaining,
+installing, and upgrading artifacts. Within a programming language ecosystem,
+a package manager is a developer-focused tool whose primary functionality is
+to download library artifacts and their dependencies from some central
+repository; this capability is often combined with the ability to perform
+software builds (by invoking the language-specific compiler).
+
+[*Cargo*](#cargo) is the package manager within the Rust ecosystem. Cargo
+downloads your Rust [package](#package)’s dependencies
+([*artifacts*](#artifact) known as [*crates*](#crate)), compiles your
+packages, makes distributable packages, and (optionally) uploads them to
+[crates.io][], the Rust community’s [*package registry*](#registry).
+
+### Package registry
+
+See [*registry*](#registry).
+
+### Project
+
+Another name for a [package](#package).
+
+### Registry
+
+A *registry* is a service that contains a collection of downloadable
+[*crates*](#crate) that can be installed or used as dependencies for a
+[*package*](#package). The default registry in the Rust ecosystem is
+[crates.io](https://crates.io). The registry has an [*index*](#index) which
+contains a list of all crates, and tells Cargo how to download the crates that
+are needed.
+
+### Source
+
+A *source* is a provider that contains [*crates*](#crate) that may be included
+as dependencies for a [*package*](#package). There are several kinds of
+sources:
+
+- **Registry source** --- See [registry](#registry).
+- **Local registry source** --- A set of crates stored as compressed files on
+ the filesystem. See [Local Registry Sources].
+- **Directory source** --- A set of crates stored as uncompressed files on the
+ filesystem. See [Directory Sources].
+- **Path source** --- An individual package located on the filesystem (such as a
+ [path dependency]) or a set of multiple packages (such as [path overrides]).
+- **Git source** --- Packages located in a git repository (such as a [git
+ dependency] or [git source]).
+
+See [Source Replacement] for more information.
+
+### Spec
+
+See [package ID specification](#package).
+
+### Target
+
+The meaning of the term *target* depends on the context:
+
+- **Cargo Target** --- Cargo [*packages*](#package) consist of *targets* which
+ correspond to [*artifacts*](#artifact) that will be produced. Packages can
+ have library, binary, example, test, and benchmark targets. The
+ [list of targets][targets] are configured in the `Cargo.toml`
+ [*manifest*](#manifest), often inferred automatically by the [directory
+ layout] of the source files.
+- **Target Directory** --- Cargo places all built artifacts and intermediate
+ files in the *target* directory. By default this is a directory named
+ `target` at the [*workspace*](#workspace) root, or the package root if not
+ using a workspace. The directory may be changed with the `--target-dir`
+ command-line option, the `CARGO_TARGET_DIR` [environment variable], or the
+ `build.target-dir` [config option].
+- **Target Architecture** --- The OS and machine architecture for the built
+ artifacts are typically referred to as a *target*.
+- **Target Triple** --- A triple is a specific format for specifying a target
+ architecture. Triples may be referred to as a *target triple* which is the
+ architecture for the artifact produced, and the *host triple* which is the
+ architecture that the compiler is running on. The target triple can be
+ specified with the `--target` command-line option or the `build.target`
+ [config option]. The general format of the triple is
+ `<arch><sub>-<vendor>-<sys>-<abi>` where:
+
+ - `arch` = The base CPU architecture, for example `x86_64`, `i686`, `arm`,
+ `thumb`, `mips`, etc.
+ - `sub` = The CPU sub-architecture, for example `arm` has `v7`, `v7s`,
+ `v5te`, etc.
+ - `vendor` = The vendor, for example `unknown`, `apple`, `pc`, `nvidia`, etc.
+ - `sys` = The system name, for example `linux`, `windows`, `darwin`, etc.
+ `none` is typically used for bare-metal without an OS.
+ - `abi` = The ABI, for example `gnu`, `android`, `eabi`, etc.
+
+ Some parameters may be omitted. Run `rustc --print target-list` for a list of
+ supported targets.
+
+### Test Targets
+
+Cargo *test targets* generate binaries which help verify proper operation and
+correctness of code. There are two types of test artifacts:
+
+* **Unit test** --- A *unit test* is an executable binary compiled directly from
+ a library or a binary target. It contains the entire contents of the library
+ or binary code, and runs `#[test]` annotated functions, intended to verify
+ individual units of code.
+* **Integration test target** --- An [*integration test
+ target*][integration-tests] is an executable binary compiled from a *test
+ target* which is a distinct [*crate*](#crate) whose source is located in the
+ `tests` directory or specified by the [`[[test]]` table][targets] in the
+ `Cargo.toml` [*manifest*](#manifest). It is intended to only test the public
+ API of a library, or execute a binary to verify its operation.
+
+### Workspace
+
+A [*workspace*][workspace] is a collection of one or more
+[*packages*](#package) that share common dependency resolution (with a shared
+`Cargo.lock` [*lock file*](#lock-file)), output directory, and various
+settings such as profiles.
+
+A [*virtual workspace*][virtual] is a workspace where the root `Cargo.toml`
+[*manifest*](#manifest) does not define a package, and only lists the
+workspace [*members*](#member).
+
+The *workspace root* is the directory where the workspace's `Cargo.toml`
+manifest is located. (Compare with [*package root*](#package).)
+
+
+[Cargo.toml vs Cargo.lock]: ../guide/cargo-toml-vs-cargo-lock.md
+[Directory Sources]: ../reference/source-replacement.md#directory-sources
+[Local Registry Sources]: ../reference/source-replacement.md#local-registry-sources
+[Source Replacement]: ../reference/source-replacement.md
+[cargo-unstable]: ../reference/unstable.md
+[config option]: ../reference/config.md
+[crates.io]: https://crates.io/
+[directory layout]: ../guide/project-layout.md
+[edition guide]: ../../edition-guide/index.html
+[edition-field]: ../reference/manifest.md#the-edition-field
+[environment variable]: ../reference/environment-variables.md
+[feature]: ../reference/features.md
+[git dependency]: ../reference/specifying-dependencies.md#specifying-dependencies-from-git-repositories
+[git source]: ../reference/source-replacement.md
+[integration-tests]: ../reference/cargo-targets.md#integration-tests
+[manifest]: ../reference/manifest.md
+[path dependency]: ../reference/specifying-dependencies.md#specifying-path-dependencies
+[path overrides]: ../reference/overriding-dependencies.md#paths-overrides
+[pkgid-spec]: ../reference/pkgid-spec.md
+[rustdoc-unstable]: https://doc.rust-lang.org/nightly/rustdoc/unstable-features.html
+[target-feature]: ../../reference/attributes/codegen.html#the-target_feature-attribute
+[targets]: ../reference/cargo-targets.md#configuring-a-target
+[unstable-book]: https://doc.rust-lang.org/nightly/unstable-book/index.html
+[virtual]: ../reference/workspaces.md
+[workspace]: ../reference/workspaces.md
diff --git a/src/tools/cargo/src/doc/src/commands/build-commands.md b/src/tools/cargo/src/doc/src/commands/build-commands.md
new file mode 100644
index 000000000..5cc6fff96
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/build-commands.md
@@ -0,0 +1,13 @@
+# Build Commands
+* [cargo bench](cargo-bench.md)
+* [cargo build](cargo-build.md)
+* [cargo check](cargo-check.md)
+* [cargo clean](cargo-clean.md)
+* [cargo doc](cargo-doc.md)
+* [cargo fetch](cargo-fetch.md)
+* [cargo fix](cargo-fix.md)
+* [cargo run](cargo-run.md)
+* [cargo rustc](cargo-rustc.md)
+* [cargo rustdoc](cargo-rustdoc.md)
+* [cargo test](cargo-test.md)
+* [cargo report](cargo-report.md)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-add.md b/src/tools/cargo/src/doc/src/commands/cargo-add.md
new file mode 100644
index 000000000..89c1cc6f1
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-add.md
@@ -0,0 +1,283 @@
+# cargo-add(1)
+
+
+
+
+## NAME
+
+cargo-add --- Add dependencies to a Cargo.toml manifest file
+
+## SYNOPSIS
+
+`cargo add` [_options_] _crate_...\
+`cargo add` [_options_] `--path` _path_\
+`cargo add` [_options_] `--git` _url_ [_crate_...]\
+
+
+## DESCRIPTION
+
+This command can add or modify dependencies.
+
+The source for the dependency can be specified with:
+
+* _crate_`@`_version_: Fetch from a registry with a version constraint of "_version_"
+* `--path` _path_: Fetch from the specified _path_
+* `--git` _url_: Pull from a git repo at _url_
+
+If no source is specified, then a best effort will be made to select one, including:
+
+* Existing dependencies in other tables (like `dev-dependencies`)
+* Workspace members
+* Latest release in the registry
+
+When you add a package that is already present, the existing entry will be updated with the flags specified.
+
+Upon successful invocation, the enabled (`+`) and disabled (`-`) [features] of the specified
+dependency will be listed in the command's output.
+
+[features]: ../reference/features.md
+
+## OPTIONS
+
+### Source options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-add---git"><a class="option-anchor" href="#option-cargo-add---git"></a><code>--git</code> <em>url</em></dt>
+<dd class="option-desc"><a href="../reference/specifying-dependencies.html#specifying-dependencies-from-git-repositories">Git URL to add the specified crate from</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-add---branch"><a class="option-anchor" href="#option-cargo-add---branch"></a><code>--branch</code> <em>branch</em></dt>
+<dd class="option-desc">Branch to use when adding from git.</dd>
+
+
+<dt class="option-term" id="option-cargo-add---tag"><a class="option-anchor" href="#option-cargo-add---tag"></a><code>--tag</code> <em>tag</em></dt>
+<dd class="option-desc">Tag to use when adding from git.</dd>
+
+
+<dt class="option-term" id="option-cargo-add---rev"><a class="option-anchor" href="#option-cargo-add---rev"></a><code>--rev</code> <em>sha</em></dt>
+<dd class="option-desc">Specific commit to use when adding from git.</dd>
+
+
+<dt class="option-term" id="option-cargo-add---path"><a class="option-anchor" href="#option-cargo-add---path"></a><code>--path</code> <em>path</em></dt>
+<dd class="option-desc"><a href="../reference/specifying-dependencies.html#specifying-path-dependencies">Filesystem path</a> to local crate to add.</dd>
+
+
+<dt class="option-term" id="option-cargo-add---registry"><a class="option-anchor" href="#option-cargo-add---registry"></a><code>--registry</code> <em>registry</em></dt>
+<dd class="option-desc">Name of the registry to use. Registry names are defined in <a href="../reference/config.html">Cargo config
+files</a>. If not specified, the default registry is used,
+which is defined by the <code>registry.default</code> config key which defaults to
+<code>crates-io</code>.</dd>
+
+
+
+</dl>
+
+### Section options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-add---dev"><a class="option-anchor" href="#option-cargo-add---dev"></a><code>--dev</code></dt>
+<dd class="option-desc">Add as a <a href="../reference/specifying-dependencies.html#development-dependencies">development dependency</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-add---build"><a class="option-anchor" href="#option-cargo-add---build"></a><code>--build</code></dt>
+<dd class="option-desc">Add as a <a href="../reference/specifying-dependencies.html#build-dependencies">build dependency</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-add---target"><a class="option-anchor" href="#option-cargo-add---target"></a><code>--target</code> <em>target</em></dt>
+<dd class="option-desc">Add as a dependency to the <a href="../reference/specifying-dependencies.html#platform-specific-dependencies">given target platform</a>.</p>
+<p>To avoid unexpected shell expansions, you may use quotes around each target, e.g., <code>--target 'cfg(unix)'</code>.</dd>
+
+
+</dl>
+
+### Dependency options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-add---dry-run"><a class="option-anchor" href="#option-cargo-add---dry-run"></a><code>--dry-run</code></dt>
+<dd class="option-desc">Don’t actually write the manifest</dd>
+
+
+<dt class="option-term" id="option-cargo-add---rename"><a class="option-anchor" href="#option-cargo-add---rename"></a><code>--rename</code> <em>name</em></dt>
+<dd class="option-desc"><a href="../reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml">Rename</a> the dependency.</dd>
+
+
+<dt class="option-term" id="option-cargo-add---optional"><a class="option-anchor" href="#option-cargo-add---optional"></a><code>--optional</code></dt>
+<dd class="option-desc">Mark the dependency as <a href="../reference/features.html#optional-dependencies">optional</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-add---no-optional"><a class="option-anchor" href="#option-cargo-add---no-optional"></a><code>--no-optional</code></dt>
+<dd class="option-desc">Mark the dependency as <a href="../reference/features.html#optional-dependencies">required</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-add---no-default-features"><a class="option-anchor" href="#option-cargo-add---no-default-features"></a><code>--no-default-features</code></dt>
+<dd class="option-desc">Disable the <a href="../reference/features.html#dependency-features">default features</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-add---default-features"><a class="option-anchor" href="#option-cargo-add---default-features"></a><code>--default-features</code></dt>
+<dd class="option-desc">Re-enable the <a href="../reference/features.html#dependency-features">default features</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-add--F"><a class="option-anchor" href="#option-cargo-add--F"></a><code>-F</code> <em>features</em></dt>
+<dt class="option-term" id="option-cargo-add---features"><a class="option-anchor" href="#option-cargo-add---features"></a><code>--features</code> <em>features</em></dt>
+<dd class="option-desc">Space or comma separated list of <a href="../reference/features.html#dependency-features">features to
+activate</a>. When adding multiple
+crates, the features for a specific crate may be enabled with
+<code>package-name/feature-name</code> syntax. This flag may be specified multiple times,
+which enables all specified features.</dd>
+
+
+</dl>
+
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-add--v"><a class="option-anchor" href="#option-cargo-add--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-add---verbose"><a class="option-anchor" href="#option-cargo-add---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-add--q"><a class="option-anchor" href="#option-cargo-add--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-add---quiet"><a class="option-anchor" href="#option-cargo-add---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-add---color"><a class="option-anchor" href="#option-cargo-add---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+<dt class="option-term" id="option-cargo-add---manifest-path"><a class="option-anchor" href="#option-cargo-add---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-add--p"><a class="option-anchor" href="#option-cargo-add--p"></a><code>-p</code> <em>spec</em></dt>
+<dt class="option-term" id="option-cargo-add---package"><a class="option-anchor" href="#option-cargo-add---package"></a><code>--package</code> <em>spec</em></dt>
+<dd class="option-desc">Add dependencies to only the specified package.</dd>
+
+
+<dt class="option-term" id="option-cargo-add---frozen"><a class="option-anchor" href="#option-cargo-add---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-add---locked"><a class="option-anchor" href="#option-cargo-add---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-add---offline"><a class="option-anchor" href="#option-cargo-add---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-add-+toolchain"><a class="option-anchor" href="#option-cargo-add-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-add---config"><a class="option-anchor" href="#option-cargo-add---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-add--C"><a class="option-anchor" href="#option-cargo-add--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-add--h"><a class="option-anchor" href="#option-cargo-add--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-add---help"><a class="option-anchor" href="#option-cargo-add---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-add--Z"><a class="option-anchor" href="#option-cargo-add--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Add `regex` as a dependency
+
+ cargo add regex
+
+2. Add `trybuild` as a dev-dependency
+
+ cargo add --dev trybuild
+
+3. Add an older version of `nom` as a dependency
+
+ cargo add nom@5
+
+4. Add support for serializing data structures to json with `derive`s
+
+ cargo add serde serde_json -F serde/derive
+
+5. Add `windows` as a platform specific dependency on `cfg(windows)`
+
+ cargo add windows --target 'cfg(windows)'
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-remove(1)](cargo-remove.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-bench.md b/src/tools/cargo/src/doc/src/commands/cargo-bench.md
new file mode 100644
index 000000000..e7e9b36fb
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-bench.md
@@ -0,0 +1,508 @@
+# cargo-bench(1)
+
+
+
+
+## NAME
+
+cargo-bench --- Execute benchmarks of a package
+
+## SYNOPSIS
+
+`cargo bench` [_options_] [_benchname_] [`--` _bench-options_]
+
+## DESCRIPTION
+
+Compile and execute benchmarks.
+
+The benchmark filtering argument _benchname_ and all the arguments following
+the two dashes (`--`) are passed to the benchmark binaries and thus to
+_libtest_ (rustc's built in unit-test and micro-benchmarking framework). If
+you are passing arguments to both Cargo and the binary, the ones after `--` go
+to the binary, the ones before go to Cargo. For details about libtest's
+arguments see the output of `cargo bench -- --help` and check out the rustc
+book's chapter on how tests work at
+<https://doc.rust-lang.org/rustc/tests/index.html>.
+
+As an example, this will run only the benchmark named `foo` (and skip other
+similarly named benchmarks like `foobar`):
+
+ cargo bench -- foo --exact
+
+Benchmarks are built with the `--test` option to `rustc` which creates a
+special executable by linking your code with libtest. The executable
+automatically runs all functions annotated with the `#[bench]` attribute.
+Cargo passes the `--bench` flag to the test harness to tell it to run
+only benchmarks.
+
+The libtest harness may be disabled by setting `harness = false` in the target
+manifest settings, in which case your code will need to provide its own `main`
+function to handle running benchmarks.
+
+> **Note**: The
+> [`#[bench]` attribute](https://doc.rust-lang.org/nightly/unstable-book/library-features/test.html)
+> is currently unstable and only available on the
+> [nightly channel](https://doc.rust-lang.org/book/appendix-07-nightly-rust.html).
+> There are some packages available on
+> [crates.io](https://crates.io/keywords/benchmark) that may help with
+> running benchmarks on the stable channel, such as
+> [Criterion](https://crates.io/crates/criterion).
+
+By default, `cargo bench` uses the [`bench` profile], which enables
+optimizations and disables debugging information. If you need to debug a
+benchmark, you can use the `--profile=dev` command-line option to switch to
+the dev profile. You can then run the debug-enabled benchmark within a
+debugger.
+
+[`bench` profile]: ../reference/profiles.html#bench
+
+### Working directory of benchmarks
+
+The working directory of every benchmark is set to the root directory of the
+package the benchmark belongs to.
+Setting the working directory of benchmarks to the package's root directory
+makes it possible for benchmarks to reliably access the package's files using
+relative paths, regardless from where `cargo bench` was executed from.
+
+## OPTIONS
+
+### Benchmark Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-bench---no-run"><a class="option-anchor" href="#option-cargo-bench---no-run"></a><code>--no-run</code></dt>
+<dd class="option-desc">Compile, but don’t run benchmarks.</dd>
+
+
+<dt class="option-term" id="option-cargo-bench---no-fail-fast"><a class="option-anchor" href="#option-cargo-bench---no-fail-fast"></a><code>--no-fail-fast</code></dt>
+<dd class="option-desc">Run all benchmarks regardless of failure. Without this flag, Cargo will exit
+after the first executable fails. The Rust test harness will run all benchmarks
+within the executable to completion, this flag only applies to the executable
+as a whole.</dd>
+
+
+</dl>
+
+
+### Package Selection
+
+By default, when no package selection options are given, the packages selected
+depend on the selected manifest file (based on the current working directory if
+`--manifest-path` is not given). If the manifest is the root of a workspace then
+the workspaces default members are selected, otherwise only the package defined
+by the manifest will be selected.
+
+The default members of a workspace can be set explicitly with the
+`workspace.default-members` key in the root manifest. If this is not set, a
+virtual workspace will include all workspace members (equivalent to passing
+`--workspace`), and a non-virtual workspace will include only the root crate itself.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-bench--p"><a class="option-anchor" href="#option-cargo-bench--p"></a><code>-p</code> <em>spec</em>…</dt>
+<dt class="option-term" id="option-cargo-bench---package"><a class="option-anchor" href="#option-cargo-bench---package"></a><code>--package</code> <em>spec</em>…</dt>
+<dd class="option-desc">Benchmark only the specified packages. See <a href="cargo-pkgid.html">cargo-pkgid(1)</a> for the
+SPEC format. This flag may be specified multiple times and supports common Unix
+glob patterns like <code>*</code>, <code>?</code> and <code>[]</code>. However, to avoid your shell accidentally
+expanding glob patterns before Cargo handles them, you must use single quotes or
+double quotes around each pattern.</dd>
+
+
+<dt class="option-term" id="option-cargo-bench---workspace"><a class="option-anchor" href="#option-cargo-bench---workspace"></a><code>--workspace</code></dt>
+<dd class="option-desc">Benchmark all members in the workspace.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-bench---all"><a class="option-anchor" href="#option-cargo-bench---all"></a><code>--all</code></dt>
+<dd class="option-desc">Deprecated alias for <code>--workspace</code>.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-bench---exclude"><a class="option-anchor" href="#option-cargo-bench---exclude"></a><code>--exclude</code> <em>SPEC</em>…</dt>
+<dd class="option-desc">Exclude the specified packages. Must be used in conjunction with the
+<code>--workspace</code> flag. This flag may be specified multiple times and supports
+common Unix glob patterns like <code>*</code>, <code>?</code> and <code>[]</code>. However, to avoid your shell
+accidentally expanding glob patterns before Cargo handles them, you must use
+single quotes or double quotes around each pattern.</dd>
+
+
+</dl>
+
+
+### Target Selection
+
+When no target selection options are given, `cargo bench` will build the
+following targets of the selected packages:
+
+- lib --- used to link with binaries and benchmarks
+- bins (only if benchmark targets are built and required features are
+ available)
+- lib as a benchmark
+- bins as benchmarks
+- benchmark targets
+
+The default behavior can be changed by setting the `bench` flag for the target
+in the manifest settings. Setting examples to `bench = true` will build and
+run the example as a benchmark. Setting targets to `bench = false` will stop
+them from being benchmarked by default. Target selection options that take a
+target by name ignore the `bench` flag and will always benchmark the given
+target.
+
+Binary targets are automatically built if there is an integration test or
+benchmark being selected to benchmark. This allows an integration
+test to execute the binary to exercise and test its behavior.
+The `CARGO_BIN_EXE_<name>`
+[environment variable](../reference/environment-variables.html#environment-variables-cargo-sets-for-crates)
+is set when the integration test is built so that it can use the
+[`env` macro](https://doc.rust-lang.org/std/macro.env.html) to locate the
+executable.
+
+
+Passing target selection flags will benchmark only the specified
+targets.
+
+Note that `--bin`, `--example`, `--test` and `--bench` flags also
+support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your
+shell accidentally expanding glob patterns before Cargo handles them, you must
+use single quotes or double quotes around each glob pattern.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-bench---lib"><a class="option-anchor" href="#option-cargo-bench---lib"></a><code>--lib</code></dt>
+<dd class="option-desc">Benchmark the package’s library.</dd>
+
+
+<dt class="option-term" id="option-cargo-bench---bin"><a class="option-anchor" href="#option-cargo-bench---bin"></a><code>--bin</code> <em>name</em>…</dt>
+<dd class="option-desc">Benchmark the specified binary. This flag may be specified multiple times
+and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-bench---bins"><a class="option-anchor" href="#option-cargo-bench---bins"></a><code>--bins</code></dt>
+<dd class="option-desc">Benchmark all binary targets.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-bench---example"><a class="option-anchor" href="#option-cargo-bench---example"></a><code>--example</code> <em>name</em>…</dt>
+<dd class="option-desc">Benchmark the specified example. This flag may be specified multiple times
+and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-bench---examples"><a class="option-anchor" href="#option-cargo-bench---examples"></a><code>--examples</code></dt>
+<dd class="option-desc">Benchmark all example targets.</dd>
+
+
+<dt class="option-term" id="option-cargo-bench---test"><a class="option-anchor" href="#option-cargo-bench---test"></a><code>--test</code> <em>name</em>…</dt>
+<dd class="option-desc">Benchmark the specified integration test. This flag may be specified
+multiple times and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-bench---tests"><a class="option-anchor" href="#option-cargo-bench---tests"></a><code>--tests</code></dt>
+<dd class="option-desc">Benchmark all targets in test mode that have the <code>test = true</code> manifest
+flag set. By default this includes the library and binaries built as
+unittests, and integration tests. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+unittest, and once as a dependency for binaries, integration tests, etc.).
+Targets may be enabled or disabled by setting the <code>test</code> flag in the
+manifest settings for the target.</dd>
+
+
+<dt class="option-term" id="option-cargo-bench---bench"><a class="option-anchor" href="#option-cargo-bench---bench"></a><code>--bench</code> <em>name</em>…</dt>
+<dd class="option-desc">Benchmark the specified benchmark. This flag may be specified multiple
+times and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-bench---benches"><a class="option-anchor" href="#option-cargo-bench---benches"></a><code>--benches</code></dt>
+<dd class="option-desc">Benchmark all targets in benchmark mode that have the <code>bench = true</code>
+manifest flag set. By default this includes the library and binaries built
+as benchmarks, and bench targets. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+benchmark, and once as a dependency for binaries, benchmarks, etc.).
+Targets may be enabled or disabled by setting the <code>bench</code> flag in the
+manifest settings for the target.</dd>
+
+
+<dt class="option-term" id="option-cargo-bench---all-targets"><a class="option-anchor" href="#option-cargo-bench---all-targets"></a><code>--all-targets</code></dt>
+<dd class="option-desc">Benchmark all targets. This is equivalent to specifying <code>--lib --bins --tests --benches --examples</code>.</dd>
+
+
+</dl>
+
+
+### Feature Selection
+
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the `default` feature is activated for every
+selected package.
+
+See [the features documentation](../reference/features.html#command-line-feature-options)
+for more details.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-bench--F"><a class="option-anchor" href="#option-cargo-bench--F"></a><code>-F</code> <em>features</em></dt>
+<dt class="option-term" id="option-cargo-bench---features"><a class="option-anchor" href="#option-cargo-bench---features"></a><code>--features</code> <em>features</em></dt>
+<dd class="option-desc">Space or comma separated list of features to activate. Features of workspace
+members may be enabled with <code>package-name/feature-name</code> syntax. This flag may
+be specified multiple times, which enables all specified features.</dd>
+
+
+<dt class="option-term" id="option-cargo-bench---all-features"><a class="option-anchor" href="#option-cargo-bench---all-features"></a><code>--all-features</code></dt>
+<dd class="option-desc">Activate all available features of all selected packages.</dd>
+
+
+<dt class="option-term" id="option-cargo-bench---no-default-features"><a class="option-anchor" href="#option-cargo-bench---no-default-features"></a><code>--no-default-features</code></dt>
+<dd class="option-desc">Do not activate the <code>default</code> feature of the selected packages.</dd>
+
+
+</dl>
+
+
+### Compilation Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-bench---target"><a class="option-anchor" href="#option-cargo-bench---target"></a><code>--target</code> <em>triple</em></dt>
+<dd class="option-desc">Benchmark for the given architecture. The default is the host architecture. The general format of the triple is
+<code>&lt;arch&gt;&lt;sub&gt;-&lt;vendor&gt;-&lt;sys&gt;-&lt;abi&gt;</code>. Run <code>rustc --print target-list</code> for a
+list of supported targets. This flag may be specified multiple times.</p>
+<p>This may also be specified with the <code>build.target</code>
+<a href="../reference/config.html">config value</a>.</p>
+<p>Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+<a href="../guide/build-cache.html">build cache</a> documentation for more details.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-bench---profile"><a class="option-anchor" href="#option-cargo-bench---profile"></a><code>--profile</code> <em>name</em></dt>
+<dd class="option-desc">Benchmark with the given profile.
+See the <a href="../reference/profiles.html">the reference</a> for more details on profiles.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-bench---ignore-rust-version"><a class="option-anchor" href="#option-cargo-bench---ignore-rust-version"></a><code>--ignore-rust-version</code></dt>
+<dd class="option-desc">Benchmark the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project’s <code>rust-version</code> field.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-bench---timings=fmts"><a class="option-anchor" href="#option-cargo-bench---timings=fmts"></a><code>--timings=</code><em>fmts</em></dt>
+<dd class="option-desc">Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma-separated list of output
+formats; <code>--timings</code> without an argument will default to <code>--timings=html</code>.
+Specifying an output format (rather than the default) is unstable and requires
+<code>-Zunstable-options</code>. Valid output formats:</p>
+<ul>
+<li><code>html</code> (unstable, requires <code>-Zunstable-options</code>): Write a human-readable file <code>cargo-timing.html</code> to the
+<code>target/cargo-timings</code> directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine-readable timing data.</li>
+<li><code>json</code> (unstable, requires <code>-Zunstable-options</code>): Emit machine-readable JSON
+information about timing information.</li>
+</ul></dd>
+
+
+
+
+</dl>
+
+### Output Options
+
+<dl>
+<dt class="option-term" id="option-cargo-bench---target-dir"><a class="option-anchor" href="#option-cargo-bench---target-dir"></a><code>--target-dir</code> <em>directory</em></dt>
+<dd class="option-desc">Directory for all generated artifacts and intermediate files. May also be
+specified with the <code>CARGO_TARGET_DIR</code> environment variable, or the
+<code>build.target-dir</code> <a href="../reference/config.html">config value</a>.
+Defaults to <code>target</code> in the root of the workspace.</dd>
+
+
+</dl>
+
+### Display Options
+
+By default the Rust test harness hides output from benchmark execution to keep
+results readable. Benchmark output can be recovered (e.g., for debugging) by
+passing `--nocapture` to the benchmark binaries:
+
+ cargo bench -- --nocapture
+
+<dl>
+
+<dt class="option-term" id="option-cargo-bench--v"><a class="option-anchor" href="#option-cargo-bench--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-bench---verbose"><a class="option-anchor" href="#option-cargo-bench---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-bench--q"><a class="option-anchor" href="#option-cargo-bench--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-bench---quiet"><a class="option-anchor" href="#option-cargo-bench---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-bench---color"><a class="option-anchor" href="#option-cargo-bench---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-bench---message-format"><a class="option-anchor" href="#option-cargo-bench---message-format"></a><code>--message-format</code> <em>fmt</em></dt>
+<dd class="option-desc">The output format for diagnostic messages. Can be specified multiple times
+and consists of comma-separated values. Valid values:</p>
+<ul>
+<li><code>human</code> (default): Display in a human-readable text format. Conflicts with
+<code>short</code> and <code>json</code>.</li>
+<li><code>short</code>: Emit shorter, human-readable text messages. Conflicts with <code>human</code>
+and <code>json</code>.</li>
+<li><code>json</code>: Emit JSON messages to stdout. See
+<a href="../reference/external-tools.html#json-messages">the reference</a>
+for more details. Conflicts with <code>human</code> and <code>short</code>.</li>
+<li><code>json-diagnostic-short</code>: Ensure the <code>rendered</code> field of JSON messages contains
+the “short” rendering from rustc. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-diagnostic-rendered-ansi</code>: Ensure the <code>rendered</code> field of JSON messages
+contains embedded ANSI color codes for respecting rustc’s default color
+scheme. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-render-diagnostics</code>: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo’s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with <code>human</code> or <code>short</code>.</li>
+</ul></dd>
+
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+<dt class="option-term" id="option-cargo-bench---manifest-path"><a class="option-anchor" href="#option-cargo-bench---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-bench---frozen"><a class="option-anchor" href="#option-cargo-bench---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-bench---locked"><a class="option-anchor" href="#option-cargo-bench---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-bench---offline"><a class="option-anchor" href="#option-cargo-bench---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-bench-+toolchain"><a class="option-anchor" href="#option-cargo-bench-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-bench---config"><a class="option-anchor" href="#option-cargo-bench---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-bench--C"><a class="option-anchor" href="#option-cargo-bench--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-bench--h"><a class="option-anchor" href="#option-cargo-bench--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-bench---help"><a class="option-anchor" href="#option-cargo-bench---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-bench--Z"><a class="option-anchor" href="#option-cargo-bench--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+### Miscellaneous Options
+
+The `--jobs` argument affects the building of the benchmark executable but
+does not affect how many threads are used when running the benchmarks. The
+Rust test harness runs benchmarks serially in a single thread.
+
+<dl>
+<dt class="option-term" id="option-cargo-bench--j"><a class="option-anchor" href="#option-cargo-bench--j"></a><code>-j</code> <em>N</em></dt>
+<dt class="option-term" id="option-cargo-bench---jobs"><a class="option-anchor" href="#option-cargo-bench---jobs"></a><code>--jobs</code> <em>N</em></dt>
+<dd class="option-desc">Number of parallel jobs to run. May also be specified with the
+<code>build.jobs</code> <a href="../reference/config.html">config value</a>. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.</dd>
+
+
+<dt class="option-term" id="option-cargo-bench---keep-going"><a class="option-anchor" href="#option-cargo-bench---keep-going"></a><code>--keep-going</code></dt>
+<dd class="option-desc">Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+<code>-Zunstable-options</code>.</dd>
+
+
+</dl>
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Build and execute all the benchmarks of the current package:
+
+ cargo bench
+
+2. Run only a specific benchmark within a specific benchmark target:
+
+ cargo bench --bench bench_name -- modname::some_benchmark
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-test(1)](cargo-test.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-build.md b/src/tools/cargo/src/doc/src/commands/cargo-build.md
new file mode 100644
index 000000000..6e3cf157a
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-build.md
@@ -0,0 +1,446 @@
+# cargo-build(1)
+
+
+
+## NAME
+
+cargo-build --- Compile the current package
+
+## SYNOPSIS
+
+`cargo build` [_options_]
+
+## DESCRIPTION
+
+Compile local packages and all of their dependencies.
+
+## OPTIONS
+
+### Package Selection
+
+By default, when no package selection options are given, the packages selected
+depend on the selected manifest file (based on the current working directory if
+`--manifest-path` is not given). If the manifest is the root of a workspace then
+the workspaces default members are selected, otherwise only the package defined
+by the manifest will be selected.
+
+The default members of a workspace can be set explicitly with the
+`workspace.default-members` key in the root manifest. If this is not set, a
+virtual workspace will include all workspace members (equivalent to passing
+`--workspace`), and a non-virtual workspace will include only the root crate itself.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-build--p"><a class="option-anchor" href="#option-cargo-build--p"></a><code>-p</code> <em>spec</em>…</dt>
+<dt class="option-term" id="option-cargo-build---package"><a class="option-anchor" href="#option-cargo-build---package"></a><code>--package</code> <em>spec</em>…</dt>
+<dd class="option-desc">Build only the specified packages. See <a href="cargo-pkgid.html">cargo-pkgid(1)</a> for the
+SPEC format. This flag may be specified multiple times and supports common Unix
+glob patterns like <code>*</code>, <code>?</code> and <code>[]</code>. However, to avoid your shell accidentally
+expanding glob patterns before Cargo handles them, you must use single quotes or
+double quotes around each pattern.</dd>
+
+
+<dt class="option-term" id="option-cargo-build---workspace"><a class="option-anchor" href="#option-cargo-build---workspace"></a><code>--workspace</code></dt>
+<dd class="option-desc">Build all members in the workspace.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-build---all"><a class="option-anchor" href="#option-cargo-build---all"></a><code>--all</code></dt>
+<dd class="option-desc">Deprecated alias for <code>--workspace</code>.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-build---exclude"><a class="option-anchor" href="#option-cargo-build---exclude"></a><code>--exclude</code> <em>SPEC</em>…</dt>
+<dd class="option-desc">Exclude the specified packages. Must be used in conjunction with the
+<code>--workspace</code> flag. This flag may be specified multiple times and supports
+common Unix glob patterns like <code>*</code>, <code>?</code> and <code>[]</code>. However, to avoid your shell
+accidentally expanding glob patterns before Cargo handles them, you must use
+single quotes or double quotes around each pattern.</dd>
+
+
+</dl>
+
+
+### Target Selection
+
+When no target selection options are given, `cargo build` will build all
+binary and library targets of the selected packages. Binaries are skipped if
+they have `required-features` that are missing.
+
+Binary targets are automatically built if there is an integration test or
+benchmark being selected to build. This allows an integration
+test to execute the binary to exercise and test its behavior.
+The `CARGO_BIN_EXE_<name>`
+[environment variable](../reference/environment-variables.html#environment-variables-cargo-sets-for-crates)
+is set when the integration test is built so that it can use the
+[`env` macro](https://doc.rust-lang.org/std/macro.env.html) to locate the
+executable.
+
+
+Passing target selection flags will build only the specified
+targets.
+
+Note that `--bin`, `--example`, `--test` and `--bench` flags also
+support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your
+shell accidentally expanding glob patterns before Cargo handles them, you must
+use single quotes or double quotes around each glob pattern.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-build---lib"><a class="option-anchor" href="#option-cargo-build---lib"></a><code>--lib</code></dt>
+<dd class="option-desc">Build the package’s library.</dd>
+
+
+<dt class="option-term" id="option-cargo-build---bin"><a class="option-anchor" href="#option-cargo-build---bin"></a><code>--bin</code> <em>name</em>…</dt>
+<dd class="option-desc">Build the specified binary. This flag may be specified multiple times
+and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-build---bins"><a class="option-anchor" href="#option-cargo-build---bins"></a><code>--bins</code></dt>
+<dd class="option-desc">Build all binary targets.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-build---example"><a class="option-anchor" href="#option-cargo-build---example"></a><code>--example</code> <em>name</em>…</dt>
+<dd class="option-desc">Build the specified example. This flag may be specified multiple times
+and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-build---examples"><a class="option-anchor" href="#option-cargo-build---examples"></a><code>--examples</code></dt>
+<dd class="option-desc">Build all example targets.</dd>
+
+
+<dt class="option-term" id="option-cargo-build---test"><a class="option-anchor" href="#option-cargo-build---test"></a><code>--test</code> <em>name</em>…</dt>
+<dd class="option-desc">Build the specified integration test. This flag may be specified
+multiple times and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-build---tests"><a class="option-anchor" href="#option-cargo-build---tests"></a><code>--tests</code></dt>
+<dd class="option-desc">Build all targets in test mode that have the <code>test = true</code> manifest
+flag set. By default this includes the library and binaries built as
+unittests, and integration tests. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+unittest, and once as a dependency for binaries, integration tests, etc.).
+Targets may be enabled or disabled by setting the <code>test</code> flag in the
+manifest settings for the target.</dd>
+
+
+<dt class="option-term" id="option-cargo-build---bench"><a class="option-anchor" href="#option-cargo-build---bench"></a><code>--bench</code> <em>name</em>…</dt>
+<dd class="option-desc">Build the specified benchmark. This flag may be specified multiple
+times and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-build---benches"><a class="option-anchor" href="#option-cargo-build---benches"></a><code>--benches</code></dt>
+<dd class="option-desc">Build all targets in benchmark mode that have the <code>bench = true</code>
+manifest flag set. By default this includes the library and binaries built
+as benchmarks, and bench targets. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+benchmark, and once as a dependency for binaries, benchmarks, etc.).
+Targets may be enabled or disabled by setting the <code>bench</code> flag in the
+manifest settings for the target.</dd>
+
+
+<dt class="option-term" id="option-cargo-build---all-targets"><a class="option-anchor" href="#option-cargo-build---all-targets"></a><code>--all-targets</code></dt>
+<dd class="option-desc">Build all targets. This is equivalent to specifying <code>--lib --bins --tests --benches --examples</code>.</dd>
+
+
+</dl>
+
+
+### Feature Selection
+
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the `default` feature is activated for every
+selected package.
+
+See [the features documentation](../reference/features.html#command-line-feature-options)
+for more details.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-build--F"><a class="option-anchor" href="#option-cargo-build--F"></a><code>-F</code> <em>features</em></dt>
+<dt class="option-term" id="option-cargo-build---features"><a class="option-anchor" href="#option-cargo-build---features"></a><code>--features</code> <em>features</em></dt>
+<dd class="option-desc">Space or comma separated list of features to activate. Features of workspace
+members may be enabled with <code>package-name/feature-name</code> syntax. This flag may
+be specified multiple times, which enables all specified features.</dd>
+
+
+<dt class="option-term" id="option-cargo-build---all-features"><a class="option-anchor" href="#option-cargo-build---all-features"></a><code>--all-features</code></dt>
+<dd class="option-desc">Activate all available features of all selected packages.</dd>
+
+
+<dt class="option-term" id="option-cargo-build---no-default-features"><a class="option-anchor" href="#option-cargo-build---no-default-features"></a><code>--no-default-features</code></dt>
+<dd class="option-desc">Do not activate the <code>default</code> feature of the selected packages.</dd>
+
+
+</dl>
+
+
+### Compilation Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-build---target"><a class="option-anchor" href="#option-cargo-build---target"></a><code>--target</code> <em>triple</em></dt>
+<dd class="option-desc">Build for the given architecture. The default is the host architecture. The general format of the triple is
+<code>&lt;arch&gt;&lt;sub&gt;-&lt;vendor&gt;-&lt;sys&gt;-&lt;abi&gt;</code>. Run <code>rustc --print target-list</code> for a
+list of supported targets. This flag may be specified multiple times.</p>
+<p>This may also be specified with the <code>build.target</code>
+<a href="../reference/config.html">config value</a>.</p>
+<p>Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+<a href="../guide/build-cache.html">build cache</a> documentation for more details.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-build--r"><a class="option-anchor" href="#option-cargo-build--r"></a><code>-r</code></dt>
+<dt class="option-term" id="option-cargo-build---release"><a class="option-anchor" href="#option-cargo-build---release"></a><code>--release</code></dt>
+<dd class="option-desc">Build optimized artifacts with the <code>release</code> profile.
+See also the <code>--profile</code> option for choosing a specific profile by name.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-build---profile"><a class="option-anchor" href="#option-cargo-build---profile"></a><code>--profile</code> <em>name</em></dt>
+<dd class="option-desc">Build with the given profile.
+See the <a href="../reference/profiles.html">the reference</a> for more details on profiles.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-build---ignore-rust-version"><a class="option-anchor" href="#option-cargo-build---ignore-rust-version"></a><code>--ignore-rust-version</code></dt>
+<dd class="option-desc">Build the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project’s <code>rust-version</code> field.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-build---timings=fmts"><a class="option-anchor" href="#option-cargo-build---timings=fmts"></a><code>--timings=</code><em>fmts</em></dt>
+<dd class="option-desc">Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma-separated list of output
+formats; <code>--timings</code> without an argument will default to <code>--timings=html</code>.
+Specifying an output format (rather than the default) is unstable and requires
+<code>-Zunstable-options</code>. Valid output formats:</p>
+<ul>
+<li><code>html</code> (unstable, requires <code>-Zunstable-options</code>): Write a human-readable file <code>cargo-timing.html</code> to the
+<code>target/cargo-timings</code> directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine-readable timing data.</li>
+<li><code>json</code> (unstable, requires <code>-Zunstable-options</code>): Emit machine-readable JSON
+information about timing information.</li>
+</ul></dd>
+
+
+
+
+</dl>
+
+### Output Options
+
+<dl>
+<dt class="option-term" id="option-cargo-build---target-dir"><a class="option-anchor" href="#option-cargo-build---target-dir"></a><code>--target-dir</code> <em>directory</em></dt>
+<dd class="option-desc">Directory for all generated artifacts and intermediate files. May also be
+specified with the <code>CARGO_TARGET_DIR</code> environment variable, or the
+<code>build.target-dir</code> <a href="../reference/config.html">config value</a>.
+Defaults to <code>target</code> in the root of the workspace.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-build---out-dir"><a class="option-anchor" href="#option-cargo-build---out-dir"></a><code>--out-dir</code> <em>directory</em></dt>
+<dd class="option-desc">Copy final artifacts to this directory.</p>
+<p>This option is unstable and available only on the
+<a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly channel</a>
+and requires the <code>-Z unstable-options</code> flag to enable.
+See <a href="https://github.com/rust-lang/cargo/issues/6790">https://github.com/rust-lang/cargo/issues/6790</a> for more information.</dd>
+
+
+</dl>
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-build--v"><a class="option-anchor" href="#option-cargo-build--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-build---verbose"><a class="option-anchor" href="#option-cargo-build---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-build--q"><a class="option-anchor" href="#option-cargo-build--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-build---quiet"><a class="option-anchor" href="#option-cargo-build---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-build---color"><a class="option-anchor" href="#option-cargo-build---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-build---message-format"><a class="option-anchor" href="#option-cargo-build---message-format"></a><code>--message-format</code> <em>fmt</em></dt>
+<dd class="option-desc">The output format for diagnostic messages. Can be specified multiple times
+and consists of comma-separated values. Valid values:</p>
+<ul>
+<li><code>human</code> (default): Display in a human-readable text format. Conflicts with
+<code>short</code> and <code>json</code>.</li>
+<li><code>short</code>: Emit shorter, human-readable text messages. Conflicts with <code>human</code>
+and <code>json</code>.</li>
+<li><code>json</code>: Emit JSON messages to stdout. See
+<a href="../reference/external-tools.html#json-messages">the reference</a>
+for more details. Conflicts with <code>human</code> and <code>short</code>.</li>
+<li><code>json-diagnostic-short</code>: Ensure the <code>rendered</code> field of JSON messages contains
+the “short” rendering from rustc. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-diagnostic-rendered-ansi</code>: Ensure the <code>rendered</code> field of JSON messages
+contains embedded ANSI color codes for respecting rustc’s default color
+scheme. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-render-diagnostics</code>: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo’s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with <code>human</code> or <code>short</code>.</li>
+</ul></dd>
+
+
+
+<dt class="option-term" id="option-cargo-build---build-plan"><a class="option-anchor" href="#option-cargo-build---build-plan"></a><code>--build-plan</code></dt>
+<dd class="option-desc">Outputs a series of JSON messages to stdout that indicate the commands to run
+the build.</p>
+<p>This option is unstable and available only on the
+<a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly channel</a>
+and requires the <code>-Z unstable-options</code> flag to enable.
+See <a href="https://github.com/rust-lang/cargo/issues/5579">https://github.com/rust-lang/cargo/issues/5579</a> for more information.</dd>
+
+</dl>
+
+### Manifest Options
+
+<dl>
+<dt class="option-term" id="option-cargo-build---manifest-path"><a class="option-anchor" href="#option-cargo-build---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-build---frozen"><a class="option-anchor" href="#option-cargo-build---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-build---locked"><a class="option-anchor" href="#option-cargo-build---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-build---offline"><a class="option-anchor" href="#option-cargo-build---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-build-+toolchain"><a class="option-anchor" href="#option-cargo-build-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-build---config"><a class="option-anchor" href="#option-cargo-build---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-build--C"><a class="option-anchor" href="#option-cargo-build--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-build--h"><a class="option-anchor" href="#option-cargo-build--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-build---help"><a class="option-anchor" href="#option-cargo-build---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-build--Z"><a class="option-anchor" href="#option-cargo-build--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+### Miscellaneous Options
+
+<dl>
+<dt class="option-term" id="option-cargo-build--j"><a class="option-anchor" href="#option-cargo-build--j"></a><code>-j</code> <em>N</em></dt>
+<dt class="option-term" id="option-cargo-build---jobs"><a class="option-anchor" href="#option-cargo-build---jobs"></a><code>--jobs</code> <em>N</em></dt>
+<dd class="option-desc">Number of parallel jobs to run. May also be specified with the
+<code>build.jobs</code> <a href="../reference/config.html">config value</a>. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.</dd>
+
+
+<dt class="option-term" id="option-cargo-build---keep-going"><a class="option-anchor" href="#option-cargo-build---keep-going"></a><code>--keep-going</code></dt>
+<dd class="option-desc">Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+<code>-Zunstable-options</code>.</dd>
+
+
+<dt class="option-term" id="option-cargo-build---future-incompat-report"><a class="option-anchor" href="#option-cargo-build---future-incompat-report"></a><code>--future-incompat-report</code></dt>
+<dd class="option-desc">Displays a future-incompat report for any future-incompatible warnings
+produced during execution of this command</p>
+<p>See <a href="cargo-report.html">cargo-report(1)</a></dd>
+
+
+</dl>
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Build the local package and all of its dependencies:
+
+ cargo build
+
+2. Build with optimizations:
+
+ cargo build --release
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-rustc(1)](cargo-rustc.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-check.md b/src/tools/cargo/src/doc/src/commands/cargo-check.md
new file mode 100644
index 000000000..2070293ac
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-check.md
@@ -0,0 +1,427 @@
+# cargo-check(1)
+
+
+
+## NAME
+
+cargo-check --- Check the current package
+
+## SYNOPSIS
+
+`cargo check` [_options_]
+
+## DESCRIPTION
+
+Check a local package and all of its dependencies for errors. This will
+essentially compile the packages without performing the final step of code
+generation, which is faster than running `cargo build`. The compiler will save
+metadata files to disk so that future runs will reuse them if the source has
+not been modified. Some diagnostics and errors are only emitted during code
+generation, so they inherently won't be reported with `cargo check`.
+
+## OPTIONS
+
+### Package Selection
+
+By default, when no package selection options are given, the packages selected
+depend on the selected manifest file (based on the current working directory if
+`--manifest-path` is not given). If the manifest is the root of a workspace then
+the workspaces default members are selected, otherwise only the package defined
+by the manifest will be selected.
+
+The default members of a workspace can be set explicitly with the
+`workspace.default-members` key in the root manifest. If this is not set, a
+virtual workspace will include all workspace members (equivalent to passing
+`--workspace`), and a non-virtual workspace will include only the root crate itself.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-check--p"><a class="option-anchor" href="#option-cargo-check--p"></a><code>-p</code> <em>spec</em>…</dt>
+<dt class="option-term" id="option-cargo-check---package"><a class="option-anchor" href="#option-cargo-check---package"></a><code>--package</code> <em>spec</em>…</dt>
+<dd class="option-desc">Check only the specified packages. See <a href="cargo-pkgid.html">cargo-pkgid(1)</a> for the
+SPEC format. This flag may be specified multiple times and supports common Unix
+glob patterns like <code>*</code>, <code>?</code> and <code>[]</code>. However, to avoid your shell accidentally
+expanding glob patterns before Cargo handles them, you must use single quotes or
+double quotes around each pattern.</dd>
+
+
+<dt class="option-term" id="option-cargo-check---workspace"><a class="option-anchor" href="#option-cargo-check---workspace"></a><code>--workspace</code></dt>
+<dd class="option-desc">Check all members in the workspace.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-check---all"><a class="option-anchor" href="#option-cargo-check---all"></a><code>--all</code></dt>
+<dd class="option-desc">Deprecated alias for <code>--workspace</code>.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-check---exclude"><a class="option-anchor" href="#option-cargo-check---exclude"></a><code>--exclude</code> <em>SPEC</em>…</dt>
+<dd class="option-desc">Exclude the specified packages. Must be used in conjunction with the
+<code>--workspace</code> flag. This flag may be specified multiple times and supports
+common Unix glob patterns like <code>*</code>, <code>?</code> and <code>[]</code>. However, to avoid your shell
+accidentally expanding glob patterns before Cargo handles them, you must use
+single quotes or double quotes around each pattern.</dd>
+
+
+</dl>
+
+
+### Target Selection
+
+When no target selection options are given, `cargo check` will check all
+binary and library targets of the selected packages. Binaries are skipped if
+they have `required-features` that are missing.
+
+Passing target selection flags will check only the specified
+targets.
+
+Note that `--bin`, `--example`, `--test` and `--bench` flags also
+support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your
+shell accidentally expanding glob patterns before Cargo handles them, you must
+use single quotes or double quotes around each glob pattern.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-check---lib"><a class="option-anchor" href="#option-cargo-check---lib"></a><code>--lib</code></dt>
+<dd class="option-desc">Check the package’s library.</dd>
+
+
+<dt class="option-term" id="option-cargo-check---bin"><a class="option-anchor" href="#option-cargo-check---bin"></a><code>--bin</code> <em>name</em>…</dt>
+<dd class="option-desc">Check the specified binary. This flag may be specified multiple times
+and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-check---bins"><a class="option-anchor" href="#option-cargo-check---bins"></a><code>--bins</code></dt>
+<dd class="option-desc">Check all binary targets.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-check---example"><a class="option-anchor" href="#option-cargo-check---example"></a><code>--example</code> <em>name</em>…</dt>
+<dd class="option-desc">Check the specified example. This flag may be specified multiple times
+and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-check---examples"><a class="option-anchor" href="#option-cargo-check---examples"></a><code>--examples</code></dt>
+<dd class="option-desc">Check all example targets.</dd>
+
+
+<dt class="option-term" id="option-cargo-check---test"><a class="option-anchor" href="#option-cargo-check---test"></a><code>--test</code> <em>name</em>…</dt>
+<dd class="option-desc">Check the specified integration test. This flag may be specified
+multiple times and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-check---tests"><a class="option-anchor" href="#option-cargo-check---tests"></a><code>--tests</code></dt>
+<dd class="option-desc">Check all targets in test mode that have the <code>test = true</code> manifest
+flag set. By default this includes the library and binaries built as
+unittests, and integration tests. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+unittest, and once as a dependency for binaries, integration tests, etc.).
+Targets may be enabled or disabled by setting the <code>test</code> flag in the
+manifest settings for the target.</dd>
+
+
+<dt class="option-term" id="option-cargo-check---bench"><a class="option-anchor" href="#option-cargo-check---bench"></a><code>--bench</code> <em>name</em>…</dt>
+<dd class="option-desc">Check the specified benchmark. This flag may be specified multiple
+times and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-check---benches"><a class="option-anchor" href="#option-cargo-check---benches"></a><code>--benches</code></dt>
+<dd class="option-desc">Check all targets in benchmark mode that have the <code>bench = true</code>
+manifest flag set. By default this includes the library and binaries built
+as benchmarks, and bench targets. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+benchmark, and once as a dependency for binaries, benchmarks, etc.).
+Targets may be enabled or disabled by setting the <code>bench</code> flag in the
+manifest settings for the target.</dd>
+
+
+<dt class="option-term" id="option-cargo-check---all-targets"><a class="option-anchor" href="#option-cargo-check---all-targets"></a><code>--all-targets</code></dt>
+<dd class="option-desc">Check all targets. This is equivalent to specifying <code>--lib --bins --tests --benches --examples</code>.</dd>
+
+
+</dl>
+
+
+### Feature Selection
+
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the `default` feature is activated for every
+selected package.
+
+See [the features documentation](../reference/features.html#command-line-feature-options)
+for more details.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-check--F"><a class="option-anchor" href="#option-cargo-check--F"></a><code>-F</code> <em>features</em></dt>
+<dt class="option-term" id="option-cargo-check---features"><a class="option-anchor" href="#option-cargo-check---features"></a><code>--features</code> <em>features</em></dt>
+<dd class="option-desc">Space or comma separated list of features to activate. Features of workspace
+members may be enabled with <code>package-name/feature-name</code> syntax. This flag may
+be specified multiple times, which enables all specified features.</dd>
+
+
+<dt class="option-term" id="option-cargo-check---all-features"><a class="option-anchor" href="#option-cargo-check---all-features"></a><code>--all-features</code></dt>
+<dd class="option-desc">Activate all available features of all selected packages.</dd>
+
+
+<dt class="option-term" id="option-cargo-check---no-default-features"><a class="option-anchor" href="#option-cargo-check---no-default-features"></a><code>--no-default-features</code></dt>
+<dd class="option-desc">Do not activate the <code>default</code> feature of the selected packages.</dd>
+
+
+</dl>
+
+
+### Compilation Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-check---target"><a class="option-anchor" href="#option-cargo-check---target"></a><code>--target</code> <em>triple</em></dt>
+<dd class="option-desc">Check for the given architecture. The default is the host architecture. The general format of the triple is
+<code>&lt;arch&gt;&lt;sub&gt;-&lt;vendor&gt;-&lt;sys&gt;-&lt;abi&gt;</code>. Run <code>rustc --print target-list</code> for a
+list of supported targets. This flag may be specified multiple times.</p>
+<p>This may also be specified with the <code>build.target</code>
+<a href="../reference/config.html">config value</a>.</p>
+<p>Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+<a href="../guide/build-cache.html">build cache</a> documentation for more details.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-check--r"><a class="option-anchor" href="#option-cargo-check--r"></a><code>-r</code></dt>
+<dt class="option-term" id="option-cargo-check---release"><a class="option-anchor" href="#option-cargo-check---release"></a><code>--release</code></dt>
+<dd class="option-desc">Check optimized artifacts with the <code>release</code> profile.
+See also the <code>--profile</code> option for choosing a specific profile by name.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-check---profile"><a class="option-anchor" href="#option-cargo-check---profile"></a><code>--profile</code> <em>name</em></dt>
+<dd class="option-desc">Check with the given profile.</p>
+<p>As a special case, specifying the <code>test</code> profile will also enable checking in
+test mode which will enable checking tests and enable the <code>test</code> cfg option.
+See <a href="https://doc.rust-lang.org/rustc/tests/index.html">rustc tests</a> for more
+detail.</p>
+<p>See the <a href="../reference/profiles.html">the reference</a> for more details on profiles.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-check---ignore-rust-version"><a class="option-anchor" href="#option-cargo-check---ignore-rust-version"></a><code>--ignore-rust-version</code></dt>
+<dd class="option-desc">Check the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project’s <code>rust-version</code> field.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-check---timings=fmts"><a class="option-anchor" href="#option-cargo-check---timings=fmts"></a><code>--timings=</code><em>fmts</em></dt>
+<dd class="option-desc">Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma-separated list of output
+formats; <code>--timings</code> without an argument will default to <code>--timings=html</code>.
+Specifying an output format (rather than the default) is unstable and requires
+<code>-Zunstable-options</code>. Valid output formats:</p>
+<ul>
+<li><code>html</code> (unstable, requires <code>-Zunstable-options</code>): Write a human-readable file <code>cargo-timing.html</code> to the
+<code>target/cargo-timings</code> directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine-readable timing data.</li>
+<li><code>json</code> (unstable, requires <code>-Zunstable-options</code>): Emit machine-readable JSON
+information about timing information.</li>
+</ul></dd>
+
+
+
+
+</dl>
+
+### Output Options
+
+<dl>
+<dt class="option-term" id="option-cargo-check---target-dir"><a class="option-anchor" href="#option-cargo-check---target-dir"></a><code>--target-dir</code> <em>directory</em></dt>
+<dd class="option-desc">Directory for all generated artifacts and intermediate files. May also be
+specified with the <code>CARGO_TARGET_DIR</code> environment variable, or the
+<code>build.target-dir</code> <a href="../reference/config.html">config value</a>.
+Defaults to <code>target</code> in the root of the workspace.</dd>
+
+
+</dl>
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-check--v"><a class="option-anchor" href="#option-cargo-check--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-check---verbose"><a class="option-anchor" href="#option-cargo-check---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-check--q"><a class="option-anchor" href="#option-cargo-check--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-check---quiet"><a class="option-anchor" href="#option-cargo-check---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-check---color"><a class="option-anchor" href="#option-cargo-check---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-check---message-format"><a class="option-anchor" href="#option-cargo-check---message-format"></a><code>--message-format</code> <em>fmt</em></dt>
+<dd class="option-desc">The output format for diagnostic messages. Can be specified multiple times
+and consists of comma-separated values. Valid values:</p>
+<ul>
+<li><code>human</code> (default): Display in a human-readable text format. Conflicts with
+<code>short</code> and <code>json</code>.</li>
+<li><code>short</code>: Emit shorter, human-readable text messages. Conflicts with <code>human</code>
+and <code>json</code>.</li>
+<li><code>json</code>: Emit JSON messages to stdout. See
+<a href="../reference/external-tools.html#json-messages">the reference</a>
+for more details. Conflicts with <code>human</code> and <code>short</code>.</li>
+<li><code>json-diagnostic-short</code>: Ensure the <code>rendered</code> field of JSON messages contains
+the “short” rendering from rustc. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-diagnostic-rendered-ansi</code>: Ensure the <code>rendered</code> field of JSON messages
+contains embedded ANSI color codes for respecting rustc’s default color
+scheme. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-render-diagnostics</code>: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo’s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with <code>human</code> or <code>short</code>.</li>
+</ul></dd>
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+<dt class="option-term" id="option-cargo-check---manifest-path"><a class="option-anchor" href="#option-cargo-check---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-check---frozen"><a class="option-anchor" href="#option-cargo-check---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-check---locked"><a class="option-anchor" href="#option-cargo-check---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-check---offline"><a class="option-anchor" href="#option-cargo-check---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-check-+toolchain"><a class="option-anchor" href="#option-cargo-check-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-check---config"><a class="option-anchor" href="#option-cargo-check---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-check--C"><a class="option-anchor" href="#option-cargo-check--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-check--h"><a class="option-anchor" href="#option-cargo-check--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-check---help"><a class="option-anchor" href="#option-cargo-check---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-check--Z"><a class="option-anchor" href="#option-cargo-check--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+### Miscellaneous Options
+
+<dl>
+<dt class="option-term" id="option-cargo-check--j"><a class="option-anchor" href="#option-cargo-check--j"></a><code>-j</code> <em>N</em></dt>
+<dt class="option-term" id="option-cargo-check---jobs"><a class="option-anchor" href="#option-cargo-check---jobs"></a><code>--jobs</code> <em>N</em></dt>
+<dd class="option-desc">Number of parallel jobs to run. May also be specified with the
+<code>build.jobs</code> <a href="../reference/config.html">config value</a>. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.</dd>
+
+
+<dt class="option-term" id="option-cargo-check---keep-going"><a class="option-anchor" href="#option-cargo-check---keep-going"></a><code>--keep-going</code></dt>
+<dd class="option-desc">Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+<code>-Zunstable-options</code>.</dd>
+
+
+<dt class="option-term" id="option-cargo-check---future-incompat-report"><a class="option-anchor" href="#option-cargo-check---future-incompat-report"></a><code>--future-incompat-report</code></dt>
+<dd class="option-desc">Displays a future-incompat report for any future-incompatible warnings
+produced during execution of this command</p>
+<p>See <a href="cargo-report.html">cargo-report(1)</a></dd>
+
+
+</dl>
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Check the local package for errors:
+
+ cargo check
+
+2. Check all targets, including unit tests:
+
+ cargo check --all-targets --profile=test
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-build(1)](cargo-build.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-clean.md b/src/tools/cargo/src/doc/src/commands/cargo-clean.md
new file mode 100644
index 000000000..bdbcb86d4
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-clean.md
@@ -0,0 +1,205 @@
+# cargo-clean(1)
+
+
+
+## NAME
+
+cargo-clean --- Remove generated artifacts
+
+## SYNOPSIS
+
+`cargo clean` [_options_]
+
+## DESCRIPTION
+
+Remove artifacts from the target directory that Cargo has generated in the
+past.
+
+With no options, `cargo clean` will delete the entire target directory.
+
+## OPTIONS
+
+### Package Selection
+
+When no packages are selected, all packages and all dependencies in the
+workspace are cleaned.
+
+<dl>
+<dt class="option-term" id="option-cargo-clean--p"><a class="option-anchor" href="#option-cargo-clean--p"></a><code>-p</code> <em>spec</em>…</dt>
+<dt class="option-term" id="option-cargo-clean---package"><a class="option-anchor" href="#option-cargo-clean---package"></a><code>--package</code> <em>spec</em>…</dt>
+<dd class="option-desc">Clean only the specified packages. This flag may be specified
+multiple times. See <a href="cargo-pkgid.html">cargo-pkgid(1)</a> for the SPEC format.</dd>
+
+</dl>
+
+### Clean Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-clean---doc"><a class="option-anchor" href="#option-cargo-clean---doc"></a><code>--doc</code></dt>
+<dd class="option-desc">This option will cause <code>cargo clean</code> to remove only the <code>doc</code> directory in
+the target directory.</dd>
+
+
+<dt class="option-term" id="option-cargo-clean---release"><a class="option-anchor" href="#option-cargo-clean---release"></a><code>--release</code></dt>
+<dd class="option-desc">Remove all artifacts in the <code>release</code> directory.</dd>
+
+
+<dt class="option-term" id="option-cargo-clean---profile"><a class="option-anchor" href="#option-cargo-clean---profile"></a><code>--profile</code> <em>name</em></dt>
+<dd class="option-desc">Remove all artifacts in the directory with the given profile name.</dd>
+
+
+<dt class="option-term" id="option-cargo-clean---target-dir"><a class="option-anchor" href="#option-cargo-clean---target-dir"></a><code>--target-dir</code> <em>directory</em></dt>
+<dd class="option-desc">Directory for all generated artifacts and intermediate files. May also be
+specified with the <code>CARGO_TARGET_DIR</code> environment variable, or the
+<code>build.target-dir</code> <a href="../reference/config.html">config value</a>.
+Defaults to <code>target</code> in the root of the workspace.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-clean---target"><a class="option-anchor" href="#option-cargo-clean---target"></a><code>--target</code> <em>triple</em></dt>
+<dd class="option-desc">Clean for the given architecture. The default is the host architecture. The general format of the triple is
+<code>&lt;arch&gt;&lt;sub&gt;-&lt;vendor&gt;-&lt;sys&gt;-&lt;abi&gt;</code>. Run <code>rustc --print target-list</code> for a
+list of supported targets. This flag may be specified multiple times.</p>
+<p>This may also be specified with the <code>build.target</code>
+<a href="../reference/config.html">config value</a>.</p>
+<p>Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+<a href="../guide/build-cache.html">build cache</a> documentation for more details.</dd>
+
+
+
+</dl>
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-clean--v"><a class="option-anchor" href="#option-cargo-clean--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-clean---verbose"><a class="option-anchor" href="#option-cargo-clean---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-clean--q"><a class="option-anchor" href="#option-cargo-clean--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-clean---quiet"><a class="option-anchor" href="#option-cargo-clean---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-clean---color"><a class="option-anchor" href="#option-cargo-clean---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+<dt class="option-term" id="option-cargo-clean---manifest-path"><a class="option-anchor" href="#option-cargo-clean---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-clean---frozen"><a class="option-anchor" href="#option-cargo-clean---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-clean---locked"><a class="option-anchor" href="#option-cargo-clean---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-clean---offline"><a class="option-anchor" href="#option-cargo-clean---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-clean-+toolchain"><a class="option-anchor" href="#option-cargo-clean-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-clean---config"><a class="option-anchor" href="#option-cargo-clean---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-clean--C"><a class="option-anchor" href="#option-cargo-clean--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-clean--h"><a class="option-anchor" href="#option-cargo-clean--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-clean---help"><a class="option-anchor" href="#option-cargo-clean---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-clean--Z"><a class="option-anchor" href="#option-cargo-clean--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Remove the entire target directory:
+
+ cargo clean
+
+2. Remove only the release artifacts:
+
+ cargo clean --release
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-build(1)](cargo-build.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-doc.md b/src/tools/cargo/src/doc/src/commands/cargo-doc.md
new file mode 100644
index 000000000..e0e5c8ed2
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-doc.md
@@ -0,0 +1,392 @@
+# cargo-doc(1)
+
+
+
+## NAME
+
+cargo-doc --- Build a package's documentation
+
+## SYNOPSIS
+
+`cargo doc` [_options_]
+
+## DESCRIPTION
+
+Build the documentation for the local package and all dependencies. The output
+is placed in `target/doc` in rustdoc's usual format.
+
+## OPTIONS
+
+### Documentation Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-doc---open"><a class="option-anchor" href="#option-cargo-doc---open"></a><code>--open</code></dt>
+<dd class="option-desc">Open the docs in a browser after building them. This will use your default
+browser unless you define another one in the <code>BROWSER</code> environment variable
+or use the <a href="../reference/config.html#docbrowser"><code>doc.browser</code></a> configuration
+option.</dd>
+
+
+<dt class="option-term" id="option-cargo-doc---no-deps"><a class="option-anchor" href="#option-cargo-doc---no-deps"></a><code>--no-deps</code></dt>
+<dd class="option-desc">Do not build documentation for dependencies.</dd>
+
+
+<dt class="option-term" id="option-cargo-doc---document-private-items"><a class="option-anchor" href="#option-cargo-doc---document-private-items"></a><code>--document-private-items</code></dt>
+<dd class="option-desc">Include non-public items in the documentation. This will be enabled by default if documenting a binary target.</dd>
+
+
+</dl>
+
+### Package Selection
+
+By default, when no package selection options are given, the packages selected
+depend on the selected manifest file (based on the current working directory if
+`--manifest-path` is not given). If the manifest is the root of a workspace then
+the workspaces default members are selected, otherwise only the package defined
+by the manifest will be selected.
+
+The default members of a workspace can be set explicitly with the
+`workspace.default-members` key in the root manifest. If this is not set, a
+virtual workspace will include all workspace members (equivalent to passing
+`--workspace`), and a non-virtual workspace will include only the root crate itself.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-doc--p"><a class="option-anchor" href="#option-cargo-doc--p"></a><code>-p</code> <em>spec</em>…</dt>
+<dt class="option-term" id="option-cargo-doc---package"><a class="option-anchor" href="#option-cargo-doc---package"></a><code>--package</code> <em>spec</em>…</dt>
+<dd class="option-desc">Document only the specified packages. See <a href="cargo-pkgid.html">cargo-pkgid(1)</a> for the
+SPEC format. This flag may be specified multiple times and supports common Unix
+glob patterns like <code>*</code>, <code>?</code> and <code>[]</code>. However, to avoid your shell accidentally
+expanding glob patterns before Cargo handles them, you must use single quotes or
+double quotes around each pattern.</dd>
+
+
+<dt class="option-term" id="option-cargo-doc---workspace"><a class="option-anchor" href="#option-cargo-doc---workspace"></a><code>--workspace</code></dt>
+<dd class="option-desc">Document all members in the workspace.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-doc---all"><a class="option-anchor" href="#option-cargo-doc---all"></a><code>--all</code></dt>
+<dd class="option-desc">Deprecated alias for <code>--workspace</code>.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-doc---exclude"><a class="option-anchor" href="#option-cargo-doc---exclude"></a><code>--exclude</code> <em>SPEC</em>…</dt>
+<dd class="option-desc">Exclude the specified packages. Must be used in conjunction with the
+<code>--workspace</code> flag. This flag may be specified multiple times and supports
+common Unix glob patterns like <code>*</code>, <code>?</code> and <code>[]</code>. However, to avoid your shell
+accidentally expanding glob patterns before Cargo handles them, you must use
+single quotes or double quotes around each pattern.</dd>
+
+
+</dl>
+
+
+### Target Selection
+
+When no target selection options are given, `cargo doc` will document all
+binary and library targets of the selected package. The binary will be skipped
+if its name is the same as the lib target. Binaries are skipped if they have
+`required-features` that are missing.
+
+The default behavior can be changed by setting `doc = false` for the target in
+the manifest settings. Using target selection options will ignore the `doc`
+flag and will always document the given target.
+
+<dl>
+<dt class="option-term" id="option-cargo-doc---lib"><a class="option-anchor" href="#option-cargo-doc---lib"></a><code>--lib</code></dt>
+<dd class="option-desc">Document the package’s library.</dd>
+
+
+<dt class="option-term" id="option-cargo-doc---bin"><a class="option-anchor" href="#option-cargo-doc---bin"></a><code>--bin</code> <em>name</em>…</dt>
+<dd class="option-desc">Document the specified binary. This flag may be specified multiple times
+and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-doc---bins"><a class="option-anchor" href="#option-cargo-doc---bins"></a><code>--bins</code></dt>
+<dd class="option-desc">Document all binary targets.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-doc---example"><a class="option-anchor" href="#option-cargo-doc---example"></a><code>--example</code> <em>name</em>…</dt>
+<dd class="option-desc">Document the specified example. This flag may be specified multiple times
+and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-doc---examples"><a class="option-anchor" href="#option-cargo-doc---examples"></a><code>--examples</code></dt>
+<dd class="option-desc">Document all example targets.</dd>
+
+
+</dl>
+
+### Feature Selection
+
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the `default` feature is activated for every
+selected package.
+
+See [the features documentation](../reference/features.html#command-line-feature-options)
+for more details.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-doc--F"><a class="option-anchor" href="#option-cargo-doc--F"></a><code>-F</code> <em>features</em></dt>
+<dt class="option-term" id="option-cargo-doc---features"><a class="option-anchor" href="#option-cargo-doc---features"></a><code>--features</code> <em>features</em></dt>
+<dd class="option-desc">Space or comma separated list of features to activate. Features of workspace
+members may be enabled with <code>package-name/feature-name</code> syntax. This flag may
+be specified multiple times, which enables all specified features.</dd>
+
+
+<dt class="option-term" id="option-cargo-doc---all-features"><a class="option-anchor" href="#option-cargo-doc---all-features"></a><code>--all-features</code></dt>
+<dd class="option-desc">Activate all available features of all selected packages.</dd>
+
+
+<dt class="option-term" id="option-cargo-doc---no-default-features"><a class="option-anchor" href="#option-cargo-doc---no-default-features"></a><code>--no-default-features</code></dt>
+<dd class="option-desc">Do not activate the <code>default</code> feature of the selected packages.</dd>
+
+
+</dl>
+
+
+### Compilation Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-doc---target"><a class="option-anchor" href="#option-cargo-doc---target"></a><code>--target</code> <em>triple</em></dt>
+<dd class="option-desc">Document for the given architecture. The default is the host architecture. The general format of the triple is
+<code>&lt;arch&gt;&lt;sub&gt;-&lt;vendor&gt;-&lt;sys&gt;-&lt;abi&gt;</code>. Run <code>rustc --print target-list</code> for a
+list of supported targets. This flag may be specified multiple times.</p>
+<p>This may also be specified with the <code>build.target</code>
+<a href="../reference/config.html">config value</a>.</p>
+<p>Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+<a href="../guide/build-cache.html">build cache</a> documentation for more details.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-doc--r"><a class="option-anchor" href="#option-cargo-doc--r"></a><code>-r</code></dt>
+<dt class="option-term" id="option-cargo-doc---release"><a class="option-anchor" href="#option-cargo-doc---release"></a><code>--release</code></dt>
+<dd class="option-desc">Document optimized artifacts with the <code>release</code> profile.
+See also the <code>--profile</code> option for choosing a specific profile by name.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-doc---profile"><a class="option-anchor" href="#option-cargo-doc---profile"></a><code>--profile</code> <em>name</em></dt>
+<dd class="option-desc">Document with the given profile.
+See the <a href="../reference/profiles.html">the reference</a> for more details on profiles.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-doc---ignore-rust-version"><a class="option-anchor" href="#option-cargo-doc---ignore-rust-version"></a><code>--ignore-rust-version</code></dt>
+<dd class="option-desc">Document the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project’s <code>rust-version</code> field.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-doc---timings=fmts"><a class="option-anchor" href="#option-cargo-doc---timings=fmts"></a><code>--timings=</code><em>fmts</em></dt>
+<dd class="option-desc">Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma-separated list of output
+formats; <code>--timings</code> without an argument will default to <code>--timings=html</code>.
+Specifying an output format (rather than the default) is unstable and requires
+<code>-Zunstable-options</code>. Valid output formats:</p>
+<ul>
+<li><code>html</code> (unstable, requires <code>-Zunstable-options</code>): Write a human-readable file <code>cargo-timing.html</code> to the
+<code>target/cargo-timings</code> directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine-readable timing data.</li>
+<li><code>json</code> (unstable, requires <code>-Zunstable-options</code>): Emit machine-readable JSON
+information about timing information.</li>
+</ul></dd>
+
+
+
+
+</dl>
+
+### Output Options
+
+<dl>
+<dt class="option-term" id="option-cargo-doc---target-dir"><a class="option-anchor" href="#option-cargo-doc---target-dir"></a><code>--target-dir</code> <em>directory</em></dt>
+<dd class="option-desc">Directory for all generated artifacts and intermediate files. May also be
+specified with the <code>CARGO_TARGET_DIR</code> environment variable, or the
+<code>build.target-dir</code> <a href="../reference/config.html">config value</a>.
+Defaults to <code>target</code> in the root of the workspace.</dd>
+
+
+</dl>
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-doc--v"><a class="option-anchor" href="#option-cargo-doc--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-doc---verbose"><a class="option-anchor" href="#option-cargo-doc---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-doc--q"><a class="option-anchor" href="#option-cargo-doc--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-doc---quiet"><a class="option-anchor" href="#option-cargo-doc---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-doc---color"><a class="option-anchor" href="#option-cargo-doc---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-doc---message-format"><a class="option-anchor" href="#option-cargo-doc---message-format"></a><code>--message-format</code> <em>fmt</em></dt>
+<dd class="option-desc">The output format for diagnostic messages. Can be specified multiple times
+and consists of comma-separated values. Valid values:</p>
+<ul>
+<li><code>human</code> (default): Display in a human-readable text format. Conflicts with
+<code>short</code> and <code>json</code>.</li>
+<li><code>short</code>: Emit shorter, human-readable text messages. Conflicts with <code>human</code>
+and <code>json</code>.</li>
+<li><code>json</code>: Emit JSON messages to stdout. See
+<a href="../reference/external-tools.html#json-messages">the reference</a>
+for more details. Conflicts with <code>human</code> and <code>short</code>.</li>
+<li><code>json-diagnostic-short</code>: Ensure the <code>rendered</code> field of JSON messages contains
+the “short” rendering from rustc. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-diagnostic-rendered-ansi</code>: Ensure the <code>rendered</code> field of JSON messages
+contains embedded ANSI color codes for respecting rustc’s default color
+scheme. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-render-diagnostics</code>: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo’s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with <code>human</code> or <code>short</code>.</li>
+</ul></dd>
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+<dt class="option-term" id="option-cargo-doc---manifest-path"><a class="option-anchor" href="#option-cargo-doc---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-doc---frozen"><a class="option-anchor" href="#option-cargo-doc---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-doc---locked"><a class="option-anchor" href="#option-cargo-doc---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-doc---offline"><a class="option-anchor" href="#option-cargo-doc---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-doc-+toolchain"><a class="option-anchor" href="#option-cargo-doc-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-doc---config"><a class="option-anchor" href="#option-cargo-doc---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-doc--C"><a class="option-anchor" href="#option-cargo-doc--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-doc--h"><a class="option-anchor" href="#option-cargo-doc--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-doc---help"><a class="option-anchor" href="#option-cargo-doc---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-doc--Z"><a class="option-anchor" href="#option-cargo-doc--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+### Miscellaneous Options
+
+<dl>
+<dt class="option-term" id="option-cargo-doc--j"><a class="option-anchor" href="#option-cargo-doc--j"></a><code>-j</code> <em>N</em></dt>
+<dt class="option-term" id="option-cargo-doc---jobs"><a class="option-anchor" href="#option-cargo-doc---jobs"></a><code>--jobs</code> <em>N</em></dt>
+<dd class="option-desc">Number of parallel jobs to run. May also be specified with the
+<code>build.jobs</code> <a href="../reference/config.html">config value</a>. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.</dd>
+
+
+<dt class="option-term" id="option-cargo-doc---keep-going"><a class="option-anchor" href="#option-cargo-doc---keep-going"></a><code>--keep-going</code></dt>
+<dd class="option-desc">Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+<code>-Zunstable-options</code>.</dd>
+
+
+</dl>
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Build the local package documentation and its dependencies and output to
+ `target/doc`.
+
+ cargo doc
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-rustdoc(1)](cargo-rustdoc.html), [rustdoc(1)](https://doc.rust-lang.org/rustdoc/index.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-fetch.md b/src/tools/cargo/src/doc/src/commands/cargo-fetch.md
new file mode 100644
index 000000000..e6a460795
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-fetch.md
@@ -0,0 +1,175 @@
+# cargo-fetch(1)
+
+
+
+
+## NAME
+
+cargo-fetch --- Fetch dependencies of a package from the network
+
+## SYNOPSIS
+
+`cargo fetch` [_options_]
+
+## DESCRIPTION
+
+If a `Cargo.lock` file is available, this command will ensure that all of the
+git dependencies and/or registry dependencies are downloaded and locally
+available. Subsequent Cargo commands will be able to run offline after a `cargo
+fetch` unless the lock file changes.
+
+If the lock file is not available, then this command will generate the lock
+file before fetching the dependencies.
+
+If `--target` is not specified, then all target dependencies are fetched.
+
+See also the [cargo-prefetch](https://crates.io/crates/cargo-prefetch)
+plugin which adds a command to download popular crates. This may be useful if
+you plan to use Cargo without a network with the `--offline` flag.
+
+## OPTIONS
+
+### Fetch options
+
+<dl>
+<dt class="option-term" id="option-cargo-fetch---target"><a class="option-anchor" href="#option-cargo-fetch---target"></a><code>--target</code> <em>triple</em></dt>
+<dd class="option-desc">Fetch for the given architecture. The default is all architectures. The general format of the triple is
+<code>&lt;arch&gt;&lt;sub&gt;-&lt;vendor&gt;-&lt;sys&gt;-&lt;abi&gt;</code>. Run <code>rustc --print target-list</code> for a
+list of supported targets. This flag may be specified multiple times.</p>
+<p>This may also be specified with the <code>build.target</code>
+<a href="../reference/config.html">config value</a>.</p>
+<p>Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+<a href="../guide/build-cache.html">build cache</a> documentation for more details.</dd>
+
+
+</dl>
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-fetch--v"><a class="option-anchor" href="#option-cargo-fetch--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-fetch---verbose"><a class="option-anchor" href="#option-cargo-fetch---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-fetch--q"><a class="option-anchor" href="#option-cargo-fetch--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-fetch---quiet"><a class="option-anchor" href="#option-cargo-fetch---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-fetch---color"><a class="option-anchor" href="#option-cargo-fetch---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+<dt class="option-term" id="option-cargo-fetch---manifest-path"><a class="option-anchor" href="#option-cargo-fetch---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-fetch---frozen"><a class="option-anchor" href="#option-cargo-fetch---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-fetch---locked"><a class="option-anchor" href="#option-cargo-fetch---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-fetch---offline"><a class="option-anchor" href="#option-cargo-fetch---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-fetch-+toolchain"><a class="option-anchor" href="#option-cargo-fetch-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-fetch---config"><a class="option-anchor" href="#option-cargo-fetch---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-fetch--C"><a class="option-anchor" href="#option-cargo-fetch--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-fetch--h"><a class="option-anchor" href="#option-cargo-fetch--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-fetch---help"><a class="option-anchor" href="#option-cargo-fetch---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-fetch--Z"><a class="option-anchor" href="#option-cargo-fetch--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Fetch all dependencies:
+
+ cargo fetch
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-update(1)](cargo-update.html), [cargo-generate-lockfile(1)](cargo-generate-lockfile.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-fix.md b/src/tools/cargo/src/doc/src/commands/cargo-fix.md
new file mode 100644
index 000000000..1b9ec6a85
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-fix.md
@@ -0,0 +1,505 @@
+# cargo-fix(1)
+
+
+
+## NAME
+
+cargo-fix --- Automatically fix lint warnings reported by rustc
+
+## SYNOPSIS
+
+`cargo fix` [_options_]
+
+## DESCRIPTION
+
+This Cargo subcommand will automatically take rustc's suggestions from
+diagnostics like warnings and apply them to your source code. This is intended
+to help automate tasks that rustc itself already knows how to tell you to fix!
+
+Executing `cargo fix` will under the hood execute [cargo-check(1)](cargo-check.html). Any warnings
+applicable to your crate will be automatically fixed (if possible) and all
+remaining warnings will be displayed when the check process is finished. For
+example if you'd like to apply all fixes to the current package, you can run:
+
+ cargo fix
+
+which behaves the same as `cargo check --all-targets`.
+
+`cargo fix` is only capable of fixing code that is normally compiled with
+`cargo check`. If code is conditionally enabled with optional features, you
+will need to enable those features for that code to be analyzed:
+
+ cargo fix --features foo
+
+Similarly, other `cfg` expressions like platform-specific code will need to
+pass `--target` to fix code for the given target.
+
+ cargo fix --target x86_64-pc-windows-gnu
+
+If you encounter any problems with `cargo fix` or otherwise have any questions
+or feature requests please don't hesitate to file an issue at
+<https://github.com/rust-lang/cargo>.
+
+### Edition migration
+
+The `cargo fix` subcommand can also be used to migrate a package from one
+[edition] to the next. The general procedure is:
+
+1. Run `cargo fix --edition`. Consider also using the `--all-features` flag if
+ your project has multiple features. You may also want to run `cargo fix
+ --edition` multiple times with different `--target` flags if your project
+ has platform-specific code gated by `cfg` attributes.
+2. Modify `Cargo.toml` to set the [edition field] to the new edition.
+3. Run your project tests to verify that everything still works. If new
+ warnings are issued, you may want to consider running `cargo fix` again
+ (without the `--edition` flag) to apply any suggestions given by the
+ compiler.
+
+And hopefully that's it! Just keep in mind of the caveats mentioned above that
+`cargo fix` cannot update code for inactive features or `cfg` expressions.
+Also, in some rare cases the compiler is unable to automatically migrate all
+code to the new edition, and this may require manual changes after building
+with the new edition.
+
+[edition]: https://doc.rust-lang.org/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html
+[edition field]: ../reference/manifest.html#the-edition-field
+
+## OPTIONS
+
+### Fix options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-fix---broken-code"><a class="option-anchor" href="#option-cargo-fix---broken-code"></a><code>--broken-code</code></dt>
+<dd class="option-desc">Fix code even if it already has compiler errors. This is useful if <code>cargo fix</code>
+fails to apply the changes. It will apply the changes and leave the broken
+code in the working directory for you to inspect and manually fix.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---edition"><a class="option-anchor" href="#option-cargo-fix---edition"></a><code>--edition</code></dt>
+<dd class="option-desc">Apply changes that will update the code to the next edition. This will not
+update the edition in the <code>Cargo.toml</code> manifest, which must be updated
+manually after <code>cargo fix --edition</code> has finished.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---edition-idioms"><a class="option-anchor" href="#option-cargo-fix---edition-idioms"></a><code>--edition-idioms</code></dt>
+<dd class="option-desc">Apply suggestions that will update code to the preferred style for the current
+edition.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---allow-no-vcs"><a class="option-anchor" href="#option-cargo-fix---allow-no-vcs"></a><code>--allow-no-vcs</code></dt>
+<dd class="option-desc">Fix code even if a VCS was not detected.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---allow-dirty"><a class="option-anchor" href="#option-cargo-fix---allow-dirty"></a><code>--allow-dirty</code></dt>
+<dd class="option-desc">Fix code even if the working directory has changes.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---allow-staged"><a class="option-anchor" href="#option-cargo-fix---allow-staged"></a><code>--allow-staged</code></dt>
+<dd class="option-desc">Fix code even if the working directory has staged changes.</dd>
+
+
+</dl>
+
+### Package Selection
+
+By default, when no package selection options are given, the packages selected
+depend on the selected manifest file (based on the current working directory if
+`--manifest-path` is not given). If the manifest is the root of a workspace then
+the workspaces default members are selected, otherwise only the package defined
+by the manifest will be selected.
+
+The default members of a workspace can be set explicitly with the
+`workspace.default-members` key in the root manifest. If this is not set, a
+virtual workspace will include all workspace members (equivalent to passing
+`--workspace`), and a non-virtual workspace will include only the root crate itself.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-fix--p"><a class="option-anchor" href="#option-cargo-fix--p"></a><code>-p</code> <em>spec</em>…</dt>
+<dt class="option-term" id="option-cargo-fix---package"><a class="option-anchor" href="#option-cargo-fix---package"></a><code>--package</code> <em>spec</em>…</dt>
+<dd class="option-desc">Fix only the specified packages. See <a href="cargo-pkgid.html">cargo-pkgid(1)</a> for the
+SPEC format. This flag may be specified multiple times and supports common Unix
+glob patterns like <code>*</code>, <code>?</code> and <code>[]</code>. However, to avoid your shell accidentally
+expanding glob patterns before Cargo handles them, you must use single quotes or
+double quotes around each pattern.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---workspace"><a class="option-anchor" href="#option-cargo-fix---workspace"></a><code>--workspace</code></dt>
+<dd class="option-desc">Fix all members in the workspace.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-fix---all"><a class="option-anchor" href="#option-cargo-fix---all"></a><code>--all</code></dt>
+<dd class="option-desc">Deprecated alias for <code>--workspace</code>.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-fix---exclude"><a class="option-anchor" href="#option-cargo-fix---exclude"></a><code>--exclude</code> <em>SPEC</em>…</dt>
+<dd class="option-desc">Exclude the specified packages. Must be used in conjunction with the
+<code>--workspace</code> flag. This flag may be specified multiple times and supports
+common Unix glob patterns like <code>*</code>, <code>?</code> and <code>[]</code>. However, to avoid your shell
+accidentally expanding glob patterns before Cargo handles them, you must use
+single quotes or double quotes around each pattern.</dd>
+
+
+</dl>
+
+
+### Target Selection
+
+When no target selection options are given, `cargo fix` will fix all targets
+(`--all-targets` implied). Binaries are skipped if they have
+`required-features` that are missing.
+
+Passing target selection flags will fix only the specified
+targets.
+
+Note that `--bin`, `--example`, `--test` and `--bench` flags also
+support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your
+shell accidentally expanding glob patterns before Cargo handles them, you must
+use single quotes or double quotes around each glob pattern.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-fix---lib"><a class="option-anchor" href="#option-cargo-fix---lib"></a><code>--lib</code></dt>
+<dd class="option-desc">Fix the package’s library.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---bin"><a class="option-anchor" href="#option-cargo-fix---bin"></a><code>--bin</code> <em>name</em>…</dt>
+<dd class="option-desc">Fix the specified binary. This flag may be specified multiple times
+and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---bins"><a class="option-anchor" href="#option-cargo-fix---bins"></a><code>--bins</code></dt>
+<dd class="option-desc">Fix all binary targets.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-fix---example"><a class="option-anchor" href="#option-cargo-fix---example"></a><code>--example</code> <em>name</em>…</dt>
+<dd class="option-desc">Fix the specified example. This flag may be specified multiple times
+and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---examples"><a class="option-anchor" href="#option-cargo-fix---examples"></a><code>--examples</code></dt>
+<dd class="option-desc">Fix all example targets.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---test"><a class="option-anchor" href="#option-cargo-fix---test"></a><code>--test</code> <em>name</em>…</dt>
+<dd class="option-desc">Fix the specified integration test. This flag may be specified
+multiple times and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---tests"><a class="option-anchor" href="#option-cargo-fix---tests"></a><code>--tests</code></dt>
+<dd class="option-desc">Fix all targets in test mode that have the <code>test = true</code> manifest
+flag set. By default this includes the library and binaries built as
+unittests, and integration tests. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+unittest, and once as a dependency for binaries, integration tests, etc.).
+Targets may be enabled or disabled by setting the <code>test</code> flag in the
+manifest settings for the target.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---bench"><a class="option-anchor" href="#option-cargo-fix---bench"></a><code>--bench</code> <em>name</em>…</dt>
+<dd class="option-desc">Fix the specified benchmark. This flag may be specified multiple
+times and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---benches"><a class="option-anchor" href="#option-cargo-fix---benches"></a><code>--benches</code></dt>
+<dd class="option-desc">Fix all targets in benchmark mode that have the <code>bench = true</code>
+manifest flag set. By default this includes the library and binaries built
+as benchmarks, and bench targets. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+benchmark, and once as a dependency for binaries, benchmarks, etc.).
+Targets may be enabled or disabled by setting the <code>bench</code> flag in the
+manifest settings for the target.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---all-targets"><a class="option-anchor" href="#option-cargo-fix---all-targets"></a><code>--all-targets</code></dt>
+<dd class="option-desc">Fix all targets. This is equivalent to specifying <code>--lib --bins --tests --benches --examples</code>.</dd>
+
+
+</dl>
+
+
+### Feature Selection
+
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the `default` feature is activated for every
+selected package.
+
+See [the features documentation](../reference/features.html#command-line-feature-options)
+for more details.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-fix--F"><a class="option-anchor" href="#option-cargo-fix--F"></a><code>-F</code> <em>features</em></dt>
+<dt class="option-term" id="option-cargo-fix---features"><a class="option-anchor" href="#option-cargo-fix---features"></a><code>--features</code> <em>features</em></dt>
+<dd class="option-desc">Space or comma separated list of features to activate. Features of workspace
+members may be enabled with <code>package-name/feature-name</code> syntax. This flag may
+be specified multiple times, which enables all specified features.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---all-features"><a class="option-anchor" href="#option-cargo-fix---all-features"></a><code>--all-features</code></dt>
+<dd class="option-desc">Activate all available features of all selected packages.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---no-default-features"><a class="option-anchor" href="#option-cargo-fix---no-default-features"></a><code>--no-default-features</code></dt>
+<dd class="option-desc">Do not activate the <code>default</code> feature of the selected packages.</dd>
+
+
+</dl>
+
+
+### Compilation Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-fix---target"><a class="option-anchor" href="#option-cargo-fix---target"></a><code>--target</code> <em>triple</em></dt>
+<dd class="option-desc">Fix for the given architecture. The default is the host architecture. The general format of the triple is
+<code>&lt;arch&gt;&lt;sub&gt;-&lt;vendor&gt;-&lt;sys&gt;-&lt;abi&gt;</code>. Run <code>rustc --print target-list</code> for a
+list of supported targets. This flag may be specified multiple times.</p>
+<p>This may also be specified with the <code>build.target</code>
+<a href="../reference/config.html">config value</a>.</p>
+<p>Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+<a href="../guide/build-cache.html">build cache</a> documentation for more details.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-fix--r"><a class="option-anchor" href="#option-cargo-fix--r"></a><code>-r</code></dt>
+<dt class="option-term" id="option-cargo-fix---release"><a class="option-anchor" href="#option-cargo-fix---release"></a><code>--release</code></dt>
+<dd class="option-desc">Fix optimized artifacts with the <code>release</code> profile.
+See also the <code>--profile</code> option for choosing a specific profile by name.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-fix---profile"><a class="option-anchor" href="#option-cargo-fix---profile"></a><code>--profile</code> <em>name</em></dt>
+<dd class="option-desc">Fix with the given profile.</p>
+<p>As a special case, specifying the <code>test</code> profile will also enable checking in
+test mode which will enable checking tests and enable the <code>test</code> cfg option.
+See <a href="https://doc.rust-lang.org/rustc/tests/index.html">rustc tests</a> for more
+detail.</p>
+<p>See the <a href="../reference/profiles.html">the reference</a> for more details on profiles.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-fix---ignore-rust-version"><a class="option-anchor" href="#option-cargo-fix---ignore-rust-version"></a><code>--ignore-rust-version</code></dt>
+<dd class="option-desc">Fix the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project’s <code>rust-version</code> field.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-fix---timings=fmts"><a class="option-anchor" href="#option-cargo-fix---timings=fmts"></a><code>--timings=</code><em>fmts</em></dt>
+<dd class="option-desc">Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma-separated list of output
+formats; <code>--timings</code> without an argument will default to <code>--timings=html</code>.
+Specifying an output format (rather than the default) is unstable and requires
+<code>-Zunstable-options</code>. Valid output formats:</p>
+<ul>
+<li><code>html</code> (unstable, requires <code>-Zunstable-options</code>): Write a human-readable file <code>cargo-timing.html</code> to the
+<code>target/cargo-timings</code> directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine-readable timing data.</li>
+<li><code>json</code> (unstable, requires <code>-Zunstable-options</code>): Emit machine-readable JSON
+information about timing information.</li>
+</ul></dd>
+
+
+
+
+</dl>
+
+### Output Options
+
+<dl>
+<dt class="option-term" id="option-cargo-fix---target-dir"><a class="option-anchor" href="#option-cargo-fix---target-dir"></a><code>--target-dir</code> <em>directory</em></dt>
+<dd class="option-desc">Directory for all generated artifacts and intermediate files. May also be
+specified with the <code>CARGO_TARGET_DIR</code> environment variable, or the
+<code>build.target-dir</code> <a href="../reference/config.html">config value</a>.
+Defaults to <code>target</code> in the root of the workspace.</dd>
+
+
+</dl>
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-fix--v"><a class="option-anchor" href="#option-cargo-fix--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-fix---verbose"><a class="option-anchor" href="#option-cargo-fix---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix--q"><a class="option-anchor" href="#option-cargo-fix--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-fix---quiet"><a class="option-anchor" href="#option-cargo-fix---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---color"><a class="option-anchor" href="#option-cargo-fix---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-fix---message-format"><a class="option-anchor" href="#option-cargo-fix---message-format"></a><code>--message-format</code> <em>fmt</em></dt>
+<dd class="option-desc">The output format for diagnostic messages. Can be specified multiple times
+and consists of comma-separated values. Valid values:</p>
+<ul>
+<li><code>human</code> (default): Display in a human-readable text format. Conflicts with
+<code>short</code> and <code>json</code>.</li>
+<li><code>short</code>: Emit shorter, human-readable text messages. Conflicts with <code>human</code>
+and <code>json</code>.</li>
+<li><code>json</code>: Emit JSON messages to stdout. See
+<a href="../reference/external-tools.html#json-messages">the reference</a>
+for more details. Conflicts with <code>human</code> and <code>short</code>.</li>
+<li><code>json-diagnostic-short</code>: Ensure the <code>rendered</code> field of JSON messages contains
+the “short” rendering from rustc. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-diagnostic-rendered-ansi</code>: Ensure the <code>rendered</code> field of JSON messages
+contains embedded ANSI color codes for respecting rustc’s default color
+scheme. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-render-diagnostics</code>: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo’s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with <code>human</code> or <code>short</code>.</li>
+</ul></dd>
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+<dt class="option-term" id="option-cargo-fix---manifest-path"><a class="option-anchor" href="#option-cargo-fix---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-fix---frozen"><a class="option-anchor" href="#option-cargo-fix---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-fix---locked"><a class="option-anchor" href="#option-cargo-fix---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---offline"><a class="option-anchor" href="#option-cargo-fix---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-fix-+toolchain"><a class="option-anchor" href="#option-cargo-fix-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---config"><a class="option-anchor" href="#option-cargo-fix---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix--C"><a class="option-anchor" href="#option-cargo-fix--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-fix--h"><a class="option-anchor" href="#option-cargo-fix--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-fix---help"><a class="option-anchor" href="#option-cargo-fix---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix--Z"><a class="option-anchor" href="#option-cargo-fix--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+### Miscellaneous Options
+
+<dl>
+<dt class="option-term" id="option-cargo-fix--j"><a class="option-anchor" href="#option-cargo-fix--j"></a><code>-j</code> <em>N</em></dt>
+<dt class="option-term" id="option-cargo-fix---jobs"><a class="option-anchor" href="#option-cargo-fix---jobs"></a><code>--jobs</code> <em>N</em></dt>
+<dd class="option-desc">Number of parallel jobs to run. May also be specified with the
+<code>build.jobs</code> <a href="../reference/config.html">config value</a>. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.</dd>
+
+
+<dt class="option-term" id="option-cargo-fix---keep-going"><a class="option-anchor" href="#option-cargo-fix---keep-going"></a><code>--keep-going</code></dt>
+<dd class="option-desc">Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+<code>-Zunstable-options</code>.</dd>
+
+
+</dl>
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Apply compiler suggestions to the local package:
+
+ cargo fix
+
+2. Update a package to prepare it for the next edition:
+
+ cargo fix --edition
+
+3. Apply suggested idioms for the current edition:
+
+ cargo fix --edition-idioms
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-check(1)](cargo-check.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-generate-lockfile.md b/src/tools/cargo/src/doc/src/commands/cargo-generate-lockfile.md
new file mode 100644
index 000000000..eb8d2e30e
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-generate-lockfile.md
@@ -0,0 +1,149 @@
+# cargo-generate-lockfile(1)
+
+## NAME
+
+cargo-generate-lockfile --- Generate the lockfile for a package
+
+## SYNOPSIS
+
+`cargo generate-lockfile` [_options_]
+
+## DESCRIPTION
+
+This command will create the `Cargo.lock` lockfile for the current package or
+workspace. If the lockfile already exists, it will be rebuilt with the latest
+available version of every package.
+
+See also [cargo-update(1)](cargo-update.html) which is also capable of creating a `Cargo.lock`
+lockfile and has more options for controlling update behavior.
+
+## OPTIONS
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-generate-lockfile--v"><a class="option-anchor" href="#option-cargo-generate-lockfile--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-generate-lockfile---verbose"><a class="option-anchor" href="#option-cargo-generate-lockfile---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-generate-lockfile--q"><a class="option-anchor" href="#option-cargo-generate-lockfile--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-generate-lockfile---quiet"><a class="option-anchor" href="#option-cargo-generate-lockfile---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-generate-lockfile---color"><a class="option-anchor" href="#option-cargo-generate-lockfile---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+<dt class="option-term" id="option-cargo-generate-lockfile---manifest-path"><a class="option-anchor" href="#option-cargo-generate-lockfile---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-generate-lockfile---frozen"><a class="option-anchor" href="#option-cargo-generate-lockfile---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-generate-lockfile---locked"><a class="option-anchor" href="#option-cargo-generate-lockfile---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-generate-lockfile---offline"><a class="option-anchor" href="#option-cargo-generate-lockfile---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-generate-lockfile-+toolchain"><a class="option-anchor" href="#option-cargo-generate-lockfile-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-generate-lockfile---config"><a class="option-anchor" href="#option-cargo-generate-lockfile---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-generate-lockfile--C"><a class="option-anchor" href="#option-cargo-generate-lockfile--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-generate-lockfile--h"><a class="option-anchor" href="#option-cargo-generate-lockfile--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-generate-lockfile---help"><a class="option-anchor" href="#option-cargo-generate-lockfile---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-generate-lockfile--Z"><a class="option-anchor" href="#option-cargo-generate-lockfile--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Create or update the lockfile for the current package or workspace:
+
+ cargo generate-lockfile
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-update(1)](cargo-update.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-help.md b/src/tools/cargo/src/doc/src/commands/cargo-help.md
new file mode 100644
index 000000000..db5cb342a
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-help.md
@@ -0,0 +1,26 @@
+# cargo-help(1)
+
+## NAME
+
+cargo-help --- Get help for a Cargo command
+
+## SYNOPSIS
+
+`cargo help` [_subcommand_]
+
+## DESCRIPTION
+
+Prints a help message for the given command.
+
+## EXAMPLES
+
+1. Get help for a command:
+
+ cargo help build
+
+2. Help is also available with the `--help` flag:
+
+ cargo build --help
+
+## SEE ALSO
+[cargo(1)](cargo.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-init.md b/src/tools/cargo/src/doc/src/commands/cargo-init.md
new file mode 100644
index 000000000..c0cf34b51
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-init.md
@@ -0,0 +1,162 @@
+# cargo-init(1)
+
+## NAME
+
+cargo-init --- Create a new Cargo package in an existing directory
+
+## SYNOPSIS
+
+`cargo init` [_options_] [_path_]
+
+## DESCRIPTION
+
+This command will create a new Cargo manifest in the current directory. Give a
+path as an argument to create in the given directory.
+
+If there are typically-named Rust source files already in the directory, those
+will be used. If not, then a sample `src/main.rs` file will be created, or
+`src/lib.rs` if `--lib` is passed.
+
+If the directory is not already in a VCS repository, then a new repository
+is created (see `--vcs` below).
+
+See [cargo-new(1)](cargo-new.html) for a similar command which will create a new package in
+a new directory.
+
+## OPTIONS
+
+### Init Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-init---bin"><a class="option-anchor" href="#option-cargo-init---bin"></a><code>--bin</code></dt>
+<dd class="option-desc">Create a package with a binary target (<code>src/main.rs</code>).
+This is the default behavior.</dd>
+
+
+<dt class="option-term" id="option-cargo-init---lib"><a class="option-anchor" href="#option-cargo-init---lib"></a><code>--lib</code></dt>
+<dd class="option-desc">Create a package with a library target (<code>src/lib.rs</code>).</dd>
+
+
+<dt class="option-term" id="option-cargo-init---edition"><a class="option-anchor" href="#option-cargo-init---edition"></a><code>--edition</code> <em>edition</em></dt>
+<dd class="option-desc">Specify the Rust edition to use. Default is 2021.
+Possible values: 2015, 2018, 2021</dd>
+
+
+<dt class="option-term" id="option-cargo-init---name"><a class="option-anchor" href="#option-cargo-init---name"></a><code>--name</code> <em>name</em></dt>
+<dd class="option-desc">Set the package name. Defaults to the directory name.</dd>
+
+
+<dt class="option-term" id="option-cargo-init---vcs"><a class="option-anchor" href="#option-cargo-init---vcs"></a><code>--vcs</code> <em>vcs</em></dt>
+<dd class="option-desc">Initialize a new VCS repository for the given version control system (git,
+hg, pijul, or fossil) or do not initialize any version control at all
+(none). If not specified, defaults to <code>git</code> or the configuration value
+<code>cargo-new.vcs</code>, or <code>none</code> if already inside a VCS repository.</dd>
+
+
+<dt class="option-term" id="option-cargo-init---registry"><a class="option-anchor" href="#option-cargo-init---registry"></a><code>--registry</code> <em>registry</em></dt>
+<dd class="option-desc">This sets the <code>publish</code> field in <code>Cargo.toml</code> to the given registry name
+which will restrict publishing only to that registry.</p>
+<p>Registry names are defined in <a href="../reference/config.html">Cargo config files</a>.
+If not specified, the default registry defined by the <code>registry.default</code>
+config key is used. If the default registry is not set and <code>--registry</code> is not
+used, the <code>publish</code> field will not be set which means that publishing will not
+be restricted.</dd>
+
+
+</dl>
+
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-init--v"><a class="option-anchor" href="#option-cargo-init--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-init---verbose"><a class="option-anchor" href="#option-cargo-init---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-init--q"><a class="option-anchor" href="#option-cargo-init--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-init---quiet"><a class="option-anchor" href="#option-cargo-init---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-init---color"><a class="option-anchor" href="#option-cargo-init---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-init-+toolchain"><a class="option-anchor" href="#option-cargo-init-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-init---config"><a class="option-anchor" href="#option-cargo-init---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-init--C"><a class="option-anchor" href="#option-cargo-init--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-init--h"><a class="option-anchor" href="#option-cargo-init--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-init---help"><a class="option-anchor" href="#option-cargo-init---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-init--Z"><a class="option-anchor" href="#option-cargo-init--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Create a binary Cargo package in the current directory:
+
+ cargo init
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-new(1)](cargo-new.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-install.md b/src/tools/cargo/src/doc/src/commands/cargo-install.md
new file mode 100644
index 000000000..99697c156
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-install.md
@@ -0,0 +1,452 @@
+# cargo-install(1)
+
+
+
+## NAME
+
+cargo-install --- Build and install a Rust binary
+
+## SYNOPSIS
+
+`cargo install` [_options_] _crate_[@_version_]...\
+`cargo install` [_options_] `--path` _path_\
+`cargo install` [_options_] `--git` _url_ [_crate_...]\
+`cargo install` [_options_] `--list`
+
+## DESCRIPTION
+
+This command manages Cargo's local set of installed binary crates. Only
+packages which have executable `[[bin]]` or `[[example]]` targets can be
+installed, and all executables are installed into the installation root's
+`bin` folder.
+
+The installation root is determined, in order of precedence:
+
+- `--root` option
+- `CARGO_INSTALL_ROOT` environment variable
+- `install.root` Cargo [config value](../reference/config.html)
+- `CARGO_HOME` environment variable
+- `$HOME/.cargo`
+
+
+There are multiple sources from which a crate can be installed. The default
+location is crates.io but the `--git`, `--path`, and `--registry` flags can
+change this source. If the source contains more than one package (such as
+crates.io or a git repository with multiple crates) the _crate_ argument is
+required to indicate which crate should be installed.
+
+Crates from crates.io can optionally specify the version they wish to install
+via the `--version` flags, and similarly packages from git repositories can
+optionally specify the branch, tag, or revision that should be installed. If a
+crate has multiple binaries, the `--bin` argument can selectively install only
+one of them, and if you'd rather install examples the `--example` argument can
+be used as well.
+
+If the package is already installed, Cargo will reinstall it if the installed
+version does not appear to be up-to-date. If any of the following values
+change, then Cargo will reinstall the package:
+
+- The package version and source.
+- The set of binary names installed.
+- The chosen features.
+- The profile (`--profile`).
+- The target (`--target`).
+
+Installing with `--path` will always build and install, unless there are
+conflicting binaries from another package. The `--force` flag may be used to
+force Cargo to always reinstall the package.
+
+If the source is crates.io or `--git` then by default the crate will be built
+in a temporary target directory. To avoid this, the target directory can be
+specified by setting the `CARGO_TARGET_DIR` environment variable to a relative
+path. In particular, this can be useful for caching build artifacts on
+continuous integration systems.
+
+### Dealing with the Lockfile
+
+By default, the `Cargo.lock` file that is included with the package will be
+ignored. This means that Cargo will recompute which versions of dependencies
+to use, possibly using newer versions that have been released since the
+package was published. The `--locked` flag can be used to force Cargo to use
+the packaged `Cargo.lock` file if it is available. This may be useful for
+ensuring reproducible builds, to use the exact same set of dependencies that
+were available when the package was published. It may also be useful if a
+newer version of a dependency is published that no longer builds on your
+system, or has other problems. The downside to using `--locked` is that you
+will not receive any fixes or updates to any dependency. Note that Cargo did
+not start publishing `Cargo.lock` files until version 1.37, which means
+packages published with prior versions will not have a `Cargo.lock` file
+available.
+
+### Configuration Discovery
+
+This command operates on system or user level, not project level.
+This means that the local [configuration discovery] is ignored.
+Instead, the configuration discovery begins at `$CARGO_HOME/config.toml`.
+If the package is installed with `--path $PATH`, the local configuration
+will be used, beginning discovery at `$PATH/.cargo/config.toml`.
+
+[configuration discovery]: ../reference/config.html#hierarchical-structure
+
+## OPTIONS
+
+### Install Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-install---vers"><a class="option-anchor" href="#option-cargo-install---vers"></a><code>--vers</code> <em>version</em></dt>
+<dt class="option-term" id="option-cargo-install---version"><a class="option-anchor" href="#option-cargo-install---version"></a><code>--version</code> <em>version</em></dt>
+<dd class="option-desc">Specify a version to install. This may be a <a href="../reference/specifying-dependencies.md">version
+requirement</a>, like <code>~1.2</code>, to have Cargo
+select the newest version from the given requirement. If the version does not
+have a requirement operator (such as <code>^</code> or <code>~</code>), then it must be in the form
+<em>MAJOR.MINOR.PATCH</em>, and will install exactly that version; it is <em>not</em>
+treated as a caret requirement like Cargo dependencies are.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---git"><a class="option-anchor" href="#option-cargo-install---git"></a><code>--git</code> <em>url</em></dt>
+<dd class="option-desc">Git URL to install the specified crate from.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---branch"><a class="option-anchor" href="#option-cargo-install---branch"></a><code>--branch</code> <em>branch</em></dt>
+<dd class="option-desc">Branch to use when installing from git.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---tag"><a class="option-anchor" href="#option-cargo-install---tag"></a><code>--tag</code> <em>tag</em></dt>
+<dd class="option-desc">Tag to use when installing from git.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---rev"><a class="option-anchor" href="#option-cargo-install---rev"></a><code>--rev</code> <em>sha</em></dt>
+<dd class="option-desc">Specific commit to use when installing from git.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---path"><a class="option-anchor" href="#option-cargo-install---path"></a><code>--path</code> <em>path</em></dt>
+<dd class="option-desc">Filesystem path to local crate to install.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---list"><a class="option-anchor" href="#option-cargo-install---list"></a><code>--list</code></dt>
+<dd class="option-desc">List all installed packages and their versions.</dd>
+
+
+<dt class="option-term" id="option-cargo-install--f"><a class="option-anchor" href="#option-cargo-install--f"></a><code>-f</code></dt>
+<dt class="option-term" id="option-cargo-install---force"><a class="option-anchor" href="#option-cargo-install---force"></a><code>--force</code></dt>
+<dd class="option-desc">Force overwriting existing crates or binaries. This can be used if a package
+has installed a binary with the same name as another package. This is also
+useful if something has changed on the system that you want to rebuild with,
+such as a newer version of <code>rustc</code>.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---no-track"><a class="option-anchor" href="#option-cargo-install---no-track"></a><code>--no-track</code></dt>
+<dd class="option-desc">By default, Cargo keeps track of the installed packages with a metadata file
+stored in the installation root directory. This flag tells Cargo not to use or
+create that file. With this flag, Cargo will refuse to overwrite any existing
+files unless the <code>--force</code> flag is used. This also disables Cargo’s ability to
+protect against multiple concurrent invocations of Cargo installing at the
+same time.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---bin"><a class="option-anchor" href="#option-cargo-install---bin"></a><code>--bin</code> <em>name</em>…</dt>
+<dd class="option-desc">Install only the specified binary.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---bins"><a class="option-anchor" href="#option-cargo-install---bins"></a><code>--bins</code></dt>
+<dd class="option-desc">Install all binaries.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---example"><a class="option-anchor" href="#option-cargo-install---example"></a><code>--example</code> <em>name</em>…</dt>
+<dd class="option-desc">Install only the specified example.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---examples"><a class="option-anchor" href="#option-cargo-install---examples"></a><code>--examples</code></dt>
+<dd class="option-desc">Install all examples.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---root"><a class="option-anchor" href="#option-cargo-install---root"></a><code>--root</code> <em>dir</em></dt>
+<dd class="option-desc">Directory to install packages into.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---registry"><a class="option-anchor" href="#option-cargo-install---registry"></a><code>--registry</code> <em>registry</em></dt>
+<dd class="option-desc">Name of the registry to use. Registry names are defined in <a href="../reference/config.html">Cargo config
+files</a>. If not specified, the default registry is used,
+which is defined by the <code>registry.default</code> config key which defaults to
+<code>crates-io</code>.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-install---index"><a class="option-anchor" href="#option-cargo-install---index"></a><code>--index</code> <em>index</em></dt>
+<dd class="option-desc">The URL of the registry index to use.</dd>
+
+
+
+</dl>
+
+### Feature Selection
+
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the `default` feature is activated for every
+selected package.
+
+See [the features documentation](../reference/features.html#command-line-feature-options)
+for more details.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-install--F"><a class="option-anchor" href="#option-cargo-install--F"></a><code>-F</code> <em>features</em></dt>
+<dt class="option-term" id="option-cargo-install---features"><a class="option-anchor" href="#option-cargo-install---features"></a><code>--features</code> <em>features</em></dt>
+<dd class="option-desc">Space or comma separated list of features to activate. Features of workspace
+members may be enabled with <code>package-name/feature-name</code> syntax. This flag may
+be specified multiple times, which enables all specified features.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---all-features"><a class="option-anchor" href="#option-cargo-install---all-features"></a><code>--all-features</code></dt>
+<dd class="option-desc">Activate all available features of all selected packages.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---no-default-features"><a class="option-anchor" href="#option-cargo-install---no-default-features"></a><code>--no-default-features</code></dt>
+<dd class="option-desc">Do not activate the <code>default</code> feature of the selected packages.</dd>
+
+
+</dl>
+
+
+### Compilation Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-install---target"><a class="option-anchor" href="#option-cargo-install---target"></a><code>--target</code> <em>triple</em></dt>
+<dd class="option-desc">Install for the given architecture. The default is the host architecture. The general format of the triple is
+<code>&lt;arch&gt;&lt;sub&gt;-&lt;vendor&gt;-&lt;sys&gt;-&lt;abi&gt;</code>. Run <code>rustc --print target-list</code> for a
+list of supported targets.</p>
+<p>This may also be specified with the <code>build.target</code>
+<a href="../reference/config.html">config value</a>.</p>
+<p>Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+<a href="../guide/build-cache.html">build cache</a> documentation for more details.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-install---target-dir"><a class="option-anchor" href="#option-cargo-install---target-dir"></a><code>--target-dir</code> <em>directory</em></dt>
+<dd class="option-desc">Directory for all generated artifacts and intermediate files. May also be
+specified with the <code>CARGO_TARGET_DIR</code> environment variable, or the
+<code>build.target-dir</code> <a href="../reference/config.html">config value</a>.
+Defaults to a new temporary folder located in the
+temporary directory of the platform. </p>
+<p>When using <code>--path</code>, by default it will use <code>target</code> directory in the workspace
+of the local crate unless <code>--target-dir</code>
+is specified.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-install---debug"><a class="option-anchor" href="#option-cargo-install---debug"></a><code>--debug</code></dt>
+<dd class="option-desc">Build with the <code>dev</code> profile instead the <code>release</code> profile.
+See also the <code>--profile</code> option for choosing a specific profile by name.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---profile"><a class="option-anchor" href="#option-cargo-install---profile"></a><code>--profile</code> <em>name</em></dt>
+<dd class="option-desc">Install with the given profile.
+See the <a href="../reference/profiles.html">the reference</a> for more details on profiles.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-install---ignore-rust-version"><a class="option-anchor" href="#option-cargo-install---ignore-rust-version"></a><code>--ignore-rust-version</code></dt>
+<dd class="option-desc">Install the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project’s <code>rust-version</code> field.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-install---timings=fmts"><a class="option-anchor" href="#option-cargo-install---timings=fmts"></a><code>--timings=</code><em>fmts</em></dt>
+<dd class="option-desc">Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma-separated list of output
+formats; <code>--timings</code> without an argument will default to <code>--timings=html</code>.
+Specifying an output format (rather than the default) is unstable and requires
+<code>-Zunstable-options</code>. Valid output formats:</p>
+<ul>
+<li><code>html</code> (unstable, requires <code>-Zunstable-options</code>): Write a human-readable file <code>cargo-timing.html</code> to the
+<code>target/cargo-timings</code> directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine-readable timing data.</li>
+<li><code>json</code> (unstable, requires <code>-Zunstable-options</code>): Emit machine-readable JSON
+information about timing information.</li>
+</ul></dd>
+
+
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+<dt class="option-term" id="option-cargo-install---frozen"><a class="option-anchor" href="#option-cargo-install---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-install---locked"><a class="option-anchor" href="#option-cargo-install---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---offline"><a class="option-anchor" href="#option-cargo-install---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Miscellaneous Options
+
+<dl>
+<dt class="option-term" id="option-cargo-install--j"><a class="option-anchor" href="#option-cargo-install--j"></a><code>-j</code> <em>N</em></dt>
+<dt class="option-term" id="option-cargo-install---jobs"><a class="option-anchor" href="#option-cargo-install---jobs"></a><code>--jobs</code> <em>N</em></dt>
+<dd class="option-desc">Number of parallel jobs to run. May also be specified with the
+<code>build.jobs</code> <a href="../reference/config.html">config value</a>. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---keep-going"><a class="option-anchor" href="#option-cargo-install---keep-going"></a><code>--keep-going</code></dt>
+<dd class="option-desc">Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+<code>-Zunstable-options</code>.</dd>
+
+
+</dl>
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-install--v"><a class="option-anchor" href="#option-cargo-install--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-install---verbose"><a class="option-anchor" href="#option-cargo-install---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-install--q"><a class="option-anchor" href="#option-cargo-install--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-install---quiet"><a class="option-anchor" href="#option-cargo-install---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---color"><a class="option-anchor" href="#option-cargo-install---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-install---message-format"><a class="option-anchor" href="#option-cargo-install---message-format"></a><code>--message-format</code> <em>fmt</em></dt>
+<dd class="option-desc">The output format for diagnostic messages. Can be specified multiple times
+and consists of comma-separated values. Valid values:</p>
+<ul>
+<li><code>human</code> (default): Display in a human-readable text format. Conflicts with
+<code>short</code> and <code>json</code>.</li>
+<li><code>short</code>: Emit shorter, human-readable text messages. Conflicts with <code>human</code>
+and <code>json</code>.</li>
+<li><code>json</code>: Emit JSON messages to stdout. See
+<a href="../reference/external-tools.html#json-messages">the reference</a>
+for more details. Conflicts with <code>human</code> and <code>short</code>.</li>
+<li><code>json-diagnostic-short</code>: Ensure the <code>rendered</code> field of JSON messages contains
+the “short” rendering from rustc. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-diagnostic-rendered-ansi</code>: Ensure the <code>rendered</code> field of JSON messages
+contains embedded ANSI color codes for respecting rustc’s default color
+scheme. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-render-diagnostics</code>: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo’s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with <code>human</code> or <code>short</code>.</li>
+</ul></dd>
+
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-install-+toolchain"><a class="option-anchor" href="#option-cargo-install-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-install---config"><a class="option-anchor" href="#option-cargo-install---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-install--C"><a class="option-anchor" href="#option-cargo-install--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-install--h"><a class="option-anchor" href="#option-cargo-install--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-install---help"><a class="option-anchor" href="#option-cargo-install---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-install--Z"><a class="option-anchor" href="#option-cargo-install--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Install or upgrade a package from crates.io:
+
+ cargo install ripgrep
+
+2. Install or reinstall the package in the current directory:
+
+ cargo install --path .
+
+3. View the list of installed packages:
+
+ cargo install --list
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-uninstall(1)](cargo-uninstall.html), [cargo-search(1)](cargo-search.html), [cargo-publish(1)](cargo-publish.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-locate-project.md b/src/tools/cargo/src/doc/src/commands/cargo-locate-project.md
new file mode 100644
index 000000000..00491b398
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-locate-project.md
@@ -0,0 +1,144 @@
+# cargo-locate-project(1)
+
+## NAME
+
+cargo-locate-project --- Print a JSON representation of a Cargo.toml file's location
+
+## SYNOPSIS
+
+`cargo locate-project` [_options_]
+
+## DESCRIPTION
+
+This command will print a JSON object to stdout with the full path to the manifest. The
+manifest is found by searching upward for a file named `Cargo.toml` starting from the current
+working directory.
+
+If the project happens to be a part of a workspace, the manifest of the project, rather than
+the workspace root, is output. This can be overridden by the `--workspace` flag. The root
+workspace is found by traversing further upward or by using the field `package.workspace` after
+locating the manifest of a workspace member.
+
+## OPTIONS
+
+<dl>
+
+<dt class="option-term" id="option-cargo-locate-project---workspace"><a class="option-anchor" href="#option-cargo-locate-project---workspace"></a><code>--workspace</code></dt>
+<dd class="option-desc">Locate the <code>Cargo.toml</code> at the root of the workspace, as opposed to the current
+workspace member.</dd>
+
+
+</dl>
+
+### Display Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-locate-project---message-format"><a class="option-anchor" href="#option-cargo-locate-project---message-format"></a><code>--message-format</code> <em>fmt</em></dt>
+<dd class="option-desc">The representation in which to print the project location. Valid values:</p>
+<ul>
+<li><code>json</code> (default): JSON object with the path under the key “root”.</li>
+<li><code>plain</code>: Just the path.</li>
+</ul></dd>
+
+
+<dt class="option-term" id="option-cargo-locate-project--v"><a class="option-anchor" href="#option-cargo-locate-project--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-locate-project---verbose"><a class="option-anchor" href="#option-cargo-locate-project---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-locate-project--q"><a class="option-anchor" href="#option-cargo-locate-project--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-locate-project---quiet"><a class="option-anchor" href="#option-cargo-locate-project---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-locate-project---color"><a class="option-anchor" href="#option-cargo-locate-project---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+<dt class="option-term" id="option-cargo-locate-project---manifest-path"><a class="option-anchor" href="#option-cargo-locate-project---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-locate-project-+toolchain"><a class="option-anchor" href="#option-cargo-locate-project-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-locate-project---config"><a class="option-anchor" href="#option-cargo-locate-project---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-locate-project--C"><a class="option-anchor" href="#option-cargo-locate-project--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-locate-project--h"><a class="option-anchor" href="#option-cargo-locate-project--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-locate-project---help"><a class="option-anchor" href="#option-cargo-locate-project---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-locate-project--Z"><a class="option-anchor" href="#option-cargo-locate-project--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Display the path to the manifest based on the current directory:
+
+ cargo locate-project
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-metadata(1)](cargo-metadata.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-login.md b/src/tools/cargo/src/doc/src/commands/cargo-login.md
new file mode 100644
index 000000000..e738dca06
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-login.md
@@ -0,0 +1,130 @@
+# cargo-login(1)
+
+## NAME
+
+cargo-login --- Save an API token from the registry locally
+
+## SYNOPSIS
+
+`cargo login` [_options_] [_token_]
+
+## DESCRIPTION
+
+This command will save the API token to disk so that commands that require
+authentication, such as [cargo-publish(1)](cargo-publish.html), will be automatically
+authenticated. The token is saved in `$CARGO_HOME/credentials.toml`. `CARGO_HOME`
+defaults to `.cargo` in your home directory.
+
+If the _token_ argument is not specified, it will be read from stdin.
+
+The API token for crates.io may be retrieved from <https://crates.io/me>.
+
+Take care to keep the token secret, it should not be shared with anyone else.
+
+## OPTIONS
+
+### Login Options
+
+<dl>
+<dt class="option-term" id="option-cargo-login---registry"><a class="option-anchor" href="#option-cargo-login---registry"></a><code>--registry</code> <em>registry</em></dt>
+<dd class="option-desc">Name of the registry to use. Registry names are defined in <a href="../reference/config.html">Cargo config
+files</a>. If not specified, the default registry is used,
+which is defined by the <code>registry.default</code> config key which defaults to
+<code>crates-io</code>.</dd>
+
+
+</dl>
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-login--v"><a class="option-anchor" href="#option-cargo-login--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-login---verbose"><a class="option-anchor" href="#option-cargo-login---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-login--q"><a class="option-anchor" href="#option-cargo-login--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-login---quiet"><a class="option-anchor" href="#option-cargo-login---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-login---color"><a class="option-anchor" href="#option-cargo-login---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-login-+toolchain"><a class="option-anchor" href="#option-cargo-login-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-login---config"><a class="option-anchor" href="#option-cargo-login---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-login--C"><a class="option-anchor" href="#option-cargo-login--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-login--h"><a class="option-anchor" href="#option-cargo-login--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-login---help"><a class="option-anchor" href="#option-cargo-login---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-login--Z"><a class="option-anchor" href="#option-cargo-login--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Save the API token to disk:
+
+ cargo login
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-logout(1)](cargo-logout.html), [cargo-publish(1)](cargo-publish.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-logout.md b/src/tools/cargo/src/doc/src/commands/cargo-logout.md
new file mode 100644
index 000000000..16e393b02
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-logout.md
@@ -0,0 +1,136 @@
+# cargo-logout(1)
+
+## NAME
+
+cargo-logout --- Remove an API token from the registry locally
+
+## SYNOPSIS
+
+`cargo logout` [_options_]
+
+## DESCRIPTION
+
+This command will remove the API token from the local credential storage.
+Credentials are stored in `$CARGO_HOME/credentials.toml` where `$CARGO_HOME`
+defaults to `.cargo` in your home directory.
+
+If `--registry` is not specified, then the credentials for the default
+registry will be removed (configured by
+[`registry.default`](../reference/config.html#registrydefault), which defaults
+to <https://crates.io/>).
+
+This will not revoke the token on the server. If you need to revoke the token,
+visit the registry website and follow its instructions (see
+<https://crates.io/me> to revoke the token for <https://crates.io/>).
+
+## OPTIONS
+
+### Logout Options
+
+<dl>
+<dt class="option-term" id="option-cargo-logout---registry"><a class="option-anchor" href="#option-cargo-logout---registry"></a><code>--registry</code> <em>registry</em></dt>
+<dd class="option-desc">Name of the registry to use. Registry names are defined in <a href="../reference/config.html">Cargo config
+files</a>. If not specified, the default registry is used,
+which is defined by the <code>registry.default</code> config key which defaults to
+<code>crates-io</code>.</dd>
+
+
+</dl>
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-logout--v"><a class="option-anchor" href="#option-cargo-logout--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-logout---verbose"><a class="option-anchor" href="#option-cargo-logout---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-logout--q"><a class="option-anchor" href="#option-cargo-logout--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-logout---quiet"><a class="option-anchor" href="#option-cargo-logout---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-logout---color"><a class="option-anchor" href="#option-cargo-logout---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-logout-+toolchain"><a class="option-anchor" href="#option-cargo-logout-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-logout---config"><a class="option-anchor" href="#option-cargo-logout---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-logout--C"><a class="option-anchor" href="#option-cargo-logout--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-logout--h"><a class="option-anchor" href="#option-cargo-logout--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-logout---help"><a class="option-anchor" href="#option-cargo-logout---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-logout--Z"><a class="option-anchor" href="#option-cargo-logout--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Remove the default registry token:
+
+ cargo logout
+
+2. Remove the token for a specific registry:
+
+ cargo logout --registry my-registry
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-login(1)](cargo-login.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-metadata.md b/src/tools/cargo/src/doc/src/commands/cargo-metadata.md
new file mode 100644
index 000000000..ebde0ea10
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-metadata.md
@@ -0,0 +1,479 @@
+# cargo-metadata(1)
+
+## NAME
+
+cargo-metadata --- Machine-readable metadata about the current package
+
+## SYNOPSIS
+
+`cargo metadata` [_options_]
+
+## DESCRIPTION
+
+Output JSON to stdout containing information about the workspace members and
+resolved dependencies of the current package.
+
+It is recommended to include the `--format-version` flag to future-proof
+your code to ensure the output is in the format you are expecting.
+
+See the [cargo_metadata crate](https://crates.io/crates/cargo_metadata)
+for a Rust API for reading the metadata.
+
+## OUTPUT FORMAT
+
+The output has the following format:
+
+```javascript
+{
+ /* Array of all packages in the workspace.
+ It also includes all feature-enabled dependencies unless --no-deps is used.
+ */
+ "packages": [
+ {
+ /* The name of the package. */
+ "name": "my-package",
+ /* The version of the package. */
+ "version": "0.1.0",
+ /* The Package ID, a unique identifier for referring to the package. */
+ "id": "my-package 0.1.0 (path+file:///path/to/my-package)",
+ /* The license value from the manifest, or null. */
+ "license": "MIT/Apache-2.0",
+ /* The license-file value from the manifest, or null. */
+ "license_file": "LICENSE",
+ /* The description value from the manifest, or null. */
+ "description": "Package description.",
+ /* The source ID of the package. This represents where
+ a package is retrieved from.
+ This is null for path dependencies and workspace members.
+ For other dependencies, it is a string with the format:
+ - "registry+URL" for registry-based dependencies.
+ Example: "registry+https://github.com/rust-lang/crates.io-index"
+ - "git+URL" for git-based dependencies.
+ Example: "git+https://github.com/rust-lang/cargo?rev=5e85ba14aaa20f8133863373404cb0af69eeef2c#5e85ba14aaa20f8133863373404cb0af69eeef2c"
+ */
+ "source": null,
+ /* Array of dependencies declared in the package's manifest. */
+ "dependencies": [
+ {
+ /* The name of the dependency. */
+ "name": "bitflags",
+ /* The source ID of the dependency. May be null, see
+ description for the package source.
+ */
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ /* The version requirement for the dependency.
+ Dependencies without a version requirement have a value of "*".
+ */
+ "req": "^1.0",
+ /* The dependency kind.
+ "dev", "build", or null for a normal dependency.
+ */
+ "kind": null,
+ /* If the dependency is renamed, this is the new name for
+ the dependency as a string. null if it is not renamed.
+ */
+ "rename": null,
+ /* Boolean of whether or not this is an optional dependency. */
+ "optional": false,
+ /* Boolean of whether or not default features are enabled. */
+ "uses_default_features": true,
+ /* Array of features enabled. */
+ "features": [],
+ /* The target platform for the dependency.
+ null if not a target dependency.
+ */
+ "target": "cfg(windows)",
+ /* The file system path for a local path dependency.
+ not present if not a path dependency.
+ */
+ "path": "/path/to/dep",
+ /* A string of the URL of the registry this dependency is from.
+ If not specified or null, the dependency is from the default
+ registry (crates.io).
+ */
+ "registry": null
+ }
+ ],
+ /* Array of Cargo targets. */
+ "targets": [
+ {
+ /* Array of target kinds.
+ - lib targets list the `crate-type` values from the
+ manifest such as "lib", "rlib", "dylib",
+ "proc-macro", etc. (default ["lib"])
+ - binary is ["bin"]
+ - example is ["example"]
+ - integration test is ["test"]
+ - benchmark is ["bench"]
+ - build script is ["custom-build"]
+ */
+ "kind": [
+ "bin"
+ ],
+ /* Array of crate types.
+ - lib and example libraries list the `crate-type` values
+ from the manifest such as "lib", "rlib", "dylib",
+ "proc-macro", etc. (default ["lib"])
+ - all other target kinds are ["bin"]
+ */
+ "crate_types": [
+ "bin"
+ ],
+ /* The name of the target. */
+ "name": "my-package",
+ /* Absolute path to the root source file of the target. */
+ "src_path": "/path/to/my-package/src/main.rs",
+ /* The Rust edition of the target.
+ Defaults to the package edition.
+ */
+ "edition": "2018",
+ /* Array of required features.
+ This property is not included if no required features are set.
+ */
+ "required-features": ["feat1"],
+ /* Whether the target should be documented by `cargo doc`. */
+ "doc": true,
+ /* Whether or not this target has doc tests enabled, and
+ the target is compatible with doc testing.
+ */
+ "doctest": false,
+ /* Whether or not this target should be built and run with `--test`
+ */
+ "test": true
+ }
+ ],
+ /* Set of features defined for the package.
+ Each feature maps to an array of features or dependencies it
+ enables.
+ */
+ "features": {
+ "default": [
+ "feat1"
+ ],
+ "feat1": [],
+ "feat2": []
+ },
+ /* Absolute path to this package's manifest. */
+ "manifest_path": "/path/to/my-package/Cargo.toml",
+ /* Package metadata.
+ This is null if no metadata is specified.
+ */
+ "metadata": {
+ "docs": {
+ "rs": {
+ "all-features": true
+ }
+ }
+ },
+ /* List of registries to which this package may be published.
+ Publishing is unrestricted if null, and forbidden if an empty array. */
+ "publish": [
+ "crates-io"
+ ],
+ /* Array of authors from the manifest.
+ Empty array if no authors specified.
+ */
+ "authors": [
+ "Jane Doe <user@example.com>"
+ ],
+ /* Array of categories from the manifest. */
+ "categories": [
+ "command-line-utilities"
+ ],
+ /* Optional string that is the default binary picked by cargo run. */
+ "default_run": null,
+ /* Optional string that is the minimum supported rust version */
+ "rust_version": "1.56",
+ /* Array of keywords from the manifest. */
+ "keywords": [
+ "cli"
+ ],
+ /* The readme value from the manifest or null if not specified. */
+ "readme": "README.md",
+ /* The repository value from the manifest or null if not specified. */
+ "repository": "https://github.com/rust-lang/cargo",
+ /* The homepage value from the manifest or null if not specified. */
+ "homepage": "https://rust-lang.org",
+ /* The documentation value from the manifest or null if not specified. */
+ "documentation": "https://doc.rust-lang.org/stable/std",
+ /* The default edition of the package.
+ Note that individual targets may have different editions.
+ */
+ "edition": "2018",
+ /* Optional string that is the name of a native library the package
+ is linking to.
+ */
+ "links": null,
+ }
+ ],
+ /* Array of members of the workspace.
+ Each entry is the Package ID for the package.
+ */
+ "workspace_members": [
+ "my-package 0.1.0 (path+file:///path/to/my-package)",
+ ],
+ // The resolved dependency graph for the entire workspace. The enabled
+ // features are based on the enabled features for the "current" package.
+ // Inactivated optional dependencies are not listed.
+ //
+ // This is null if --no-deps is specified.
+ //
+ // By default, this includes all dependencies for all target platforms.
+ // The `--filter-platform` flag may be used to narrow to a specific
+ // target triple.
+ "resolve": {
+ /* Array of nodes within the dependency graph.
+ Each node is a package.
+ */
+ "nodes": [
+ {
+ /* The Package ID of this node. */
+ "id": "my-package 0.1.0 (path+file:///path/to/my-package)",
+ /* The dependencies of this package, an array of Package IDs. */
+ "dependencies": [
+ "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)"
+ ],
+ /* The dependencies of this package. This is an alternative to
+ "dependencies" which contains additional information. In
+ particular, this handles renamed dependencies.
+ */
+ "deps": [
+ {
+ /* The name of the dependency's library target.
+ If this is a renamed dependency, this is the new
+ name.
+ */
+ "name": "bitflags",
+ /* The Package ID of the dependency. */
+ "pkg": "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ /* Array of dependency kinds. Added in Cargo 1.40. */
+ "dep_kinds": [
+ {
+ /* The dependency kind.
+ "dev", "build", or null for a normal dependency.
+ */
+ "kind": null,
+ /* The target platform for the dependency.
+ null if not a target dependency.
+ */
+ "target": "cfg(windows)"
+ }
+ ]
+ }
+ ],
+ /* Array of features enabled on this package. */
+ "features": [
+ "default"
+ ]
+ }
+ ],
+ /* The root package of the workspace.
+ This is null if this is a virtual workspace. Otherwise it is
+ the Package ID of the root package.
+ */
+ "root": "my-package 0.1.0 (path+file:///path/to/my-package)"
+ },
+ /* The absolute path to the build directory where Cargo places its output. */
+ "target_directory": "/path/to/my-package/target",
+ /* The version of the schema for this metadata structure.
+ This will be changed if incompatible changes are ever made.
+ */
+ "version": 1,
+ /* The absolute path to the root of the workspace. */
+ "workspace_root": "/path/to/my-package"
+ /* Workspace metadata.
+ This is null if no metadata is specified. */
+ "metadata": {
+ "docs": {
+ "rs": {
+ "all-features": true
+ }
+ }
+ }
+}
+````
+
+## OPTIONS
+
+### Output Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-metadata---no-deps"><a class="option-anchor" href="#option-cargo-metadata---no-deps"></a><code>--no-deps</code></dt>
+<dd class="option-desc">Output information only about the workspace members and don’t fetch
+dependencies.</dd>
+
+
+<dt class="option-term" id="option-cargo-metadata---format-version"><a class="option-anchor" href="#option-cargo-metadata---format-version"></a><code>--format-version</code> <em>version</em></dt>
+<dd class="option-desc">Specify the version of the output format to use. Currently <code>1</code> is the only
+possible value.</dd>
+
+
+<dt class="option-term" id="option-cargo-metadata---filter-platform"><a class="option-anchor" href="#option-cargo-metadata---filter-platform"></a><code>--filter-platform</code> <em>triple</em></dt>
+<dd class="option-desc">This filters the <code>resolve</code> output to only include dependencies for the
+given <a href="../appendix/glossary.html#target">target triple</a>.
+Without this flag, the resolve includes all targets.</p>
+<p>Note that the dependencies listed in the “packages” array still includes all
+dependencies. Each package definition is intended to be an unaltered
+reproduction of the information within <code>Cargo.toml</code>.</dd>
+
+
+</dl>
+
+### Feature Selection
+
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the `default` feature is activated for every
+selected package.
+
+See [the features documentation](../reference/features.html#command-line-feature-options)
+for more details.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-metadata--F"><a class="option-anchor" href="#option-cargo-metadata--F"></a><code>-F</code> <em>features</em></dt>
+<dt class="option-term" id="option-cargo-metadata---features"><a class="option-anchor" href="#option-cargo-metadata---features"></a><code>--features</code> <em>features</em></dt>
+<dd class="option-desc">Space or comma separated list of features to activate. Features of workspace
+members may be enabled with <code>package-name/feature-name</code> syntax. This flag may
+be specified multiple times, which enables all specified features.</dd>
+
+
+<dt class="option-term" id="option-cargo-metadata---all-features"><a class="option-anchor" href="#option-cargo-metadata---all-features"></a><code>--all-features</code></dt>
+<dd class="option-desc">Activate all available features of all selected packages.</dd>
+
+
+<dt class="option-term" id="option-cargo-metadata---no-default-features"><a class="option-anchor" href="#option-cargo-metadata---no-default-features"></a><code>--no-default-features</code></dt>
+<dd class="option-desc">Do not activate the <code>default</code> feature of the selected packages.</dd>
+
+
+</dl>
+
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-metadata--v"><a class="option-anchor" href="#option-cargo-metadata--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-metadata---verbose"><a class="option-anchor" href="#option-cargo-metadata---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-metadata--q"><a class="option-anchor" href="#option-cargo-metadata--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-metadata---quiet"><a class="option-anchor" href="#option-cargo-metadata---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-metadata---color"><a class="option-anchor" href="#option-cargo-metadata---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+<dt class="option-term" id="option-cargo-metadata---manifest-path"><a class="option-anchor" href="#option-cargo-metadata---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-metadata---frozen"><a class="option-anchor" href="#option-cargo-metadata---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-metadata---locked"><a class="option-anchor" href="#option-cargo-metadata---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-metadata---offline"><a class="option-anchor" href="#option-cargo-metadata---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-metadata-+toolchain"><a class="option-anchor" href="#option-cargo-metadata-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-metadata---config"><a class="option-anchor" href="#option-cargo-metadata---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-metadata--C"><a class="option-anchor" href="#option-cargo-metadata--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-metadata--h"><a class="option-anchor" href="#option-cargo-metadata--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-metadata---help"><a class="option-anchor" href="#option-cargo-metadata---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-metadata--Z"><a class="option-anchor" href="#option-cargo-metadata--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Output JSON about the current package:
+
+ cargo metadata --format-version=1
+
+## SEE ALSO
+[cargo(1)](cargo.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-new.md b/src/tools/cargo/src/doc/src/commands/cargo-new.md
new file mode 100644
index 000000000..144b6f2eb
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-new.md
@@ -0,0 +1,157 @@
+# cargo-new(1)
+
+## NAME
+
+cargo-new --- Create a new Cargo package
+
+## SYNOPSIS
+
+`cargo new` [_options_] _path_
+
+## DESCRIPTION
+
+This command will create a new Cargo package in the given directory. This
+includes a simple template with a `Cargo.toml` manifest, sample source file,
+and a VCS ignore file. If the directory is not already in a VCS repository,
+then a new repository is created (see `--vcs` below).
+
+See [cargo-init(1)](cargo-init.html) for a similar command which will create a new manifest
+in an existing directory.
+
+## OPTIONS
+
+### New Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-new---bin"><a class="option-anchor" href="#option-cargo-new---bin"></a><code>--bin</code></dt>
+<dd class="option-desc">Create a package with a binary target (<code>src/main.rs</code>).
+This is the default behavior.</dd>
+
+
+<dt class="option-term" id="option-cargo-new---lib"><a class="option-anchor" href="#option-cargo-new---lib"></a><code>--lib</code></dt>
+<dd class="option-desc">Create a package with a library target (<code>src/lib.rs</code>).</dd>
+
+
+<dt class="option-term" id="option-cargo-new---edition"><a class="option-anchor" href="#option-cargo-new---edition"></a><code>--edition</code> <em>edition</em></dt>
+<dd class="option-desc">Specify the Rust edition to use. Default is 2021.
+Possible values: 2015, 2018, 2021</dd>
+
+
+<dt class="option-term" id="option-cargo-new---name"><a class="option-anchor" href="#option-cargo-new---name"></a><code>--name</code> <em>name</em></dt>
+<dd class="option-desc">Set the package name. Defaults to the directory name.</dd>
+
+
+<dt class="option-term" id="option-cargo-new---vcs"><a class="option-anchor" href="#option-cargo-new---vcs"></a><code>--vcs</code> <em>vcs</em></dt>
+<dd class="option-desc">Initialize a new VCS repository for the given version control system (git,
+hg, pijul, or fossil) or do not initialize any version control at all
+(none). If not specified, defaults to <code>git</code> or the configuration value
+<code>cargo-new.vcs</code>, or <code>none</code> if already inside a VCS repository.</dd>
+
+
+<dt class="option-term" id="option-cargo-new---registry"><a class="option-anchor" href="#option-cargo-new---registry"></a><code>--registry</code> <em>registry</em></dt>
+<dd class="option-desc">This sets the <code>publish</code> field in <code>Cargo.toml</code> to the given registry name
+which will restrict publishing only to that registry.</p>
+<p>Registry names are defined in <a href="../reference/config.html">Cargo config files</a>.
+If not specified, the default registry defined by the <code>registry.default</code>
+config key is used. If the default registry is not set and <code>--registry</code> is not
+used, the <code>publish</code> field will not be set which means that publishing will not
+be restricted.</dd>
+
+
+</dl>
+
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-new--v"><a class="option-anchor" href="#option-cargo-new--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-new---verbose"><a class="option-anchor" href="#option-cargo-new---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-new--q"><a class="option-anchor" href="#option-cargo-new--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-new---quiet"><a class="option-anchor" href="#option-cargo-new---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-new---color"><a class="option-anchor" href="#option-cargo-new---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-new-+toolchain"><a class="option-anchor" href="#option-cargo-new-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-new---config"><a class="option-anchor" href="#option-cargo-new---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-new--C"><a class="option-anchor" href="#option-cargo-new--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-new--h"><a class="option-anchor" href="#option-cargo-new--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-new---help"><a class="option-anchor" href="#option-cargo-new---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-new--Z"><a class="option-anchor" href="#option-cargo-new--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Create a binary Cargo package in the given directory:
+
+ cargo new foo
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-init(1)](cargo-init.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-owner.md b/src/tools/cargo/src/doc/src/commands/cargo-owner.md
new file mode 100644
index 000000000..caf16f4b2
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-owner.md
@@ -0,0 +1,176 @@
+# cargo-owner(1)
+
+## NAME
+
+cargo-owner --- Manage the owners of a crate on the registry
+
+## SYNOPSIS
+
+`cargo owner` [_options_] `--add` _login_ [_crate_]\
+`cargo owner` [_options_] `--remove` _login_ [_crate_]\
+`cargo owner` [_options_] `--list` [_crate_]
+
+## DESCRIPTION
+
+This command will modify the owners for a crate on the registry. Owners of a
+crate can upload new versions and yank old versions. Non-team owners can also
+modify the set of owners, so take care!
+
+This command requires you to be authenticated with either the `--token` option
+or using [cargo-login(1)](cargo-login.html).
+
+If the crate name is not specified, it will use the package name from the
+current directory.
+
+See [the reference](../reference/publishing.html#cargo-owner) for more
+information about owners and publishing.
+
+## OPTIONS
+
+### Owner Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-owner--a"><a class="option-anchor" href="#option-cargo-owner--a"></a><code>-a</code></dt>
+<dt class="option-term" id="option-cargo-owner---add"><a class="option-anchor" href="#option-cargo-owner---add"></a><code>--add</code> <em>login</em>…</dt>
+<dd class="option-desc">Invite the given user or team as an owner.</dd>
+
+
+<dt class="option-term" id="option-cargo-owner--r"><a class="option-anchor" href="#option-cargo-owner--r"></a><code>-r</code></dt>
+<dt class="option-term" id="option-cargo-owner---remove"><a class="option-anchor" href="#option-cargo-owner---remove"></a><code>--remove</code> <em>login</em>…</dt>
+<dd class="option-desc">Remove the given user or team as an owner.</dd>
+
+
+<dt class="option-term" id="option-cargo-owner--l"><a class="option-anchor" href="#option-cargo-owner--l"></a><code>-l</code></dt>
+<dt class="option-term" id="option-cargo-owner---list"><a class="option-anchor" href="#option-cargo-owner---list"></a><code>--list</code></dt>
+<dd class="option-desc">List owners of a crate.</dd>
+
+
+<dt class="option-term" id="option-cargo-owner---token"><a class="option-anchor" href="#option-cargo-owner---token"></a><code>--token</code> <em>token</em></dt>
+<dd class="option-desc">API token to use when authenticating. This overrides the token stored in
+the credentials file (which is created by <a href="cargo-login.html">cargo-login(1)</a>).</p>
+<p><a href="../reference/config.html">Cargo config</a> environment variables can be
+used to override the tokens stored in the credentials file. The token for
+crates.io may be specified with the <code>CARGO_REGISTRY_TOKEN</code> environment
+variable. Tokens for other registries may be specified with environment
+variables of the form <code>CARGO_REGISTRIES_NAME_TOKEN</code> where <code>NAME</code> is the name
+of the registry in all capital letters.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-owner---index"><a class="option-anchor" href="#option-cargo-owner---index"></a><code>--index</code> <em>index</em></dt>
+<dd class="option-desc">The URL of the registry index to use.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-owner---registry"><a class="option-anchor" href="#option-cargo-owner---registry"></a><code>--registry</code> <em>registry</em></dt>
+<dd class="option-desc">Name of the registry to use. Registry names are defined in <a href="../reference/config.html">Cargo config
+files</a>. If not specified, the default registry is used,
+which is defined by the <code>registry.default</code> config key which defaults to
+<code>crates-io</code>.</dd>
+
+
+
+</dl>
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-owner--v"><a class="option-anchor" href="#option-cargo-owner--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-owner---verbose"><a class="option-anchor" href="#option-cargo-owner---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-owner--q"><a class="option-anchor" href="#option-cargo-owner--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-owner---quiet"><a class="option-anchor" href="#option-cargo-owner---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-owner---color"><a class="option-anchor" href="#option-cargo-owner---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-owner-+toolchain"><a class="option-anchor" href="#option-cargo-owner-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-owner---config"><a class="option-anchor" href="#option-cargo-owner---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-owner--C"><a class="option-anchor" href="#option-cargo-owner--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-owner--h"><a class="option-anchor" href="#option-cargo-owner--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-owner---help"><a class="option-anchor" href="#option-cargo-owner---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-owner--Z"><a class="option-anchor" href="#option-cargo-owner--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. List owners of a package:
+
+ cargo owner --list foo
+
+2. Invite an owner to a package:
+
+ cargo owner --add username foo
+
+3. Remove an owner from a package:
+
+ cargo owner --remove username foo
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-login(1)](cargo-login.html), [cargo-publish(1)](cargo-publish.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-package.md b/src/tools/cargo/src/doc/src/commands/cargo-package.md
new file mode 100644
index 000000000..776b150cf
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-package.md
@@ -0,0 +1,334 @@
+# cargo-package(1)
+
+
+
+
+## NAME
+
+cargo-package --- Assemble the local package into a distributable tarball
+
+## SYNOPSIS
+
+`cargo package` [_options_]
+
+## DESCRIPTION
+
+This command will create a distributable, compressed `.crate` file with the
+source code of the package in the current directory. The resulting file will
+be stored in the `target/package` directory. This performs the following
+steps:
+
+1. Load and check the current workspace, performing some basic checks.
+ - Path dependencies are not allowed unless they have a version key. Cargo
+ will ignore the path key for dependencies in published packages.
+ `dev-dependencies` do not have this restriction.
+2. Create the compressed `.crate` file.
+ - The original `Cargo.toml` file is rewritten and normalized.
+ - `[patch]`, `[replace]`, and `[workspace]` sections are removed from the
+ manifest.
+ - `Cargo.lock` is automatically included if the package contains an
+ executable binary or example target. [cargo-install(1)](cargo-install.html) will use the
+ packaged lock file if the `--locked` flag is used.
+ - A `.cargo_vcs_info.json` file is included that contains information
+ about the current VCS checkout hash if available (not included with
+ `--allow-dirty`).
+3. Extract the `.crate` file and build it to verify it can build.
+ - This will rebuild your package from scratch to ensure that it can be
+ built from a pristine state. The `--no-verify` flag can be used to skip
+ this step.
+4. Check that build scripts did not modify any source files.
+
+The list of files included can be controlled with the `include` and `exclude`
+fields in the manifest.
+
+See [the reference](../reference/publishing.html) for more details about
+packaging and publishing.
+
+### .cargo_vcs_info.json format
+
+Will generate a `.cargo_vcs_info.json` in the following format
+
+```javascript
+{
+ "git": {
+ "sha1": "aac20b6e7e543e6dd4118b246c77225e3a3a1302"
+ },
+ "path_in_vcs": ""
+}
+```
+
+`path_in_vcs` will be set to a repo-relative path for packages
+in subdirectories of the version control repository.
+
+## OPTIONS
+
+### Package Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-package--l"><a class="option-anchor" href="#option-cargo-package--l"></a><code>-l</code></dt>
+<dt class="option-term" id="option-cargo-package---list"><a class="option-anchor" href="#option-cargo-package---list"></a><code>--list</code></dt>
+<dd class="option-desc">Print files included in a package without making one.</dd>
+
+
+<dt class="option-term" id="option-cargo-package---no-verify"><a class="option-anchor" href="#option-cargo-package---no-verify"></a><code>--no-verify</code></dt>
+<dd class="option-desc">Don’t verify the contents by building them.</dd>
+
+
+<dt class="option-term" id="option-cargo-package---no-metadata"><a class="option-anchor" href="#option-cargo-package---no-metadata"></a><code>--no-metadata</code></dt>
+<dd class="option-desc">Ignore warnings about a lack of human-usable metadata (such as the description
+or the license).</dd>
+
+
+<dt class="option-term" id="option-cargo-package---allow-dirty"><a class="option-anchor" href="#option-cargo-package---allow-dirty"></a><code>--allow-dirty</code></dt>
+<dd class="option-desc">Allow working directories with uncommitted VCS changes to be packaged.</dd>
+
+
+</dl>
+
+### Package Selection
+
+By default, when no package selection options are given, the packages selected
+depend on the selected manifest file (based on the current working directory if
+`--manifest-path` is not given). If the manifest is the root of a workspace then
+the workspaces default members are selected, otherwise only the package defined
+by the manifest will be selected.
+
+The default members of a workspace can be set explicitly with the
+`workspace.default-members` key in the root manifest. If this is not set, a
+virtual workspace will include all workspace members (equivalent to passing
+`--workspace`), and a non-virtual workspace will include only the root crate itself.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-package--p"><a class="option-anchor" href="#option-cargo-package--p"></a><code>-p</code> <em>spec</em>…</dt>
+<dt class="option-term" id="option-cargo-package---package"><a class="option-anchor" href="#option-cargo-package---package"></a><code>--package</code> <em>spec</em>…</dt>
+<dd class="option-desc">Package only the specified packages. See <a href="cargo-pkgid.html">cargo-pkgid(1)</a> for the
+SPEC format. This flag may be specified multiple times and supports common Unix
+glob patterns like <code>*</code>, <code>?</code> and <code>[]</code>. However, to avoid your shell accidentally
+expanding glob patterns before Cargo handles them, you must use single quotes or
+double quotes around each pattern.</dd>
+
+
+<dt class="option-term" id="option-cargo-package---workspace"><a class="option-anchor" href="#option-cargo-package---workspace"></a><code>--workspace</code></dt>
+<dd class="option-desc">Package all members in the workspace.</dd>
+
+
+
+
+<dt class="option-term" id="option-cargo-package---exclude"><a class="option-anchor" href="#option-cargo-package---exclude"></a><code>--exclude</code> <em>SPEC</em>…</dt>
+<dd class="option-desc">Exclude the specified packages. Must be used in conjunction with the
+<code>--workspace</code> flag. This flag may be specified multiple times and supports
+common Unix glob patterns like <code>*</code>, <code>?</code> and <code>[]</code>. However, to avoid your shell
+accidentally expanding glob patterns before Cargo handles them, you must use
+single quotes or double quotes around each pattern.</dd>
+
+
+</dl>
+
+
+### Compilation Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-package---target"><a class="option-anchor" href="#option-cargo-package---target"></a><code>--target</code> <em>triple</em></dt>
+<dd class="option-desc">Package for the given architecture. The default is the host architecture. The general format of the triple is
+<code>&lt;arch&gt;&lt;sub&gt;-&lt;vendor&gt;-&lt;sys&gt;-&lt;abi&gt;</code>. Run <code>rustc --print target-list</code> for a
+list of supported targets. This flag may be specified multiple times.</p>
+<p>This may also be specified with the <code>build.target</code>
+<a href="../reference/config.html">config value</a>.</p>
+<p>Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+<a href="../guide/build-cache.html">build cache</a> documentation for more details.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-package---target-dir"><a class="option-anchor" href="#option-cargo-package---target-dir"></a><code>--target-dir</code> <em>directory</em></dt>
+<dd class="option-desc">Directory for all generated artifacts and intermediate files. May also be
+specified with the <code>CARGO_TARGET_DIR</code> environment variable, or the
+<code>build.target-dir</code> <a href="../reference/config.html">config value</a>.
+Defaults to <code>target</code> in the root of the workspace.</dd>
+
+
+
+</dl>
+
+### Feature Selection
+
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the `default` feature is activated for every
+selected package.
+
+See [the features documentation](../reference/features.html#command-line-feature-options)
+for more details.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-package--F"><a class="option-anchor" href="#option-cargo-package--F"></a><code>-F</code> <em>features</em></dt>
+<dt class="option-term" id="option-cargo-package---features"><a class="option-anchor" href="#option-cargo-package---features"></a><code>--features</code> <em>features</em></dt>
+<dd class="option-desc">Space or comma separated list of features to activate. Features of workspace
+members may be enabled with <code>package-name/feature-name</code> syntax. This flag may
+be specified multiple times, which enables all specified features.</dd>
+
+
+<dt class="option-term" id="option-cargo-package---all-features"><a class="option-anchor" href="#option-cargo-package---all-features"></a><code>--all-features</code></dt>
+<dd class="option-desc">Activate all available features of all selected packages.</dd>
+
+
+<dt class="option-term" id="option-cargo-package---no-default-features"><a class="option-anchor" href="#option-cargo-package---no-default-features"></a><code>--no-default-features</code></dt>
+<dd class="option-desc">Do not activate the <code>default</code> feature of the selected packages.</dd>
+
+
+</dl>
+
+
+### Manifest Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-package---manifest-path"><a class="option-anchor" href="#option-cargo-package---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-package---frozen"><a class="option-anchor" href="#option-cargo-package---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-package---locked"><a class="option-anchor" href="#option-cargo-package---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-package---offline"><a class="option-anchor" href="#option-cargo-package---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+
+</dl>
+
+### Miscellaneous Options
+
+<dl>
+<dt class="option-term" id="option-cargo-package--j"><a class="option-anchor" href="#option-cargo-package--j"></a><code>-j</code> <em>N</em></dt>
+<dt class="option-term" id="option-cargo-package---jobs"><a class="option-anchor" href="#option-cargo-package---jobs"></a><code>--jobs</code> <em>N</em></dt>
+<dd class="option-desc">Number of parallel jobs to run. May also be specified with the
+<code>build.jobs</code> <a href="../reference/config.html">config value</a>. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.</dd>
+
+
+<dt class="option-term" id="option-cargo-package---keep-going"><a class="option-anchor" href="#option-cargo-package---keep-going"></a><code>--keep-going</code></dt>
+<dd class="option-desc">Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+<code>-Zunstable-options</code>.</dd>
+
+
+</dl>
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-package--v"><a class="option-anchor" href="#option-cargo-package--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-package---verbose"><a class="option-anchor" href="#option-cargo-package---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-package--q"><a class="option-anchor" href="#option-cargo-package--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-package---quiet"><a class="option-anchor" href="#option-cargo-package---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-package---color"><a class="option-anchor" href="#option-cargo-package---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-package-+toolchain"><a class="option-anchor" href="#option-cargo-package-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-package---config"><a class="option-anchor" href="#option-cargo-package---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-package--C"><a class="option-anchor" href="#option-cargo-package--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-package--h"><a class="option-anchor" href="#option-cargo-package--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-package---help"><a class="option-anchor" href="#option-cargo-package---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-package--Z"><a class="option-anchor" href="#option-cargo-package--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Create a compressed `.crate` file of the current package:
+
+ cargo package
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-publish(1)](cargo-publish.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-pkgid.md b/src/tools/cargo/src/doc/src/commands/cargo-pkgid.md
new file mode 100644
index 000000000..d7e507506
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-pkgid.md
@@ -0,0 +1,190 @@
+# cargo-pkgid(1)
+
+## NAME
+
+cargo-pkgid --- Print a fully qualified package specification
+
+## SYNOPSIS
+
+`cargo pkgid` [_options_] [_spec_]
+
+## DESCRIPTION
+
+Given a _spec_ argument, print out the fully qualified package ID specifier
+for a package or dependency in the current workspace. This command will
+generate an error if _spec_ is ambiguous as to which package it refers to in
+the dependency graph. If no _spec_ is given, then the specifier for the local
+package is printed.
+
+This command requires that a lockfile is available and dependencies have been
+fetched.
+
+A package specifier consists of a name, version, and source URL. You are
+allowed to use partial specifiers to succinctly match a specific package as
+long as it matches only one package. The format of a _spec_ can be one of the
+following:
+
+SPEC Structure | Example SPEC
+---------------------------|--------------
+_name_ | `bitflags`
+_name_`@`_version_ | `bitflags@1.0.4`
+_url_ | `https://github.com/rust-lang/cargo`
+_url_`#`_version_ | `https://github.com/rust-lang/cargo#0.33.0`
+_url_`#`_name_ | `https://github.com/rust-lang/crates.io-index#bitflags`
+_url_`#`_name_`@`_version_ | `https://github.com/rust-lang/cargo#crates-io@0.21.0`
+
+## OPTIONS
+
+### Package Selection
+
+<dl>
+
+<dt class="option-term" id="option-cargo-pkgid--p"><a class="option-anchor" href="#option-cargo-pkgid--p"></a><code>-p</code> <em>spec</em></dt>
+<dt class="option-term" id="option-cargo-pkgid---package"><a class="option-anchor" href="#option-cargo-pkgid---package"></a><code>--package</code> <em>spec</em></dt>
+<dd class="option-desc">Get the package ID for the given package instead of the current package.</dd>
+
+
+</dl>
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-pkgid--v"><a class="option-anchor" href="#option-cargo-pkgid--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-pkgid---verbose"><a class="option-anchor" href="#option-cargo-pkgid---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-pkgid--q"><a class="option-anchor" href="#option-cargo-pkgid--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-pkgid---quiet"><a class="option-anchor" href="#option-cargo-pkgid---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-pkgid---color"><a class="option-anchor" href="#option-cargo-pkgid---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-pkgid---manifest-path"><a class="option-anchor" href="#option-cargo-pkgid---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-pkgid---frozen"><a class="option-anchor" href="#option-cargo-pkgid---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-pkgid---locked"><a class="option-anchor" href="#option-cargo-pkgid---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-pkgid---offline"><a class="option-anchor" href="#option-cargo-pkgid---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-pkgid-+toolchain"><a class="option-anchor" href="#option-cargo-pkgid-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-pkgid---config"><a class="option-anchor" href="#option-cargo-pkgid---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-pkgid--C"><a class="option-anchor" href="#option-cargo-pkgid--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-pkgid--h"><a class="option-anchor" href="#option-cargo-pkgid--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-pkgid---help"><a class="option-anchor" href="#option-cargo-pkgid---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-pkgid--Z"><a class="option-anchor" href="#option-cargo-pkgid--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Retrieve package specification for `foo` package:
+
+ cargo pkgid foo
+
+2. Retrieve package specification for version 1.0.0 of `foo`:
+
+ cargo pkgid foo@1.0.0
+
+3. Retrieve package specification for `foo` from crates.io:
+
+ cargo pkgid https://github.com/rust-lang/crates.io-index#foo
+
+4. Retrieve package specification for `foo` from a local package:
+
+ cargo pkgid file:///path/to/local/package#foo
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-generate-lockfile(1)](cargo-generate-lockfile.html), [cargo-metadata(1)](cargo-metadata.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-publish.md b/src/tools/cargo/src/doc/src/commands/cargo-publish.md
new file mode 100644
index 000000000..1f4fbebb8
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-publish.md
@@ -0,0 +1,300 @@
+# cargo-publish(1)
+
+
+
+## NAME
+
+cargo-publish --- Upload a package to the registry
+
+## SYNOPSIS
+
+`cargo publish` [_options_]
+
+## DESCRIPTION
+
+This command will create a distributable, compressed `.crate` file with the
+source code of the package in the current directory and upload it to a
+registry. The default registry is <https://crates.io>. This performs the
+following steps:
+
+1. Performs a few checks, including:
+ - Checks the `package.publish` key in the manifest for restrictions on
+ which registries you are allowed to publish to.
+2. Create a `.crate` file by following the steps in [cargo-package(1)](cargo-package.html).
+3. Upload the crate to the registry. Note that the server will perform
+ additional checks on the crate.
+
+This command requires you to be authenticated with either the `--token` option
+or using [cargo-login(1)](cargo-login.html).
+
+See [the reference](../reference/publishing.html) for more details about
+packaging and publishing.
+
+## OPTIONS
+
+### Publish Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-publish---dry-run"><a class="option-anchor" href="#option-cargo-publish---dry-run"></a><code>--dry-run</code></dt>
+<dd class="option-desc">Perform all checks without uploading.</dd>
+
+
+<dt class="option-term" id="option-cargo-publish---token"><a class="option-anchor" href="#option-cargo-publish---token"></a><code>--token</code> <em>token</em></dt>
+<dd class="option-desc">API token to use when authenticating. This overrides the token stored in
+the credentials file (which is created by <a href="cargo-login.html">cargo-login(1)</a>).</p>
+<p><a href="../reference/config.html">Cargo config</a> environment variables can be
+used to override the tokens stored in the credentials file. The token for
+crates.io may be specified with the <code>CARGO_REGISTRY_TOKEN</code> environment
+variable. Tokens for other registries may be specified with environment
+variables of the form <code>CARGO_REGISTRIES_NAME_TOKEN</code> where <code>NAME</code> is the name
+of the registry in all capital letters.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-publish---no-verify"><a class="option-anchor" href="#option-cargo-publish---no-verify"></a><code>--no-verify</code></dt>
+<dd class="option-desc">Don’t verify the contents by building them.</dd>
+
+
+<dt class="option-term" id="option-cargo-publish---allow-dirty"><a class="option-anchor" href="#option-cargo-publish---allow-dirty"></a><code>--allow-dirty</code></dt>
+<dd class="option-desc">Allow working directories with uncommitted VCS changes to be packaged.</dd>
+
+
+<dt class="option-term" id="option-cargo-publish---index"><a class="option-anchor" href="#option-cargo-publish---index"></a><code>--index</code> <em>index</em></dt>
+<dd class="option-desc">The URL of the registry index to use.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-publish---registry"><a class="option-anchor" href="#option-cargo-publish---registry"></a><code>--registry</code> <em>registry</em></dt>
+<dd class="option-desc">Name of the registry to publish to. Registry names are defined in <a href="../reference/config.html">Cargo
+config files</a>. If not specified, and there is a
+<a href="../reference/manifest.html#the-publish-field"><code>package.publish</code></a> field in
+<code>Cargo.toml</code> with a single registry, then it will publish to that registry.
+Otherwise it will use the default registry, which is defined by the
+<a href="../reference/config.html#registrydefault"><code>registry.default</code></a> config key
+which defaults to <code>crates-io</code>.</dd>
+
+
+</dl>
+
+### Package Selection
+
+By default, the package in the current working directory is selected. The `-p`
+flag can be used to choose a different package in a workspace.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-publish--p"><a class="option-anchor" href="#option-cargo-publish--p"></a><code>-p</code> <em>spec</em></dt>
+<dt class="option-term" id="option-cargo-publish---package"><a class="option-anchor" href="#option-cargo-publish---package"></a><code>--package</code> <em>spec</em></dt>
+<dd class="option-desc">The package to publish. See <a href="cargo-pkgid.html">cargo-pkgid(1)</a> for the SPEC
+format.</dd>
+
+
+</dl>
+
+
+### Compilation Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-publish---target"><a class="option-anchor" href="#option-cargo-publish---target"></a><code>--target</code> <em>triple</em></dt>
+<dd class="option-desc">Publish for the given architecture. The default is the host architecture. The general format of the triple is
+<code>&lt;arch&gt;&lt;sub&gt;-&lt;vendor&gt;-&lt;sys&gt;-&lt;abi&gt;</code>. Run <code>rustc --print target-list</code> for a
+list of supported targets. This flag may be specified multiple times.</p>
+<p>This may also be specified with the <code>build.target</code>
+<a href="../reference/config.html">config value</a>.</p>
+<p>Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+<a href="../guide/build-cache.html">build cache</a> documentation for more details.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-publish---target-dir"><a class="option-anchor" href="#option-cargo-publish---target-dir"></a><code>--target-dir</code> <em>directory</em></dt>
+<dd class="option-desc">Directory for all generated artifacts and intermediate files. May also be
+specified with the <code>CARGO_TARGET_DIR</code> environment variable, or the
+<code>build.target-dir</code> <a href="../reference/config.html">config value</a>.
+Defaults to <code>target</code> in the root of the workspace.</dd>
+
+
+
+</dl>
+
+### Feature Selection
+
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the `default` feature is activated for every
+selected package.
+
+See [the features documentation](../reference/features.html#command-line-feature-options)
+for more details.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-publish--F"><a class="option-anchor" href="#option-cargo-publish--F"></a><code>-F</code> <em>features</em></dt>
+<dt class="option-term" id="option-cargo-publish---features"><a class="option-anchor" href="#option-cargo-publish---features"></a><code>--features</code> <em>features</em></dt>
+<dd class="option-desc">Space or comma separated list of features to activate. Features of workspace
+members may be enabled with <code>package-name/feature-name</code> syntax. This flag may
+be specified multiple times, which enables all specified features.</dd>
+
+
+<dt class="option-term" id="option-cargo-publish---all-features"><a class="option-anchor" href="#option-cargo-publish---all-features"></a><code>--all-features</code></dt>
+<dd class="option-desc">Activate all available features of all selected packages.</dd>
+
+
+<dt class="option-term" id="option-cargo-publish---no-default-features"><a class="option-anchor" href="#option-cargo-publish---no-default-features"></a><code>--no-default-features</code></dt>
+<dd class="option-desc">Do not activate the <code>default</code> feature of the selected packages.</dd>
+
+
+</dl>
+
+
+### Manifest Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-publish---manifest-path"><a class="option-anchor" href="#option-cargo-publish---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-publish---frozen"><a class="option-anchor" href="#option-cargo-publish---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-publish---locked"><a class="option-anchor" href="#option-cargo-publish---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-publish---offline"><a class="option-anchor" href="#option-cargo-publish---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+
+</dl>
+
+### Miscellaneous Options
+
+<dl>
+<dt class="option-term" id="option-cargo-publish--j"><a class="option-anchor" href="#option-cargo-publish--j"></a><code>-j</code> <em>N</em></dt>
+<dt class="option-term" id="option-cargo-publish---jobs"><a class="option-anchor" href="#option-cargo-publish---jobs"></a><code>--jobs</code> <em>N</em></dt>
+<dd class="option-desc">Number of parallel jobs to run. May also be specified with the
+<code>build.jobs</code> <a href="../reference/config.html">config value</a>. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.</dd>
+
+
+<dt class="option-term" id="option-cargo-publish---keep-going"><a class="option-anchor" href="#option-cargo-publish---keep-going"></a><code>--keep-going</code></dt>
+<dd class="option-desc">Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+<code>-Zunstable-options</code>.</dd>
+
+
+</dl>
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-publish--v"><a class="option-anchor" href="#option-cargo-publish--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-publish---verbose"><a class="option-anchor" href="#option-cargo-publish---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-publish--q"><a class="option-anchor" href="#option-cargo-publish--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-publish---quiet"><a class="option-anchor" href="#option-cargo-publish---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-publish---color"><a class="option-anchor" href="#option-cargo-publish---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-publish-+toolchain"><a class="option-anchor" href="#option-cargo-publish-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-publish---config"><a class="option-anchor" href="#option-cargo-publish---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-publish--C"><a class="option-anchor" href="#option-cargo-publish--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-publish--h"><a class="option-anchor" href="#option-cargo-publish--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-publish---help"><a class="option-anchor" href="#option-cargo-publish---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-publish--Z"><a class="option-anchor" href="#option-cargo-publish--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Publish the current package:
+
+ cargo publish
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-package(1)](cargo-package.html), [cargo-login(1)](cargo-login.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-remove.md b/src/tools/cargo/src/doc/src/commands/cargo-remove.md
new file mode 100644
index 000000000..571c41075
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-remove.md
@@ -0,0 +1,194 @@
+# cargo-remove(1)
+
+
+
+## NAME
+
+cargo-remove --- Remove dependencies from a Cargo.toml manifest file
+
+## SYNOPSIS
+
+`cargo remove` [_options_] _dependency_...
+
+## DESCRIPTION
+
+Remove one or more dependencies from a `Cargo.toml` manifest.
+
+## OPTIONS
+
+### Section options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-remove---dev"><a class="option-anchor" href="#option-cargo-remove---dev"></a><code>--dev</code></dt>
+<dd class="option-desc">Remove as a <a href="../reference/specifying-dependencies.html#development-dependencies">development dependency</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-remove---build"><a class="option-anchor" href="#option-cargo-remove---build"></a><code>--build</code></dt>
+<dd class="option-desc">Remove as a <a href="../reference/specifying-dependencies.html#build-dependencies">build dependency</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-remove---target"><a class="option-anchor" href="#option-cargo-remove---target"></a><code>--target</code> <em>target</em></dt>
+<dd class="option-desc">Remove as a dependency to the <a href="../reference/specifying-dependencies.html#platform-specific-dependencies">given target platform</a>.</p>
+<p>To avoid unexpected shell expansions, you may use quotes around each target, e.g., <code>--target 'cfg(unix)'</code>.</dd>
+
+
+</dl>
+
+### Miscellaneous Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-remove---dry-run"><a class="option-anchor" href="#option-cargo-remove---dry-run"></a><code>--dry-run</code></dt>
+<dd class="option-desc">Don’t actually write to the manifest.</dd>
+
+
+</dl>
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-remove--v"><a class="option-anchor" href="#option-cargo-remove--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-remove---verbose"><a class="option-anchor" href="#option-cargo-remove---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-remove--q"><a class="option-anchor" href="#option-cargo-remove--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-remove---quiet"><a class="option-anchor" href="#option-cargo-remove---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-remove---color"><a class="option-anchor" href="#option-cargo-remove---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+<dt class="option-term" id="option-cargo-remove---manifest-path"><a class="option-anchor" href="#option-cargo-remove---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-remove---frozen"><a class="option-anchor" href="#option-cargo-remove---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-remove---locked"><a class="option-anchor" href="#option-cargo-remove---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-remove---offline"><a class="option-anchor" href="#option-cargo-remove---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Package Selection
+
+<dl>
+
+<dt class="option-term" id="option-cargo-remove--p"><a class="option-anchor" href="#option-cargo-remove--p"></a><code>-p</code> <em>spec</em>…</dt>
+<dt class="option-term" id="option-cargo-remove---package"><a class="option-anchor" href="#option-cargo-remove---package"></a><code>--package</code> <em>spec</em>…</dt>
+<dd class="option-desc">Package to remove from.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-remove-+toolchain"><a class="option-anchor" href="#option-cargo-remove-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-remove---config"><a class="option-anchor" href="#option-cargo-remove---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-remove--C"><a class="option-anchor" href="#option-cargo-remove--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-remove--h"><a class="option-anchor" href="#option-cargo-remove--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-remove---help"><a class="option-anchor" href="#option-cargo-remove---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-remove--Z"><a class="option-anchor" href="#option-cargo-remove--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Remove `regex` as a dependency
+
+ cargo remove regex
+
+2. Remove `trybuild` as a dev-dependency
+
+ cargo remove --dev trybuild
+
+3. Remove `nom` from the `x86_64-pc-windows-gnu` dependencies table
+
+ cargo remove --target x86_64-pc-windows-gnu nom
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-add(1)](cargo-add.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-report.md b/src/tools/cargo/src/doc/src/commands/cargo-report.md
new file mode 100644
index 000000000..130449d12
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-report.md
@@ -0,0 +1,43 @@
+# cargo-report(1)
+
+## NAME
+
+cargo-report --- Generate and display various kinds of reports
+
+## SYNOPSIS
+
+`cargo report` _type_ [_options_]
+
+### DESCRIPTION
+
+Displays a report of the given _type_ --- currently, only `future-incompat` is supported
+
+## OPTIONS
+
+<dl>
+
+<dt class="option-term" id="option-cargo-report---id"><a class="option-anchor" href="#option-cargo-report---id"></a><code>--id</code> <em>id</em></dt>
+<dd class="option-desc">Show the report with the specified Cargo-generated id</dd>
+
+
+<dt class="option-term" id="option-cargo-report--p"><a class="option-anchor" href="#option-cargo-report--p"></a><code>-p</code> <em>spec</em>…</dt>
+<dt class="option-term" id="option-cargo-report---package"><a class="option-anchor" href="#option-cargo-report---package"></a><code>--package</code> <em>spec</em>…</dt>
+<dd class="option-desc">Only display a report for the specified package</dd>
+
+
+</dl>
+
+## EXAMPLES
+
+1. Display the latest future-incompat report:
+
+ cargo report future-incompat
+
+2. Display the latest future-incompat report for a specific package:
+
+ cargo report future-incompat --package my-dep:0.0.1
+
+## SEE ALSO
+[Future incompat report](../reference/future-incompat-report.html)
+
+[cargo(1)](cargo.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-run.md b/src/tools/cargo/src/doc/src/commands/cargo-run.md
new file mode 100644
index 000000000..f6f5ec2a3
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-run.md
@@ -0,0 +1,337 @@
+# cargo-run(1)
+
+
+## NAME
+
+cargo-run --- Run the current package
+
+## SYNOPSIS
+
+`cargo run` [_options_] [`--` _args_]
+
+## DESCRIPTION
+
+Run a binary or example of the local package.
+
+All the arguments following the two dashes (`--`) are passed to the binary to
+run. If you're passing arguments to both Cargo and the binary, the ones after
+`--` go to the binary, the ones before go to Cargo.
+
+Unlike [cargo-test(1)](cargo-test.html) and [cargo-bench(1)](cargo-bench.html), `cargo run` sets the
+working directory of the binary executed to the current working directory, same
+as if it was executed in the shell directly.
+
+## OPTIONS
+
+### Package Selection
+
+By default, the package in the current working directory is selected. The `-p`
+flag can be used to choose a different package in a workspace.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-run--p"><a class="option-anchor" href="#option-cargo-run--p"></a><code>-p</code> <em>spec</em></dt>
+<dt class="option-term" id="option-cargo-run---package"><a class="option-anchor" href="#option-cargo-run---package"></a><code>--package</code> <em>spec</em></dt>
+<dd class="option-desc">The package to run. See <a href="cargo-pkgid.html">cargo-pkgid(1)</a> for the SPEC
+format.</dd>
+
+
+</dl>
+
+
+### Target Selection
+
+When no target selection options are given, `cargo run` will run the binary
+target. If there are multiple binary targets, you must pass a target flag to
+choose one. Or, the `default-run` field may be specified in the `[package]`
+section of `Cargo.toml` to choose the name of the binary to run by default.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-run---bin"><a class="option-anchor" href="#option-cargo-run---bin"></a><code>--bin</code> <em>name</em></dt>
+<dd class="option-desc">Run the specified binary.</dd>
+
+
+<dt class="option-term" id="option-cargo-run---example"><a class="option-anchor" href="#option-cargo-run---example"></a><code>--example</code> <em>name</em></dt>
+<dd class="option-desc">Run the specified example.</dd>
+
+
+</dl>
+
+### Feature Selection
+
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the `default` feature is activated for every
+selected package.
+
+See [the features documentation](../reference/features.html#command-line-feature-options)
+for more details.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-run--F"><a class="option-anchor" href="#option-cargo-run--F"></a><code>-F</code> <em>features</em></dt>
+<dt class="option-term" id="option-cargo-run---features"><a class="option-anchor" href="#option-cargo-run---features"></a><code>--features</code> <em>features</em></dt>
+<dd class="option-desc">Space or comma separated list of features to activate. Features of workspace
+members may be enabled with <code>package-name/feature-name</code> syntax. This flag may
+be specified multiple times, which enables all specified features.</dd>
+
+
+<dt class="option-term" id="option-cargo-run---all-features"><a class="option-anchor" href="#option-cargo-run---all-features"></a><code>--all-features</code></dt>
+<dd class="option-desc">Activate all available features of all selected packages.</dd>
+
+
+<dt class="option-term" id="option-cargo-run---no-default-features"><a class="option-anchor" href="#option-cargo-run---no-default-features"></a><code>--no-default-features</code></dt>
+<dd class="option-desc">Do not activate the <code>default</code> feature of the selected packages.</dd>
+
+
+</dl>
+
+
+### Compilation Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-run---target"><a class="option-anchor" href="#option-cargo-run---target"></a><code>--target</code> <em>triple</em></dt>
+<dd class="option-desc">Run for the given architecture. The default is the host architecture. The general format of the triple is
+<code>&lt;arch&gt;&lt;sub&gt;-&lt;vendor&gt;-&lt;sys&gt;-&lt;abi&gt;</code>. Run <code>rustc --print target-list</code> for a
+list of supported targets.</p>
+<p>This may also be specified with the <code>build.target</code>
+<a href="../reference/config.html">config value</a>.</p>
+<p>Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+<a href="../guide/build-cache.html">build cache</a> documentation for more details.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-run--r"><a class="option-anchor" href="#option-cargo-run--r"></a><code>-r</code></dt>
+<dt class="option-term" id="option-cargo-run---release"><a class="option-anchor" href="#option-cargo-run---release"></a><code>--release</code></dt>
+<dd class="option-desc">Run optimized artifacts with the <code>release</code> profile.
+See also the <code>--profile</code> option for choosing a specific profile by name.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-run---profile"><a class="option-anchor" href="#option-cargo-run---profile"></a><code>--profile</code> <em>name</em></dt>
+<dd class="option-desc">Run with the given profile.
+See the <a href="../reference/profiles.html">the reference</a> for more details on profiles.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-run---ignore-rust-version"><a class="option-anchor" href="#option-cargo-run---ignore-rust-version"></a><code>--ignore-rust-version</code></dt>
+<dd class="option-desc">Run the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project’s <code>rust-version</code> field.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-run---timings=fmts"><a class="option-anchor" href="#option-cargo-run---timings=fmts"></a><code>--timings=</code><em>fmts</em></dt>
+<dd class="option-desc">Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma-separated list of output
+formats; <code>--timings</code> without an argument will default to <code>--timings=html</code>.
+Specifying an output format (rather than the default) is unstable and requires
+<code>-Zunstable-options</code>. Valid output formats:</p>
+<ul>
+<li><code>html</code> (unstable, requires <code>-Zunstable-options</code>): Write a human-readable file <code>cargo-timing.html</code> to the
+<code>target/cargo-timings</code> directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine-readable timing data.</li>
+<li><code>json</code> (unstable, requires <code>-Zunstable-options</code>): Emit machine-readable JSON
+information about timing information.</li>
+</ul></dd>
+
+
+
+
+</dl>
+
+### Output Options
+
+<dl>
+<dt class="option-term" id="option-cargo-run---target-dir"><a class="option-anchor" href="#option-cargo-run---target-dir"></a><code>--target-dir</code> <em>directory</em></dt>
+<dd class="option-desc">Directory for all generated artifacts and intermediate files. May also be
+specified with the <code>CARGO_TARGET_DIR</code> environment variable, or the
+<code>build.target-dir</code> <a href="../reference/config.html">config value</a>.
+Defaults to <code>target</code> in the root of the workspace.</dd>
+
+
+</dl>
+
+### Display Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-run--v"><a class="option-anchor" href="#option-cargo-run--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-run---verbose"><a class="option-anchor" href="#option-cargo-run---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-run--q"><a class="option-anchor" href="#option-cargo-run--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-run---quiet"><a class="option-anchor" href="#option-cargo-run---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-run---color"><a class="option-anchor" href="#option-cargo-run---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-run---message-format"><a class="option-anchor" href="#option-cargo-run---message-format"></a><code>--message-format</code> <em>fmt</em></dt>
+<dd class="option-desc">The output format for diagnostic messages. Can be specified multiple times
+and consists of comma-separated values. Valid values:</p>
+<ul>
+<li><code>human</code> (default): Display in a human-readable text format. Conflicts with
+<code>short</code> and <code>json</code>.</li>
+<li><code>short</code>: Emit shorter, human-readable text messages. Conflicts with <code>human</code>
+and <code>json</code>.</li>
+<li><code>json</code>: Emit JSON messages to stdout. See
+<a href="../reference/external-tools.html#json-messages">the reference</a>
+for more details. Conflicts with <code>human</code> and <code>short</code>.</li>
+<li><code>json-diagnostic-short</code>: Ensure the <code>rendered</code> field of JSON messages contains
+the “short” rendering from rustc. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-diagnostic-rendered-ansi</code>: Ensure the <code>rendered</code> field of JSON messages
+contains embedded ANSI color codes for respecting rustc’s default color
+scheme. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-render-diagnostics</code>: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo’s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with <code>human</code> or <code>short</code>.</li>
+</ul></dd>
+
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-run---manifest-path"><a class="option-anchor" href="#option-cargo-run---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-run---frozen"><a class="option-anchor" href="#option-cargo-run---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-run---locked"><a class="option-anchor" href="#option-cargo-run---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-run---offline"><a class="option-anchor" href="#option-cargo-run---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-run-+toolchain"><a class="option-anchor" href="#option-cargo-run-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-run---config"><a class="option-anchor" href="#option-cargo-run---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-run--C"><a class="option-anchor" href="#option-cargo-run--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-run--h"><a class="option-anchor" href="#option-cargo-run--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-run---help"><a class="option-anchor" href="#option-cargo-run---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-run--Z"><a class="option-anchor" href="#option-cargo-run--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+### Miscellaneous Options
+
+<dl>
+<dt class="option-term" id="option-cargo-run--j"><a class="option-anchor" href="#option-cargo-run--j"></a><code>-j</code> <em>N</em></dt>
+<dt class="option-term" id="option-cargo-run---jobs"><a class="option-anchor" href="#option-cargo-run---jobs"></a><code>--jobs</code> <em>N</em></dt>
+<dd class="option-desc">Number of parallel jobs to run. May also be specified with the
+<code>build.jobs</code> <a href="../reference/config.html">config value</a>. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.</dd>
+
+
+<dt class="option-term" id="option-cargo-run---keep-going"><a class="option-anchor" href="#option-cargo-run---keep-going"></a><code>--keep-going</code></dt>
+<dd class="option-desc">Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+<code>-Zunstable-options</code>.</dd>
+
+
+</dl>
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Build the local package and run its main target (assuming only one binary):
+
+ cargo run
+
+2. Run an example with extra arguments:
+
+ cargo run --example exname -- --exoption exarg1 exarg2
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-build(1)](cargo-build.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-rustc.md b/src/tools/cargo/src/doc/src/commands/cargo-rustc.md
new file mode 100644
index 000000000..946298af9
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-rustc.md
@@ -0,0 +1,445 @@
+# cargo-rustc(1)
+
+
+
+## NAME
+
+cargo-rustc --- Compile the current package, and pass extra options to the compiler
+
+## SYNOPSIS
+
+`cargo rustc` [_options_] [`--` _args_]
+
+## DESCRIPTION
+
+The specified target for the current package (or package specified by `-p` if
+provided) will be compiled along with all of its dependencies. The specified
+_args_ will all be passed to the final compiler invocation, not any of the
+dependencies. Note that the compiler will still unconditionally receive
+arguments such as `-L`, `--extern`, and `--crate-type`, and the specified
+_args_ will simply be added to the compiler invocation.
+
+See <https://doc.rust-lang.org/rustc/index.html> for documentation on rustc
+flags.
+
+This command requires that only one target is being compiled when additional
+arguments are provided. If more than one target is available for the current
+package the filters of `--lib`, `--bin`, etc, must be used to select which
+target is compiled.
+
+To pass flags to all compiler processes spawned by Cargo, use the `RUSTFLAGS`
+[environment variable](../reference/environment-variables.html) or the
+`build.rustflags` [config value](../reference/config.html).
+
+## OPTIONS
+
+### Package Selection
+
+By default, the package in the current working directory is selected. The `-p`
+flag can be used to choose a different package in a workspace.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-rustc--p"><a class="option-anchor" href="#option-cargo-rustc--p"></a><code>-p</code> <em>spec</em></dt>
+<dt class="option-term" id="option-cargo-rustc---package"><a class="option-anchor" href="#option-cargo-rustc---package"></a><code>--package</code> <em>spec</em></dt>
+<dd class="option-desc">The package to build. See <a href="cargo-pkgid.html">cargo-pkgid(1)</a> for the SPEC
+format.</dd>
+
+
+</dl>
+
+
+### Target Selection
+
+When no target selection options are given, `cargo rustc` will build all
+binary and library targets of the selected package.
+
+Binary targets are automatically built if there is an integration test or
+benchmark being selected to build. This allows an integration
+test to execute the binary to exercise and test its behavior.
+The `CARGO_BIN_EXE_<name>`
+[environment variable](../reference/environment-variables.html#environment-variables-cargo-sets-for-crates)
+is set when the integration test is built so that it can use the
+[`env` macro](https://doc.rust-lang.org/std/macro.env.html) to locate the
+executable.
+
+
+Passing target selection flags will build only the specified
+targets.
+
+Note that `--bin`, `--example`, `--test` and `--bench` flags also
+support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your
+shell accidentally expanding glob patterns before Cargo handles them, you must
+use single quotes or double quotes around each glob pattern.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-rustc---lib"><a class="option-anchor" href="#option-cargo-rustc---lib"></a><code>--lib</code></dt>
+<dd class="option-desc">Build the package’s library.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc---bin"><a class="option-anchor" href="#option-cargo-rustc---bin"></a><code>--bin</code> <em>name</em>…</dt>
+<dd class="option-desc">Build the specified binary. This flag may be specified multiple times
+and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc---bins"><a class="option-anchor" href="#option-cargo-rustc---bins"></a><code>--bins</code></dt>
+<dd class="option-desc">Build all binary targets.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-rustc---example"><a class="option-anchor" href="#option-cargo-rustc---example"></a><code>--example</code> <em>name</em>…</dt>
+<dd class="option-desc">Build the specified example. This flag may be specified multiple times
+and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc---examples"><a class="option-anchor" href="#option-cargo-rustc---examples"></a><code>--examples</code></dt>
+<dd class="option-desc">Build all example targets.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc---test"><a class="option-anchor" href="#option-cargo-rustc---test"></a><code>--test</code> <em>name</em>…</dt>
+<dd class="option-desc">Build the specified integration test. This flag may be specified
+multiple times and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc---tests"><a class="option-anchor" href="#option-cargo-rustc---tests"></a><code>--tests</code></dt>
+<dd class="option-desc">Build all targets in test mode that have the <code>test = true</code> manifest
+flag set. By default this includes the library and binaries built as
+unittests, and integration tests. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+unittest, and once as a dependency for binaries, integration tests, etc.).
+Targets may be enabled or disabled by setting the <code>test</code> flag in the
+manifest settings for the target.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc---bench"><a class="option-anchor" href="#option-cargo-rustc---bench"></a><code>--bench</code> <em>name</em>…</dt>
+<dd class="option-desc">Build the specified benchmark. This flag may be specified multiple
+times and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc---benches"><a class="option-anchor" href="#option-cargo-rustc---benches"></a><code>--benches</code></dt>
+<dd class="option-desc">Build all targets in benchmark mode that have the <code>bench = true</code>
+manifest flag set. By default this includes the library and binaries built
+as benchmarks, and bench targets. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+benchmark, and once as a dependency for binaries, benchmarks, etc.).
+Targets may be enabled or disabled by setting the <code>bench</code> flag in the
+manifest settings for the target.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc---all-targets"><a class="option-anchor" href="#option-cargo-rustc---all-targets"></a><code>--all-targets</code></dt>
+<dd class="option-desc">Build all targets. This is equivalent to specifying <code>--lib --bins --tests --benches --examples</code>.</dd>
+
+
+</dl>
+
+
+### Feature Selection
+
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the `default` feature is activated for every
+selected package.
+
+See [the features documentation](../reference/features.html#command-line-feature-options)
+for more details.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-rustc--F"><a class="option-anchor" href="#option-cargo-rustc--F"></a><code>-F</code> <em>features</em></dt>
+<dt class="option-term" id="option-cargo-rustc---features"><a class="option-anchor" href="#option-cargo-rustc---features"></a><code>--features</code> <em>features</em></dt>
+<dd class="option-desc">Space or comma separated list of features to activate. Features of workspace
+members may be enabled with <code>package-name/feature-name</code> syntax. This flag may
+be specified multiple times, which enables all specified features.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc---all-features"><a class="option-anchor" href="#option-cargo-rustc---all-features"></a><code>--all-features</code></dt>
+<dd class="option-desc">Activate all available features of all selected packages.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc---no-default-features"><a class="option-anchor" href="#option-cargo-rustc---no-default-features"></a><code>--no-default-features</code></dt>
+<dd class="option-desc">Do not activate the <code>default</code> feature of the selected packages.</dd>
+
+
+</dl>
+
+
+### Compilation Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-rustc---target"><a class="option-anchor" href="#option-cargo-rustc---target"></a><code>--target</code> <em>triple</em></dt>
+<dd class="option-desc">Build for the given architecture. The default is the host architecture. The general format of the triple is
+<code>&lt;arch&gt;&lt;sub&gt;-&lt;vendor&gt;-&lt;sys&gt;-&lt;abi&gt;</code>. Run <code>rustc --print target-list</code> for a
+list of supported targets. This flag may be specified multiple times.</p>
+<p>This may also be specified with the <code>build.target</code>
+<a href="../reference/config.html">config value</a>.</p>
+<p>Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+<a href="../guide/build-cache.html">build cache</a> documentation for more details.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-rustc--r"><a class="option-anchor" href="#option-cargo-rustc--r"></a><code>-r</code></dt>
+<dt class="option-term" id="option-cargo-rustc---release"><a class="option-anchor" href="#option-cargo-rustc---release"></a><code>--release</code></dt>
+<dd class="option-desc">Build optimized artifacts with the <code>release</code> profile.
+See also the <code>--profile</code> option for choosing a specific profile by name.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-rustc---profile"><a class="option-anchor" href="#option-cargo-rustc---profile"></a><code>--profile</code> <em>name</em></dt>
+<dd class="option-desc">Build with the given profile.</p>
+<p>The <code>rustc</code> subcommand will treat the following named profiles with special behaviors:</p>
+<ul>
+<li><code>check</code> — Builds in the same way as the <a href="cargo-check.html">cargo-check(1)</a> command with
+the <code>dev</code> profile.</li>
+<li><code>test</code> — Builds in the same way as the <a href="cargo-test.html">cargo-test(1)</a> command,
+enabling building in test mode which will enable tests and enable the <code>test</code>
+cfg option. See <a href="https://doc.rust-lang.org/rustc/tests/index.html">rustc
+tests</a> for more detail.</li>
+<li><code>bench</code> — Builds in the same was as the <a href="cargo-bench.html">cargo-bench(1)</a> command,
+similar to the <code>test</code> profile.</li>
+</ul>
+<p>See the <a href="../reference/profiles.html">the reference</a> for more details on profiles.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc---ignore-rust-version"><a class="option-anchor" href="#option-cargo-rustc---ignore-rust-version"></a><code>--ignore-rust-version</code></dt>
+<dd class="option-desc">Build the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project’s <code>rust-version</code> field.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-rustc---timings=fmts"><a class="option-anchor" href="#option-cargo-rustc---timings=fmts"></a><code>--timings=</code><em>fmts</em></dt>
+<dd class="option-desc">Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma-separated list of output
+formats; <code>--timings</code> without an argument will default to <code>--timings=html</code>.
+Specifying an output format (rather than the default) is unstable and requires
+<code>-Zunstable-options</code>. Valid output formats:</p>
+<ul>
+<li><code>html</code> (unstable, requires <code>-Zunstable-options</code>): Write a human-readable file <code>cargo-timing.html</code> to the
+<code>target/cargo-timings</code> directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine-readable timing data.</li>
+<li><code>json</code> (unstable, requires <code>-Zunstable-options</code>): Emit machine-readable JSON
+information about timing information.</li>
+</ul></dd>
+
+
+
+
+<dt class="option-term" id="option-cargo-rustc---crate-type"><a class="option-anchor" href="#option-cargo-rustc---crate-type"></a><code>--crate-type</code> <em>crate-type</em></dt>
+<dd class="option-desc">Build for the given crate type. This flag accepts a comma-separated list of
+1 or more crate types, of which the allowed values are the same as <code>crate-type</code>
+field in the manifest for configuring a Cargo target. See
+<a href="../reference/cargo-targets.html#the-crate-type-field"><code>crate-type</code> field</a>
+for possible values.</p>
+<p>If the manifest contains a list, and <code>--crate-type</code> is provided,
+the command-line argument value will override what is in the manifest.</p>
+<p>This flag only works when building a <code>lib</code> or <code>example</code> library target.</dd>
+
+
+</dl>
+
+### Output Options
+
+<dl>
+<dt class="option-term" id="option-cargo-rustc---target-dir"><a class="option-anchor" href="#option-cargo-rustc---target-dir"></a><code>--target-dir</code> <em>directory</em></dt>
+<dd class="option-desc">Directory for all generated artifacts and intermediate files. May also be
+specified with the <code>CARGO_TARGET_DIR</code> environment variable, or the
+<code>build.target-dir</code> <a href="../reference/config.html">config value</a>.
+Defaults to <code>target</code> in the root of the workspace.</dd>
+
+
+</dl>
+
+### Display Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-rustc--v"><a class="option-anchor" href="#option-cargo-rustc--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-rustc---verbose"><a class="option-anchor" href="#option-cargo-rustc---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc--q"><a class="option-anchor" href="#option-cargo-rustc--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-rustc---quiet"><a class="option-anchor" href="#option-cargo-rustc---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc---color"><a class="option-anchor" href="#option-cargo-rustc---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-rustc---message-format"><a class="option-anchor" href="#option-cargo-rustc---message-format"></a><code>--message-format</code> <em>fmt</em></dt>
+<dd class="option-desc">The output format for diagnostic messages. Can be specified multiple times
+and consists of comma-separated values. Valid values:</p>
+<ul>
+<li><code>human</code> (default): Display in a human-readable text format. Conflicts with
+<code>short</code> and <code>json</code>.</li>
+<li><code>short</code>: Emit shorter, human-readable text messages. Conflicts with <code>human</code>
+and <code>json</code>.</li>
+<li><code>json</code>: Emit JSON messages to stdout. See
+<a href="../reference/external-tools.html#json-messages">the reference</a>
+for more details. Conflicts with <code>human</code> and <code>short</code>.</li>
+<li><code>json-diagnostic-short</code>: Ensure the <code>rendered</code> field of JSON messages contains
+the “short” rendering from rustc. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-diagnostic-rendered-ansi</code>: Ensure the <code>rendered</code> field of JSON messages
+contains embedded ANSI color codes for respecting rustc’s default color
+scheme. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-render-diagnostics</code>: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo’s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with <code>human</code> or <code>short</code>.</li>
+</ul></dd>
+
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-rustc---manifest-path"><a class="option-anchor" href="#option-cargo-rustc---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-rustc---frozen"><a class="option-anchor" href="#option-cargo-rustc---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-rustc---locked"><a class="option-anchor" href="#option-cargo-rustc---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc---offline"><a class="option-anchor" href="#option-cargo-rustc---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-rustc-+toolchain"><a class="option-anchor" href="#option-cargo-rustc-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc---config"><a class="option-anchor" href="#option-cargo-rustc---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc--C"><a class="option-anchor" href="#option-cargo-rustc--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc--h"><a class="option-anchor" href="#option-cargo-rustc--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-rustc---help"><a class="option-anchor" href="#option-cargo-rustc---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc--Z"><a class="option-anchor" href="#option-cargo-rustc--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+### Miscellaneous Options
+
+<dl>
+<dt class="option-term" id="option-cargo-rustc--j"><a class="option-anchor" href="#option-cargo-rustc--j"></a><code>-j</code> <em>N</em></dt>
+<dt class="option-term" id="option-cargo-rustc---jobs"><a class="option-anchor" href="#option-cargo-rustc---jobs"></a><code>--jobs</code> <em>N</em></dt>
+<dd class="option-desc">Number of parallel jobs to run. May also be specified with the
+<code>build.jobs</code> <a href="../reference/config.html">config value</a>. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc---keep-going"><a class="option-anchor" href="#option-cargo-rustc---keep-going"></a><code>--keep-going</code></dt>
+<dd class="option-desc">Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+<code>-Zunstable-options</code>.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustc---future-incompat-report"><a class="option-anchor" href="#option-cargo-rustc---future-incompat-report"></a><code>--future-incompat-report</code></dt>
+<dd class="option-desc">Displays a future-incompat report for any future-incompatible warnings
+produced during execution of this command</p>
+<p>See <a href="cargo-report.html">cargo-report(1)</a></dd>
+
+
+</dl>
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Check if your package (not including dependencies) uses unsafe code:
+
+ cargo rustc --lib -- -D unsafe-code
+
+2. Try an experimental flag on the nightly compiler, such as this which prints
+ the size of every type:
+
+ cargo rustc --lib -- -Z print-type-sizes
+
+3. Override `crate-type` field in Cargo.toml with command-line option:
+
+ cargo rustc --lib --crate-type lib,cdylib
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-build(1)](cargo-build.html), [rustc(1)](https://doc.rust-lang.org/rustc/index.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-rustdoc.md b/src/tools/cargo/src/doc/src/commands/cargo-rustdoc.md
new file mode 100644
index 000000000..8467da2a3
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-rustdoc.md
@@ -0,0 +1,410 @@
+# cargo-rustdoc(1)
+
+
+
+## NAME
+
+cargo-rustdoc --- Build a package's documentation, using specified custom flags
+
+## SYNOPSIS
+
+`cargo rustdoc` [_options_] [`--` _args_]
+
+## DESCRIPTION
+
+The specified target for the current package (or package specified by `-p` if
+provided) will be documented with the specified _args_ being passed to the
+final rustdoc invocation. Dependencies will not be documented as part of this
+command. Note that rustdoc will still unconditionally receive arguments such
+as `-L`, `--extern`, and `--crate-type`, and the specified _args_ will simply
+be added to the rustdoc invocation.
+
+See <https://doc.rust-lang.org/rustdoc/index.html> for documentation on rustdoc
+flags.
+
+This command requires that only one target is being compiled when additional
+arguments are provided. If more than one target is available for the current
+package the filters of `--lib`, `--bin`, etc, must be used to select which
+target is compiled.
+
+To pass flags to all rustdoc processes spawned by Cargo, use the
+`RUSTDOCFLAGS` [environment variable](../reference/environment-variables.html)
+or the `build.rustdocflags` [config value](../reference/config.html).
+
+## OPTIONS
+
+### Documentation Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-rustdoc---open"><a class="option-anchor" href="#option-cargo-rustdoc---open"></a><code>--open</code></dt>
+<dd class="option-desc">Open the docs in a browser after building them. This will use your default
+browser unless you define another one in the <code>BROWSER</code> environment variable
+or use the <a href="../reference/config.html#docbrowser"><code>doc.browser</code></a> configuration
+option.</dd>
+
+
+</dl>
+
+### Package Selection
+
+By default, the package in the current working directory is selected. The `-p`
+flag can be used to choose a different package in a workspace.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-rustdoc--p"><a class="option-anchor" href="#option-cargo-rustdoc--p"></a><code>-p</code> <em>spec</em></dt>
+<dt class="option-term" id="option-cargo-rustdoc---package"><a class="option-anchor" href="#option-cargo-rustdoc---package"></a><code>--package</code> <em>spec</em></dt>
+<dd class="option-desc">The package to document. See <a href="cargo-pkgid.html">cargo-pkgid(1)</a> for the SPEC
+format.</dd>
+
+
+</dl>
+
+
+### Target Selection
+
+When no target selection options are given, `cargo rustdoc` will document all
+binary and library targets of the selected package. The binary will be skipped
+if its name is the same as the lib target. Binaries are skipped if they have
+`required-features` that are missing.
+
+Passing target selection flags will document only the specified
+targets.
+
+Note that `--bin`, `--example`, `--test` and `--bench` flags also
+support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your
+shell accidentally expanding glob patterns before Cargo handles them, you must
+use single quotes or double quotes around each glob pattern.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-rustdoc---lib"><a class="option-anchor" href="#option-cargo-rustdoc---lib"></a><code>--lib</code></dt>
+<dd class="option-desc">Document the package’s library.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustdoc---bin"><a class="option-anchor" href="#option-cargo-rustdoc---bin"></a><code>--bin</code> <em>name</em>…</dt>
+<dd class="option-desc">Document the specified binary. This flag may be specified multiple times
+and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustdoc---bins"><a class="option-anchor" href="#option-cargo-rustdoc---bins"></a><code>--bins</code></dt>
+<dd class="option-desc">Document all binary targets.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-rustdoc---example"><a class="option-anchor" href="#option-cargo-rustdoc---example"></a><code>--example</code> <em>name</em>…</dt>
+<dd class="option-desc">Document the specified example. This flag may be specified multiple times
+and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustdoc---examples"><a class="option-anchor" href="#option-cargo-rustdoc---examples"></a><code>--examples</code></dt>
+<dd class="option-desc">Document all example targets.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustdoc---test"><a class="option-anchor" href="#option-cargo-rustdoc---test"></a><code>--test</code> <em>name</em>…</dt>
+<dd class="option-desc">Document the specified integration test. This flag may be specified
+multiple times and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustdoc---tests"><a class="option-anchor" href="#option-cargo-rustdoc---tests"></a><code>--tests</code></dt>
+<dd class="option-desc">Document all targets in test mode that have the <code>test = true</code> manifest
+flag set. By default this includes the library and binaries built as
+unittests, and integration tests. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+unittest, and once as a dependency for binaries, integration tests, etc.).
+Targets may be enabled or disabled by setting the <code>test</code> flag in the
+manifest settings for the target.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustdoc---bench"><a class="option-anchor" href="#option-cargo-rustdoc---bench"></a><code>--bench</code> <em>name</em>…</dt>
+<dd class="option-desc">Document the specified benchmark. This flag may be specified multiple
+times and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustdoc---benches"><a class="option-anchor" href="#option-cargo-rustdoc---benches"></a><code>--benches</code></dt>
+<dd class="option-desc">Document all targets in benchmark mode that have the <code>bench = true</code>
+manifest flag set. By default this includes the library and binaries built
+as benchmarks, and bench targets. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+benchmark, and once as a dependency for binaries, benchmarks, etc.).
+Targets may be enabled or disabled by setting the <code>bench</code> flag in the
+manifest settings for the target.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustdoc---all-targets"><a class="option-anchor" href="#option-cargo-rustdoc---all-targets"></a><code>--all-targets</code></dt>
+<dd class="option-desc">Document all targets. This is equivalent to specifying <code>--lib --bins --tests --benches --examples</code>.</dd>
+
+
+</dl>
+
+
+### Feature Selection
+
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the `default` feature is activated for every
+selected package.
+
+See [the features documentation](../reference/features.html#command-line-feature-options)
+for more details.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-rustdoc--F"><a class="option-anchor" href="#option-cargo-rustdoc--F"></a><code>-F</code> <em>features</em></dt>
+<dt class="option-term" id="option-cargo-rustdoc---features"><a class="option-anchor" href="#option-cargo-rustdoc---features"></a><code>--features</code> <em>features</em></dt>
+<dd class="option-desc">Space or comma separated list of features to activate. Features of workspace
+members may be enabled with <code>package-name/feature-name</code> syntax. This flag may
+be specified multiple times, which enables all specified features.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustdoc---all-features"><a class="option-anchor" href="#option-cargo-rustdoc---all-features"></a><code>--all-features</code></dt>
+<dd class="option-desc">Activate all available features of all selected packages.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustdoc---no-default-features"><a class="option-anchor" href="#option-cargo-rustdoc---no-default-features"></a><code>--no-default-features</code></dt>
+<dd class="option-desc">Do not activate the <code>default</code> feature of the selected packages.</dd>
+
+
+</dl>
+
+
+### Compilation Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-rustdoc---target"><a class="option-anchor" href="#option-cargo-rustdoc---target"></a><code>--target</code> <em>triple</em></dt>
+<dd class="option-desc">Document for the given architecture. The default is the host architecture. The general format of the triple is
+<code>&lt;arch&gt;&lt;sub&gt;-&lt;vendor&gt;-&lt;sys&gt;-&lt;abi&gt;</code>. Run <code>rustc --print target-list</code> for a
+list of supported targets. This flag may be specified multiple times.</p>
+<p>This may also be specified with the <code>build.target</code>
+<a href="../reference/config.html">config value</a>.</p>
+<p>Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+<a href="../guide/build-cache.html">build cache</a> documentation for more details.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-rustdoc--r"><a class="option-anchor" href="#option-cargo-rustdoc--r"></a><code>-r</code></dt>
+<dt class="option-term" id="option-cargo-rustdoc---release"><a class="option-anchor" href="#option-cargo-rustdoc---release"></a><code>--release</code></dt>
+<dd class="option-desc">Document optimized artifacts with the <code>release</code> profile.
+See also the <code>--profile</code> option for choosing a specific profile by name.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-rustdoc---profile"><a class="option-anchor" href="#option-cargo-rustdoc---profile"></a><code>--profile</code> <em>name</em></dt>
+<dd class="option-desc">Document with the given profile.
+See the <a href="../reference/profiles.html">the reference</a> for more details on profiles.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-rustdoc---ignore-rust-version"><a class="option-anchor" href="#option-cargo-rustdoc---ignore-rust-version"></a><code>--ignore-rust-version</code></dt>
+<dd class="option-desc">Document the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project’s <code>rust-version</code> field.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-rustdoc---timings=fmts"><a class="option-anchor" href="#option-cargo-rustdoc---timings=fmts"></a><code>--timings=</code><em>fmts</em></dt>
+<dd class="option-desc">Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma-separated list of output
+formats; <code>--timings</code> without an argument will default to <code>--timings=html</code>.
+Specifying an output format (rather than the default) is unstable and requires
+<code>-Zunstable-options</code>. Valid output formats:</p>
+<ul>
+<li><code>html</code> (unstable, requires <code>-Zunstable-options</code>): Write a human-readable file <code>cargo-timing.html</code> to the
+<code>target/cargo-timings</code> directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine-readable timing data.</li>
+<li><code>json</code> (unstable, requires <code>-Zunstable-options</code>): Emit machine-readable JSON
+information about timing information.</li>
+</ul></dd>
+
+
+
+
+</dl>
+
+### Output Options
+
+<dl>
+<dt class="option-term" id="option-cargo-rustdoc---target-dir"><a class="option-anchor" href="#option-cargo-rustdoc---target-dir"></a><code>--target-dir</code> <em>directory</em></dt>
+<dd class="option-desc">Directory for all generated artifacts and intermediate files. May also be
+specified with the <code>CARGO_TARGET_DIR</code> environment variable, or the
+<code>build.target-dir</code> <a href="../reference/config.html">config value</a>.
+Defaults to <code>target</code> in the root of the workspace.</dd>
+
+
+</dl>
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-rustdoc--v"><a class="option-anchor" href="#option-cargo-rustdoc--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-rustdoc---verbose"><a class="option-anchor" href="#option-cargo-rustdoc---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustdoc--q"><a class="option-anchor" href="#option-cargo-rustdoc--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-rustdoc---quiet"><a class="option-anchor" href="#option-cargo-rustdoc---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustdoc---color"><a class="option-anchor" href="#option-cargo-rustdoc---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-rustdoc---message-format"><a class="option-anchor" href="#option-cargo-rustdoc---message-format"></a><code>--message-format</code> <em>fmt</em></dt>
+<dd class="option-desc">The output format for diagnostic messages. Can be specified multiple times
+and consists of comma-separated values. Valid values:</p>
+<ul>
+<li><code>human</code> (default): Display in a human-readable text format. Conflicts with
+<code>short</code> and <code>json</code>.</li>
+<li><code>short</code>: Emit shorter, human-readable text messages. Conflicts with <code>human</code>
+and <code>json</code>.</li>
+<li><code>json</code>: Emit JSON messages to stdout. See
+<a href="../reference/external-tools.html#json-messages">the reference</a>
+for more details. Conflicts with <code>human</code> and <code>short</code>.</li>
+<li><code>json-diagnostic-short</code>: Ensure the <code>rendered</code> field of JSON messages contains
+the “short” rendering from rustc. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-diagnostic-rendered-ansi</code>: Ensure the <code>rendered</code> field of JSON messages
+contains embedded ANSI color codes for respecting rustc’s default color
+scheme. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-render-diagnostics</code>: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo’s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with <code>human</code> or <code>short</code>.</li>
+</ul></dd>
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+<dt class="option-term" id="option-cargo-rustdoc---manifest-path"><a class="option-anchor" href="#option-cargo-rustdoc---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-rustdoc---frozen"><a class="option-anchor" href="#option-cargo-rustdoc---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-rustdoc---locked"><a class="option-anchor" href="#option-cargo-rustdoc---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustdoc---offline"><a class="option-anchor" href="#option-cargo-rustdoc---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-rustdoc-+toolchain"><a class="option-anchor" href="#option-cargo-rustdoc-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustdoc---config"><a class="option-anchor" href="#option-cargo-rustdoc---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustdoc--C"><a class="option-anchor" href="#option-cargo-rustdoc--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-rustdoc--h"><a class="option-anchor" href="#option-cargo-rustdoc--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-rustdoc---help"><a class="option-anchor" href="#option-cargo-rustdoc---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustdoc--Z"><a class="option-anchor" href="#option-cargo-rustdoc--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+### Miscellaneous Options
+
+<dl>
+<dt class="option-term" id="option-cargo-rustdoc--j"><a class="option-anchor" href="#option-cargo-rustdoc--j"></a><code>-j</code> <em>N</em></dt>
+<dt class="option-term" id="option-cargo-rustdoc---jobs"><a class="option-anchor" href="#option-cargo-rustdoc---jobs"></a><code>--jobs</code> <em>N</em></dt>
+<dd class="option-desc">Number of parallel jobs to run. May also be specified with the
+<code>build.jobs</code> <a href="../reference/config.html">config value</a>. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.</dd>
+
+
+<dt class="option-term" id="option-cargo-rustdoc---keep-going"><a class="option-anchor" href="#option-cargo-rustdoc---keep-going"></a><code>--keep-going</code></dt>
+<dd class="option-desc">Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+<code>-Zunstable-options</code>.</dd>
+
+
+</dl>
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Build documentation with custom CSS included from a given file:
+
+ cargo rustdoc --lib -- --extend-css extra.css
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-doc(1)](cargo-doc.html), [rustdoc(1)](https://doc.rust-lang.org/rustdoc/index.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-search.md b/src/tools/cargo/src/doc/src/commands/cargo-search.md
new file mode 100644
index 000000000..72e2accf3
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-search.md
@@ -0,0 +1,134 @@
+# cargo-search(1)
+
+## NAME
+
+cargo-search --- Search packages in crates.io
+
+## SYNOPSIS
+
+`cargo search` [_options_] [_query_...]
+
+## DESCRIPTION
+
+This performs a textual search for crates on <https://crates.io>. The matching
+crates will be displayed along with their description in TOML format suitable
+for copying into a `Cargo.toml` manifest.
+
+## OPTIONS
+
+### Search Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-search---limit"><a class="option-anchor" href="#option-cargo-search---limit"></a><code>--limit</code> <em>limit</em></dt>
+<dd class="option-desc">Limit the number of results (default: 10, max: 100).</dd>
+
+
+<dt class="option-term" id="option-cargo-search---index"><a class="option-anchor" href="#option-cargo-search---index"></a><code>--index</code> <em>index</em></dt>
+<dd class="option-desc">The URL of the registry index to use.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-search---registry"><a class="option-anchor" href="#option-cargo-search---registry"></a><code>--registry</code> <em>registry</em></dt>
+<dd class="option-desc">Name of the registry to use. Registry names are defined in <a href="../reference/config.html">Cargo config
+files</a>. If not specified, the default registry is used,
+which is defined by the <code>registry.default</code> config key which defaults to
+<code>crates-io</code>.</dd>
+
+
+
+</dl>
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-search--v"><a class="option-anchor" href="#option-cargo-search--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-search---verbose"><a class="option-anchor" href="#option-cargo-search---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-search--q"><a class="option-anchor" href="#option-cargo-search--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-search---quiet"><a class="option-anchor" href="#option-cargo-search---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-search---color"><a class="option-anchor" href="#option-cargo-search---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-search-+toolchain"><a class="option-anchor" href="#option-cargo-search-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-search---config"><a class="option-anchor" href="#option-cargo-search---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-search--C"><a class="option-anchor" href="#option-cargo-search--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-search--h"><a class="option-anchor" href="#option-cargo-search--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-search---help"><a class="option-anchor" href="#option-cargo-search---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-search--Z"><a class="option-anchor" href="#option-cargo-search--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Search for a package from crates.io:
+
+ cargo search serde
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-install(1)](cargo-install.html), [cargo-publish(1)](cargo-publish.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-test.md b/src/tools/cargo/src/doc/src/commands/cargo-test.md
new file mode 100644
index 000000000..2967d7381
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-test.md
@@ -0,0 +1,545 @@
+# cargo-test(1)
+
+
+
+
+## NAME
+
+cargo-test --- Execute unit and integration tests of a package
+
+## SYNOPSIS
+
+`cargo test` [_options_] [_testname_] [`--` _test-options_]
+
+## DESCRIPTION
+
+Compile and execute unit, integration, and documentation tests.
+
+The test filtering argument `TESTNAME` and all the arguments following the two
+dashes (`--`) are passed to the test binaries and thus to _libtest_ (rustc's
+built in unit-test and micro-benchmarking framework). If you're passing
+arguments to both Cargo and the binary, the ones after `--` go to the binary,
+the ones before go to Cargo. For details about libtest's arguments see the
+output of `cargo test -- --help` and check out the rustc book's chapter on
+how tests work at <https://doc.rust-lang.org/rustc/tests/index.html>.
+
+As an example, this will filter for tests with `foo` in their name and run them
+on 3 threads in parallel:
+
+ cargo test foo -- --test-threads 3
+
+Tests are built with the `--test` option to `rustc` which creates a special
+executable by linking your code with libtest. The executable automatically
+runs all functions annotated with the `#[test]` attribute in multiple threads.
+`#[bench]` annotated functions will also be run with one iteration to verify
+that they are functional.
+
+If the package contains multiple test targets, each target compiles to a
+special executable as aforementioned, and then is run serially.
+
+The libtest harness may be disabled by setting `harness = false` in the target
+manifest settings, in which case your code will need to provide its own `main`
+function to handle running tests.
+
+### Documentation tests
+
+Documentation tests are also run by default, which is handled by `rustdoc`. It
+extracts code samples from documentation comments of the library target, and
+then executes them.
+
+Different from normal test targets, each code block compiles to a doctest
+executable on the fly with `rustc`. These executables run in parallel in
+separate processes. The compilation of a code block is in fact a part of test
+function controlled by libtest, so some options such as `--jobs` might not
+take effect. Note that this execution model of doctests is not guaranteed
+and may change in the future; beware of depending on it.
+
+See the [rustdoc book](https://doc.rust-lang.org/rustdoc/) for more information
+on writing doc tests.
+
+### Working directory of tests
+
+The working directory of every test is set to the root directory of the package
+the test belongs to.
+Setting the working directory of tests to the package's root directory makes it
+possible for tests to reliably access the package's files using relative paths,
+regardless from where `cargo test` was executed from.
+
+## OPTIONS
+
+### Test Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-test---no-run"><a class="option-anchor" href="#option-cargo-test---no-run"></a><code>--no-run</code></dt>
+<dd class="option-desc">Compile, but don’t run tests.</dd>
+
+
+<dt class="option-term" id="option-cargo-test---no-fail-fast"><a class="option-anchor" href="#option-cargo-test---no-fail-fast"></a><code>--no-fail-fast</code></dt>
+<dd class="option-desc">Run all tests regardless of failure. Without this flag, Cargo will exit
+after the first executable fails. The Rust test harness will run all tests
+within the executable to completion, this flag only applies to the executable
+as a whole.</dd>
+
+
+</dl>
+
+
+### Package Selection
+
+By default, when no package selection options are given, the packages selected
+depend on the selected manifest file (based on the current working directory if
+`--manifest-path` is not given). If the manifest is the root of a workspace then
+the workspaces default members are selected, otherwise only the package defined
+by the manifest will be selected.
+
+The default members of a workspace can be set explicitly with the
+`workspace.default-members` key in the root manifest. If this is not set, a
+virtual workspace will include all workspace members (equivalent to passing
+`--workspace`), and a non-virtual workspace will include only the root crate itself.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-test--p"><a class="option-anchor" href="#option-cargo-test--p"></a><code>-p</code> <em>spec</em>…</dt>
+<dt class="option-term" id="option-cargo-test---package"><a class="option-anchor" href="#option-cargo-test---package"></a><code>--package</code> <em>spec</em>…</dt>
+<dd class="option-desc">Test only the specified packages. See <a href="cargo-pkgid.html">cargo-pkgid(1)</a> for the
+SPEC format. This flag may be specified multiple times and supports common Unix
+glob patterns like <code>*</code>, <code>?</code> and <code>[]</code>. However, to avoid your shell accidentally
+expanding glob patterns before Cargo handles them, you must use single quotes or
+double quotes around each pattern.</dd>
+
+
+<dt class="option-term" id="option-cargo-test---workspace"><a class="option-anchor" href="#option-cargo-test---workspace"></a><code>--workspace</code></dt>
+<dd class="option-desc">Test all members in the workspace.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-test---all"><a class="option-anchor" href="#option-cargo-test---all"></a><code>--all</code></dt>
+<dd class="option-desc">Deprecated alias for <code>--workspace</code>.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-test---exclude"><a class="option-anchor" href="#option-cargo-test---exclude"></a><code>--exclude</code> <em>SPEC</em>…</dt>
+<dd class="option-desc">Exclude the specified packages. Must be used in conjunction with the
+<code>--workspace</code> flag. This flag may be specified multiple times and supports
+common Unix glob patterns like <code>*</code>, <code>?</code> and <code>[]</code>. However, to avoid your shell
+accidentally expanding glob patterns before Cargo handles them, you must use
+single quotes or double quotes around each pattern.</dd>
+
+
+</dl>
+
+
+### Target Selection
+
+When no target selection options are given, `cargo test` will build the
+following targets of the selected packages:
+
+- lib --- used to link with binaries, examples, integration tests, and doc tests
+- bins (only if integration tests are built and required features are
+ available)
+- examples --- to ensure they compile
+- lib as a unit test
+- bins as unit tests
+- integration tests
+- doc tests for the lib target
+
+The default behavior can be changed by setting the `test` flag for the target
+in the manifest settings. Setting examples to `test = true` will build and run
+the example as a test. Setting targets to `test = false` will stop them from
+being tested by default. Target selection options that take a target by name
+ignore the `test` flag and will always test the given target.
+
+Doc tests for libraries may be disabled by setting `doctest = false` for the
+library in the manifest.
+
+Binary targets are automatically built if there is an integration test or
+benchmark being selected to test. This allows an integration
+test to execute the binary to exercise and test its behavior.
+The `CARGO_BIN_EXE_<name>`
+[environment variable](../reference/environment-variables.html#environment-variables-cargo-sets-for-crates)
+is set when the integration test is built so that it can use the
+[`env` macro](https://doc.rust-lang.org/std/macro.env.html) to locate the
+executable.
+
+
+Passing target selection flags will test only the specified
+targets.
+
+Note that `--bin`, `--example`, `--test` and `--bench` flags also
+support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your
+shell accidentally expanding glob patterns before Cargo handles them, you must
+use single quotes or double quotes around each glob pattern.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-test---lib"><a class="option-anchor" href="#option-cargo-test---lib"></a><code>--lib</code></dt>
+<dd class="option-desc">Test the package’s library.</dd>
+
+
+<dt class="option-term" id="option-cargo-test---bin"><a class="option-anchor" href="#option-cargo-test---bin"></a><code>--bin</code> <em>name</em>…</dt>
+<dd class="option-desc">Test the specified binary. This flag may be specified multiple times
+and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-test---bins"><a class="option-anchor" href="#option-cargo-test---bins"></a><code>--bins</code></dt>
+<dd class="option-desc">Test all binary targets.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-test---example"><a class="option-anchor" href="#option-cargo-test---example"></a><code>--example</code> <em>name</em>…</dt>
+<dd class="option-desc">Test the specified example. This flag may be specified multiple times
+and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-test---examples"><a class="option-anchor" href="#option-cargo-test---examples"></a><code>--examples</code></dt>
+<dd class="option-desc">Test all example targets.</dd>
+
+
+<dt class="option-term" id="option-cargo-test---test"><a class="option-anchor" href="#option-cargo-test---test"></a><code>--test</code> <em>name</em>…</dt>
+<dd class="option-desc">Test the specified integration test. This flag may be specified
+multiple times and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-test---tests"><a class="option-anchor" href="#option-cargo-test---tests"></a><code>--tests</code></dt>
+<dd class="option-desc">Test all targets in test mode that have the <code>test = true</code> manifest
+flag set. By default this includes the library and binaries built as
+unittests, and integration tests. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+unittest, and once as a dependency for binaries, integration tests, etc.).
+Targets may be enabled or disabled by setting the <code>test</code> flag in the
+manifest settings for the target.</dd>
+
+
+<dt class="option-term" id="option-cargo-test---bench"><a class="option-anchor" href="#option-cargo-test---bench"></a><code>--bench</code> <em>name</em>…</dt>
+<dd class="option-desc">Test the specified benchmark. This flag may be specified multiple
+times and supports common Unix glob patterns.</dd>
+
+
+<dt class="option-term" id="option-cargo-test---benches"><a class="option-anchor" href="#option-cargo-test---benches"></a><code>--benches</code></dt>
+<dd class="option-desc">Test all targets in benchmark mode that have the <code>bench = true</code>
+manifest flag set. By default this includes the library and binaries built
+as benchmarks, and bench targets. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+benchmark, and once as a dependency for binaries, benchmarks, etc.).
+Targets may be enabled or disabled by setting the <code>bench</code> flag in the
+manifest settings for the target.</dd>
+
+
+<dt class="option-term" id="option-cargo-test---all-targets"><a class="option-anchor" href="#option-cargo-test---all-targets"></a><code>--all-targets</code></dt>
+<dd class="option-desc">Test all targets. This is equivalent to specifying <code>--lib --bins --tests --benches --examples</code>.</dd>
+
+
+</dl>
+
+
+<dl>
+
+<dt class="option-term" id="option-cargo-test---doc"><a class="option-anchor" href="#option-cargo-test---doc"></a><code>--doc</code></dt>
+<dd class="option-desc">Test only the library’s documentation. This cannot be mixed with other
+target options.</dd>
+
+
+</dl>
+
+### Feature Selection
+
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the `default` feature is activated for every
+selected package.
+
+See [the features documentation](../reference/features.html#command-line-feature-options)
+for more details.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-test--F"><a class="option-anchor" href="#option-cargo-test--F"></a><code>-F</code> <em>features</em></dt>
+<dt class="option-term" id="option-cargo-test---features"><a class="option-anchor" href="#option-cargo-test---features"></a><code>--features</code> <em>features</em></dt>
+<dd class="option-desc">Space or comma separated list of features to activate. Features of workspace
+members may be enabled with <code>package-name/feature-name</code> syntax. This flag may
+be specified multiple times, which enables all specified features.</dd>
+
+
+<dt class="option-term" id="option-cargo-test---all-features"><a class="option-anchor" href="#option-cargo-test---all-features"></a><code>--all-features</code></dt>
+<dd class="option-desc">Activate all available features of all selected packages.</dd>
+
+
+<dt class="option-term" id="option-cargo-test---no-default-features"><a class="option-anchor" href="#option-cargo-test---no-default-features"></a><code>--no-default-features</code></dt>
+<dd class="option-desc">Do not activate the <code>default</code> feature of the selected packages.</dd>
+
+
+</dl>
+
+
+### Compilation Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-test---target"><a class="option-anchor" href="#option-cargo-test---target"></a><code>--target</code> <em>triple</em></dt>
+<dd class="option-desc">Test for the given architecture. The default is the host architecture. The general format of the triple is
+<code>&lt;arch&gt;&lt;sub&gt;-&lt;vendor&gt;-&lt;sys&gt;-&lt;abi&gt;</code>. Run <code>rustc --print target-list</code> for a
+list of supported targets. This flag may be specified multiple times.</p>
+<p>This may also be specified with the <code>build.target</code>
+<a href="../reference/config.html">config value</a>.</p>
+<p>Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+<a href="../guide/build-cache.html">build cache</a> documentation for more details.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-test--r"><a class="option-anchor" href="#option-cargo-test--r"></a><code>-r</code></dt>
+<dt class="option-term" id="option-cargo-test---release"><a class="option-anchor" href="#option-cargo-test---release"></a><code>--release</code></dt>
+<dd class="option-desc">Test optimized artifacts with the <code>release</code> profile.
+See also the <code>--profile</code> option for choosing a specific profile by name.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-test---profile"><a class="option-anchor" href="#option-cargo-test---profile"></a><code>--profile</code> <em>name</em></dt>
+<dd class="option-desc">Test with the given profile.
+See the <a href="../reference/profiles.html">the reference</a> for more details on profiles.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-test---ignore-rust-version"><a class="option-anchor" href="#option-cargo-test---ignore-rust-version"></a><code>--ignore-rust-version</code></dt>
+<dd class="option-desc">Test the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project’s <code>rust-version</code> field.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-test---timings=fmts"><a class="option-anchor" href="#option-cargo-test---timings=fmts"></a><code>--timings=</code><em>fmts</em></dt>
+<dd class="option-desc">Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma-separated list of output
+formats; <code>--timings</code> without an argument will default to <code>--timings=html</code>.
+Specifying an output format (rather than the default) is unstable and requires
+<code>-Zunstable-options</code>. Valid output formats:</p>
+<ul>
+<li><code>html</code> (unstable, requires <code>-Zunstable-options</code>): Write a human-readable file <code>cargo-timing.html</code> to the
+<code>target/cargo-timings</code> directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine-readable timing data.</li>
+<li><code>json</code> (unstable, requires <code>-Zunstable-options</code>): Emit machine-readable JSON
+information about timing information.</li>
+</ul></dd>
+
+
+
+
+</dl>
+
+### Output Options
+
+<dl>
+<dt class="option-term" id="option-cargo-test---target-dir"><a class="option-anchor" href="#option-cargo-test---target-dir"></a><code>--target-dir</code> <em>directory</em></dt>
+<dd class="option-desc">Directory for all generated artifacts and intermediate files. May also be
+specified with the <code>CARGO_TARGET_DIR</code> environment variable, or the
+<code>build.target-dir</code> <a href="../reference/config.html">config value</a>.
+Defaults to <code>target</code> in the root of the workspace.</dd>
+
+
+</dl>
+
+### Display Options
+
+By default the Rust test harness hides output from test execution to keep
+results readable. Test output can be recovered (e.g., for debugging) by passing
+`--nocapture` to the test binaries:
+
+ cargo test -- --nocapture
+
+<dl>
+
+<dt class="option-term" id="option-cargo-test--v"><a class="option-anchor" href="#option-cargo-test--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-test---verbose"><a class="option-anchor" href="#option-cargo-test---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-test--q"><a class="option-anchor" href="#option-cargo-test--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-test---quiet"><a class="option-anchor" href="#option-cargo-test---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-test---color"><a class="option-anchor" href="#option-cargo-test---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-test---message-format"><a class="option-anchor" href="#option-cargo-test---message-format"></a><code>--message-format</code> <em>fmt</em></dt>
+<dd class="option-desc">The output format for diagnostic messages. Can be specified multiple times
+and consists of comma-separated values. Valid values:</p>
+<ul>
+<li><code>human</code> (default): Display in a human-readable text format. Conflicts with
+<code>short</code> and <code>json</code>.</li>
+<li><code>short</code>: Emit shorter, human-readable text messages. Conflicts with <code>human</code>
+and <code>json</code>.</li>
+<li><code>json</code>: Emit JSON messages to stdout. See
+<a href="../reference/external-tools.html#json-messages">the reference</a>
+for more details. Conflicts with <code>human</code> and <code>short</code>.</li>
+<li><code>json-diagnostic-short</code>: Ensure the <code>rendered</code> field of JSON messages contains
+the “short” rendering from rustc. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-diagnostic-rendered-ansi</code>: Ensure the <code>rendered</code> field of JSON messages
+contains embedded ANSI color codes for respecting rustc’s default color
+scheme. Cannot be used with <code>human</code> or <code>short</code>.</li>
+<li><code>json-render-diagnostics</code>: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo’s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with <code>human</code> or <code>short</code>.</li>
+</ul></dd>
+
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-test---manifest-path"><a class="option-anchor" href="#option-cargo-test---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-test---frozen"><a class="option-anchor" href="#option-cargo-test---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-test---locked"><a class="option-anchor" href="#option-cargo-test---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-test---offline"><a class="option-anchor" href="#option-cargo-test---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-test-+toolchain"><a class="option-anchor" href="#option-cargo-test-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-test---config"><a class="option-anchor" href="#option-cargo-test---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-test--C"><a class="option-anchor" href="#option-cargo-test--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-test--h"><a class="option-anchor" href="#option-cargo-test--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-test---help"><a class="option-anchor" href="#option-cargo-test---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-test--Z"><a class="option-anchor" href="#option-cargo-test--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+### Miscellaneous Options
+
+The `--jobs` argument affects the building of the test executable but does not
+affect how many threads are used when running the tests. The Rust test harness
+includes an option to control the number of threads used:
+
+ cargo test -j 2 -- --test-threads=2
+
+<dl>
+
+<dt class="option-term" id="option-cargo-test--j"><a class="option-anchor" href="#option-cargo-test--j"></a><code>-j</code> <em>N</em></dt>
+<dt class="option-term" id="option-cargo-test---jobs"><a class="option-anchor" href="#option-cargo-test---jobs"></a><code>--jobs</code> <em>N</em></dt>
+<dd class="option-desc">Number of parallel jobs to run. May also be specified with the
+<code>build.jobs</code> <a href="../reference/config.html">config value</a>. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.</dd>
+
+
+<dt class="option-term" id="option-cargo-test---keep-going"><a class="option-anchor" href="#option-cargo-test---keep-going"></a><code>--keep-going</code></dt>
+<dd class="option-desc">Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+<code>-Zunstable-options</code>.</dd>
+
+
+<dt class="option-term" id="option-cargo-test---future-incompat-report"><a class="option-anchor" href="#option-cargo-test---future-incompat-report"></a><code>--future-incompat-report</code></dt>
+<dd class="option-desc">Displays a future-incompat report for any future-incompatible warnings
+produced during execution of this command</p>
+<p>See <a href="cargo-report.html">cargo-report(1)</a></dd>
+
+
+
+</dl>
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Execute all the unit and integration tests of the current package:
+
+ cargo test
+
+2. Run only tests whose names match against a filter string:
+
+ cargo test name_filter
+
+3. Run only a specific test within a specific integration test:
+
+ cargo test --test int_test_name -- modname::test_name
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-bench(1)](cargo-bench.html), [types of tests](../reference/cargo-targets.html#tests), [how to write tests](https://doc.rust-lang.org/rustc/tests/index.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-tree.md b/src/tools/cargo/src/doc/src/commands/cargo-tree.md
new file mode 100644
index 000000000..9a9aa9f0c
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-tree.md
@@ -0,0 +1,436 @@
+# cargo-tree(1)
+
+
+
+## NAME
+
+cargo-tree --- Display a tree visualization of a dependency graph
+
+## SYNOPSIS
+
+`cargo tree` [_options_]
+
+## DESCRIPTION
+
+This command will display a tree of dependencies to the terminal. An example
+of a simple project that depends on the "rand" package:
+
+```
+myproject v0.1.0 (/myproject)
+└── rand v0.7.3
+ ├── getrandom v0.1.14
+ │ ├── cfg-if v0.1.10
+ │ └── libc v0.2.68
+ ├── libc v0.2.68 (*)
+ ├── rand_chacha v0.2.2
+ │ ├── ppv-lite86 v0.2.6
+ │ └── rand_core v0.5.1
+ │ └── getrandom v0.1.14 (*)
+ └── rand_core v0.5.1 (*)
+[build-dependencies]
+└── cc v1.0.50
+```
+
+Packages marked with `(*)` have been "de-duplicated". The dependencies for the
+package have already been shown elsewhere in the graph, and so are not
+repeated. Use the `--no-dedupe` option to repeat the duplicates.
+
+The `-e` flag can be used to select the dependency kinds to display. The
+"features" kind changes the output to display the features enabled by
+each dependency. For example, `cargo tree -e features`:
+
+```
+myproject v0.1.0 (/myproject)
+└── log feature "serde"
+ └── log v0.4.8
+ ├── serde v1.0.106
+ └── cfg-if feature "default"
+ └── cfg-if v0.1.10
+```
+
+In this tree, `myproject` depends on `log` with the `serde` feature. `log` in
+turn depends on `cfg-if` with "default" features. When using `-e features` it
+can be helpful to use `-i` flag to show how the features flow into a package.
+See the examples below for more detail.
+
+### Feature Unification
+
+This command shows a graph much closer to a feature-unified graph Cargo will
+build, rather than what you list in `Cargo.toml`. For instance, if you specify
+the same dependency in both `[dependencies]` and `[dev-dependencies]` but with
+different features on. This command may merge all features and show a `(*)` on
+one of the dependency to indicate the duplicate.
+
+As a result, for a mostly equivalent overview of what `cargo build` does,
+`cargo tree -e normal,build` is pretty close; for a mostly equivalent overview
+of what `cargo test` does, `cargo tree` is pretty close. However, it doesn't
+guarantee the exact equivalence to what Cargo is going to build, since a
+compilation is complex and depends on lots of different factors.
+
+To learn more about feature unification, check out this
+[dedicated section](../reference/features.html#feature-unification).
+
+## OPTIONS
+
+### Tree Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-tree--i"><a class="option-anchor" href="#option-cargo-tree--i"></a><code>-i</code> <em>spec</em></dt>
+<dt class="option-term" id="option-cargo-tree---invert"><a class="option-anchor" href="#option-cargo-tree---invert"></a><code>--invert</code> <em>spec</em></dt>
+<dd class="option-desc">Show the reverse dependencies for the given package. This flag will invert
+the tree and display the packages that depend on the given package.</p>
+<p>Note that in a workspace, by default it will only display the package’s
+reverse dependencies inside the tree of the workspace member in the current
+directory. The <code>--workspace</code> flag can be used to extend it so that it will
+show the package’s reverse dependencies across the entire workspace. The <code>-p</code>
+flag can be used to display the package’s reverse dependencies only with the
+subtree of the package given to <code>-p</code>.</dd>
+
+
+<dt class="option-term" id="option-cargo-tree---prune"><a class="option-anchor" href="#option-cargo-tree---prune"></a><code>--prune</code> <em>spec</em></dt>
+<dd class="option-desc">Prune the given package from the display of the dependency tree.</dd>
+
+
+<dt class="option-term" id="option-cargo-tree---depth"><a class="option-anchor" href="#option-cargo-tree---depth"></a><code>--depth</code> <em>depth</em></dt>
+<dd class="option-desc">Maximum display depth of the dependency tree. A depth of 1 displays the direct
+dependencies, for example.</dd>
+
+
+<dt class="option-term" id="option-cargo-tree---no-dedupe"><a class="option-anchor" href="#option-cargo-tree---no-dedupe"></a><code>--no-dedupe</code></dt>
+<dd class="option-desc">Do not de-duplicate repeated dependencies. Usually, when a package has already
+displayed its dependencies, further occurrences will not re-display its
+dependencies, and will include a <code>(*)</code> to indicate it has already been shown.
+This flag will cause those duplicates to be repeated.</dd>
+
+
+<dt class="option-term" id="option-cargo-tree--d"><a class="option-anchor" href="#option-cargo-tree--d"></a><code>-d</code></dt>
+<dt class="option-term" id="option-cargo-tree---duplicates"><a class="option-anchor" href="#option-cargo-tree---duplicates"></a><code>--duplicates</code></dt>
+<dd class="option-desc">Show only dependencies which come in multiple versions (implies <code>--invert</code>).
+When used with the <code>-p</code> flag, only shows duplicates within the subtree of the
+given package.</p>
+<p>It can be beneficial for build times and executable sizes to avoid building
+that same package multiple times. This flag can help identify the offending
+packages. You can then investigate if the package that depends on the
+duplicate with the older version can be updated to the newer version so that
+only one instance is built.</dd>
+
+
+<dt class="option-term" id="option-cargo-tree--e"><a class="option-anchor" href="#option-cargo-tree--e"></a><code>-e</code> <em>kinds</em></dt>
+<dt class="option-term" id="option-cargo-tree---edges"><a class="option-anchor" href="#option-cargo-tree---edges"></a><code>--edges</code> <em>kinds</em></dt>
+<dd class="option-desc">The dependency kinds to display. Takes a comma separated list of values:</p>
+<ul>
+<li><code>all</code> — Show all edge kinds.</li>
+<li><code>normal</code> — Show normal dependencies.</li>
+<li><code>build</code> — Show build dependencies.</li>
+<li><code>dev</code> — Show development dependencies.</li>
+<li><code>features</code> — Show features enabled by each dependency. If this is the only
+kind given, then it will automatically include the other dependency kinds.</li>
+<li><code>no-normal</code> — Do not include normal dependencies.</li>
+<li><code>no-build</code> — Do not include build dependencies.</li>
+<li><code>no-dev</code> — Do not include development dependencies.</li>
+<li><code>no-proc-macro</code> — Do not include procedural macro dependencies.</li>
+</ul>
+<p>The <code>normal</code>, <code>build</code>, <code>dev</code>, and <code>all</code> dependency kinds cannot be mixed with
+<code>no-normal</code>, <code>no-build</code>, or <code>no-dev</code> dependency kinds.</p>
+<p>The default is <code>normal,build,dev</code>.</dd>
+
+
+<dt class="option-term" id="option-cargo-tree---target"><a class="option-anchor" href="#option-cargo-tree---target"></a><code>--target</code> <em>triple</em></dt>
+<dd class="option-desc">Filter dependencies matching the given <a href="../appendix/glossary.html#target">target triple</a>.
+The default is the host platform. Use the value <code>all</code> to include <em>all</em> targets.</dd>
+
+
+</dl>
+
+### Tree Formatting Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-tree---charset"><a class="option-anchor" href="#option-cargo-tree---charset"></a><code>--charset</code> <em>charset</em></dt>
+<dd class="option-desc">Chooses the character set to use for the tree. Valid values are “utf8” or
+“ascii”. Default is “utf8”.</dd>
+
+
+<dt class="option-term" id="option-cargo-tree--f"><a class="option-anchor" href="#option-cargo-tree--f"></a><code>-f</code> <em>format</em></dt>
+<dt class="option-term" id="option-cargo-tree---format"><a class="option-anchor" href="#option-cargo-tree---format"></a><code>--format</code> <em>format</em></dt>
+<dd class="option-desc">Set the format string for each package. The default is “{p}”.</p>
+<p>This is an arbitrary string which will be used to display each package. The following
+strings will be replaced with the corresponding value:</p>
+<ul>
+<li><code>{p}</code> — The package name.</li>
+<li><code>{l}</code> — The package license.</li>
+<li><code>{r}</code> — The package repository URL.</li>
+<li><code>{f}</code> — Comma-separated list of package features that are enabled.</li>
+<li><code>{lib}</code> — The name, as used in a <code>use</code> statement, of the package’s library.</li>
+</ul></dd>
+
+
+<dt class="option-term" id="option-cargo-tree---prefix"><a class="option-anchor" href="#option-cargo-tree---prefix"></a><code>--prefix</code> <em>prefix</em></dt>
+<dd class="option-desc">Sets how each line is displayed. The <em>prefix</em> value can be one of:</p>
+<ul>
+<li><code>indent</code> (default) — Shows each line indented as a tree.</li>
+<li><code>depth</code> — Show as a list, with the numeric depth printed before each entry.</li>
+<li><code>none</code> — Show as a flat list.</li>
+</ul></dd>
+
+
+</dl>
+
+### Package Selection
+
+By default, when no package selection options are given, the packages selected
+depend on the selected manifest file (based on the current working directory if
+`--manifest-path` is not given). If the manifest is the root of a workspace then
+the workspaces default members are selected, otherwise only the package defined
+by the manifest will be selected.
+
+The default members of a workspace can be set explicitly with the
+`workspace.default-members` key in the root manifest. If this is not set, a
+virtual workspace will include all workspace members (equivalent to passing
+`--workspace`), and a non-virtual workspace will include only the root crate itself.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-tree--p"><a class="option-anchor" href="#option-cargo-tree--p"></a><code>-p</code> <em>spec</em>…</dt>
+<dt class="option-term" id="option-cargo-tree---package"><a class="option-anchor" href="#option-cargo-tree---package"></a><code>--package</code> <em>spec</em>…</dt>
+<dd class="option-desc">Display only the specified packages. See <a href="cargo-pkgid.html">cargo-pkgid(1)</a> for the
+SPEC format. This flag may be specified multiple times and supports common Unix
+glob patterns like <code>*</code>, <code>?</code> and <code>[]</code>. However, to avoid your shell accidentally
+expanding glob patterns before Cargo handles them, you must use single quotes or
+double quotes around each pattern.</dd>
+
+
+<dt class="option-term" id="option-cargo-tree---workspace"><a class="option-anchor" href="#option-cargo-tree---workspace"></a><code>--workspace</code></dt>
+<dd class="option-desc">Display all members in the workspace.</dd>
+
+
+
+
+<dt class="option-term" id="option-cargo-tree---exclude"><a class="option-anchor" href="#option-cargo-tree---exclude"></a><code>--exclude</code> <em>SPEC</em>…</dt>
+<dd class="option-desc">Exclude the specified packages. Must be used in conjunction with the
+<code>--workspace</code> flag. This flag may be specified multiple times and supports
+common Unix glob patterns like <code>*</code>, <code>?</code> and <code>[]</code>. However, to avoid your shell
+accidentally expanding glob patterns before Cargo handles them, you must use
+single quotes or double quotes around each pattern.</dd>
+
+
+</dl>
+
+
+### Manifest Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-tree---manifest-path"><a class="option-anchor" href="#option-cargo-tree---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-tree---frozen"><a class="option-anchor" href="#option-cargo-tree---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-tree---locked"><a class="option-anchor" href="#option-cargo-tree---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-tree---offline"><a class="option-anchor" href="#option-cargo-tree---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+
+</dl>
+
+### Feature Selection
+
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the `default` feature is activated for every
+selected package.
+
+See [the features documentation](../reference/features.html#command-line-feature-options)
+for more details.
+
+<dl>
+
+<dt class="option-term" id="option-cargo-tree--F"><a class="option-anchor" href="#option-cargo-tree--F"></a><code>-F</code> <em>features</em></dt>
+<dt class="option-term" id="option-cargo-tree---features"><a class="option-anchor" href="#option-cargo-tree---features"></a><code>--features</code> <em>features</em></dt>
+<dd class="option-desc">Space or comma separated list of features to activate. Features of workspace
+members may be enabled with <code>package-name/feature-name</code> syntax. This flag may
+be specified multiple times, which enables all specified features.</dd>
+
+
+<dt class="option-term" id="option-cargo-tree---all-features"><a class="option-anchor" href="#option-cargo-tree---all-features"></a><code>--all-features</code></dt>
+<dd class="option-desc">Activate all available features of all selected packages.</dd>
+
+
+<dt class="option-term" id="option-cargo-tree---no-default-features"><a class="option-anchor" href="#option-cargo-tree---no-default-features"></a><code>--no-default-features</code></dt>
+<dd class="option-desc">Do not activate the <code>default</code> feature of the selected packages.</dd>
+
+
+</dl>
+
+
+### Display Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-tree--v"><a class="option-anchor" href="#option-cargo-tree--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-tree---verbose"><a class="option-anchor" href="#option-cargo-tree---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-tree--q"><a class="option-anchor" href="#option-cargo-tree--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-tree---quiet"><a class="option-anchor" href="#option-cargo-tree---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-tree---color"><a class="option-anchor" href="#option-cargo-tree---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-tree-+toolchain"><a class="option-anchor" href="#option-cargo-tree-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-tree---config"><a class="option-anchor" href="#option-cargo-tree---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-tree--C"><a class="option-anchor" href="#option-cargo-tree--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-tree--h"><a class="option-anchor" href="#option-cargo-tree--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-tree---help"><a class="option-anchor" href="#option-cargo-tree---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-tree--Z"><a class="option-anchor" href="#option-cargo-tree--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Display the tree for the package in the current directory:
+
+ cargo tree
+
+2. Display all the packages that depend on the `syn` package:
+
+ cargo tree -i syn
+
+3. Show the features enabled on each package:
+
+ cargo tree --format "{p} {f}"
+
+4. Show all packages that are built multiple times. This can happen if multiple
+ semver-incompatible versions appear in the tree (like 1.0.0 and 2.0.0).
+
+ cargo tree -d
+
+5. Explain why features are enabled for the `syn` package:
+
+ cargo tree -e features -i syn
+
+ The `-e features` flag is used to show features. The `-i` flag is used to
+ invert the graph so that it displays the packages that depend on `syn`. An
+ example of what this would display:
+
+ ```
+ syn v1.0.17
+ ├── syn feature "clone-impls"
+ │ └── syn feature "default"
+ │ └── rustversion v1.0.2
+ │ └── rustversion feature "default"
+ │ └── myproject v0.1.0 (/myproject)
+ │ └── myproject feature "default" (command-line)
+ ├── syn feature "default" (*)
+ ├── syn feature "derive"
+ │ └── syn feature "default" (*)
+ ├── syn feature "full"
+ │ └── rustversion v1.0.2 (*)
+ ├── syn feature "parsing"
+ │ └── syn feature "default" (*)
+ ├── syn feature "printing"
+ │ └── syn feature "default" (*)
+ ├── syn feature "proc-macro"
+ │ └── syn feature "default" (*)
+ └── syn feature "quote"
+ ├── syn feature "printing" (*)
+ └── syn feature "proc-macro" (*)
+ ```
+
+ To read this graph, you can follow the chain for each feature from the root
+ to see why it is included. For example, the "full" feature is added by the
+ `rustversion` crate which is included from `myproject` (with the default
+ features), and `myproject` is the package selected on the command-line. All
+ of the other `syn` features are added by the "default" feature ("quote" is
+ added by "printing" and "proc-macro", both of which are default features).
+
+ If you're having difficulty cross-referencing the de-duplicated `(*)`
+ entries, try with the `--no-dedupe` flag to get the full output.
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-metadata(1)](cargo-metadata.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-uninstall.md b/src/tools/cargo/src/doc/src/commands/cargo-uninstall.md
new file mode 100644
index 000000000..e9c73b0cd
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-uninstall.md
@@ -0,0 +1,144 @@
+# cargo-uninstall(1)
+
+## NAME
+
+cargo-uninstall --- Remove a Rust binary
+
+## SYNOPSIS
+
+`cargo uninstall` [_options_] [_spec_...]
+
+## DESCRIPTION
+
+This command removes a package installed with [cargo-install(1)](cargo-install.html). The _spec_
+argument is a package ID specification of the package to remove (see
+[cargo-pkgid(1)](cargo-pkgid.html)).
+
+By default all binaries are removed for a crate but the `--bin` and
+`--example` flags can be used to only remove particular binaries.
+
+The installation root is determined, in order of precedence:
+
+- `--root` option
+- `CARGO_INSTALL_ROOT` environment variable
+- `install.root` Cargo [config value](../reference/config.html)
+- `CARGO_HOME` environment variable
+- `$HOME/.cargo`
+
+
+## OPTIONS
+
+### Install Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-uninstall--p"><a class="option-anchor" href="#option-cargo-uninstall--p"></a><code>-p</code></dt>
+<dt class="option-term" id="option-cargo-uninstall---package"><a class="option-anchor" href="#option-cargo-uninstall---package"></a><code>--package</code> <em>spec</em>…</dt>
+<dd class="option-desc">Package to uninstall.</dd>
+
+
+<dt class="option-term" id="option-cargo-uninstall---bin"><a class="option-anchor" href="#option-cargo-uninstall---bin"></a><code>--bin</code> <em>name</em>…</dt>
+<dd class="option-desc">Only uninstall the binary <em>name</em>.</dd>
+
+
+<dt class="option-term" id="option-cargo-uninstall---root"><a class="option-anchor" href="#option-cargo-uninstall---root"></a><code>--root</code> <em>dir</em></dt>
+<dd class="option-desc">Directory to uninstall packages from.</dd>
+
+
+</dl>
+
+### Display Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-uninstall--v"><a class="option-anchor" href="#option-cargo-uninstall--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-uninstall---verbose"><a class="option-anchor" href="#option-cargo-uninstall---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-uninstall--q"><a class="option-anchor" href="#option-cargo-uninstall--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-uninstall---quiet"><a class="option-anchor" href="#option-cargo-uninstall---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-uninstall---color"><a class="option-anchor" href="#option-cargo-uninstall---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-uninstall-+toolchain"><a class="option-anchor" href="#option-cargo-uninstall-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-uninstall---config"><a class="option-anchor" href="#option-cargo-uninstall---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-uninstall--C"><a class="option-anchor" href="#option-cargo-uninstall--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-uninstall--h"><a class="option-anchor" href="#option-cargo-uninstall--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-uninstall---help"><a class="option-anchor" href="#option-cargo-uninstall---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-uninstall--Z"><a class="option-anchor" href="#option-cargo-uninstall--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Uninstall a previously installed package.
+
+ cargo uninstall ripgrep
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-install(1)](cargo-install.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-update.md b/src/tools/cargo/src/doc/src/commands/cargo-update.md
new file mode 100644
index 000000000..3cfd69282
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-update.md
@@ -0,0 +1,197 @@
+# cargo-update(1)
+
+## NAME
+
+cargo-update --- Update dependencies as recorded in the local lock file
+
+## SYNOPSIS
+
+`cargo update` [_options_]
+
+## DESCRIPTION
+
+This command will update dependencies in the `Cargo.lock` file to the latest
+version. If the `Cargo.lock` file does not exist, it will be created with the
+latest available versions.
+
+## OPTIONS
+
+### Update Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-update--p"><a class="option-anchor" href="#option-cargo-update--p"></a><code>-p</code> <em>spec</em>…</dt>
+<dt class="option-term" id="option-cargo-update---package"><a class="option-anchor" href="#option-cargo-update---package"></a><code>--package</code> <em>spec</em>…</dt>
+<dd class="option-desc">Update only the specified packages. This flag may be specified
+multiple times. See <a href="cargo-pkgid.html">cargo-pkgid(1)</a> for the SPEC format.</p>
+<p>If packages are specified with the <code>-p</code> flag, then a conservative update of
+the lockfile will be performed. This means that only the dependency specified
+by SPEC will be updated. Its transitive dependencies will be updated only if
+SPEC cannot be updated without updating dependencies. All other dependencies
+will remain locked at their currently recorded versions.</p>
+<p>If <code>-p</code> is not specified, all dependencies are updated.</dd>
+
+
+<dt class="option-term" id="option-cargo-update---aggressive"><a class="option-anchor" href="#option-cargo-update---aggressive"></a><code>--aggressive</code></dt>
+<dd class="option-desc">When used with <code>-p</code>, dependencies of <em>spec</em> are forced to update as well.
+Cannot be used with <code>--precise</code>.</dd>
+
+
+<dt class="option-term" id="option-cargo-update---precise"><a class="option-anchor" href="#option-cargo-update---precise"></a><code>--precise</code> <em>precise</em></dt>
+<dd class="option-desc">When used with <code>-p</code>, allows you to specify a specific version number to set
+the package to. If the package comes from a git repository, this can be a git
+revision (such as a SHA hash or tag).</dd>
+
+
+<dt class="option-term" id="option-cargo-update--w"><a class="option-anchor" href="#option-cargo-update--w"></a><code>-w</code></dt>
+<dt class="option-term" id="option-cargo-update---workspace"><a class="option-anchor" href="#option-cargo-update---workspace"></a><code>--workspace</code></dt>
+<dd class="option-desc">Attempt to update only packages defined in the workspace. Other packages
+are updated only if they don’t already exist in the lockfile. This
+option is useful for updating <code>Cargo.lock</code> after you’ve changed version
+numbers in <code>Cargo.toml</code>.</dd>
+
+
+<dt class="option-term" id="option-cargo-update---dry-run"><a class="option-anchor" href="#option-cargo-update---dry-run"></a><code>--dry-run</code></dt>
+<dd class="option-desc">Displays what would be updated, but doesn’t actually write the lockfile.</dd>
+
+
+</dl>
+
+### Display Options
+
+<dl>
+<dt class="option-term" id="option-cargo-update--v"><a class="option-anchor" href="#option-cargo-update--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-update---verbose"><a class="option-anchor" href="#option-cargo-update---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-update--q"><a class="option-anchor" href="#option-cargo-update--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-update---quiet"><a class="option-anchor" href="#option-cargo-update---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-update---color"><a class="option-anchor" href="#option-cargo-update---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-update---manifest-path"><a class="option-anchor" href="#option-cargo-update---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-update---frozen"><a class="option-anchor" href="#option-cargo-update---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-update---locked"><a class="option-anchor" href="#option-cargo-update---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-update---offline"><a class="option-anchor" href="#option-cargo-update---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-update-+toolchain"><a class="option-anchor" href="#option-cargo-update-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-update---config"><a class="option-anchor" href="#option-cargo-update---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-update--C"><a class="option-anchor" href="#option-cargo-update--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-update--h"><a class="option-anchor" href="#option-cargo-update--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-update---help"><a class="option-anchor" href="#option-cargo-update---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-update--Z"><a class="option-anchor" href="#option-cargo-update--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Update all dependencies in the lockfile:
+
+ cargo update
+
+2. Update only specific dependencies:
+
+ cargo update -p foo -p bar
+
+3. Set a specific dependency to a specific version:
+
+ cargo update -p foo --precise 1.2.3
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-generate-lockfile(1)](cargo-generate-lockfile.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-vendor.md b/src/tools/cargo/src/doc/src/commands/cargo-vendor.md
new file mode 100644
index 000000000..cf47fc256
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-vendor.md
@@ -0,0 +1,194 @@
+# cargo-vendor(1)
+
+## NAME
+
+cargo-vendor --- Vendor all dependencies locally
+
+## SYNOPSIS
+
+`cargo vendor` [_options_] [_path_]
+
+## DESCRIPTION
+
+This cargo subcommand will vendor all crates.io and git dependencies for a
+project into the specified directory at `<path>`. After this command completes
+the vendor directory specified by `<path>` will contain all remote sources from
+dependencies specified. Additional manifests beyond the default one can be
+specified with the `-s` option.
+
+The `cargo vendor` command will also print out the configuration necessary
+to use the vendored sources, which you will need to add to `.cargo/config.toml`.
+
+## OPTIONS
+
+### Vendor Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-vendor--s"><a class="option-anchor" href="#option-cargo-vendor--s"></a><code>-s</code> <em>manifest</em></dt>
+<dt class="option-term" id="option-cargo-vendor---sync"><a class="option-anchor" href="#option-cargo-vendor---sync"></a><code>--sync</code> <em>manifest</em></dt>
+<dd class="option-desc">Specify an extra <code>Cargo.toml</code> manifest to workspaces which should also be
+vendored and synced to the output. May be specified multiple times.</dd>
+
+
+<dt class="option-term" id="option-cargo-vendor---no-delete"><a class="option-anchor" href="#option-cargo-vendor---no-delete"></a><code>--no-delete</code></dt>
+<dd class="option-desc">Don’t delete the “vendor” directory when vendoring, but rather keep all
+existing contents of the vendor directory</dd>
+
+
+<dt class="option-term" id="option-cargo-vendor---respect-source-config"><a class="option-anchor" href="#option-cargo-vendor---respect-source-config"></a><code>--respect-source-config</code></dt>
+<dd class="option-desc">Instead of ignoring <code>[source]</code> configuration by default in <code>.cargo/config.toml</code>
+read it and use it when downloading crates from crates.io, for example</dd>
+
+
+<dt class="option-term" id="option-cargo-vendor---versioned-dirs"><a class="option-anchor" href="#option-cargo-vendor---versioned-dirs"></a><code>--versioned-dirs</code></dt>
+<dd class="option-desc">Normally versions are only added to disambiguate multiple versions of the
+same package. This option causes all directories in the “vendor” directory
+to be versioned, which makes it easier to track the history of vendored
+packages over time, and can help with the performance of re-vendoring when
+only a subset of the packages have changed.</dd>
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-vendor---manifest-path"><a class="option-anchor" href="#option-cargo-vendor---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-vendor---frozen"><a class="option-anchor" href="#option-cargo-vendor---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-vendor---locked"><a class="option-anchor" href="#option-cargo-vendor---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-vendor---offline"><a class="option-anchor" href="#option-cargo-vendor---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+
+</dl>
+
+### Display Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-vendor--v"><a class="option-anchor" href="#option-cargo-vendor--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-vendor---verbose"><a class="option-anchor" href="#option-cargo-vendor---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-vendor--q"><a class="option-anchor" href="#option-cargo-vendor--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-vendor---quiet"><a class="option-anchor" href="#option-cargo-vendor---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-vendor---color"><a class="option-anchor" href="#option-cargo-vendor---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-vendor-+toolchain"><a class="option-anchor" href="#option-cargo-vendor-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-vendor---config"><a class="option-anchor" href="#option-cargo-vendor---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-vendor--C"><a class="option-anchor" href="#option-cargo-vendor--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-vendor--h"><a class="option-anchor" href="#option-cargo-vendor--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-vendor---help"><a class="option-anchor" href="#option-cargo-vendor---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-vendor--Z"><a class="option-anchor" href="#option-cargo-vendor--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Vendor all dependencies into a local "vendor" folder
+
+ cargo vendor
+
+2. Vendor all dependencies into a local "third-party/vendor" folder
+
+ cargo vendor third-party/vendor
+
+3. Vendor the current workspace as well as another to "vendor"
+
+ cargo vendor -s ../path/to/Cargo.toml
+
+## SEE ALSO
+[cargo(1)](cargo.html)
+
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-verify-project.md b/src/tools/cargo/src/doc/src/commands/cargo-verify-project.md
new file mode 100644
index 000000000..2a1d5d950
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-verify-project.md
@@ -0,0 +1,154 @@
+# cargo-verify-project(1)
+
+## NAME
+
+cargo-verify-project --- Check correctness of crate manifest
+
+## SYNOPSIS
+
+`cargo verify-project` [_options_]
+
+## DESCRIPTION
+
+This command will parse the local manifest and check its validity. It emits a
+JSON object with the result. A successful validation will display:
+
+ {"success":"true"}
+
+An invalid workspace will display:
+
+ {"invalid":"human-readable error message"}
+
+## OPTIONS
+
+### Display Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-verify-project--v"><a class="option-anchor" href="#option-cargo-verify-project--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-verify-project---verbose"><a class="option-anchor" href="#option-cargo-verify-project---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-verify-project--q"><a class="option-anchor" href="#option-cargo-verify-project--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-verify-project---quiet"><a class="option-anchor" href="#option-cargo-verify-project---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-verify-project---color"><a class="option-anchor" href="#option-cargo-verify-project---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-verify-project---manifest-path"><a class="option-anchor" href="#option-cargo-verify-project---manifest-path"></a><code>--manifest-path</code> <em>path</em></dt>
+<dd class="option-desc">Path to the <code>Cargo.toml</code> file. By default, Cargo searches for the
+<code>Cargo.toml</code> file in the current directory or any parent directory.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-verify-project---frozen"><a class="option-anchor" href="#option-cargo-verify-project---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo-verify-project---locked"><a class="option-anchor" href="#option-cargo-verify-project---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo-verify-project---offline"><a class="option-anchor" href="#option-cargo-verify-project---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-verify-project-+toolchain"><a class="option-anchor" href="#option-cargo-verify-project-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-verify-project---config"><a class="option-anchor" href="#option-cargo-verify-project---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-verify-project--C"><a class="option-anchor" href="#option-cargo-verify-project--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-verify-project--h"><a class="option-anchor" href="#option-cargo-verify-project--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-verify-project---help"><a class="option-anchor" href="#option-cargo-verify-project---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-verify-project--Z"><a class="option-anchor" href="#option-cargo-verify-project--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: The workspace is OK.
+* `1`: The workspace is invalid.
+
+## EXAMPLES
+
+1. Check the current workspace for errors:
+
+ cargo verify-project
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-package(1)](cargo-package.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-version.md b/src/tools/cargo/src/doc/src/commands/cargo-version.md
new file mode 100644
index 000000000..e9e12a05f
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-version.md
@@ -0,0 +1,42 @@
+# cargo-version(1)
+
+## NAME
+
+cargo-version --- Show version information
+
+## SYNOPSIS
+
+`cargo version` [_options_]
+
+## DESCRIPTION
+
+Displays the version of Cargo.
+
+## OPTIONS
+
+<dl>
+
+<dt class="option-term" id="option-cargo-version--v"><a class="option-anchor" href="#option-cargo-version--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-version---verbose"><a class="option-anchor" href="#option-cargo-version---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Display additional version information.</dd>
+
+
+</dl>
+
+## EXAMPLES
+
+1. Display the version:
+
+ cargo version
+
+2. The version is also available via flags:
+
+ cargo --version
+ cargo -V
+
+3. Display extra version information:
+
+ cargo -Vv
+
+## SEE ALSO
+[cargo(1)](cargo.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-yank.md b/src/tools/cargo/src/doc/src/commands/cargo-yank.md
new file mode 100644
index 000000000..18565d2dc
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo-yank.md
@@ -0,0 +1,164 @@
+# cargo-yank(1)
+
+## NAME
+
+cargo-yank --- Remove a pushed crate from the index
+
+## SYNOPSIS
+
+`cargo yank` [_options_] _crate_@_version_\
+`cargo yank` [_options_] `--version` _version_ [_crate_]
+
+## DESCRIPTION
+
+The yank command removes a previously published crate's version from the
+server's index. This command does not delete any data, and the crate will
+still be available for download via the registry's download link.
+
+Note that existing crates locked to a yanked version will still be able to
+download the yanked version to use it. Cargo will, however, not allow any new
+crates to be locked to any yanked version.
+
+This command requires you to be authenticated with either the `--token` option
+or using [cargo-login(1)](cargo-login.html).
+
+If the crate name is not specified, it will use the package name from the
+current directory.
+
+## OPTIONS
+
+### Yank Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-yank---vers"><a class="option-anchor" href="#option-cargo-yank---vers"></a><code>--vers</code> <em>version</em></dt>
+<dt class="option-term" id="option-cargo-yank---version"><a class="option-anchor" href="#option-cargo-yank---version"></a><code>--version</code> <em>version</em></dt>
+<dd class="option-desc">The version to yank or un-yank.</dd>
+
+
+<dt class="option-term" id="option-cargo-yank---undo"><a class="option-anchor" href="#option-cargo-yank---undo"></a><code>--undo</code></dt>
+<dd class="option-desc">Undo a yank, putting a version back into the index.</dd>
+
+
+<dt class="option-term" id="option-cargo-yank---token"><a class="option-anchor" href="#option-cargo-yank---token"></a><code>--token</code> <em>token</em></dt>
+<dd class="option-desc">API token to use when authenticating. This overrides the token stored in
+the credentials file (which is created by <a href="cargo-login.html">cargo-login(1)</a>).</p>
+<p><a href="../reference/config.html">Cargo config</a> environment variables can be
+used to override the tokens stored in the credentials file. The token for
+crates.io may be specified with the <code>CARGO_REGISTRY_TOKEN</code> environment
+variable. Tokens for other registries may be specified with environment
+variables of the form <code>CARGO_REGISTRIES_NAME_TOKEN</code> where <code>NAME</code> is the name
+of the registry in all capital letters.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-yank---index"><a class="option-anchor" href="#option-cargo-yank---index"></a><code>--index</code> <em>index</em></dt>
+<dd class="option-desc">The URL of the registry index to use.</dd>
+
+
+
+<dt class="option-term" id="option-cargo-yank---registry"><a class="option-anchor" href="#option-cargo-yank---registry"></a><code>--registry</code> <em>registry</em></dt>
+<dd class="option-desc">Name of the registry to use. Registry names are defined in <a href="../reference/config.html">Cargo config
+files</a>. If not specified, the default registry is used,
+which is defined by the <code>registry.default</code> config key which defaults to
+<code>crates-io</code>.</dd>
+
+
+
+</dl>
+
+### Display Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-yank--v"><a class="option-anchor" href="#option-cargo-yank--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo-yank---verbose"><a class="option-anchor" href="#option-cargo-yank---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-yank--q"><a class="option-anchor" href="#option-cargo-yank--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo-yank---quiet"><a class="option-anchor" href="#option-cargo-yank---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo-yank---color"><a class="option-anchor" href="#option-cargo-yank---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-yank-+toolchain"><a class="option-anchor" href="#option-cargo-yank-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo-yank---config"><a class="option-anchor" href="#option-cargo-yank---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo-yank--C"><a class="option-anchor" href="#option-cargo-yank--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo-yank--h"><a class="option-anchor" href="#option-cargo-yank--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo-yank---help"><a class="option-anchor" href="#option-cargo-yank---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo-yank--Z"><a class="option-anchor" href="#option-cargo-yank--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## EXAMPLES
+
+1. Yank a crate from the index:
+
+ cargo yank foo@1.0.7
+
+## SEE ALSO
+[cargo(1)](cargo.html), [cargo-login(1)](cargo-login.html), [cargo-publish(1)](cargo-publish.html)
diff --git a/src/tools/cargo/src/doc/src/commands/cargo.md b/src/tools/cargo/src/doc/src/commands/cargo.md
new file mode 100644
index 000000000..b1b07bc70
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/cargo.md
@@ -0,0 +1,335 @@
+# cargo(1)
+
+## NAME
+
+cargo --- The Rust package manager
+
+## SYNOPSIS
+
+`cargo` [_options_] _command_ [_args_]\
+`cargo` [_options_] `--version`\
+`cargo` [_options_] `--list`\
+`cargo` [_options_] `--help`\
+`cargo` [_options_] `--explain` _code_
+
+## DESCRIPTION
+
+This program is a package manager and build tool for the Rust language,
+available at <https://rust-lang.org>.
+
+## COMMANDS
+
+### Build Commands
+
+[cargo-bench(1)](cargo-bench.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Execute benchmarks of a package.
+
+[cargo-build(1)](cargo-build.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Compile a package.
+
+[cargo-check(1)](cargo-check.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Check a local package and all of its dependencies for errors.
+
+[cargo-clean(1)](cargo-clean.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Remove artifacts that Cargo has generated in the past.
+
+[cargo-doc(1)](cargo-doc.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Build a package's documentation.
+
+[cargo-fetch(1)](cargo-fetch.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Fetch dependencies of a package from the network.
+
+[cargo-fix(1)](cargo-fix.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Automatically fix lint warnings reported by rustc.
+
+[cargo-run(1)](cargo-run.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Run a binary or example of the local package.
+
+[cargo-rustc(1)](cargo-rustc.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Compile a package, and pass extra options to the compiler.
+
+[cargo-rustdoc(1)](cargo-rustdoc.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Build a package's documentation, using specified custom flags.
+
+[cargo-test(1)](cargo-test.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Execute unit and integration tests of a package.
+
+### Manifest Commands
+
+[cargo-generate-lockfile(1)](cargo-generate-lockfile.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Generate `Cargo.lock` for a project.
+
+[cargo-locate-project(1)](cargo-locate-project.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Print a JSON representation of a `Cargo.toml` file's location.
+
+[cargo-metadata(1)](cargo-metadata.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Output the resolved dependencies of a package in machine-readable format.
+
+[cargo-pkgid(1)](cargo-pkgid.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Print a fully qualified package specification.
+
+[cargo-tree(1)](cargo-tree.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Display a tree visualization of a dependency graph.
+
+[cargo-update(1)](cargo-update.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Update dependencies as recorded in the local lock file.
+
+[cargo-vendor(1)](cargo-vendor.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Vendor all dependencies locally.
+
+[cargo-verify-project(1)](cargo-verify-project.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Check correctness of crate manifest.
+
+### Package Commands
+
+[cargo-init(1)](cargo-init.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Create a new Cargo package in an existing directory.
+
+[cargo-install(1)](cargo-install.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Build and install a Rust binary.
+
+[cargo-new(1)](cargo-new.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Create a new Cargo package.
+
+[cargo-search(1)](cargo-search.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Search packages in crates.io.
+
+[cargo-uninstall(1)](cargo-uninstall.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Remove a Rust binary.
+
+### Publishing Commands
+
+[cargo-login(1)](cargo-login.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Save an API token from the registry locally.
+
+[cargo-logout(1)](cargo-logout.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Remove an API token from the registry locally.
+
+[cargo-owner(1)](cargo-owner.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Manage the owners of a crate on the registry.
+
+[cargo-package(1)](cargo-package.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Assemble the local package into a distributable tarball.
+
+[cargo-publish(1)](cargo-publish.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Upload a package to the registry.
+
+[cargo-yank(1)](cargo-yank.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Remove a pushed crate from the index.
+
+### General Commands
+
+[cargo-help(1)](cargo-help.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Display help information about Cargo.
+
+[cargo-version(1)](cargo-version.html)\
+&nbsp;&nbsp;&nbsp;&nbsp;Show version information.
+
+## OPTIONS
+
+### Special Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo--V"><a class="option-anchor" href="#option-cargo--V"></a><code>-V</code></dt>
+<dt class="option-term" id="option-cargo---version"><a class="option-anchor" href="#option-cargo---version"></a><code>--version</code></dt>
+<dd class="option-desc">Print version info and exit. If used with <code>--verbose</code>, prints extra
+information.</dd>
+
+
+<dt class="option-term" id="option-cargo---list"><a class="option-anchor" href="#option-cargo---list"></a><code>--list</code></dt>
+<dd class="option-desc">List all installed Cargo subcommands. If used with <code>--verbose</code>, prints extra
+information.</dd>
+
+
+<dt class="option-term" id="option-cargo---explain"><a class="option-anchor" href="#option-cargo---explain"></a><code>--explain</code> <em>code</em></dt>
+<dd class="option-desc">Run <code>rustc --explain CODE</code> which will print out a detailed explanation of an
+error message (for example, <code>E0004</code>).</dd>
+
+
+</dl>
+
+### Display Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo--v"><a class="option-anchor" href="#option-cargo--v"></a><code>-v</code></dt>
+<dt class="option-term" id="option-cargo---verbose"><a class="option-anchor" href="#option-cargo---verbose"></a><code>--verbose</code></dt>
+<dd class="option-desc">Use verbose output. May be specified twice for “very verbose” output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the <code>term.verbose</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo--q"><a class="option-anchor" href="#option-cargo--q"></a><code>-q</code></dt>
+<dt class="option-term" id="option-cargo---quiet"><a class="option-anchor" href="#option-cargo---quiet"></a><code>--quiet</code></dt>
+<dd class="option-desc">Do not print cargo log messages.
+May also be specified with the <code>term.quiet</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+<dt class="option-term" id="option-cargo---color"><a class="option-anchor" href="#option-cargo---color"></a><code>--color</code> <em>when</em></dt>
+<dd class="option-desc">Control when colored output is used. Valid values:</p>
+<ul>
+<li><code>auto</code> (default): Automatically detect if color support is available on the
+terminal.</li>
+<li><code>always</code>: Always display colors.</li>
+<li><code>never</code>: Never display colors.</li>
+</ul>
+<p>May also be specified with the <code>term.color</code>
+<a href="../reference/config.html">config value</a>.</dd>
+
+
+
+</dl>
+
+### Manifest Options
+
+<dl>
+<dt class="option-term" id="option-cargo---frozen"><a class="option-anchor" href="#option-cargo---frozen"></a><code>--frozen</code></dt>
+<dt class="option-term" id="option-cargo---locked"><a class="option-anchor" href="#option-cargo---locked"></a><code>--locked</code></dt>
+<dd class="option-desc">Either of these flags requires that the <code>Cargo.lock</code> file is
+up-to-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The <code>--frozen</code> flag also prevents Cargo from
+attempting to access the network to determine if it is out-of-date.</p>
+<p>These may be used in environments where you want to assert that the
+<code>Cargo.lock</code> file is up-to-date (such as a CI build) or want to avoid network
+access.</dd>
+
+
+<dt class="option-term" id="option-cargo---offline"><a class="option-anchor" href="#option-cargo---offline"></a><code>--offline</code></dt>
+<dd class="option-desc">Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.</p>
+<p>Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the <a href="cargo-fetch.html">cargo-fetch(1)</a> command to download dependencies before going
+offline.</p>
+<p>May also be specified with the <code>net.offline</code> <a href="../reference/config.html">config value</a>.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+
+<dt class="option-term" id="option-cargo-+toolchain"><a class="option-anchor" href="#option-cargo-+toolchain"></a><code>+</code><em>toolchain</em></dt>
+<dd class="option-desc">If Cargo has been installed with rustup, and the first argument to <code>cargo</code>
+begins with <code>+</code>, it will be interpreted as a rustup toolchain name (such
+as <code>+stable</code> or <code>+nightly</code>).
+See the <a href="https://rust-lang.github.io/rustup/overrides.html">rustup documentation</a>
+for more information about how toolchain overrides work.</dd>
+
+
+<dt class="option-term" id="option-cargo---config"><a class="option-anchor" href="#option-cargo---config"></a><code>--config</code> <em>KEY=VALUE</em> or <em>PATH</em></dt>
+<dd class="option-desc">Overrides a Cargo configuration value. The argument should be in TOML syntax of <code>KEY=VALUE</code>,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the <a href="../reference/config.html#command-line-overrides">command-line overrides section</a> for more information.</dd>
+
+
+<dt class="option-term" id="option-cargo--C"><a class="option-anchor" href="#option-cargo--C"></a><code>-C</code> <em>PATH</em></dt>
+<dd class="option-desc">Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (<code>Cargo.toml</code>), as well as
+the directories searched for discovering <code>.cargo/config.toml</code>, for example. This option must
+appear before the command name, for example <code>cargo -C path/to/my-project build</code>.</p>
+<p>This option is only available on the <a href="https://doc.rust-lang.org/book/appendix-07-nightly-rust.html">nightly
+channel</a> and
+requires the <code>-Z unstable-options</code> flag to enable (see
+<a href="https://github.com/rust-lang/cargo/issues/10098">#10098</a>).</dd>
+
+
+<dt class="option-term" id="option-cargo--h"><a class="option-anchor" href="#option-cargo--h"></a><code>-h</code></dt>
+<dt class="option-term" id="option-cargo---help"><a class="option-anchor" href="#option-cargo---help"></a><code>--help</code></dt>
+<dd class="option-desc">Prints help information.</dd>
+
+
+<dt class="option-term" id="option-cargo--Z"><a class="option-anchor" href="#option-cargo--Z"></a><code>-Z</code> <em>flag</em></dt>
+<dd class="option-desc">Unstable (nightly-only) flags to Cargo. Run <code>cargo -Z help</code> for details.</dd>
+
+
+</dl>
+
+
+## ENVIRONMENT
+
+See [the reference](../reference/environment-variables.html) for
+details on environment variables that Cargo reads.
+
+
+## EXIT STATUS
+
+* `0`: Cargo succeeded.
+* `101`: Cargo failed to complete.
+
+
+## FILES
+
+`~/.cargo/`\
+&nbsp;&nbsp;&nbsp;&nbsp;Default location for Cargo's "home" directory where it
+stores various files. The location can be changed with the `CARGO_HOME`
+environment variable.
+
+`$CARGO_HOME/bin/`\
+&nbsp;&nbsp;&nbsp;&nbsp;Binaries installed by [cargo-install(1)](cargo-install.html) will be located here. If using
+[rustup], executables distributed with Rust are also located here.
+
+`$CARGO_HOME/config.toml`\
+&nbsp;&nbsp;&nbsp;&nbsp;The global configuration file. See [the reference](../reference/config.html)
+for more information about configuration files.
+
+`.cargo/config.toml`\
+&nbsp;&nbsp;&nbsp;&nbsp;Cargo automatically searches for a file named `.cargo/config.toml` in the
+current directory, and all parent directories. These configuration files
+will be merged with the global configuration file.
+
+`$CARGO_HOME/credentials.toml`\
+&nbsp;&nbsp;&nbsp;&nbsp;Private authentication information for logging in to a registry.
+
+`$CARGO_HOME/registry/`\
+&nbsp;&nbsp;&nbsp;&nbsp;This directory contains cached downloads of the registry index and any
+downloaded dependencies.
+
+`$CARGO_HOME/git/`\
+&nbsp;&nbsp;&nbsp;&nbsp;This directory contains cached downloads of git dependencies.
+
+Please note that the internal structure of the `$CARGO_HOME` directory is not
+stable yet and may be subject to change.
+
+[rustup]: https://rust-lang.github.io/rustup/
+
+## EXAMPLES
+
+1. Build a local package and all of its dependencies:
+
+ cargo build
+
+2. Build a package with optimizations:
+
+ cargo build --release
+
+3. Run tests for a cross-compiled target:
+
+ cargo test --target i686-unknown-linux-gnu
+
+4. Create a new package that builds an executable:
+
+ cargo new foobar
+
+5. Create a package in the current directory:
+
+ mkdir foo && cd foo
+ cargo init .
+
+6. Learn about a command's options and usage:
+
+ cargo help clean
+
+## BUGS
+
+See <https://github.com/rust-lang/cargo/issues> for issues.
+
+## SEE ALSO
+[rustc(1)](https://doc.rust-lang.org/rustc/index.html), [rustdoc(1)](https://doc.rust-lang.org/rustdoc/index.html)
diff --git a/src/tools/cargo/src/doc/src/commands/general-commands.md b/src/tools/cargo/src/doc/src/commands/general-commands.md
new file mode 100644
index 000000000..eaad4fbc3
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/general-commands.md
@@ -0,0 +1,4 @@
+# General Commands
+* [cargo](cargo.md)
+* [cargo help](cargo-help.md)
+* [cargo version](cargo-version.md)
diff --git a/src/tools/cargo/src/doc/src/commands/index.md b/src/tools/cargo/src/doc/src/commands/index.md
new file mode 100644
index 000000000..362a53ec0
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/index.md
@@ -0,0 +1,6 @@
+# Cargo Commands
+* [General Commands](general-commands.md)
+* [Build Commands](build-commands.md)
+* [Manifest Commands](manifest-commands.md)
+* [Package Commands](package-commands.md)
+* [Publishing Commands](publishing-commands.md)
diff --git a/src/tools/cargo/src/doc/src/commands/manifest-commands.md b/src/tools/cargo/src/doc/src/commands/manifest-commands.md
new file mode 100644
index 000000000..98a82d8aa
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/manifest-commands.md
@@ -0,0 +1,11 @@
+# Manifest Commands
+* [cargo add](cargo-add.md)
+* [cargo generate-lockfile](cargo-generate-lockfile.md)
+* [cargo locate-project](cargo-locate-project.md)
+* [cargo metadata](cargo-metadata.md)
+* [cargo pkgid](cargo-pkgid.md)
+* [cargo remove](cargo-remove.md)
+* [cargo tree](cargo-tree.md)
+* [cargo update](cargo-update.md)
+* [cargo vendor](cargo-vendor.md)
+* [cargo verify-project](cargo-verify-project.md)
diff --git a/src/tools/cargo/src/doc/src/commands/package-commands.md b/src/tools/cargo/src/doc/src/commands/package-commands.md
new file mode 100644
index 000000000..783abaff8
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/package-commands.md
@@ -0,0 +1,6 @@
+# Package Commands
+* [cargo init](cargo-init.md)
+* [cargo install](cargo-install.md)
+* [cargo new](cargo-new.md)
+* [cargo search](cargo-search.md)
+* [cargo uninstall](cargo-uninstall.md)
diff --git a/src/tools/cargo/src/doc/src/commands/publishing-commands.md b/src/tools/cargo/src/doc/src/commands/publishing-commands.md
new file mode 100644
index 000000000..81d440151
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/commands/publishing-commands.md
@@ -0,0 +1,7 @@
+# Publishing Commands
+* [cargo login](cargo-login.md)
+* [cargo logout](cargo-logout.md)
+* [cargo owner](cargo-owner.md)
+* [cargo package](cargo-package.md)
+* [cargo publish](cargo-publish.md)
+* [cargo yank](cargo-yank.md)
diff --git a/src/tools/cargo/src/doc/src/faq.md b/src/tools/cargo/src/doc/src/faq.md
new file mode 100644
index 000000000..8a8840b8f
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/faq.md
@@ -0,0 +1,261 @@
+## Frequently Asked Questions
+
+### Is the plan to use GitHub as a package repository?
+
+No. The plan for Cargo is to use [crates.io], like npm or Rubygems do with
+[npmjs.com][1] and [rubygems.org][3].
+
+We plan to support git repositories as a source of packages forever,
+because they can be used for early development and temporary patches,
+even when people use the registry as the primary source of packages.
+
+### Why build crates.io rather than use GitHub as a registry?
+
+We think that it’s very important to support multiple ways to download
+packages, including downloading from GitHub and copying packages into
+your package itself.
+
+That said, we think that [crates.io] offers a number of important benefits, and
+will likely become the primary way that people download packages in Cargo.
+
+For precedent, both Node.js’s [npm][1] and Ruby’s [bundler][2] support both a
+central registry model as well as a Git-based model, and most packages
+are downloaded through the registry in those ecosystems, with an
+important minority of packages making use of git-based packages.
+
+[1]: https://www.npmjs.com
+[2]: https://bundler.io
+[3]: https://rubygems.org
+
+Some of the advantages that make a central registry popular in other
+languages include:
+
+* **Discoverability**. A central registry provides an easy place to look
+ for existing packages. Combined with tagging, this also makes it
+ possible for a registry to provide ecosystem-wide information, such as a
+ list of the most popular or most-depended-on packages.
+* **Speed**. A central registry makes it possible to easily fetch just
+ the metadata for packages quickly and efficiently, and then to
+ efficiently download just the published package, and not other bloat
+ that happens to exist in the repository. This adds up to a significant
+ improvement in the speed of dependency resolution and fetching. As
+ dependency graphs scale up, downloading all of the git repositories bogs
+ down fast. Also remember that not everybody has a high-speed,
+ low-latency Internet connection.
+
+### Will Cargo work with C code (or other languages)?
+
+Yes!
+
+Cargo handles compiling Rust code, but we know that many Rust packages
+link against C code. We also know that there are decades of tooling
+built up around compiling languages other than Rust.
+
+Our solution: Cargo allows a package to [specify a script](reference/build-scripts.md)
+(written in Rust) to run before invoking `rustc`. Rust is leveraged to
+implement platform-specific configuration and refactor out common build
+functionality among packages.
+
+### Can Cargo be used inside of `make` (or `ninja`, or ...)
+
+Indeed. While we intend Cargo to be useful as a standalone way to
+compile Rust packages at the top-level, we know that some people will
+want to invoke Cargo from other build tools.
+
+We have designed Cargo to work well in those contexts, paying attention
+to things like error codes and machine-readable output modes. We still
+have some work to do on those fronts, but using Cargo in the context of
+conventional scripts is something we designed for from the beginning and
+will continue to prioritize.
+
+### Does Cargo handle multi-platform packages or cross-compilation?
+
+Rust itself provides facilities for configuring sections of code based
+on the platform. Cargo also supports [platform-specific
+dependencies][target-deps], and we plan to support more per-platform
+configuration in `Cargo.toml` in the future.
+
+[target-deps]: reference/specifying-dependencies.md#platform-specific-dependencies
+
+In the longer-term, we’re looking at ways to conveniently cross-compile
+packages using Cargo.
+
+### Does Cargo support environments, like `production` or `test`?
+
+We support environments through the use of [profiles] to support:
+
+[profiles]: reference/profiles.md
+
+* environment-specific flags (like `-g --opt-level=0` for development
+ and `--opt-level=3` for production).
+* environment-specific dependencies (like `hamcrest` for test assertions).
+* environment-specific `#[cfg]`
+* a `cargo test` command
+
+### Does Cargo work on Windows?
+
+Yes!
+
+All commits to Cargo are required to pass the local test suite on Windows.
+If you encounter an issue while running on Windows, we consider it a bug, so [please file an
+issue][3].
+
+[3]: https://github.com/rust-lang/cargo/issues
+
+### Why do binaries have `Cargo.lock` in version control, but not libraries?
+
+The purpose of a `Cargo.lock` lockfile is to describe the state of the world at
+the time of a successful build. Cargo uses the lockfile to provide
+deterministic builds on different times and different systems, by ensuring that
+the exact same dependencies and versions are used as when the `Cargo.lock` file
+was originally generated.
+
+This property is most desirable from applications and packages which are at the
+very end of the dependency chain (binaries). As a result, it is recommended that
+all binaries check in their `Cargo.lock`.
+
+For libraries the situation is somewhat different. A library is not only used by
+the library developers, but also any downstream consumers of the library. Users
+dependent on the library will not inspect the library’s `Cargo.lock` (even if it
+exists). This is precisely because a library should **not** be deterministically
+recompiled for all users of the library.
+
+If a library ends up being used transitively by several dependencies, it’s
+likely that just a single copy of the library is desired (based on semver
+compatibility). If Cargo used all of the dependencies' `Cargo.lock` files,
+then multiple copies of the library could be used, and perhaps even a version
+conflict.
+
+In other words, libraries specify SemVer requirements for their dependencies but
+cannot see the full picture. Only end products like binaries have a full
+picture to decide what versions of dependencies should be used.
+
+### Can libraries use `*` as a version for their dependencies?
+
+**As of January 22nd, 2016, [crates.io] rejects all packages (not just libraries)
+with wildcard dependency constraints.**
+
+While libraries _can_, strictly speaking, they should not. A version requirement
+of `*` says “This will work with every version ever”, which is never going
+to be true. Libraries should always specify the range that they do work with,
+even if it’s something as general as “every 1.x.y version”.
+
+### Why `Cargo.toml`?
+
+As one of the most frequent interactions with Cargo, the question of why the
+configuration file is named `Cargo.toml` arises from time to time. The leading
+capital-`C` was chosen to ensure that the manifest was grouped with other
+similar configuration files in directory listings. Sorting files often puts
+capital letters before lowercase letters, ensuring files like `Makefile` and
+`Cargo.toml` are placed together. The trailing `.toml` was chosen to emphasize
+the fact that the file is in the [TOML configuration
+format](https://toml.io/).
+
+Cargo does not allow other names such as `cargo.toml` or `Cargofile` to
+emphasize the ease of how a Cargo repository can be identified. An option of
+many possible names has historically led to confusion where one case was handled
+but others were accidentally forgotten.
+
+[crates.io]: https://crates.io/
+
+### How can Cargo work offline?
+
+Cargo is often used in situations with limited or no network access such as
+airplanes, CI environments, or embedded in large production deployments. Users
+are often surprised when Cargo attempts to fetch resources from the network, and
+hence the request for Cargo to work offline comes up frequently.
+
+Cargo, at its heart, will not attempt to access the network unless told to do
+so. That is, if no crates come from crates.io, a git repository, or some other
+network location, Cargo will never attempt to make a network connection. As a
+result, if Cargo attempts to touch the network, then it's because it needs to
+fetch a required resource.
+
+Cargo is also quite aggressive about caching information to minimize the amount
+of network activity. It will guarantee, for example, that if `cargo build` (or
+an equivalent) is run to completion then the next `cargo build` is guaranteed to
+not touch the network so long as `Cargo.toml` has not been modified in the
+meantime. This avoidance of the network boils down to a `Cargo.lock` existing
+and a populated cache of the crates reflected in the lock file. If either of
+these components are missing, then they're required for the build to succeed and
+must be fetched remotely.
+
+As of Rust 1.11.0, Cargo understands a new flag, `--frozen`, which is an
+assertion that it shouldn't touch the network. When passed, Cargo will
+immediately return an error if it would otherwise attempt a network request.
+The error should include contextual information about why the network request is
+being made in the first place to help debug as well. Note that this flag *does
+not change the behavior of Cargo*, it simply asserts that Cargo shouldn't touch
+the network as a previous command has been run to ensure that network activity
+shouldn't be necessary.
+
+The `--offline` flag was added in Rust 1.36.0. This flag tells Cargo to not
+access the network, and try to proceed with available cached data if possible.
+You can use [`cargo fetch`] in one project to download dependencies before
+going offline, and then use those same dependencies in another project with
+the `--offline` flag (or [configuration value][offline config]).
+
+For more information about vendoring, see documentation on [source
+replacement][replace].
+
+[replace]: reference/source-replacement.md
+[`cargo fetch`]: commands/cargo-fetch.md
+[offline config]: reference/config.md#netoffline
+
+### Why is Cargo rebuilding my code?
+
+Cargo is responsible for incrementally compiling crates in your project. This
+means that if you type `cargo build` twice the second one shouldn't rebuild your
+crates.io dependencies, for example. Nevertheless bugs arise and Cargo can
+sometimes rebuild code when you're not expecting it!
+
+We've long [wanted to provide better diagnostics about
+this](https://github.com/rust-lang/cargo/issues/2904) but unfortunately haven't
+been able to make progress on that issue in quite some time. In the meantime,
+however, you can debug a rebuild at least a little by setting the `CARGO_LOG`
+environment variable:
+
+```sh
+$ CARGO_LOG=cargo::core::compiler::fingerprint=info cargo build
+```
+
+This will cause Cargo to print out a lot of information about diagnostics and
+rebuilding. This can often contain clues as to why your project is getting
+rebuilt, although you'll often need to connect some dots yourself since this
+output isn't super easy to read just yet. Note that the `CARGO_LOG` needs to be
+set for the command that rebuilds when you think it should not. Unfortunately
+Cargo has no way right now of after-the-fact debugging "why was that rebuilt?"
+
+Some issues we've seen historically which can cause crates to get rebuilt are:
+
+* A build script prints `cargo:rerun-if-changed=foo` where `foo` is a file that
+ doesn't exist and nothing generates it. In this case Cargo will keep running
+ the build script thinking it will generate the file but nothing ever does. The
+ fix is to avoid printing `rerun-if-changed` in this scenario.
+
+* Two successive Cargo builds may differ in the set of features enabled for some
+ dependencies. For example if the first build command builds the whole
+ workspace and the second command builds only one crate, this may cause a
+ dependency on crates.io to have a different set of features enabled, causing
+ it and everything that depends on it to get rebuilt. There's unfortunately not
+ really a great fix for this, although if possible it's best to have the set of
+ features enabled on a crate constant regardless of what you're building in
+ your workspace.
+
+* Some filesystems exhibit unusual behavior around timestamps. Cargo primarily
+ uses timestamps on files to govern whether rebuilding needs to happen, but if
+ you're using a nonstandard filesystem it may be affecting the timestamps
+ somehow (e.g. truncating them, causing them to drift, etc). In this scenario,
+ feel free to open an issue and we can see if we can accommodate the filesystem
+ somehow.
+
+* A concurrent build process is either deleting artifacts or modifying files.
+ Sometimes you might have a background process that either tries to build or
+ check your project. These background processes might surprisingly delete some
+ build artifacts or touch files (or maybe just by accident), which can cause
+ rebuilds to look spurious! The best fix here would be to wrangle the
+ background process to avoid clashing with your work.
+
+If after trying to debug your issue, however, you're still running into problems
+then feel free to [open an
+issue](https://github.com/rust-lang/cargo/issues/new)!
diff --git a/src/tools/cargo/src/doc/src/getting-started/first-steps.md b/src/tools/cargo/src/doc/src/getting-started/first-steps.md
new file mode 100644
index 000000000..15bb4bdc7
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/getting-started/first-steps.md
@@ -0,0 +1,82 @@
+## First Steps with Cargo
+
+This section provides a quick sense for the `cargo` command line tool. We
+demonstrate its ability to generate a new [***package***][def-package] for us,
+its ability to compile the [***crate***][def-crate] within the package, and
+its ability to run the resulting program.
+
+To start a new package with Cargo, use `cargo new`:
+
+```console
+$ cargo new hello_world
+```
+
+Cargo defaults to `--bin` to make a binary program. To make a library, we
+would pass `--lib`, instead.
+
+Let’s check out what Cargo has generated for us:
+
+```console
+$ cd hello_world
+$ tree .
+.
+├── Cargo.toml
+└── src
+ └── main.rs
+
+1 directory, 2 files
+```
+
+This is all we need to get started. First, let’s check out `Cargo.toml`:
+
+```toml
+[package]
+name = "hello_world"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+```
+
+This is called a [***manifest***][def-manifest], and it contains all of the
+metadata that Cargo needs to compile your package.
+
+Here’s what’s in `src/main.rs`:
+
+```rust
+fn main() {
+ println!("Hello, world!");
+}
+```
+
+Cargo generated a “hello world” program for us, otherwise known as a
+[***binary crate***][def-crate]. Let’s compile it:
+
+```console
+$ cargo build
+ Compiling hello_world v0.1.0 (file:///path/to/package/hello_world)
+```
+
+And then run it:
+
+```console
+$ ./target/debug/hello_world
+Hello, world!
+```
+
+We can also use `cargo run` to compile and then run it, all in one step:
+
+```console
+$ cargo run
+ Fresh hello_world v0.1.0 (file:///path/to/package/hello_world)
+ Running `target/hello_world`
+Hello, world!
+```
+
+### Going further
+
+For more details on using Cargo, check out the [Cargo Guide](../guide/index.md)
+
+[def-crate]: ../appendix/glossary.md#crate '"crate" (glossary entry)'
+[def-manifest]: ../appendix/glossary.md#manifest '"manifest" (glossary entry)'
+[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)'
diff --git a/src/tools/cargo/src/doc/src/getting-started/index.md b/src/tools/cargo/src/doc/src/getting-started/index.md
new file mode 100644
index 000000000..710e9943b
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/getting-started/index.md
@@ -0,0 +1,9 @@
+## Getting Started
+
+To get started with Cargo, install Cargo (and Rust) and set up your first
+[*crate*][def-crate].
+
+* [Installation](installation.md)
+* [First steps with Cargo](first-steps.md)
+
+[def-crate]: ../appendix/glossary.md#crate '"crate" (glossary entry)'
diff --git a/src/tools/cargo/src/doc/src/getting-started/installation.md b/src/tools/cargo/src/doc/src/getting-started/installation.md
new file mode 100644
index 000000000..7cccf9ee5
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/getting-started/installation.md
@@ -0,0 +1,38 @@
+## Installation
+
+### Install Rust and Cargo
+
+The easiest way to get Cargo is to install the current stable release of [Rust]
+by using [rustup]. Installing Rust using `rustup` will also install `cargo`.
+
+On Linux and macOS systems, this is done as follows:
+
+```console
+curl https://sh.rustup.rs -sSf | sh
+```
+
+It will download a script, and start the installation. If everything goes well,
+you’ll see this appear:
+
+```console
+Rust is installed now. Great!
+```
+
+On Windows, download and run [rustup-init.exe]. It will start the installation
+in a console and present the above message on success.
+
+After this, you can use the `rustup` command to also install `beta` or `nightly`
+channels for Rust and Cargo.
+
+For other installation options and information, visit the
+[install][install-rust] page of the Rust website.
+
+### Build and Install Cargo from Source
+
+Alternatively, you can [build Cargo from source][compiling-from-source].
+
+[rust]: https://www.rust-lang.org/
+[rustup]: https://rustup.rs/
+[rustup-init.exe]: https://win.rustup.rs/
+[install-rust]: https://www.rust-lang.org/tools/install
+[compiling-from-source]: https://github.com/rust-lang/cargo#compiling-from-source
diff --git a/src/tools/cargo/src/doc/src/guide/build-cache.md b/src/tools/cargo/src/doc/src/guide/build-cache.md
new file mode 100644
index 000000000..a79b41dba
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/guide/build-cache.md
@@ -0,0 +1,108 @@
+## Build cache
+
+Cargo stores the output of a build into the "target" directory. By default,
+this is the directory named `target` in the root of your
+[*workspace*][def-workspace]. To change the location, you can set the
+`CARGO_TARGET_DIR` [environment variable], the [`build.target-dir`] config
+value, or the `--target-dir` command-line flag.
+
+The directory layout depends on whether or not you are using the `--target`
+flag to build for a specific platform. If `--target` is not specified, Cargo
+runs in a mode where it builds for the host architecture. The output goes into
+the root of the target directory, with each [profile] stored in a separate
+subdirectory:
+
+Directory | Description
+----------|------------
+<code style="white-space: nowrap">target/debug/</code> | Contains output for the `dev` profile.
+<code style="white-space: nowrap">target/release/</code> | Contains output for the `release` profile (with the `--release` option).
+<code style="white-space: nowrap">target/foo/</code> | Contains build output for the `foo` profile (with the `--profile=foo` option).
+
+For historical reasons, the `dev` and `test` profiles are stored in the
+`debug` directory, and the `release` and `bench` profiles are stored in the
+`release` directory. User-defined profiles are stored in a directory with the
+same name as the profile.
+
+When building for another target with `--target`, the output is placed in a
+directory with the name of the [target]:
+
+Directory | Example
+----------|--------
+<code style="white-space: nowrap">target/&lt;triple&gt;/debug/</code> | <code style="white-space: nowrap">target/thumbv7em-none-eabihf/debug/</code>
+<code style="white-space: nowrap">target/&lt;triple&gt;/release/</code> | <code style="white-space: nowrap">target/thumbv7em-none-eabihf/release/</code>
+
+> **Note**: When not using `--target`, this has a consequence that Cargo will
+> share your dependencies with build scripts and proc macros. [`RUSTFLAGS`]
+> will be shared with every `rustc` invocation. With the `--target` flag,
+> build scripts and proc macros are built separately (for the host
+> architecture), and do not share `RUSTFLAGS`.
+
+Within the profile directory (such as `debug` or `release`), artifacts are
+placed into the following directories:
+
+Directory | Description
+----------|------------
+<code style="white-space: nowrap">target/debug/</code> | Contains the output of the package being built (the [binary executables] and [library targets]).
+<code style="white-space: nowrap">target/debug/examples/</code> | Contains [example targets].
+
+Some commands place their output in dedicated directories in the top level of
+the `target` directory:
+
+Directory | Description
+----------|------------
+<code style="white-space: nowrap">target/doc/</code> | Contains rustdoc documentation ([`cargo doc`]).
+<code style="white-space: nowrap">target/package/</code> | Contains the output of the [`cargo package`] and [`cargo publish`] commands.
+
+Cargo also creates several other directories and files needed for the build
+process. Their layout is considered internal to Cargo, and is subject to
+change. Some of these directories are:
+
+Directory | Description
+----------|------------
+<code style="white-space: nowrap">target/debug/deps/</code> | Dependencies and other artifacts.
+<code style="white-space: nowrap">target/debug/incremental/</code> | `rustc` [incremental output], a cache used to speed up subsequent builds.
+<code style="white-space: nowrap">target/debug/build/</code> | Output from [build scripts].
+
+### Dep-info files
+
+Next to each compiled artifact is a file called a "dep info" file with a `.d`
+suffix. This file is a Makefile-like syntax that indicates all of the file
+dependencies required to rebuild the artifact. These are intended to be used
+with external build systems so that they can detect if Cargo needs to be
+re-executed. The paths in the file are absolute by default. See the
+[`build.dep-info-basedir`] config option to use relative paths.
+
+```Makefile
+# Example dep-info file found in target/debug/foo.d
+/path/to/myproj/target/debug/foo: /path/to/myproj/src/lib.rs /path/to/myproj/src/main.rs
+```
+
+### Shared cache
+
+A third party tool, [sccache], can be used to share built dependencies across
+different workspaces.
+
+To setup `sccache`, install it with `cargo install sccache` and set
+`RUSTC_WRAPPER` environmental variable to `sccache` before invoking Cargo. If
+you use bash, it makes sense to add `export RUSTC_WRAPPER=sccache` to
+`.bashrc`. Alternatively, you can set [`build.rustc-wrapper`] in the [Cargo
+configuration][config]. Refer to sccache documentation for more details.
+
+[`RUSTFLAGS`]: ../reference/config.md#buildrustflags
+[`build.dep-info-basedir`]: ../reference/config.md#builddep-info-basedir
+[`build.rustc-wrapper`]: ../reference/config.md#buildrustc-wrapper
+[`build.target-dir`]: ../reference/config.md#buildtarget-dir
+[`cargo doc`]: ../commands/cargo-doc.md
+[`cargo package`]: ../commands/cargo-package.md
+[`cargo publish`]: ../commands/cargo-publish.md
+[build scripts]: ../reference/build-scripts.md
+[config]: ../reference/config.md
+[def-workspace]: ../appendix/glossary.md#workspace '"workspace" (glossary entry)'
+[target]: ../appendix/glossary.md#target '"target" (glossary entry)'
+[environment variable]: ../reference/environment-variables.md
+[incremental output]: ../reference/profiles.md#incremental
+[sccache]: https://github.com/mozilla/sccache
+[profile]: ../reference/profiles.md
+[binary executables]: ../reference/cargo-targets.md#binaries
+[library targets]: ../reference/cargo-targets.md#library
+[example targets]: ../reference/cargo-targets.md#examples
diff --git a/src/tools/cargo/src/doc/src/guide/cargo-home.md b/src/tools/cargo/src/doc/src/guide/cargo-home.md
new file mode 100644
index 000000000..9e6740f10
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/guide/cargo-home.md
@@ -0,0 +1,93 @@
+## Cargo Home
+
+The "Cargo home" functions as a download and source cache.
+When building a [crate][def-crate], Cargo stores downloaded build dependencies in the Cargo home.
+You can alter the location of the Cargo home by setting the `CARGO_HOME` [environmental variable][env].
+The [home](https://crates.io/crates/home) crate provides an API for getting this location if you need this information inside your Rust crate.
+By default, the Cargo home is located in `$HOME/.cargo/`.
+
+Please note that the internal structure of the Cargo home is not stabilized and may be subject to change at any time.
+
+The Cargo home consists of following components:
+
+## Files:
+
+* `config.toml`
+ Cargo's global configuration file, see the [config entry in the reference][config].
+
+* `credentials.toml`
+ Private login credentials from [`cargo login`] in order to log in to a [registry][def-registry].
+
+* `.crates.toml`, `.crates2.json`
+ These hidden files contain [package][def-package] information of crates installed via [`cargo install`]. Do NOT edit by hand!
+
+## Directories:
+
+* `bin`
+The bin directory contains executables of crates that were installed via [`cargo install`] or [`rustup`](https://rust-lang.github.io/rustup/).
+To be able to make these binaries accessible, add the path of the directory to your `$PATH` environment variable.
+
+ * `git`
+ Git sources are stored here:
+
+ * `git/db`
+ When a crate depends on a git repository, Cargo clones the repo as a bare repo into this directory and updates it if necessary.
+
+ * `git/checkouts`
+ If a git source is used, the required commit of the repo is checked out from the bare repo inside `git/db` into this directory.
+ This provides the compiler with the actual files contained in the repo of the commit specified for that dependency.
+ Multiple checkouts of different commits of the same repo are possible.
+
+* `registry`
+ Packages and metadata of crate registries (such as [crates.io](https://crates.io/)) are located here.
+
+ * `registry/index`
+ The index is a bare git repository which contains the metadata (versions, dependencies etc) of all available crates of a registry.
+
+ * `registry/cache`
+ Downloaded dependencies are stored in the cache. The crates are compressed gzip archives named with a `.crate` extension.
+
+ * `registry/src`
+ If a downloaded `.crate` archive is required by a package, it is unpacked into `registry/src` folder where rustc will find the `.rs` files.
+
+
+## Caching the Cargo home in CI
+
+To avoid redownloading all crate dependencies during continuous integration, you can cache the `$CARGO_HOME` directory.
+However, caching the entire directory is often inefficient as it will contain downloaded sources twice.
+If we depend on a crate such as `serde 1.0.92` and cache the entire `$CARGO_HOME` we would actually cache the sources twice, the `serde-1.0.92.crate` inside `registry/cache` and the extracted `.rs` files of serde inside `registry/src`.
+That can unnecessarily slow down the build as downloading, extracting, recompressing and reuploading the cache to the CI servers can take some time.
+
+If you wish to cache binaries installed with [`cargo install`], you need to cache the `bin/` folder and the `.crates.toml` and `.crates2.json` files.
+
+It should be sufficient to cache the following files and directories across builds:
+
+* `.crates.toml`
+* `.crates2.json`
+* `bin/`
+* `registry/index/`
+* `registry/cache/`
+* `git/db/`
+
+
+
+## Vendoring all dependencies of a project
+
+See the [`cargo vendor`] subcommand.
+
+
+
+## Clearing the cache
+
+In theory, you can always remove any part of the cache and Cargo will do its best to restore sources if a crate needs them either by reextracting an archive or checking out a bare repo or by simply redownloading the sources from the web.
+
+Alternatively, the [cargo-cache](https://crates.io/crates/cargo-cache) crate provides a simple CLI tool to only clear selected parts of the cache or show sizes of its components in your command-line.
+
+[`cargo install`]: ../commands/cargo-install.md
+[`cargo login`]: ../commands/cargo-login.md
+[`cargo vendor`]: ../commands/cargo-vendor.md
+[config]: ../reference/config.md
+[def-crate]: ../appendix/glossary.md#crate '"crate" (glossary entry)'
+[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)'
+[def-registry]: ../appendix/glossary.md#registry '"registry" (glossary entry)'
+[env]: ../reference/environment-variables.md
diff --git a/src/tools/cargo/src/doc/src/guide/cargo-toml-vs-cargo-lock.md b/src/tools/cargo/src/doc/src/guide/cargo-toml-vs-cargo-lock.md
new file mode 100644
index 000000000..9b0426684
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/guide/cargo-toml-vs-cargo-lock.md
@@ -0,0 +1,107 @@
+## Cargo.toml vs Cargo.lock
+
+`Cargo.toml` and `Cargo.lock` serve two different purposes. Before we talk
+about them, here’s a summary:
+
+* `Cargo.toml` is about describing your dependencies in a broad sense, and is
+ written by you.
+* `Cargo.lock` contains exact information about your dependencies. It is
+ maintained by Cargo and should not be manually edited.
+
+If you’re building a non-end product, such as a rust library that other rust
+[packages][def-package] will depend on, put `Cargo.lock` in your
+`.gitignore`. If you’re building an end product, which are executable like
+command-line tool or an application, or a system library with crate-type of
+`staticlib` or `cdylib`, check `Cargo.lock` into `git`. If you're curious
+about why that is, see
+["Why do binaries have `Cargo.lock` in version control, but not libraries?" in the
+FAQ](../faq.md#why-do-binaries-have-cargolock-in-version-control-but-not-libraries).
+
+Let’s dig in a little bit more.
+
+`Cargo.toml` is a [**manifest**][def-manifest] file in which we can specify a
+bunch of different metadata about our package. For example, we can say that we
+depend on another package:
+
+```toml
+[package]
+name = "hello_world"
+version = "0.1.0"
+
+[dependencies]
+regex = { git = "https://github.com/rust-lang/regex.git" }
+```
+
+This package has a single dependency, on the `regex` library. We’ve stated in
+this case that we’re relying on a particular Git repository that lives on
+GitHub. Since we haven’t specified any other information, Cargo assumes that
+we intend to use the latest commit on the `master` branch to build our package.
+
+Sound good? Well, there’s one problem: If you build this package today, and
+then you send a copy to me, and I build this package tomorrow, something bad
+could happen. There could be more commits to `regex` in the meantime, and my
+build would include new commits while yours would not. Therefore, we would
+get different builds. This would be bad because we want reproducible builds.
+
+We could fix this problem by defining a specific `rev` value in our `Cargo.toml`,
+so Cargo could know exactly which revision to use when building the package:
+
+```toml
+[dependencies]
+regex = { git = "https://github.com/rust-lang/regex.git", rev = "9f9f693" }
+```
+
+Now our builds will be the same. But there’s a big drawback: now we have to
+manually think about SHA-1s every time we want to update our library. This is
+both tedious and error prone.
+
+Enter the `Cargo.lock`. Because of its existence, we don’t need to manually
+keep track of the exact revisions: Cargo will do it for us. When we have a
+manifest like this:
+
+```toml
+[package]
+name = "hello_world"
+version = "0.1.0"
+
+[dependencies]
+regex = { git = "https://github.com/rust-lang/regex.git" }
+```
+
+Cargo will take the latest commit and write that information out into our
+`Cargo.lock` when we build for the first time. That file will look like this:
+
+```toml
+[[package]]
+name = "hello_world"
+version = "0.1.0"
+dependencies = [
+ "regex 1.5.0 (git+https://github.com/rust-lang/regex.git#9f9f693768c584971a4d53bc3c586c33ed3a6831)",
+]
+
+[[package]]
+name = "regex"
+version = "1.5.0"
+source = "git+https://github.com/rust-lang/regex.git#9f9f693768c584971a4d53bc3c586c33ed3a6831"
+```
+
+You can see that there’s a lot more information here, including the exact
+revision we used to build. Now when you give your package to someone else,
+they’ll use the exact same SHA, even though we didn’t specify it in our
+`Cargo.toml`.
+
+When we’re ready to opt in to a new version of the library, Cargo can
+re-calculate the dependencies and update things for us:
+
+```console
+$ cargo update # updates all dependencies
+$ cargo update -p regex # updates just “regex”
+```
+
+This will write out a new `Cargo.lock` with the new version information. Note
+that the argument to `cargo update` is actually a
+[Package ID Specification](../reference/pkgid-spec.md) and `regex` is just a
+short specification.
+
+[def-manifest]: ../appendix/glossary.md#manifest '"manifest" (glossary entry)'
+[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)'
diff --git a/src/tools/cargo/src/doc/src/guide/continuous-integration.md b/src/tools/cargo/src/doc/src/guide/continuous-integration.md
new file mode 100644
index 000000000..38d8ae20e
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/guide/continuous-integration.md
@@ -0,0 +1,125 @@
+## Continuous Integration
+
+### Travis CI
+
+To test your [package][def-package] on Travis CI, here is a sample
+`.travis.yml` file:
+
+```yaml
+language: rust
+rust:
+ - stable
+ - beta
+ - nightly
+matrix:
+ allow_failures:
+ - rust: nightly
+```
+
+This will test all three release channels, but any breakage in nightly
+will not fail your overall build. Please see the [Travis CI Rust
+documentation](https://docs.travis-ci.com/user/languages/rust/) for more
+information.
+
+### GitHub Actions
+
+To test your package on GitHub Actions, here is a sample `.github/workflows/ci.yml` file:
+
+```yaml
+name: Cargo Build & Test
+
+on:
+ push:
+ pull_request:
+
+env:
+ CARGO_TERM_COLOR: always
+
+jobs:
+ build_and_test:
+ name: Rust project - latest
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ toolchain:
+ - stable
+ - beta
+ - nightly
+ steps:
+ - uses: actions/checkout@v3
+ - run: rustup update ${{ matrix.toolchain }} && rustup default ${{ matrix.toolchain }}
+ - run: cargo build --verbose
+ - run: cargo test --verbose
+
+```
+
+This will test all three release channels (note a failure in any toolchain version will fail the entire job). You can also click `"Actions" > "new workflow"` in the GitHub UI and select Rust to add the [default configuration](https://github.com/actions/starter-workflows/blob/main/ci/rust.yml) to your repo. See [GitHub Actions documentation](https://docs.github.com/en/actions) for more information.
+
+### GitLab CI
+
+To test your package on GitLab CI, here is a sample `.gitlab-ci.yml` file:
+
+```yaml
+stages:
+ - build
+
+rust-latest:
+ stage: build
+ image: rust:latest
+ script:
+ - cargo build --verbose
+ - cargo test --verbose
+
+rust-nightly:
+ stage: build
+ image: rustlang/rust:nightly
+ script:
+ - cargo build --verbose
+ - cargo test --verbose
+ allow_failure: true
+```
+
+This will test on the stable channel and nightly channel, but any
+breakage in nightly will not fail your overall build. Please see the
+[GitLab CI documentation](https://docs.gitlab.com/ce/ci/yaml/index.html) for more
+information.
+
+### builds.sr.ht
+
+To test your package on sr.ht, here is a sample `.build.yml` file.
+Be sure to change `<your repo>` and `<your project>` to the repo to clone and
+the directory where it was cloned.
+
+```yaml
+image: archlinux
+packages:
+ - rustup
+sources:
+ - <your repo>
+tasks:
+ - setup: |
+ rustup toolchain install nightly stable
+ cd <your project>/
+ rustup run stable cargo fetch
+ - stable: |
+ rustup default stable
+ cd <your project>/
+ cargo build --verbose
+ cargo test --verbose
+ - nightly: |
+ rustup default nightly
+ cd <your project>/
+ cargo build --verbose ||:
+ cargo test --verbose ||:
+ - docs: |
+ cd <your project>/
+ rustup run stable cargo doc --no-deps
+ rustup run nightly cargo doc --no-deps ||:
+```
+
+This will test and build documentation on the stable channel and nightly
+channel, but any breakage in nightly will not fail your overall build. Please
+see the [builds.sr.ht documentation](https://man.sr.ht/builds.sr.ht/) for more
+information.
+
+[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)'
diff --git a/src/tools/cargo/src/doc/src/guide/creating-a-new-project.md b/src/tools/cargo/src/doc/src/guide/creating-a-new-project.md
new file mode 100644
index 000000000..e0daefc6b
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/guide/creating-a-new-project.md
@@ -0,0 +1,97 @@
+## Creating a New Package
+
+To start a new [package][def-package] with Cargo, use `cargo new`:
+
+```console
+$ cargo new hello_world --bin
+```
+
+We’re passing `--bin` because we’re making a binary program: if we
+were making a library, we’d pass `--lib`. This also initializes a new `git`
+repository by default. If you don't want it to do that, pass `--vcs none`.
+
+Let’s check out what Cargo has generated for us:
+
+```console
+$ cd hello_world
+$ tree .
+.
+├── Cargo.toml
+└── src
+ └── main.rs
+
+1 directory, 2 files
+```
+
+Let’s take a closer look at `Cargo.toml`:
+
+```toml
+[package]
+name = "hello_world"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+
+```
+
+This is called a [***manifest***][def-manifest], and it contains all of the
+metadata that Cargo needs to compile your package. This file is written in the
+[TOML] format (pronounced /tɑməl/).
+
+Here’s what’s in `src/main.rs`:
+
+```rust
+fn main() {
+ println!("Hello, world!");
+}
+```
+
+Cargo generated a “hello world” program for us, otherwise known as a
+[*binary crate*][def-crate]. Let’s compile it:
+
+```console
+$ cargo build
+ Compiling hello_world v0.1.0 (file:///path/to/package/hello_world)
+```
+
+And then run it:
+
+```console
+$ ./target/debug/hello_world
+Hello, world!
+```
+
+We can also use `cargo run` to compile and then run it, all in one step (You
+won't see the `Compiling` line if you have not made any changes since you last
+compiled):
+
+```console
+$ cargo run
+ Compiling hello_world v0.1.0 (file:///path/to/package/hello_world)
+ Running `target/debug/hello_world`
+Hello, world!
+```
+
+You’ll now notice a new file, `Cargo.lock`. It contains information about our
+dependencies. Since we don’t have any yet, it’s not very interesting.
+
+Once you’re ready for release, you can use `cargo build --release` to compile
+your files with optimizations turned on:
+
+```console
+$ cargo build --release
+ Compiling hello_world v0.1.0 (file:///path/to/package/hello_world)
+```
+
+`cargo build --release` puts the resulting binary in `target/release` instead of
+`target/debug`.
+
+Compiling in debug mode is the default for development. Compilation time is
+shorter since the compiler doesn't do optimizations, but the code will run
+slower. Release mode takes longer to compile, but the code will run faster.
+
+[TOML]: https://toml.io/
+[def-crate]: ../appendix/glossary.md#crate '"crate" (glossary entry)'
+[def-manifest]: ../appendix/glossary.md#manifest '"manifest" (glossary entry)'
+[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)'
diff --git a/src/tools/cargo/src/doc/src/guide/dependencies.md b/src/tools/cargo/src/doc/src/guide/dependencies.md
new file mode 100644
index 000000000..94419f15b
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/guide/dependencies.md
@@ -0,0 +1,93 @@
+## Dependencies
+
+[crates.io] is the Rust community's central [*package registry*][def-package-registry]
+that serves as a location to discover and download
+[packages][def-package]. `cargo` is configured to use it by default to find
+requested packages.
+
+To depend on a library hosted on [crates.io], add it to your `Cargo.toml`.
+
+[crates.io]: https://crates.io/
+
+### Adding a dependency
+
+If your `Cargo.toml` doesn't already have a `[dependencies]` section, add
+that, then list the [crate][def-crate] name and version that you would like to
+use. This example adds a dependency of the `time` crate:
+
+```toml
+[dependencies]
+time = "0.1.12"
+```
+
+The version string is a [SemVer] version requirement. The [specifying
+dependencies](../reference/specifying-dependencies.md) docs have more information about
+the options you have here.
+
+[SemVer]: https://semver.org
+
+If we also wanted to add a dependency on the `regex` crate, we would not need
+to add `[dependencies]` for each crate listed. Here's what your whole
+`Cargo.toml` file would look like with dependencies on the `time` and `regex`
+crates:
+
+```toml
+[package]
+name = "hello_world"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+time = "0.1.12"
+regex = "0.1.41"
+```
+
+Re-run `cargo build`, and Cargo will fetch the new dependencies and all of
+their dependencies, compile them all, and update the `Cargo.lock`:
+
+```console
+$ cargo build
+ Updating crates.io index
+ Downloading memchr v0.1.5
+ Downloading libc v0.1.10
+ Downloading regex-syntax v0.2.1
+ Downloading memchr v0.1.5
+ Downloading aho-corasick v0.3.0
+ Downloading regex v0.1.41
+ Compiling memchr v0.1.5
+ Compiling libc v0.1.10
+ Compiling regex-syntax v0.2.1
+ Compiling memchr v0.1.5
+ Compiling aho-corasick v0.3.0
+ Compiling regex v0.1.41
+ Compiling hello_world v0.1.0 (file:///path/to/package/hello_world)
+```
+
+Our `Cargo.lock` contains the exact information about which revision of all of
+these dependencies we used.
+
+Now, if `regex` gets updated, we will still build with the same revision until
+we choose to `cargo update`.
+
+You can now use the `regex` library in `main.rs`.
+
+```rust,ignore
+use regex::Regex;
+
+fn main() {
+ let re = Regex::new(r"^\d{4}-\d{2}-\d{2}$").unwrap();
+ println!("Did our date match? {}", re.is_match("2014-01-01"));
+}
+```
+
+Running it will show:
+
+```console
+$ cargo run
+ Running `target/hello_world`
+Did our date match? true
+```
+
+[def-crate]: ../appendix/glossary.md#crate '"crate" (glossary entry)'
+[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)'
+[def-package-registry]: ../appendix/glossary.md#package-registry '"package-registry" (glossary entry)'
diff --git a/src/tools/cargo/src/doc/src/guide/index.md b/src/tools/cargo/src/doc/src/guide/index.md
new file mode 100644
index 000000000..fe6d86a39
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/guide/index.md
@@ -0,0 +1,15 @@
+## Cargo Guide
+
+This guide will give you all that you need to know about how to use Cargo to
+develop Rust packages.
+
+* [Why Cargo Exists](why-cargo-exists.md)
+* [Creating a New Package](creating-a-new-project.md)
+* [Working on an Existing Cargo Package](working-on-an-existing-project.md)
+* [Dependencies](dependencies.md)
+* [Package Layout](project-layout.md)
+* [Cargo.toml vs Cargo.lock](cargo-toml-vs-cargo-lock.md)
+* [Tests](tests.md)
+* [Continuous Integration](continuous-integration.md)
+* [Cargo Home](cargo-home.md)
+* [Build Cache](build-cache.md)
diff --git a/src/tools/cargo/src/doc/src/guide/project-layout.md b/src/tools/cargo/src/doc/src/guide/project-layout.md
new file mode 100644
index 000000000..a3ce3f8a7
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/guide/project-layout.md
@@ -0,0 +1,61 @@
+## Package Layout
+
+Cargo uses conventions for file placement to make it easy to dive into a new
+Cargo [package][def-package]:
+
+```text
+.
+├── Cargo.lock
+├── Cargo.toml
+├── src/
+│   ├── lib.rs
+│   ├── main.rs
+│   └── bin/
+│ ├── named-executable.rs
+│      ├── another-executable.rs
+│      └── multi-file-executable/
+│      ├── main.rs
+│      └── some_module.rs
+├── benches/
+│   ├── large-input.rs
+│   └── multi-file-bench/
+│   ├── main.rs
+│   └── bench_module.rs
+├── examples/
+│   ├── simple.rs
+│   └── multi-file-example/
+│   ├── main.rs
+│   └── ex_module.rs
+└── tests/
+ ├── some-integration-tests.rs
+ └── multi-file-test/
+ ├── main.rs
+ └── test_module.rs
+```
+
+* `Cargo.toml` and `Cargo.lock` are stored in the root of your package (*package
+ root*).
+* Source code goes in the `src` directory.
+* The default library file is `src/lib.rs`.
+* The default executable file is `src/main.rs`.
+ * Other executables can be placed in `src/bin/`.
+* Benchmarks go in the `benches` directory.
+* Examples go in the `examples` directory.
+* Integration tests go in the `tests` directory.
+
+If a binary, example, bench, or integration test consists of multiple source
+files, place a `main.rs` file along with the extra [*modules*][def-module]
+within a subdirectory of the `src/bin`, `examples`, `benches`, or `tests`
+directory. The name of the executable will be the directory name.
+
+You can learn more about Rust's module system in [the book][book-modules].
+
+See [Configuring a target] for more details on manually configuring targets.
+See [Target auto-discovery] for more information on controlling how Cargo
+automatically infers target names.
+
+[book-modules]: ../../book/ch07-00-managing-growing-projects-with-packages-crates-and-modules.html
+[Configuring a target]: ../reference/cargo-targets.md#configuring-a-target
+[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)'
+[def-module]: ../appendix/glossary.md#module '"module" (glossary entry)'
+[Target auto-discovery]: ../reference/cargo-targets.md#target-auto-discovery
diff --git a/src/tools/cargo/src/doc/src/guide/tests.md b/src/tools/cargo/src/doc/src/guide/tests.md
new file mode 100644
index 000000000..402e8e35c
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/guide/tests.md
@@ -0,0 +1,44 @@
+## Tests
+
+Cargo can run your tests with the `cargo test` command. Cargo looks for tests
+to run in two places: in each of your `src` files and any tests in `tests/`.
+Tests in your `src` files should be unit tests and [documentation tests].
+Tests in `tests/` should be integration-style tests. As such, you’ll need to
+import your crates into the files in `tests`.
+
+Here's an example of running `cargo test` in our [package][def-package], which
+currently has no tests:
+
+```console
+$ cargo test
+ Compiling regex v1.5.0 (https://github.com/rust-lang/regex.git#9f9f693)
+ Compiling hello_world v0.1.0 (file:///path/to/package/hello_world)
+ Running target/test/hello_world-9c2b65bbb79eabce
+
+running 0 tests
+
+test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out
+```
+
+If our package had tests, we would see more output with the correct number of
+tests.
+
+You can also run a specific test by passing a filter:
+
+```console
+$ cargo test foo
+```
+
+This will run any test with `foo` in its name.
+
+`cargo test` runs additional checks as well. It will compile any examples
+you’ve included to ensure they still compile. It also runs documentation
+tests to ensure your code samples from documentation comments compile.
+Please see the [testing guide][testing] in the Rust documentation for a general
+view of writing and organizing tests. See [Cargo Targets: Tests] to learn more
+about different styles of tests in Cargo.
+
+[documentation tests]: ../../rustdoc/write-documentation/documentation-tests.html
+[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)'
+[testing]: ../../book/ch11-00-testing.html
+[Cargo Targets: Tests]: ../reference/cargo-targets.html#tests
diff --git a/src/tools/cargo/src/doc/src/guide/why-cargo-exists.md b/src/tools/cargo/src/doc/src/guide/why-cargo-exists.md
new file mode 100644
index 000000000..02b222f01
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/guide/why-cargo-exists.md
@@ -0,0 +1,65 @@
+## Why Cargo Exists
+
+### Preliminaries
+
+In Rust, as you may know, a library or executable program is called a
+[*crate*][def-crate]. Crates are compiled using the Rust compiler,
+`rustc`. When starting with Rust, the first source code most people encounter
+is that of the venerable “hello world” program, which they compile by invoking
+`rustc` directly:
+
+```console
+$ rustc hello.rs
+$ ./hello
+Hello, world!
+```
+
+Note that the above command required that we specify the file name
+explicitly. If we were to directly use `rustc` to compile a different program,
+a different command line invocation would be required. If we needed to specify
+any specific compiler flags or include external dependencies, then the
+needed command would be even more specific (and elaborate).
+
+Furthermore, most non-trivial programs will likely have dependencies on
+external libraries, and will therefore also depend transitively on *their*
+dependencies. Obtaining the correct versions of all the necessary dependencies
+and keeping them up to date would be laborious and error-prone if done by
+hand.
+
+Rather than work only with crates and `rustc`, we can avoid the manual tedium
+involved with performing the above tasks by introducing a higher-level
+["*package*"][def-package] abstraction and by using a
+[*package manager*][def-package-manager].
+
+### Enter: Cargo
+
+*Cargo* is the Rust package manager. It is a tool that allows Rust
+[*packages*][def-package] to declare their various dependencies and ensure
+that you’ll always get a repeatable build.
+
+To accomplish this goal, Cargo does four things:
+
+* Introduces two metadata files with various bits of package information.
+* Fetches and builds your package’s dependencies.
+* Invokes `rustc` or another build tool with the correct parameters to build
+ your package.
+* Introduces conventions to make working with Rust packages easier.
+
+To a large extent, Cargo normalizes the commands needed to build a given
+program or library; this is one aspect to the above mentioned conventions. As
+we show later, the same command can be used to build different
+[*artifacts*][def-artifact], regardless of their names. Rather than invoke
+`rustc` directly, we can instead invoke something generic such as `cargo
+build` and let cargo worry about constructing the correct `rustc`
+invocation. Furthermore, Cargo will automatically fetch from a
+[*registry*][def-registry] any dependencies we have defined for our artifact,
+and arrange for them to be incorporated into our build as needed.
+
+It is only a slight exaggeration to say that once you know how to build one
+Cargo-based project, you know how to build *all* of them.
+
+[def-artifact]: ../appendix/glossary.md#artifact '"artifact" (glossary entry)'
+[def-crate]: ../appendix/glossary.md#crate '"crate" (glossary entry)'
+[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)'
+[def-package-manager]: ../appendix/glossary.md#package-manager '"package manager" (glossary entry)'
+[def-registry]: ../appendix/glossary.md#registry '"registry" (glossary entry)'
diff --git a/src/tools/cargo/src/doc/src/guide/working-on-an-existing-project.md b/src/tools/cargo/src/doc/src/guide/working-on-an-existing-project.md
new file mode 100644
index 000000000..f9c26cd90
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/guide/working-on-an-existing-project.md
@@ -0,0 +1,24 @@
+## Working on an Existing Cargo Package
+
+If you download an existing [package][def-package] that uses Cargo, it’s
+really easy to get going.
+
+First, get the package from somewhere. In this example, we’ll use `regex`
+cloned from its repository on GitHub:
+
+```console
+$ git clone https://github.com/rust-lang/regex.git
+$ cd regex
+```
+
+To build, use `cargo build`:
+
+```console
+$ cargo build
+ Compiling regex v1.5.0 (file:///path/to/package/regex)
+```
+
+This will fetch all of the dependencies and then build them, along with the
+package.
+
+[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)'
diff --git a/src/tools/cargo/src/doc/src/images/Cargo-Logo-Small.png b/src/tools/cargo/src/doc/src/images/Cargo-Logo-Small.png
new file mode 100644
index 000000000..e3a99208c
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/images/Cargo-Logo-Small.png
Binary files differ
diff --git a/src/tools/cargo/src/doc/src/images/auth-level-acl.png b/src/tools/cargo/src/doc/src/images/auth-level-acl.png
new file mode 100644
index 000000000..e7bc25180
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/images/auth-level-acl.png
Binary files differ
diff --git a/src/tools/cargo/src/doc/src/images/build-info.png b/src/tools/cargo/src/doc/src/images/build-info.png
new file mode 100644
index 000000000..961b3e2f5
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/images/build-info.png
Binary files differ
diff --git a/src/tools/cargo/src/doc/src/images/build-unit-time.png b/src/tools/cargo/src/doc/src/images/build-unit-time.png
new file mode 100644
index 000000000..943084c5b
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/images/build-unit-time.png
Binary files differ
diff --git a/src/tools/cargo/src/doc/src/images/cargo-concurrency-over-time.png b/src/tools/cargo/src/doc/src/images/cargo-concurrency-over-time.png
new file mode 100644
index 000000000..274a6167d
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/images/cargo-concurrency-over-time.png
Binary files differ
diff --git a/src/tools/cargo/src/doc/src/images/org-level-acl.png b/src/tools/cargo/src/doc/src/images/org-level-acl.png
new file mode 100644
index 000000000..ed5aa882a
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/images/org-level-acl.png
Binary files differ
diff --git a/src/tools/cargo/src/doc/src/images/winapi-features.svg b/src/tools/cargo/src/doc/src/images/winapi-features.svg
new file mode 100644
index 000000000..32327ad1d
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/images/winapi-features.svg
@@ -0,0 +1,3 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<svg xmlns="http://www.w3.org/2000/svg" style="background-color: rgb(255, 255, 255);" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" width="501px" height="151px" viewBox="-0.5 -0.5 501 151"><defs/><g><rect x="170" y="30" width="90" height="30" rx="4.5" ry="4.5" fill="#ffffff" stroke="#000000" pointer-events="all"/><g transform="translate(-0.5 -0.5)"><switch><foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 88px; height: 1px; padding-top: 45px; margin-left: 171px;"><div style="box-sizing: border-box; font-size: 0; text-align: center; "><div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; "><font style="font-size: 16px"><b>foo</b></font></div></div></div></foreignObject><text x="215" y="49" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">foo</text></switch></g><rect x="170" y="90" width="90" height="30" rx="4.5" ry="4.5" fill="#ffffff" stroke="#000000" pointer-events="all"/><g transform="translate(-0.5 -0.5)"><switch><foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 88px; height: 1px; padding-top: 105px; margin-left: 171px;"><div style="box-sizing: border-box; font-size: 0; text-align: center; "><div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; "><font style="font-size: 16px"><b>bar</b></font></div></div></div></foreignObject><text x="215" y="109" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">bar</text></switch></g><rect x="400" y="10" width="90" height="130" rx="13.5" ry="13.5" fill="#ffffff" stroke="#000000" pointer-events="all"/><g transform="translate(-0.5 -0.5)"><switch><foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 88px; height: 1px; padding-top: 75px; margin-left: 401px;"><div style="box-sizing: border-box; font-size: 0; text-align: center; "><div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; "><div style="font-size: 16px" align="center"><b>winapi</b></div><div align="left"><br /></div><div align="left">features:</div><div align="left">• fileapi<br />• handleapi<br />• std<div>• winnt<br /></div></div></div></div></div></foreignObject><text x="445" y="79" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">winapi...</text></switch></g><path d="M 260 45 L 360 45 Q 370 45 377.07 52.07 L 395.5 70.5" fill="none" stroke="#000000" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 399.21 74.21 L 391.78 71.73 L 395.5 70.5 L 396.73 66.78 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="all"/><g transform="translate(-0.5 -0.5)"><switch><foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 46px; margin-left: 319px;"><div style="box-sizing: border-box; font-size: 0; text-align: center; "><div style="display: inline-block; font-size: 11px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; background-color: #ffffff; white-space: nowrap; ">fileapi, handleapi</div></div></div></foreignObject><text x="319" y="49" fill="#000000" font-family="Helvetica" font-size="11px" text-anchor="middle">fileapi, handleapi</text></switch></g><path d="M 260 105 L 360 105 Q 370 105 377.07 97.93 L 395.5 79.5" fill="none" stroke="#000000" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 399.21 75.79 L 396.73 83.22 L 395.5 79.5 L 391.78 78.27 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="all"/><g transform="translate(-0.5 -0.5)"><switch><foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 106px; margin-left: 319px;"><div style="box-sizing: border-box; font-size: 0; text-align: center; "><div style="display: inline-block; font-size: 11px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; background-color: #ffffff; white-space: nowrap; ">std, winnt</div></div></div></foreignObject><text x="319" y="109" fill="#000000" font-family="Helvetica" font-size="11px" text-anchor="middle">std, winnt</text></switch></g><rect x="10" y="55" width="110" height="35" rx="5.25" ry="5.25" fill="#ffffff" stroke="#000000" pointer-events="all"/><g transform="translate(-0.5 -0.5)"><switch><foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 108px; height: 1px; padding-top: 73px; margin-left: 11px;"><div style="box-sizing: border-box; font-size: 0; text-align: center; "><div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; "><div style="font-size: 16px"><b>my-package</b></div></div></div></div></foreignObject><text x="65" y="76" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">my-package</text></switch></g><path d="M 120 72.5 L 134.12 53.09 Q 140 45 150 45 L 163.63 45" fill="none" stroke="#000000" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 168.88 45 L 161.88 48.5 L 163.63 45 L 161.88 41.5 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="all"/><path d="M 120 72.5 L 134.76 96.48 Q 140 105 150 105 L 163.63 105" fill="none" stroke="#000000" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 168.88 105 L 161.88 108.5 L 163.63 105 L 161.88 101.5 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="all"/></g><switch><g requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"/><a transform="translate(0,-5)" xlink:href="https://www.diagrams.net/doc/faq/svg-export-text-problems" target="_blank"><text text-anchor="middle" font-size="10px" x="50%" y="100%">Viewer does not support full SVG 1.1</text></a></switch></svg> \ No newline at end of file
diff --git a/src/tools/cargo/src/doc/src/index.md b/src/tools/cargo/src/doc/src/index.md
new file mode 100644
index 000000000..223600c8b
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/index.md
@@ -0,0 +1,49 @@
+# The Cargo Book
+
+![Cargo Logo](images/Cargo-Logo-Small.png)
+
+Cargo is the [Rust] [*package manager*][def-package-manager]. Cargo downloads your Rust [package][def-package]'s
+dependencies, compiles your packages, makes distributable packages, and uploads them to
+[crates.io], the Rust community’s [*package registry*][def-package-registry]. You can contribute
+to this book on [GitHub].
+
+
+### Sections
+
+**[Getting Started](getting-started/index.md)**
+
+To get started with Cargo, install Cargo (and Rust) and set up your first
+[*crate*][def-crate].
+
+**[Cargo Guide](guide/index.md)**
+
+The guide will give you all you need to know about how to use Cargo to develop
+Rust packages.
+
+**[Cargo Reference](reference/index.md)**
+
+The reference covers the details of various areas of Cargo.
+
+**[Cargo Commands](commands/index.md)**
+
+The commands will let you interact with Cargo using its command-line interface.
+
+**[Frequently Asked Questions](faq.md)**
+
+**Appendices:**
+* [Glossary](appendix/glossary.md)
+* [Git Authentication](appendix/git-authentication.md)
+
+**Other Documentation:**
+* [Changelog](https://github.com/rust-lang/cargo/blob/master/CHANGELOG.md)
+ --- Detailed notes about changes in Cargo in each release.
+* [Rust documentation website](https://doc.rust-lang.org/) --- Links to official
+ Rust documentation and tools.
+
+[def-crate]: ./appendix/glossary.md#crate '"crate" (glossary entry)'
+[def-package]: ./appendix/glossary.md#package '"package" (glossary entry)'
+[def-package-manager]: ./appendix/glossary.md#package-manager '"package manager" (glossary entry)'
+[def-package-registry]: ./appendix/glossary.md#package-registry '"package registry" (glossary entry)'
+[rust]: https://www.rust-lang.org/
+[crates.io]: https://crates.io/
+[GitHub]: https://github.com/rust-lang/cargo/tree/master/src/doc
diff --git a/src/tools/cargo/src/doc/src/reference/build-script-examples.md b/src/tools/cargo/src/doc/src/reference/build-script-examples.md
new file mode 100644
index 000000000..5e8fae5bb
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/build-script-examples.md
@@ -0,0 +1,506 @@
+## Build Script Examples
+
+The following sections illustrate some examples of writing build scripts.
+
+Some common build script functionality can be found via crates on [crates.io].
+Check out the [`build-dependencies`
+keyword](https://crates.io/keywords/build-dependencies) to see what is
+available. The following is a sample of some popular crates[^†]:
+
+* [`bindgen`](https://crates.io/crates/bindgen) --- Automatically generate Rust
+ FFI bindings to C libraries.
+* [`cc`](https://crates.io/crates/cc) --- Compiles C/C++/assembly.
+* [`pkg-config`](https://crates.io/crates/pkg-config) --- Detect system
+ libraries using the `pkg-config` utility.
+* [`cmake`](https://crates.io/crates/cmake) --- Runs the `cmake` build tool to build a native library.
+* [`autocfg`](https://crates.io/crates/autocfg),
+ [`rustc_version`](https://crates.io/crates/rustc_version),
+ [`version_check`](https://crates.io/crates/version_check) --- These crates
+ provide ways to implement conditional compilation based on the current
+ `rustc` such as the version of the compiler.
+
+[^†]: This list is not an endorsement. Evaluate your dependencies to see which
+is right for your project.
+
+### Code generation
+
+Some Cargo packages need to have code generated just before they are compiled
+for various reasons. Here we’ll walk through a simple example which generates a
+library call as part of the build script.
+
+First, let’s take a look at the directory structure of this package:
+
+```text
+.
+├── Cargo.toml
+├── build.rs
+└── src
+ └── main.rs
+
+1 directory, 3 files
+```
+
+Here we can see that we have a `build.rs` build script and our binary in
+`main.rs`. This package has a basic manifest:
+
+```toml
+# Cargo.toml
+
+[package]
+name = "hello-from-generated-code"
+version = "0.1.0"
+edition = "2021"
+```
+
+Let’s see what’s inside the build script:
+
+```rust,no_run
+// build.rs
+
+use std::env;
+use std::fs;
+use std::path::Path;
+
+fn main() {
+ let out_dir = env::var_os("OUT_DIR").unwrap();
+ let dest_path = Path::new(&out_dir).join("hello.rs");
+ fs::write(
+ &dest_path,
+ "pub fn message() -> &'static str {
+ \"Hello, World!\"
+ }
+ "
+ ).unwrap();
+ println!("cargo:rerun-if-changed=build.rs");
+}
+```
+
+There’s a couple of points of note here:
+
+* The script uses the `OUT_DIR` environment variable to discover where the
+ output files should be located. It can use the process’ current working
+ directory to find where the input files should be located, but in this case we
+ don’t have any input files.
+* In general, build scripts should not modify any files outside of `OUT_DIR`.
+ It may seem fine on the first blush, but it does cause problems when you use
+ such crate as a dependency, because there's an *implicit* invariant that
+ sources in `.cargo/registry` should be immutable. `cargo` won't allow such
+ scripts when packaging.
+* This script is relatively simple as it just writes out a small generated file.
+ One could imagine that other more fanciful operations could take place such as
+ generating a Rust module from a C header file or another language definition,
+ for example.
+* The [`rerun-if-changed` instruction](build-scripts.md#rerun-if-changed)
+ tells Cargo that the build script only needs to re-run if the build script
+ itself changes. Without this line, Cargo will automatically run the build
+ script if any file in the package changes. If your code generation uses some
+ input files, this is where you would print a list of each of those files.
+
+Next, let’s peek at the library itself:
+
+```rust,ignore
+// src/main.rs
+
+include!(concat!(env!("OUT_DIR"), "/hello.rs"));
+
+fn main() {
+ println!("{}", message());
+}
+```
+
+This is where the real magic happens. The library is using the rustc-defined
+[`include!` macro][include-macro] in combination with the
+[`concat!`][concat-macro] and [`env!`][env-macro] macros to include the
+generated file (`hello.rs`) into the crate’s compilation.
+
+Using the structure shown here, crates can include any number of generated files
+from the build script itself.
+
+[include-macro]: ../../std/macro.include.html
+[concat-macro]: ../../std/macro.concat.html
+[env-macro]: ../../std/macro.env.html
+
+### Building a native library
+
+Sometimes it’s necessary to build some native C or C++ code as part of a
+package. This is another excellent use case of leveraging the build script to
+build a native library before the Rust crate itself. As an example, we’ll create
+a Rust library which calls into C to print “Hello, World!”.
+
+Like above, let’s first take a look at the package layout:
+
+```text
+.
+├── Cargo.toml
+├── build.rs
+└── src
+ ├── hello.c
+ └── main.rs
+
+1 directory, 4 files
+```
+
+Pretty similar to before! Next, the manifest:
+
+```toml
+# Cargo.toml
+
+[package]
+name = "hello-world-from-c"
+version = "0.1.0"
+edition = "2021"
+```
+
+For now we’re not going to use any build dependencies, so let’s take a look at
+the build script now:
+
+```rust,no_run
+// build.rs
+
+use std::process::Command;
+use std::env;
+use std::path::Path;
+
+fn main() {
+ let out_dir = env::var("OUT_DIR").unwrap();
+
+ // Note that there are a number of downsides to this approach, the comments
+ // below detail how to improve the portability of these commands.
+ Command::new("gcc").args(&["src/hello.c", "-c", "-fPIC", "-o"])
+ .arg(&format!("{}/hello.o", out_dir))
+ .status().unwrap();
+ Command::new("ar").args(&["crus", "libhello.a", "hello.o"])
+ .current_dir(&Path::new(&out_dir))
+ .status().unwrap();
+
+ println!("cargo:rustc-link-search=native={}", out_dir);
+ println!("cargo:rustc-link-lib=static=hello");
+ println!("cargo:rerun-if-changed=src/hello.c");
+}
+```
+
+This build script starts out by compiling our C file into an object file (by
+invoking `gcc`) and then converting this object file into a static library (by
+invoking `ar`). The final step is feedback to Cargo itself to say that our
+output was in `out_dir` and the compiler should link the crate to `libhello.a`
+statically via the `-l static=hello` flag.
+
+Note that there are a number of drawbacks to this hard-coded approach:
+
+* The `gcc` command itself is not portable across platforms. For example it’s
+ unlikely that Windows platforms have `gcc`, and not even all Unix platforms
+ may have `gcc`. The `ar` command is also in a similar situation.
+* These commands do not take cross-compilation into account. If we’re cross
+ compiling for a platform such as Android it’s unlikely that `gcc` will produce
+ an ARM executable.
+
+Not to fear, though, this is where a `build-dependencies` entry would help!
+The Cargo ecosystem has a number of packages to make this sort of task much
+easier, portable, and standardized. Let's try the [`cc`
+crate](https://crates.io/crates/cc) from [crates.io]. First, add it to the
+`build-dependencies` in `Cargo.toml`:
+
+```toml
+[build-dependencies]
+cc = "1.0"
+```
+
+And rewrite the build script to use this crate:
+
+```rust,ignore
+// build.rs
+
+fn main() {
+ cc::Build::new()
+ .file("src/hello.c")
+ .compile("hello");
+ println!("cargo:rerun-if-changed=src/hello.c");
+}
+```
+
+The [`cc` crate] abstracts a range of build script requirements for C code:
+
+* It invokes the appropriate compiler (MSVC for windows, `gcc` for MinGW, `cc`
+ for Unix platforms, etc.).
+* It takes the `TARGET` variable into account by passing appropriate flags to
+ the compiler being used.
+* Other environment variables, such as `OPT_LEVEL`, `DEBUG`, etc., are all
+ handled automatically.
+* The stdout output and `OUT_DIR` locations are also handled by the `cc`
+ library.
+
+Here we can start to see some of the major benefits of farming as much
+functionality as possible out to common build dependencies rather than
+duplicating logic across all build scripts!
+
+Back to the case study though, let’s take a quick look at the contents of the
+`src` directory:
+
+```c
+// src/hello.c
+
+#include <stdio.h>
+
+void hello() {
+ printf("Hello, World!\n");
+}
+```
+
+```rust,ignore
+// src/main.rs
+
+// Note the lack of the `#[link]` attribute. We’re delegating the responsibility
+// of selecting what to link over to the build script rather than hard-coding
+// it in the source file.
+extern { fn hello(); }
+
+fn main() {
+ unsafe { hello(); }
+}
+```
+
+And there we go! This should complete our example of building some C code from a
+Cargo package using the build script itself. This also shows why using a build
+dependency can be crucial in many situations and even much more concise!
+
+We’ve also seen a brief example of how a build script can use a crate as a
+dependency purely for the build process and not for the crate itself at runtime.
+
+[`cc` crate]: https://crates.io/crates/cc
+
+### Linking to system libraries
+
+This example demonstrates how to link a system library and how the build
+script is used to support this use case.
+
+Quite frequently a Rust crate wants to link to a native library provided on
+the system to bind its functionality or just use it as part of an
+implementation detail. This is quite a nuanced problem when it comes to
+performing this in a platform-agnostic fashion. It is best, if possible, to
+farm out as much of this as possible to make this as easy as possible for
+consumers.
+
+For this example, we will be creating a binding to the system's zlib library.
+This is a library that is commonly found on most Unix-like systems that
+provides data compression. This is already wrapped up in the [`libz-sys`
+crate], but for this example, we'll do an extremely simplified version. Check
+out [the source code][libz-source] for the full example.
+
+To make it easy to find the location of the library, we will use the
+[`pkg-config` crate]. This crate uses the system's `pkg-config` utility to
+discover information about a library. It will automatically tell Cargo what is
+needed to link the library. This will likely only work on Unix-like systems
+with `pkg-config` installed. Let's start by setting up the manifest:
+
+```toml
+# Cargo.toml
+
+[package]
+name = "libz-sys"
+version = "0.1.0"
+edition = "2021"
+links = "z"
+
+[build-dependencies]
+pkg-config = "0.3.16"
+```
+
+Take note that we included the `links` key in the `package` table. This tells
+Cargo that we are linking to the `libz` library. See ["Using another sys
+crate"](#using-another-sys-crate) for an example that will leverage this.
+
+The build script is fairly simple:
+
+```rust,ignore
+// build.rs
+
+fn main() {
+ pkg_config::Config::new().probe("zlib").unwrap();
+ println!("cargo:rerun-if-changed=build.rs");
+}
+```
+
+Let's round out the example with a basic FFI binding:
+
+```rust,ignore
+// src/lib.rs
+
+use std::os::raw::{c_uint, c_ulong};
+
+extern "C" {
+ pub fn crc32(crc: c_ulong, buf: *const u8, len: c_uint) -> c_ulong;
+}
+
+#[test]
+fn test_crc32() {
+ let s = "hello";
+ unsafe {
+ assert_eq!(crc32(0, s.as_ptr(), s.len() as c_uint), 0x3610a686);
+ }
+}
+```
+
+Run `cargo build -vv` to see the output from the build script. On a system
+with `libz` already installed, it may look something like this:
+
+```text
+[libz-sys 0.1.0] cargo:rustc-link-search=native=/usr/lib
+[libz-sys 0.1.0] cargo:rustc-link-lib=z
+[libz-sys 0.1.0] cargo:rerun-if-changed=build.rs
+```
+
+Nice! `pkg-config` did all the work of finding the library and telling Cargo
+where it is.
+
+It is not unusual for packages to include the source for the library, and
+build it statically if it is not found on the system, or if a feature or
+environment variable is set. For example, the real [`libz-sys` crate] checks the
+environment variable `LIBZ_SYS_STATIC` or the `static` feature to build it
+from source instead of using the system library. Check out [the
+source][libz-source] for a more complete example.
+
+[`libz-sys` crate]: https://crates.io/crates/libz-sys
+[`pkg-config` crate]: https://crates.io/crates/pkg-config
+[libz-source]: https://github.com/rust-lang/libz-sys
+
+### Using another `sys` crate
+
+When using the `links` key, crates may set metadata that can be read by other
+crates that depend on it. This provides a mechanism to communicate information
+between crates. In this example, we'll be creating a C library that makes use
+of zlib from the real [`libz-sys` crate].
+
+If you have a C library that depends on zlib, you can leverage the [`libz-sys`
+crate] to automatically find it or build it. This is great for cross-platform
+support, such as Windows where zlib is not usually installed. `libz-sys` [sets
+the `include`
+metadata](https://github.com/rust-lang/libz-sys/blob/3c594e677c79584500da673f918c4d2101ac97a1/build.rs#L156)
+to tell other packages where to find the header files for zlib. Our build
+script can read that metadata with the `DEP_Z_INCLUDE` environment variable.
+Here's an example:
+
+```toml
+# Cargo.toml
+
+[package]
+name = "zuser"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+libz-sys = "1.0.25"
+
+[build-dependencies]
+cc = "1.0.46"
+```
+
+Here we have included `libz-sys` which will ensure that there is only one
+`libz` used in the final library, and give us access to it from our build
+script:
+
+```rust,ignore
+// build.rs
+
+fn main() {
+ let mut cfg = cc::Build::new();
+ cfg.file("src/zuser.c");
+ if let Some(include) = std::env::var_os("DEP_Z_INCLUDE") {
+ cfg.include(include);
+ }
+ cfg.compile("zuser");
+ println!("cargo:rerun-if-changed=src/zuser.c");
+}
+```
+
+With `libz-sys` doing all the heavy lifting, the C source code may now include
+the zlib header, and it should find the header, even on systems where it isn't
+already installed.
+
+```c
+// src/zuser.c
+
+#include "zlib.h"
+
+// … rest of code that makes use of zlib.
+```
+
+### Conditional compilation
+
+A build script may emit [`rustc-cfg` instructions] which can enable conditions
+that can be checked at compile time. In this example, we'll take a look at how
+the [`openssl` crate] uses this to support multiple versions of the OpenSSL
+library.
+
+The [`openssl-sys` crate] implements building and linking the OpenSSL library.
+It supports multiple different implementations (like LibreSSL) and multiple
+versions. It makes use of the `links` key so that it may pass information to
+other build scripts. One of the things it passes is the `version_number` key,
+which is the version of OpenSSL that was detected. The code in the build
+script looks something [like
+this](https://github.com/sfackler/rust-openssl/blob/dc72a8e2c429e46c275e528b61a733a66e7877fc/openssl-sys/build/main.rs#L216):
+
+```rust,ignore
+println!("cargo:version_number={:x}", openssl_version);
+```
+
+This instruction causes the `DEP_OPENSSL_VERSION_NUMBER` environment variable
+to be set in any crates that directly depend on `openssl-sys`.
+
+The `openssl` crate, which provides the higher-level interface, specifies
+`openssl-sys` as a dependency. The `openssl` build script can read the
+version information generated by the `openssl-sys` build script with the
+`DEP_OPENSSL_VERSION_NUMBER` environment variable. It uses this to generate
+some [`cfg`
+values](https://github.com/sfackler/rust-openssl/blob/dc72a8e2c429e46c275e528b61a733a66e7877fc/openssl/build.rs#L18-L36):
+
+```rust,ignore
+// (portion of build.rs)
+
+if let Ok(version) = env::var("DEP_OPENSSL_VERSION_NUMBER") {
+ let version = u64::from_str_radix(&version, 16).unwrap();
+
+ if version >= 0x1_00_01_00_0 {
+ println!("cargo:rustc-cfg=ossl101");
+ }
+ if version >= 0x1_00_02_00_0 {
+ println!("cargo:rustc-cfg=ossl102");
+ }
+ if version >= 0x1_01_00_00_0 {
+ println!("cargo:rustc-cfg=ossl110");
+ }
+ if version >= 0x1_01_00_07_0 {
+ println!("cargo:rustc-cfg=ossl110g");
+ }
+ if version >= 0x1_01_01_00_0 {
+ println!("cargo:rustc-cfg=ossl111");
+ }
+}
+```
+
+These `cfg` values can then be used with the [`cfg` attribute] or the [`cfg`
+macro] to conditionally include code. For example, SHA3 support was added in
+OpenSSL 1.1.1, so it is [conditionally
+excluded](https://github.com/sfackler/rust-openssl/blob/dc72a8e2c429e46c275e528b61a733a66e7877fc/openssl/src/hash.rs#L67-L85)
+for older versions:
+
+```rust,ignore
+// (portion of openssl crate)
+
+#[cfg(ossl111)]
+pub fn sha3_224() -> MessageDigest {
+ unsafe { MessageDigest(ffi::EVP_sha3_224()) }
+}
+```
+
+Of course, one should be careful when using this, since it makes the resulting
+binary even more dependent on the build environment. In this example, if the
+binary is distributed to another system, it may not have the exact same shared
+libraries, which could cause problems.
+
+[`cfg` attribute]: ../../reference/conditional-compilation.md#the-cfg-attribute
+[`cfg` macro]: ../../std/macro.cfg.html
+[`rustc-cfg` instructions]: build-scripts.md#rustc-cfg
+[`openssl` crate]: https://crates.io/crates/openssl
+[`openssl-sys` crate]: https://crates.io/crates/openssl-sys
+
+[crates.io]: https://crates.io/
diff --git a/src/tools/cargo/src/doc/src/reference/build-scripts.md b/src/tools/cargo/src/doc/src/reference/build-scripts.md
new file mode 100644
index 000000000..68e8d404f
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/build-scripts.md
@@ -0,0 +1,485 @@
+## Build Scripts
+
+Some packages need to compile third-party non-Rust code, for example C
+libraries. Other packages need to link to C libraries which can either be
+located on the system or possibly need to be built from source. Others still
+need facilities for functionality such as code generation before building (think
+parser generators).
+
+Cargo does not aim to replace other tools that are well-optimized for these
+tasks, but it does integrate with them with custom build scripts. Placing a
+file named `build.rs` in the root of a package will cause Cargo to compile
+that script and execute it just before building the package.
+
+```rust,ignore
+// Example custom build script.
+fn main() {
+ // Tell Cargo that if the given file changes, to rerun this build script.
+ println!("cargo:rerun-if-changed=src/hello.c");
+ // Use the `cc` crate to build a C file and statically link it.
+ cc::Build::new()
+ .file("src/hello.c")
+ .compile("hello");
+}
+```
+
+Some example use cases of build scripts are:
+
+* Building a bundled C library.
+* Finding a C library on the host system.
+* Generating a Rust module from a specification.
+* Performing any platform-specific configuration needed for the crate.
+
+The sections below describe how build scripts work, and the [examples
+chapter](build-script-examples.md) shows a variety of examples on how to write
+scripts.
+
+> Note: The [`package.build` manifest key](manifest.md#package-build) can be
+> used to change the name of the build script, or disable it entirely.
+
+### Life Cycle of a Build Script
+
+Just before a package is built, Cargo will compile a build script into an
+executable (if it has not already been built). It will then run the script,
+which may perform any number of tasks. The script may communicate with Cargo
+by printing specially formatted commands prefixed with `cargo:` to stdout.
+
+The build script will be rebuilt if any of its source files or dependencies
+change.
+
+By default, Cargo will re-run the build script if any of the files in the
+package changes. Typically it is best to use the `rerun-if` commands,
+described in the [change detection](#change-detection) section below, to
+narrow the focus of what triggers a build script to run again.
+
+Once the build script successfully finishes executing, the rest of the package
+will be compiled. Scripts should exit with a non-zero exit code to halt the
+build if there is an error, in which case the build script's output will be
+displayed on the terminal.
+
+### Inputs to the Build Script
+
+When the build script is run, there are a number of inputs to the build script,
+all passed in the form of [environment variables][build-env].
+
+In addition to environment variables, the build script’s current directory is
+the source directory of the build script’s package.
+
+[build-env]: environment-variables.md#environment-variables-cargo-sets-for-build-scripts
+
+### Outputs of the Build Script
+
+Build scripts may save any output files or intermediate artifacts in the
+directory specified in the [`OUT_DIR` environment variable][build-env]. Scripts
+should not modify any files outside of that directory.
+
+Build scripts communicate with Cargo by printing to stdout. Cargo will
+interpret each line that starts with `cargo:` as an instruction that will
+influence compilation of the package. All other lines are ignored.
+
+> Note: The order of `cargo:` instructions printed by the build script *may*
+> affect the order of arguments that `cargo` passes to `rustc`. In turn, the
+> order of arguments passed to `rustc` may affect the order of arguments passed
+> to the linker. Therefore, you will want to pay attention to the order of the
+> build script's instructions. For example, if object `foo` needs to link against
+> library `bar`, you may want to make sure that library `bar`'s
+> [`cargo:rustc-link-lib`](#rustc-link-lib) instruction appears *after*
+> instructions to link object `foo`.
+
+The output of the script is hidden from the terminal during normal
+compilation. If you would like to see the output directly in your terminal,
+invoke Cargo as "very verbose" with the `-vv` flag. This only happens when the
+build script is run. If Cargo determines nothing has changed, it will not
+re-run the script, see [change detection](#change-detection) below for more.
+
+All the lines printed to stdout by a build script are written to a file like
+`target/debug/build/<pkg>/output` (the precise location may depend on your
+configuration). The stderr output is also saved in that same directory.
+
+The following is a summary of the instructions that Cargo recognizes, with each
+one detailed below.
+
+* [`cargo:rerun-if-changed=PATH`](#rerun-if-changed) --- Tells Cargo when to
+ re-run the script.
+* [`cargo:rerun-if-env-changed=VAR`](#rerun-if-env-changed) --- Tells Cargo when
+ to re-run the script.
+* [`cargo:rustc-link-arg=FLAG`](#rustc-link-arg) --- Passes custom flags to a
+ linker for benchmarks, binaries, `cdylib` crates, examples, and tests.
+* [`cargo:rustc-link-arg-bin=BIN=FLAG`](#rustc-link-arg-bin) --- Passes custom
+ flags to a linker for the binary `BIN`.
+* [`cargo:rustc-link-arg-bins=FLAG`](#rustc-link-arg-bins) --- Passes custom
+ flags to a linker for binaries.
+* [`cargo:rustc-link-arg-tests=FLAG`](#rustc-link-arg-tests) --- Passes custom
+ flags to a linker for tests.
+* [`cargo:rustc-link-arg-examples=FLAG`](#rustc-link-arg-examples) --- Passes custom
+ flags to a linker for examples.
+* [`cargo:rustc-link-arg-benches=FLAG`](#rustc-link-arg-benches) --- Passes custom
+ flags to a linker for benchmarks.
+* [`cargo:rustc-link-lib=LIB`](#rustc-link-lib) --- Adds a library to
+ link.
+* [`cargo:rustc-link-search=[KIND=]PATH`](#rustc-link-search) --- Adds to the
+ library search path.
+* [`cargo:rustc-flags=FLAGS`](#rustc-flags) --- Passes certain flags to the
+ compiler.
+* [`cargo:rustc-cfg=KEY[="VALUE"]`](#rustc-cfg) --- Enables compile-time `cfg`
+ settings.
+* [`cargo:rustc-env=VAR=VALUE`](#rustc-env) --- Sets an environment variable.
+* [`cargo:rustc-cdylib-link-arg=FLAG`](#rustc-cdylib-link-arg) --- Passes custom
+ flags to a linker for cdylib crates.
+* [`cargo:warning=MESSAGE`](#cargo-warning) --- Displays a warning on the
+ terminal.
+* [`cargo:KEY=VALUE`](#the-links-manifest-key) --- Metadata, used by `links`
+ scripts.
+
+
+<a id="rustc-link-arg"></a>
+#### `cargo:rustc-link-arg=FLAG`
+
+The `rustc-link-arg` instruction tells Cargo to pass the [`-C link-arg=FLAG`
+option][link-arg] to the compiler, but only when building supported targets
+(benchmarks, binaries, `cdylib` crates, examples, and tests). Its usage is
+highly platform specific. It is useful to set the shared library version or
+linker script.
+
+[link-arg]: ../../rustc/codegen-options/index.md#link-arg
+
+<a id="rustc-link-arg-bin"></a>
+#### `cargo:rustc-link-arg-bin=BIN=FLAG`
+
+The `rustc-link-arg-bin` instruction tells Cargo to pass the [`-C
+link-arg=FLAG` option][link-arg] to the compiler, but only when building
+the binary target with name `BIN`. Its usage is highly platform specific. It is useful
+to set a linker script or other linker options.
+
+
+<a id="rustc-link-arg-bins"></a>
+#### `cargo:rustc-link-arg-bins=FLAG`
+
+The `rustc-link-arg-bins` instruction tells Cargo to pass the [`-C
+link-arg=FLAG` option][link-arg] to the compiler, but only when building a
+binary target. Its usage is highly platform specific. It is useful
+to set a linker script or other linker options.
+
+
+<a id="rustc-link-lib"></a>
+#### `cargo:rustc-link-lib=LIB`
+
+The `rustc-link-lib` instruction tells Cargo to link the given library using
+the compiler's [`-l` flag][option-link]. This is typically used to link a
+native library using [FFI].
+
+The `LIB` string is passed directly to rustc, so it supports any syntax that
+`-l` does. \
+Currently the full supported syntax for `LIB` is `[KIND[:MODIFIERS]=]NAME[:RENAME]`.
+
+The `-l` flag is only passed to the library target of the package, unless
+there is no library target, in which case it is passed to all targets. This is
+done because all other targets have an implicit dependency on the library
+target, and the given library to link should only be included once. This means
+that if a package has both a library and a binary target, the *library* has
+access to the symbols from the given lib, and the binary should access them
+through the library target's public API.
+
+The optional `KIND` may be one of `dylib`, `static`, or `framework`. See the
+[rustc book][option-link] for more detail.
+
+[option-link]: ../../rustc/command-line-arguments.md#option-l-link-lib
+[FFI]: ../../nomicon/ffi.md
+
+
+<a id="rustc-link-arg-tests"></a>
+#### `cargo:rustc-link-arg-tests=FLAG`
+
+The `rustc-link-arg-tests` instruction tells Cargo to pass the [`-C
+link-arg=FLAG` option][link-arg] to the compiler, but only when building a
+tests target.
+
+
+<a id="rustc-link-arg-examples"></a>
+#### `cargo:rustc-link-arg-examples=FLAG`
+
+The `rustc-link-arg-examples` instruction tells Cargo to pass the [`-C
+link-arg=FLAG` option][link-arg] to the compiler, but only when building an examples
+target.
+
+<a id="rustc-link-arg-benches"></a>
+#### `cargo:rustc-link-arg-benches=FLAG`
+
+The `rustc-link-arg-benches` instruction tells Cargo to pass the [`-C
+link-arg=FLAG` option][link-arg] to the compiler, but only when building a benchmark
+target.
+
+<a id="rustc-link-search"></a>
+#### `cargo:rustc-link-search=[KIND=]PATH`
+
+The `rustc-link-search` instruction tells Cargo to pass the [`-L`
+flag][option-search] to the compiler to add a directory to the library search
+path.
+
+The optional `KIND` may be one of `dependency`, `crate`, `native`,
+`framework`, or `all`. See the [rustc book][option-search] for more detail.
+
+These paths are also added to the [dynamic library search path environment
+variable](environment-variables.md#dynamic-library-paths) if they are within
+the `OUT_DIR`. Depending on this behavior is discouraged since this makes it
+difficult to use the resulting binary. In general, it is best to avoid
+creating dynamic libraries in a build script (using existing system libraries
+is fine).
+
+[option-search]: ../../rustc/command-line-arguments.md#option-l-search-path
+
+<a id="rustc-flags"></a>
+#### `cargo:rustc-flags=FLAGS`
+
+The `rustc-flags` instruction tells Cargo to pass the given space-separated
+flags to the compiler. This only allows the `-l` and `-L` flags, and is
+equivalent to using [`rustc-link-lib`](#rustc-link-lib) and
+[`rustc-link-search`](#rustc-link-search).
+
+<a id="rustc-cfg"></a>
+#### `cargo:rustc-cfg=KEY[="VALUE"]`
+
+The `rustc-cfg` instruction tells Cargo to pass the given value to the
+[`--cfg` flag][option-cfg] to the compiler. This may be used for compile-time
+detection of features to enable [conditional compilation].
+
+Note that this does *not* affect Cargo's dependency resolution. This cannot be
+used to enable an optional dependency, or enable other Cargo features.
+
+Be aware that [Cargo features] use the form `feature="foo"`. `cfg` values
+passed with this flag are not restricted to that form, and may provide just a
+single identifier, or any arbitrary key/value pair. For example, emitting
+`cargo:rustc-cfg=abc` will then allow code to use `#[cfg(abc)]` (note the lack
+of `feature=`). Or an arbitrary key/value pair may be used with an `=` symbol
+like `cargo:rustc-cfg=my_component="foo"`. The key should be a Rust
+identifier, the value should be a string.
+
+[cargo features]: features.md
+[conditional compilation]: ../../reference/conditional-compilation.md
+[option-cfg]: ../../rustc/command-line-arguments.md#option-cfg
+
+<a id="rustc-env"></a>
+#### `cargo:rustc-env=VAR=VALUE`
+
+The `rustc-env` instruction tells Cargo to set the given environment variable
+when compiling the package. The value can be then retrieved by the [`env!`
+macro][env-macro] in the compiled crate. This is useful for embedding
+additional metadata in crate's code, such as the hash of git HEAD or the
+unique identifier of a continuous integration server.
+
+See also the [environment variables automatically included by
+Cargo][env-cargo].
+
+> **Note**: These environment variables are also set when running an
+> executable with `cargo run` or `cargo test`. However, this usage is
+> discouraged since it ties the executable to Cargo's execution environment.
+> Normally, these environment variables should only be checked at compile-time
+> with the `env!` macro.
+
+[env-macro]: ../../std/macro.env.html
+[env-cargo]: environment-variables.md#environment-variables-cargo-sets-for-crates
+
+<a id="rustc-cdylib-link-arg"></a>
+#### `cargo:rustc-cdylib-link-arg=FLAG`
+
+The `rustc-cdylib-link-arg` instruction tells Cargo to pass the [`-C
+link-arg=FLAG` option][link-arg] to the compiler, but only when building a
+`cdylib` library target. Its usage is highly platform specific. It is useful
+to set the shared library version or the runtime-path.
+
+
+<a id="cargo-warning"></a>
+#### `cargo:warning=MESSAGE`
+
+The `warning` instruction tells Cargo to display a warning after the build
+script has finished running. Warnings are only shown for `path` dependencies
+(that is, those you're working on locally), so for example warnings printed
+out in [crates.io] crates are not emitted by default. The `-vv` "very verbose"
+flag may be used to have Cargo display warnings for all crates.
+
+### Build Dependencies
+
+Build scripts are also allowed to have dependencies on other Cargo-based crates.
+Dependencies are declared through the `build-dependencies` section of the
+manifest.
+
+```toml
+[build-dependencies]
+cc = "1.0.46"
+```
+
+The build script **does not** have access to the dependencies listed in the
+`dependencies` or `dev-dependencies` section (they’re not built yet!). Also,
+build dependencies are not available to the package itself unless also
+explicitly added in the `[dependencies]` table.
+
+It is recommended to carefully consider each dependency you add, weighing
+against the impact on compile time, licensing, maintenance, etc. Cargo will
+attempt to reuse a dependency if it is shared between build dependencies and
+normal dependencies. However, this is not always possible, for example when
+cross-compiling, so keep that in consideration of the impact on compile time.
+
+### Change Detection
+
+When rebuilding a package, Cargo does not necessarily know if the build script
+needs to be run again. By default, it takes a conservative approach of always
+re-running the build script if any file within the package is changed (or the
+list of files controlled by the [`exclude` and `include` fields]). For most
+cases, this is not a good choice, so it is recommended that every build script
+emit at least one of the `rerun-if` instructions (described below). If these
+are emitted, then Cargo will only re-run the script if the given value has
+changed. If Cargo is re-running the build scripts of your own crate or a
+dependency and you don't know why, see ["Why is Cargo rebuilding my code?" in the
+FAQ](../faq.md#why-is-cargo-rebuilding-my-code).
+
+[`exclude` and `include` fields]: manifest.md#the-exclude-and-include-fields
+
+<a id="rerun-if-changed"></a>
+#### `cargo:rerun-if-changed=PATH`
+
+The `rerun-if-changed` instruction tells Cargo to re-run the build script if
+the file at the given path has changed. Currently, Cargo only uses the
+filesystem last-modified "mtime" timestamp to determine if the file has
+changed. It compares against an internal cached timestamp of when the build
+script last ran.
+
+If the path points to a directory, it will scan the entire directory for
+any modifications.
+
+If the build script inherently does not need to re-run under any circumstance,
+then emitting `cargo:rerun-if-changed=build.rs` is a simple way to prevent it
+from being re-run (otherwise, the default if no `rerun-if` instructions are
+emitted is to scan the entire package directory for changes). Cargo
+automatically handles whether or not the script itself needs to be recompiled,
+and of course the script will be re-run after it has been recompiled.
+Otherwise, specifying `build.rs` is redundant and unnecessary.
+
+<a id="rerun-if-env-changed"></a>
+#### `cargo:rerun-if-env-changed=NAME`
+
+The `rerun-if-env-changed` instruction tells Cargo to re-run the build script
+if the value of an environment variable of the given name has changed.
+
+Note that the environment variables here are intended for global environment
+variables like `CC` and such, it is not necessary to use this for environment
+variables like `TARGET` that Cargo sets.
+
+
+### The `links` Manifest Key
+
+The `package.links` key may be set in the `Cargo.toml` manifest to declare
+that the package links with the given native library. The purpose of this
+manifest key is to give Cargo an understanding about the set of native
+dependencies that a package has, as well as providing a principled system of
+passing metadata between package build scripts.
+
+```toml
+[package]
+# ...
+links = "foo"
+```
+
+This manifest states that the package links to the `libfoo` native library.
+When using the `links` key, the package must have a build script, and the
+build script should use the [`rustc-link-lib` instruction](#rustc-link-lib) to
+link the library.
+
+Primarily, Cargo requires that there is at most one package per `links` value.
+In other words, it is forbidden to have two packages link to the same native
+library. This helps prevent duplicate symbols between crates. Note, however,
+that there are [conventions in place](#-sys-packages) to alleviate this.
+
+As mentioned above in the output format, each build script can generate an
+arbitrary set of metadata in the form of key-value pairs. This metadata is
+passed to the build scripts of **dependent** packages. For example, if the
+package `bar` depends on `foo`, then if `foo` generates `key=value` as part of
+its build script metadata, then the build script of `bar` will have the
+environment variables `DEP_FOO_KEY=value`. See the ["Using another `sys`
+crate"][using-another-sys] for an example of
+how this can be used.
+
+Note that metadata is only passed to immediate dependents, not transitive
+dependents.
+
+[using-another-sys]: build-script-examples.md#using-another-sys-crate
+
+### `*-sys` Packages
+
+Some Cargo packages that link to system libraries have a naming convention of
+having a `-sys` suffix. Any package named `foo-sys` should provide two major
+pieces of functionality:
+
+* The library crate should link to the native library `libfoo`. This will often
+ probe the current system for `libfoo` before resorting to building from
+ source.
+* The library crate should provide **declarations** for types and functions in
+ `libfoo`, but **not** higher-level abstractions.
+
+The set of `*-sys` packages provides a common set of dependencies for linking
+to native libraries. There are a number of benefits earned from having this
+convention of native-library-related packages:
+
+* Common dependencies on `foo-sys` alleviates the rule about one package per
+ value of `links`.
+* Other `-sys` packages can take advantage of the `DEP_NAME_KEY=value`
+ environment variables to better integrate with other packages. See the
+ ["Using another `sys` crate"][using-another-sys] example.
+* A common dependency allows centralizing logic on discovering `libfoo` itself
+ (or building it from source).
+* These dependencies are easily [overridable](#overriding-build-scripts).
+
+It is common to have a companion package without the `-sys` suffix that
+provides a safe, high-level abstractions on top of the sys package. For
+example, the [`git2` crate] provides a high-level interface to the
+[`libgit2-sys` crate].
+
+[`git2` crate]: https://crates.io/crates/git2
+[`libgit2-sys` crate]: https://crates.io/crates/libgit2-sys
+
+### Overriding Build Scripts
+
+If a manifest contains a `links` key, then Cargo supports overriding the build
+script specified with a custom library. The purpose of this functionality is to
+prevent running the build script in question altogether and instead supply the
+metadata ahead of time.
+
+To override a build script, place the following configuration in any acceptable [`config.toml`](config.md) file.
+
+```toml
+[target.x86_64-unknown-linux-gnu.foo]
+rustc-link-lib = ["foo"]
+rustc-link-search = ["/path/to/foo"]
+rustc-flags = "-L /some/path"
+rustc-cfg = ['key="value"']
+rustc-env = {key = "value"}
+rustc-cdylib-link-arg = ["…"]
+metadata_key1 = "value"
+metadata_key2 = "value"
+```
+
+With this configuration, if a package declares that it links to `foo` then the
+build script will **not** be compiled or run, and the metadata specified will
+be used instead.
+
+The `warning`, `rerun-if-changed`, and `rerun-if-env-changed` keys should not
+be used and will be ignored.
+
+### Jobserver
+
+Cargo and `rustc` use the [jobserver protocol], developed for GNU make, to
+coordinate concurrency across processes. It is essentially a semaphore that
+controls the number of jobs running concurrently. The concurrency may be set
+with the `--jobs` flag, which defaults to the number of logical CPUs.
+
+Each build script inherits one job slot from Cargo, and should endeavor to
+only use one CPU while it runs. If the script wants to use more CPUs in
+parallel, it should use the [`jobserver` crate] to coordinate with Cargo.
+
+As an example, the [`cc` crate] may enable the optional `parallel` feature
+which will use the jobserver protocol to attempt to build multiple C files
+at the same time.
+
+[`cc` crate]: https://crates.io/crates/cc
+[`jobserver` crate]: https://crates.io/crates/jobserver
+[jobserver protocol]: http://make.mad-scientist.net/papers/jobserver-implementation/
+[crates.io]: https://crates.io/
diff --git a/src/tools/cargo/src/doc/src/reference/cargo-targets.md b/src/tools/cargo/src/doc/src/reference/cargo-targets.md
new file mode 100644
index 000000000..7aea15109
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/cargo-targets.md
@@ -0,0 +1,389 @@
+## Cargo Targets
+
+Cargo packages consist of *targets* which correspond to source files which can
+be compiled into a crate. Packages can have [library](#library),
+[binary](#binaries), [example](#examples), [test](#tests), and
+[benchmark](#benchmarks) targets. The list of targets can be configured in the
+`Cargo.toml` manifest, often [inferred automatically](#target-auto-discovery)
+by the [directory layout][package layout] of the source files.
+
+See [Configuring a target](#configuring-a-target) below for details on
+configuring the settings for a target.
+
+### Library
+
+The library target defines a "library" that can be used and linked by other
+libraries and executables. The filename defaults to `src/lib.rs`, and the name
+of the library defaults to the name of the package. A package can have only
+one library. The settings for the library can be [customized] in the `[lib]`
+table in `Cargo.toml`.
+
+```toml
+# Example of customizing the library in Cargo.toml.
+[lib]
+crate-type = ["cdylib"]
+bench = false
+```
+
+### Binaries
+
+Binary targets are executable programs that can be run after being compiled.
+The default binary filename is `src/main.rs`, which defaults to the name of
+the package. Additional binaries are stored in the [`src/bin/`
+directory][package layout]. The settings for each binary can be [customized]
+in the `[[bin]]` tables in `Cargo.toml`.
+
+Binaries can use the public API of the package's library. They are also linked
+with the [`[dependencies]`][dependencies] defined in `Cargo.toml`.
+
+You can run individual binaries with the [`cargo run`] command with the `--bin
+<bin-name>` option. [`cargo install`] can be used to copy the executable to a
+common location.
+
+```toml
+# Example of customizing binaries in Cargo.toml.
+[[bin]]
+name = "cool-tool"
+test = false
+bench = false
+
+[[bin]]
+name = "frobnicator"
+required-features = ["frobnicate"]
+```
+
+### Examples
+
+Files located under the [`examples` directory][package layout] are example
+uses of the functionality provided by the library. When compiled, they are
+placed in the [`target/debug/examples` directory][build cache].
+
+Examples can use the public API of the package's library. They are also linked
+with the [`[dependencies]`][dependencies] and
+[`[dev-dependencies]`][dev-dependencies] defined in `Cargo.toml`.
+
+By default, examples are executable binaries (with a `main()` function). You
+can specify the [`crate-type` field](#the-crate-type-field) to make an example
+be compiled as a library:
+
+```toml
+[[example]]
+name = "foo"
+crate-type = ["staticlib"]
+```
+
+You can run individual executable examples with the [`cargo run`] command with
+the `--example <example-name>` option. Library examples can be built with
+[`cargo build`] with the `--example <example-name>` option. [`cargo install`]
+with the `--example <example-name>` option can be used to copy executable
+binaries to a common location. Examples are compiled by [`cargo test`] by
+default to protect them from bit-rotting. Set [the `test`
+field](#the-test-field) to `true` if you have `#[test]` functions in the
+example that you want to run with [`cargo test`].
+
+### Tests
+
+There are two styles of tests within a Cargo project:
+
+* *Unit tests* which are functions marked with the [`#[test]`
+ attribute][test-attribute] located within your library or binaries (or any
+ target enabled with [the `test` field](#the-test-field)). These tests have
+ access to private APIs located within the target they are defined in.
+* *Integration tests* which is a separate executable binary, also containing
+ `#[test]` functions, which is linked with the project's library and has
+ access to its *public* API.
+
+Tests are run with the [`cargo test`] command. By default, Cargo and `rustc`
+use the [libtest harness] which is responsible for collecting functions
+annotated with the [`#[test]` attribute][test-attribute] and executing them in
+parallel, reporting the success and failure of each test. See [the `harness`
+field](#the-harness-field) if you want to use a different harness or test
+strategy.
+
+> **Note**: There is another special style of test in Cargo:
+> [documentation tests][documentation examples].
+> They are handled by `rustdoc` and have a slightly different execution model.
+> For more information, please see [`cargo test`][cargo-test-documentation-tests].
+
+[libtest harness]: ../../rustc/tests/index.html
+[cargo-test-documentation-tests]: ../commands/cargo-test.md#documentation-tests
+
+#### Integration tests
+
+Files located under the [`tests` directory][package layout] are integration
+tests. When you run [`cargo test`], Cargo will compile each of these files as
+a separate crate, and execute them.
+
+Integration tests can use the public API of the package's library. They are
+also linked with the [`[dependencies]`][dependencies] and
+[`[dev-dependencies]`][dev-dependencies] defined in `Cargo.toml`.
+
+If you want to share code among multiple integration tests, you can place it
+in a separate module such as `tests/common/mod.rs` and then put `mod common;`
+in each test to import it.
+
+Each integration test results in a separate executable binary, and [`cargo
+test`] will run them serially. In some cases this can be inefficient, as it
+can take longer to compile, and may not make full use of multiple CPUs when
+running the tests. If you have a lot of integration tests, you may want to
+consider creating a single integration test, and split the tests into multiple
+modules. The libtest harness will automatically find all of the `#[test]`
+annotated functions and run them in parallel. You can pass module names to
+[`cargo test`] to only run the tests within that module.
+
+Binary targets are automatically built if there is an integration test. This
+allows an integration test to execute the binary to exercise and test its
+behavior. The `CARGO_BIN_EXE_<name>` [environment variable] is set when the
+integration test is built so that it can use the [`env` macro] to locate the
+executable.
+
+[environment variable]: environment-variables.md#environment-variables-cargo-sets-for-crates
+[`env` macro]: ../../std/macro.env.html
+
+### Benchmarks
+
+Benchmarks provide a way to test the performance of your code using the
+[`cargo bench`] command. They follow the same structure as [tests](#tests),
+with each benchmark function annotated with the `#[bench]` attribute.
+Similarly to tests:
+
+* Benchmarks are placed in the [`benches` directory][package layout].
+* Benchmark functions defined in libraries and binaries have access to the
+ *private* API within the target they are defined in. Benchmarks in the
+ `benches` directory may use the *public* API.
+* [The `bench` field](#the-bench-field) can be used to define which targets
+ are benchmarked by default.
+* [The `harness` field](#the-harness-field) can be used to disable the
+ built-in harness.
+
+> **Note**: The [`#[bench]`
+> attribute](../../unstable-book/library-features/test.html) is currently
+> unstable and only available on the [nightly channel]. There are some
+> packages available on [crates.io](https://crates.io/keywords/benchmark) that
+> may help with running benchmarks on the stable channel, such as
+> [Criterion](https://crates.io/crates/criterion).
+
+### Configuring a target
+
+All of the `[lib]`, `[[bin]]`, `[[example]]`, `[[test]]`, and `[[bench]]`
+sections in `Cargo.toml` support similar configuration for specifying how a
+target should be built. The double-bracket sections like `[[bin]]` are
+[array-of-table of TOML](https://toml.io/en/v1.0.0-rc.3#array-of-tables),
+which means you can write more than one `[[bin]]` section to make several
+executables in your crate. You can only specify one library, so `[lib]` is a
+normal TOML table.
+
+The following is an overview of the TOML settings for each target, with each
+field described in detail below.
+
+```toml
+[lib]
+name = "foo" # The name of the target.
+path = "src/lib.rs" # The source file of the target.
+test = true # Is tested by default.
+doctest = true # Documentation examples are tested by default.
+bench = true # Is benchmarked by default.
+doc = true # Is documented by default.
+plugin = false # Used as a compiler plugin (deprecated).
+proc-macro = false # Set to `true` for a proc-macro library.
+harness = true # Use libtest harness.
+edition = "2015" # The edition of the target.
+crate-type = ["lib"] # The crate types to generate.
+required-features = [] # Features required to build this target (N/A for lib).
+```
+
+#### The `name` field
+
+The `name` field specifies the name of the target, which corresponds to the
+filename of the artifact that will be generated. For a library, this is the
+crate name that dependencies will use to reference it.
+
+For the `[lib]` and the default binary (`src/main.rs`), this defaults to the
+name of the package, with any dashes replaced with underscores. For other
+[auto discovered](#target-auto-discovery) targets, it defaults to the
+directory or file name.
+
+This is required for all targets except `[lib]`.
+
+#### The `path` field
+
+The `path` field specifies where the source for the crate is located, relative
+to the `Cargo.toml` file.
+
+If not specified, the [inferred path](#target-auto-discovery) is used based on
+the target name.
+
+#### The `test` field
+
+The `test` field indicates whether or not the target is tested by default by
+[`cargo test`]. The default is `true` for lib, bins, and tests.
+
+> **Note**: Examples are built by [`cargo test`] by default to ensure they
+> continue to compile, but they are not *tested* by default. Setting `test =
+> true` for an example will also build it as a test and run any
+> [`#[test]`][test-attribute] functions defined in the example.
+
+#### The `doctest` field
+
+The `doctest` field indicates whether or not [documentation examples] are
+tested by default by [`cargo test`]. This is only relevant for libraries, it
+has no effect on other sections. The default is `true` for the library.
+
+#### The `bench` field
+
+The `bench` field indicates whether or not the target is benchmarked by
+default by [`cargo bench`]. The default is `true` for lib, bins, and
+benchmarks.
+
+#### The `doc` field
+
+The `doc` field indicates whether or not the target is included in the
+documentation generated by [`cargo doc`] by default. The default is `true` for
+libraries and binaries.
+
+> **Note**: The binary will be skipped if its name is the same as the lib
+> target.
+
+#### The `plugin` field
+
+This field is used for `rustc` plugins, which are being deprecated.
+
+#### The `proc-macro` field
+
+The `proc-macro` field indicates that the library is a [procedural macro]
+([reference][proc-macro-reference]). This is only valid for the `[lib]`
+target.
+
+#### The `harness` field
+
+The `harness` field indicates that the [`--test` flag] will be passed to
+`rustc` which will automatically include the libtest library which is the
+driver for collecting and running tests marked with the [`#[test]`
+attribute][test-attribute] or benchmarks with the `#[bench]` attribute. The
+default is `true` for all targets.
+
+If set to `false`, then you are responsible for defining a `main()` function
+to run tests and benchmarks.
+
+Tests have the [`cfg(test)` conditional expression][cfg-test] enabled whether
+or not the harness is enabled.
+
+#### The `edition` field
+
+The `edition` field defines the [Rust edition] the target will use. If not
+specified, it defaults to the [`edition` field][package-edition] for the
+`[package]`. This field should usually not be set, and is only intended for
+advanced scenarios such as incrementally transitioning a large package to a
+new edition.
+
+#### The `crate-type` field
+
+The `crate-type` field defines the [crate types] that will be generated by the
+target. It is an array of strings, allowing you to specify multiple crate
+types for a single target. This can only be specified for libraries and
+examples. Binaries, tests, and benchmarks are always the "bin" crate type. The
+defaults are:
+
+Target | Crate Type
+-------|-----------
+Normal library | `"lib"`
+Proc-macro library | `"proc-macro"`
+Example | `"bin"`
+
+The available options are `bin`, `lib`, `rlib`, `dylib`, `cdylib`,
+`staticlib`, and `proc-macro`. You can read more about the different crate
+types in the [Rust Reference Manual][crate types].
+
+#### The `required-features` field
+
+The `required-features` field specifies which [features] the target needs in
+order to be built. If any of the required features are not enabled, the
+target will be skipped. This is only relevant for the `[[bin]]`, `[[bench]]`,
+`[[test]]`, and `[[example]]` sections, it has no effect on `[lib]`.
+
+```toml
+[features]
+# ...
+postgres = []
+sqlite = []
+tools = []
+
+[[bin]]
+name = "my-pg-tool"
+required-features = ["postgres", "tools"]
+```
+
+
+### Target auto-discovery
+
+By default, Cargo automatically determines the targets to build based on the
+[layout of the files][package layout] on the filesystem. The target
+configuration tables, such as `[lib]`, `[[bin]]`, `[[test]]`, `[[bench]]`, or
+`[[example]]`, can be used to add additional targets that don't follow the
+standard directory layout.
+
+The automatic target discovery can be disabled so that only manually
+configured targets will be built. Setting the keys `autobins`, `autoexamples`,
+`autotests`, or `autobenches` to `false` in the `[package]` section will
+disable auto-discovery of the corresponding target type.
+
+```toml
+[package]
+# ...
+autobins = false
+autoexamples = false
+autotests = false
+autobenches = false
+```
+
+Disabling automatic discovery should only be needed for specialized
+situations. For example, if you have a library where you want a *module* named
+`bin`, this would present a problem because Cargo would usually attempt to
+compile anything in the `bin` directory as an executable. Here is a sample
+layout of this scenario:
+
+```text
+├── Cargo.toml
+└── src
+    ├── lib.rs
+    └── bin
+       └── mod.rs
+```
+
+To prevent Cargo from inferring `src/bin/mod.rs` as an executable, set
+`autobins = false` in `Cargo.toml` to disable auto-discovery:
+
+```toml
+[package]
+# …
+autobins = false
+```
+
+> **Note**: For packages with the 2015 edition, the default for auto-discovery
+> is `false` if at least one target is manually defined in `Cargo.toml`.
+> Beginning with the 2018 edition, the default is always `true`.
+
+
+[Build cache]: ../guide/build-cache.md
+[Rust Edition]: ../../edition-guide/index.html
+[`--test` flag]: ../../rustc/command-line-arguments.html#option-test
+[`cargo bench`]: ../commands/cargo-bench.md
+[`cargo build`]: ../commands/cargo-build.md
+[`cargo doc`]: ../commands/cargo-doc.md
+[`cargo install`]: ../commands/cargo-install.md
+[`cargo run`]: ../commands/cargo-run.md
+[`cargo test`]: ../commands/cargo-test.md
+[cfg-test]: ../../reference/conditional-compilation.html#test
+[crate types]: ../../reference/linkage.html
+[crates.io]: https://crates.io/
+[customized]: #configuring-a-target
+[dependencies]: specifying-dependencies.md
+[dev-dependencies]: specifying-dependencies.md#development-dependencies
+[documentation examples]: ../../rustdoc/documentation-tests.html
+[features]: features.md
+[nightly channel]: ../../book/appendix-07-nightly-rust.html
+[package layout]: ../guide/project-layout.md
+[package-edition]: manifest.md#the-edition-field
+[proc-macro-reference]: ../../reference/procedural-macros.html
+[procedural macro]: ../../book/ch19-06-macros.html
+[test-attribute]: ../../reference/attributes/testing.html#the-test-attribute
diff --git a/src/tools/cargo/src/doc/src/reference/config.md b/src/tools/cargo/src/doc/src/reference/config.md
new file mode 100644
index 000000000..c57a45f67
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/config.md
@@ -0,0 +1,1219 @@
+## Configuration
+
+This document explains how Cargo’s configuration system works, as well as
+available keys or configuration. For configuration of a package through its
+manifest, see the [manifest format](manifest.md).
+
+### Hierarchical structure
+
+Cargo allows local configuration for a particular package as well as global
+configuration. It looks for configuration files in the current directory and
+all parent directories. If, for example, Cargo were invoked in
+`/projects/foo/bar/baz`, then the following configuration files would be
+probed for and unified in this order:
+
+* `/projects/foo/bar/baz/.cargo/config.toml`
+* `/projects/foo/bar/.cargo/config.toml`
+* `/projects/foo/.cargo/config.toml`
+* `/projects/.cargo/config.toml`
+* `/.cargo/config.toml`
+* `$CARGO_HOME/config.toml` which defaults to:
+ * Windows: `%USERPROFILE%\.cargo\config.toml`
+ * Unix: `$HOME/.cargo/config.toml`
+
+With this structure, you can specify configuration per-package, and even
+possibly check it into version control. You can also specify personal defaults
+with a configuration file in your home directory.
+
+If a key is specified in multiple config files, the values will get merged
+together. Numbers, strings, and booleans will use the value in the deeper
+config directory taking precedence over ancestor directories, where the
+home directory is the lowest priority. Arrays will be joined together.
+
+At present, when being invoked from a workspace, Cargo does not read config
+files from crates within the workspace. i.e. if a workspace has two crates in
+it, named `/projects/foo/bar/baz/mylib` and `/projects/foo/bar/baz/mybin`, and
+there are Cargo configs at `/projects/foo/bar/baz/mylib/.cargo/config.toml`
+and `/projects/foo/bar/baz/mybin/.cargo/config.toml`, Cargo does not read
+those configuration files if it is invoked from the workspace root
+(`/projects/foo/bar/baz/`).
+
+> **Note:** Cargo also reads config files without the `.toml` extension, such as
+> `.cargo/config`. Support for the `.toml` extension was added in version 1.39
+> and is the preferred form. If both files exist, Cargo will use the file
+> without the extension.
+
+### Configuration format
+
+Configuration files are written in the [TOML format][toml] (like the
+manifest), with simple key-value pairs inside of sections (tables). The
+following is a quick overview of all settings, with detailed descriptions
+found below.
+
+```toml
+paths = ["/path/to/override"] # path dependency overrides
+
+[alias] # command aliases
+b = "build"
+c = "check"
+t = "test"
+r = "run"
+rr = "run --release"
+recursive_example = "rr --example recursions"
+space_example = ["run", "--release", "--", "\"command list\""]
+
+[build]
+jobs = 1 # number of parallel jobs, defaults to # of CPUs
+rustc = "rustc" # the rust compiler tool
+rustc-wrapper = "…" # run this wrapper instead of `rustc`
+rustc-workspace-wrapper = "…" # run this wrapper instead of `rustc` for workspace members
+rustdoc = "rustdoc" # the doc generator tool
+target = "triple" # build for the target triple (ignored by `cargo install`)
+target-dir = "target" # path of where to place all generated artifacts
+rustflags = ["…", "…"] # custom flags to pass to all compiler invocations
+rustdocflags = ["…", "…"] # custom flags to pass to rustdoc
+incremental = true # whether or not to enable incremental compilation
+dep-info-basedir = "…" # path for the base directory for targets in depfiles
+
+[doc]
+browser = "chromium" # browser to use with `cargo doc --open`,
+ # overrides the `BROWSER` environment variable
+
+[env]
+# Set ENV_VAR_NAME=value for any process run by Cargo
+ENV_VAR_NAME = "value"
+# Set even if already present in environment
+ENV_VAR_NAME_2 = { value = "value", force = true }
+# Value is relative to .cargo directory containing `config.toml`, make absolute
+ENV_VAR_NAME_3 = { value = "relative/path", relative = true }
+
+[future-incompat-report]
+frequency = 'always' # when to display a notification about a future incompat report
+
+[cargo-new]
+vcs = "none" # VCS to use ('git', 'hg', 'pijul', 'fossil', 'none')
+
+[http]
+debug = false # HTTP debugging
+proxy = "host:port" # HTTP proxy in libcurl format
+ssl-version = "tlsv1.3" # TLS version to use
+ssl-version.max = "tlsv1.3" # maximum TLS version
+ssl-version.min = "tlsv1.1" # minimum TLS version
+timeout = 30 # timeout for each HTTP request, in seconds
+low-speed-limit = 10 # network timeout threshold (bytes/sec)
+cainfo = "cert.pem" # path to Certificate Authority (CA) bundle
+check-revoke = true # check for SSL certificate revocation
+multiplexing = true # HTTP/2 multiplexing
+user-agent = "…" # the user-agent header
+
+[install]
+root = "/some/path" # `cargo install` destination directory
+
+[net]
+retry = 3 # network retries
+git-fetch-with-cli = true # use the `git` executable for git operations
+offline = true # do not access the network
+
+[net.ssh]
+known-hosts = ["..."] # known SSH host keys
+
+[patch.<registry>]
+# Same keys as for [patch] in Cargo.toml
+
+[profile.<name>] # Modify profile settings via config.
+inherits = "dev" # Inherits settings from [profile.dev].
+opt-level = 0 # Optimization level.
+debug = true # Include debug info.
+split-debuginfo = '...' # Debug info splitting behavior.
+debug-assertions = true # Enables debug assertions.
+overflow-checks = true # Enables runtime integer overflow checks.
+lto = false # Sets link-time optimization.
+panic = 'unwind' # The panic strategy.
+incremental = true # Incremental compilation.
+codegen-units = 16 # Number of code generation units.
+rpath = false # Sets the rpath linking option.
+[profile.<name>.build-override] # Overrides build-script settings.
+# Same keys for a normal profile.
+[profile.<name>.package.<name>] # Override profile for a package.
+# Same keys for a normal profile (minus `panic`, `lto`, and `rpath`).
+
+[registries.<name>] # registries other than crates.io
+index = "…" # URL of the registry index
+token = "…" # authentication token for the registry
+
+[registry]
+default = "…" # name of the default registry
+token = "…" # authentication token for crates.io
+
+[source.<name>] # source definition and replacement
+replace-with = "…" # replace this source with the given named source
+directory = "…" # path to a directory source
+registry = "…" # URL to a registry source
+local-registry = "…" # path to a local registry source
+git = "…" # URL of a git repository source
+branch = "…" # branch name for the git repository
+tag = "…" # tag name for the git repository
+rev = "…" # revision for the git repository
+
+[target.<triple>]
+linker = "…" # linker to use
+runner = "…" # wrapper to run executables
+rustflags = ["…", "…"] # custom flags for `rustc`
+
+[target.<cfg>]
+runner = "…" # wrapper to run executables
+rustflags = ["…", "…"] # custom flags for `rustc`
+
+[target.<triple>.<links>] # `links` build script override
+rustc-link-lib = ["foo"]
+rustc-link-search = ["/path/to/foo"]
+rustc-flags = ["-L", "/some/path"]
+rustc-cfg = ['key="value"']
+rustc-env = {key = "value"}
+rustc-cdylib-link-arg = ["…"]
+metadata_key1 = "value"
+metadata_key2 = "value"
+
+[term]
+quiet = false # whether cargo output is quiet
+verbose = false # whether cargo provides verbose output
+color = 'auto' # whether cargo colorizes output
+progress.when = 'auto' # whether cargo shows progress bar
+progress.width = 80 # width of progress bar
+```
+
+### Environment variables
+
+Cargo can also be configured through environment variables in addition to the
+TOML configuration files. For each configuration key of the form `foo.bar` the
+environment variable `CARGO_FOO_BAR` can also be used to define the value.
+Keys are converted to uppercase, dots and dashes are converted to underscores.
+For example the `target.x86_64-unknown-linux-gnu.runner` key can also be
+defined by the `CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUNNER` environment
+variable.
+
+Environment variables will take precedence over TOML configuration files.
+Currently only integer, boolean, string and some array values are supported to
+be defined by environment variables. [Descriptions below](#configuration-keys)
+indicate which keys support environment variables and otherwise they are not
+supported due to [technical issues](https://github.com/rust-lang/cargo/issues/5416).
+
+In addition to the system above, Cargo recognizes a few other specific
+[environment variables][env].
+
+### Command-line overrides
+
+Cargo also accepts arbitrary configuration overrides through the
+`--config` command-line option. The argument should be in TOML syntax of
+`KEY=VALUE`:
+
+```console
+cargo --config net.git-fetch-with-cli=true fetch
+```
+
+The `--config` option may be specified multiple times, in which case the
+values are merged in left-to-right order, using the same merging logic
+that is used when multiple configuration files apply. Configuration
+values specified this way take precedence over environment variables,
+which take precedence over configuration files.
+
+Some examples of what it looks like using Bourne shell syntax:
+
+```console
+# Most shells will require escaping.
+cargo --config http.proxy=\"http://example.com\" …
+
+# Spaces may be used.
+cargo --config "net.git-fetch-with-cli = true" …
+
+# TOML array example. Single quotes make it easier to read and write.
+cargo --config 'build.rustdocflags = ["--html-in-header", "header.html"]' …
+
+# Example of a complex TOML key.
+cargo --config "target.'cfg(all(target_arch = \"arm\", target_os = \"none\"))'.runner = 'my-runner'" …
+
+# Example of overriding a profile setting.
+cargo --config profile.dev.package.image.opt-level=3 …
+```
+
+The `--config` option can also be used to pass paths to extra
+configuration files that Cargo should use for a specific invocation.
+Options from configuration files loaded this way follow the same
+precedence rules as other options specified directly with `--config`.
+
+### Config-relative paths
+
+Paths in config files may be absolute, relative, or a bare name without any path separators.
+Paths for executables without a path separator will use the `PATH` environment variable to search for the executable.
+Paths for non-executables will be relative to where the config value is defined.
+
+In particular, rules are:
+
+* For environment variables, paths are relative to the current working directory.
+* For config values loaded directly from the [`--config KEY=VALUE`](#command-line-overrides) option,
+ paths are relative to the current working directory.
+* For config files, paths are relative to the parent directory of the directory where the config files were defined,
+ no matter those files are from either the [hierarchical probing](#hierarchical-structure)
+ or the [`--config <path>`](#command-line-overrides) option.
+
+> **Note:** To maintain consistency with existing `.cargo/config.toml` probing behavior,
+> it is by design that a path in a config file passed via `--config <path>`
+> is also relative to two levels up from the config file itself.
+>
+> To avoid unexpected results, the rule of thumb is putting your extra config files
+> at the same level of discovered `.cargo/config.toml` in your project.
+> For instance, given a project `/my/project`,
+> it is recommended to put config files under `/my/project/.cargo`
+> or a new directory at the same level, such as `/my/project/.config`.
+
+```toml
+# Relative path examples.
+
+[target.x86_64-unknown-linux-gnu]
+runner = "foo" # Searches `PATH` for `foo`.
+
+[source.vendored-sources]
+# Directory is relative to the parent where `.cargo/config.toml` is located.
+# For example, `/my/project/.cargo/config.toml` would result in `/my/project/vendor`.
+directory = "vendor"
+```
+
+### Executable paths with arguments
+
+Some Cargo commands invoke external programs, which can be configured as a path
+and some number of arguments.
+
+The value may be an array of strings like `['/path/to/program', 'somearg']` or
+a space-separated string like `'/path/to/program somearg'`. If the path to the
+executable contains a space, the list form must be used.
+
+If Cargo is passing other arguments to the program such as a path to open or
+run, they will be passed after the last specified argument in the value of an
+option of this format. If the specified program does not have path separators,
+Cargo will search `PATH` for its executable.
+
+### Credentials
+
+Configuration values with sensitive information are stored in the
+`$CARGO_HOME/credentials.toml` file. This file is automatically created and updated
+by [`cargo login`] and [`cargo logout`]. It follows the same format as Cargo config files.
+
+```toml
+[registry]
+token = "…" # Access token for crates.io
+
+[registries.<name>]
+token = "…" # Access token for the named registry
+```
+
+Tokens are used by some Cargo commands such as [`cargo publish`] for
+authenticating with remote registries. Care should be taken to protect the
+tokens and to keep them secret.
+
+As with most other config values, tokens may be specified with environment
+variables. The token for [crates.io] may be specified with the
+`CARGO_REGISTRY_TOKEN` environment variable. Tokens for other registries may
+be specified with environment variables of the form
+`CARGO_REGISTRIES_<name>_TOKEN` where `<name>` is the name of the registry in
+all capital letters.
+
+### Configuration keys
+
+This section documents all configuration keys. The description for keys with
+variable parts are annotated with angled brackets like `target.<triple>` where
+the `<triple>` part can be any [target triple] like
+`target.x86_64-pc-windows-msvc`.
+
+#### `paths`
+* Type: array of strings (paths)
+* Default: none
+* Environment: not supported
+
+An array of paths to local packages which are to be used as overrides for
+dependencies. For more information see the [Overriding Dependencies
+guide](overriding-dependencies.md#paths-overrides).
+
+#### `[alias]`
+* Type: string or array of strings
+* Default: see below
+* Environment: `CARGO_ALIAS_<name>`
+
+The `[alias]` table defines CLI command aliases. For example, running `cargo
+b` is an alias for running `cargo build`. Each key in the table is the
+subcommand, and the value is the actual command to run. The value may be an
+array of strings, where the first element is the command and the following are
+arguments. It may also be a string, which will be split on spaces into
+subcommand and arguments. The following aliases are built-in to Cargo:
+
+```toml
+[alias]
+b = "build"
+c = "check"
+d = "doc"
+t = "test"
+r = "run"
+rm = "remove"
+```
+
+Aliases are not allowed to redefine existing built-in commands.
+
+Aliases are recursive:
+
+```toml
+[alias]
+rr = "run --release"
+recursive_example = "rr --example recursions"
+```
+
+#### `[build]`
+
+The `[build]` table controls build-time operations and compiler settings.
+
+##### `build.jobs`
+* Type: integer
+* Default: number of logical CPUs
+* Environment: `CARGO_BUILD_JOBS`
+
+Sets the maximum number of compiler processes to run in parallel. If negative,
+it sets the maximum number of compiler processes to the number of logical CPUs
+plus provided value. Should not be 0.
+
+Can be overridden with the `--jobs` CLI option.
+
+##### `build.rustc`
+* Type: string (program path)
+* Default: "rustc"
+* Environment: `CARGO_BUILD_RUSTC` or `RUSTC`
+
+Sets the executable to use for `rustc`.
+
+##### `build.rustc-wrapper`
+* Type: string (program path)
+* Default: none
+* Environment: `CARGO_BUILD_RUSTC_WRAPPER` or `RUSTC_WRAPPER`
+
+Sets a wrapper to execute instead of `rustc`. The first argument passed to the
+wrapper is the path to the actual executable to use
+(i.e., `build.rustc`, if that is set, or `"rustc"` otherwise).
+
+##### `build.rustc-workspace-wrapper`
+* Type: string (program path)
+* Default: none
+* Environment: `CARGO_BUILD_RUSTC_WORKSPACE_WRAPPER` or `RUSTC_WORKSPACE_WRAPPER`
+
+Sets a wrapper to execute instead of `rustc`, for workspace members only.
+The first argument passed to the wrapper is the path to the actual
+executable to use (i.e., `build.rustc`, if that is set, or `"rustc"` otherwise).
+It affects the filename hash so that artifacts produced by the wrapper are cached separately.
+
+##### `build.rustdoc`
+* Type: string (program path)
+* Default: "rustdoc"
+* Environment: `CARGO_BUILD_RUSTDOC` or `RUSTDOC`
+
+Sets the executable to use for `rustdoc`.
+
+##### `build.target`
+* Type: string or array of strings
+* Default: host platform
+* Environment: `CARGO_BUILD_TARGET`
+
+The default [target platform triples][target triple] to compile to.
+
+This allows passing either a string or an array of strings. Each string value
+is a target platform triple. The selected build targets will be built for each
+of the selected architectures.
+
+The string value may also be a relative path to a `.json` target spec file.
+
+Can be overridden with the `--target` CLI option.
+
+```toml
+[build]
+target = ["x86_64-unknown-linux-gnu", "i686-unknown-linux-gnu"]
+```
+
+##### `build.target-dir`
+* Type: string (path)
+* Default: "target"
+* Environment: `CARGO_BUILD_TARGET_DIR` or `CARGO_TARGET_DIR`
+
+The path to where all compiler output is placed. The default if not specified
+is a directory named `target` located at the root of the workspace.
+
+Can be overridden with the `--target-dir` CLI option.
+
+##### `build.rustflags`
+* Type: string or array of strings
+* Default: none
+* Environment: `CARGO_BUILD_RUSTFLAGS` or `CARGO_ENCODED_RUSTFLAGS` or `RUSTFLAGS`
+
+Extra command-line flags to pass to `rustc`. The value may be an array of
+strings or a space-separated string.
+
+There are four mutually exclusive sources of extra flags. They are checked in
+order, with the first one being used:
+
+1. `CARGO_ENCODED_RUSTFLAGS` environment variable.
+2. `RUSTFLAGS` environment variable.
+3. All matching `target.<triple>.rustflags` and `target.<cfg>.rustflags`
+ config entries joined together.
+4. `build.rustflags` config value.
+
+Additional flags may also be passed with the [`cargo rustc`] command.
+
+If the `--target` flag (or [`build.target`](#buildtarget)) is used, then the
+flags will only be passed to the compiler for the target. Things being built
+for the host, such as build scripts or proc macros, will not receive the args.
+Without `--target`, the flags will be passed to all compiler invocations
+(including build scripts and proc macros) because dependencies are shared. If
+you have args that you do not want to pass to build scripts or proc macros and
+are building for the host, pass `--target` with the [host triple][target triple].
+
+It is not recommended to pass in flags that Cargo itself usually manages. For
+example, the flags driven by [profiles](profiles.md) are best handled by setting the
+appropriate profile setting.
+
+> **Caution**: Due to the low-level nature of passing flags directly to the
+> compiler, this may cause a conflict with future versions of Cargo which may
+> issue the same or similar flags on its own which may interfere with the
+> flags you specify. This is an area where Cargo may not always be backwards
+> compatible.
+
+##### `build.rustdocflags`
+* Type: string or array of strings
+* Default: none
+* Environment: `CARGO_BUILD_RUSTDOCFLAGS` or `CARGO_ENCODED_RUSTDOCFLAGS` or `RUSTDOCFLAGS`
+
+Extra command-line flags to pass to `rustdoc`. The value may be an array of
+strings or a space-separated string.
+
+There are three mutually exclusive sources of extra flags. They are checked in
+order, with the first one being used:
+
+1. `CARGO_ENCODED_RUSTDOCFLAGS` environment variable.
+2. `RUSTDOCFLAGS` environment variable.
+3. `build.rustdocflags` config value.
+
+Additional flags may also be passed with the [`cargo rustdoc`] command.
+
+##### `build.incremental`
+* Type: bool
+* Default: from profile
+* Environment: `CARGO_BUILD_INCREMENTAL` or `CARGO_INCREMENTAL`
+
+Whether or not to perform [incremental compilation]. The default if not set is
+to use the value from the [profile](profiles.md#incremental). Otherwise this overrides the setting of
+all profiles.
+
+The `CARGO_INCREMENTAL` environment variable can be set to `1` to force enable
+incremental compilation for all profiles, or `0` to disable it. This env var
+overrides the config setting.
+
+##### `build.dep-info-basedir`
+* Type: string (path)
+* Default: none
+* Environment: `CARGO_BUILD_DEP_INFO_BASEDIR`
+
+Strips the given path prefix from [dep
+info](../guide/build-cache.md#dep-info-files) file paths. This config setting
+is intended to convert absolute paths to relative paths for tools that require
+relative paths.
+
+The setting itself is a config-relative path. So, for example, a value of
+`"."` would strip all paths starting with the parent directory of the `.cargo`
+directory.
+
+##### `build.pipelining`
+
+This option is deprecated and unused. Cargo always has pipelining enabled.
+
+#### `[doc]`
+
+The `[doc]` table defines options for the [`cargo doc`] command.
+
+##### `doc.browser`
+
+* Type: string or array of strings ([program path with args])
+* Default: `BROWSER` environment variable, or, if that is missing,
+ opening the link in a system specific way
+
+This option sets the browser to be used by [`cargo doc`], overriding the
+`BROWSER` environment variable when opening documentation with the `--open`
+option.
+
+#### `[cargo-new]`
+
+The `[cargo-new]` table defines defaults for the [`cargo new`] command.
+
+##### `cargo-new.name`
+
+This option is deprecated and unused.
+
+##### `cargo-new.email`
+
+This option is deprecated and unused.
+
+##### `cargo-new.vcs`
+* Type: string
+* Default: "git" or "none"
+* Environment: `CARGO_CARGO_NEW_VCS`
+
+Specifies the source control system to use for initializing a new repository.
+Valid values are `git`, `hg` (for Mercurial), `pijul`, `fossil` or `none` to
+disable this behavior. Defaults to `git`, or `none` if already inside a VCS
+repository. Can be overridden with the `--vcs` CLI option.
+
+### `[env]`
+
+The `[env]` section allows you to set additional environment variables for
+build scripts, rustc invocations, `cargo run` and `cargo build`.
+
+```toml
+[env]
+OPENSSL_DIR = "/opt/openssl"
+```
+
+By default, the variables specified will not override values that already exist
+in the environment. This behavior can be changed by setting the `force` flag.
+
+Setting the `relative` flag evaluates the value as a config-relative path that
+is relative to the parent directory of the `.cargo` directory that contains the
+`config.toml` file. The value of the environment variable will be the full
+absolute path.
+
+```toml
+[env]
+TMPDIR = { value = "/home/tmp", force = true }
+OPENSSL_DIR = { value = "vendor/openssl", relative = true }
+```
+
+### `[future-incompat-report]`
+
+The `[future-incompat-report]` table controls setting for [future incompat reporting](future-incompat-report.md)
+
+#### `future-incompat-report.frequency`
+* Type: string
+* Default: "always"
+* Environment: `CARGO_FUTURE_INCOMPAT_REPORT_FREQUENCY`
+
+Controls how often we display a notification to the terminal when a future incompat report is available. Possible values:
+
+* `always` (default): Always display a notification when a command (e.g. `cargo build`) produces a future incompat report
+* `never`: Never display a notification
+
+#### `[http]`
+
+The `[http]` table defines settings for HTTP behavior. This includes fetching
+crate dependencies and accessing remote git repositories.
+
+##### `http.debug`
+* Type: boolean
+* Default: false
+* Environment: `CARGO_HTTP_DEBUG`
+
+If `true`, enables debugging of HTTP requests. The debug information can be
+seen by setting the `CARGO_LOG=cargo::ops::registry=debug` environment
+variable (or use `trace` for even more information).
+
+Be wary when posting logs from this output in a public location. The output
+may include headers with authentication tokens which you don't want to leak!
+Be sure to review logs before posting them.
+
+##### `http.proxy`
+* Type: string
+* Default: none
+* Environment: `CARGO_HTTP_PROXY` or `HTTPS_PROXY` or `https_proxy` or `http_proxy`
+
+Sets an HTTP and HTTPS proxy to use. The format is in [libcurl format] as in
+`[protocol://]host[:port]`. If not set, Cargo will also check the `http.proxy`
+setting in your global git configuration. If none of those are set, the
+`HTTPS_PROXY` or `https_proxy` environment variables set the proxy for HTTPS
+requests, and `http_proxy` sets it for HTTP requests.
+
+##### `http.timeout`
+* Type: integer
+* Default: 30
+* Environment: `CARGO_HTTP_TIMEOUT` or `HTTP_TIMEOUT`
+
+Sets the timeout for each HTTP request, in seconds.
+
+##### `http.cainfo`
+* Type: string (path)
+* Default: none
+* Environment: `CARGO_HTTP_CAINFO`
+
+Path to a Certificate Authority (CA) bundle file, used to verify TLS
+certificates. If not specified, Cargo attempts to use the system certificates.
+
+##### `http.check-revoke`
+* Type: boolean
+* Default: true (Windows) false (all others)
+* Environment: `CARGO_HTTP_CHECK_REVOKE`
+
+This determines whether or not TLS certificate revocation checks should be
+performed. This only works on Windows.
+
+##### `http.ssl-version`
+* Type: string or min/max table
+* Default: none
+* Environment: `CARGO_HTTP_SSL_VERSION`
+
+This sets the minimum TLS version to use. It takes a string, with one of the
+possible values of "default", "tlsv1", "tlsv1.0", "tlsv1.1", "tlsv1.2", or
+"tlsv1.3".
+
+This may alternatively take a table with two keys, `min` and `max`, which each
+take a string value of the same kind that specifies the minimum and maximum
+range of TLS versions to use.
+
+The default is a minimum version of "tlsv1.0" and a max of the newest version
+supported on your platform, typically "tlsv1.3".
+
+##### `http.low-speed-limit`
+* Type: integer
+* Default: 10
+* Environment: `CARGO_HTTP_LOW_SPEED_LIMIT`
+
+This setting controls timeout behavior for slow connections. If the average
+transfer speed in bytes per second is below the given value for
+[`http.timeout`](#httptimeout) seconds (default 30 seconds), then the
+connection is considered too slow and Cargo will abort and retry.
+
+##### `http.multiplexing`
+* Type: boolean
+* Default: true
+* Environment: `CARGO_HTTP_MULTIPLEXING`
+
+When `true`, Cargo will attempt to use the HTTP2 protocol with multiplexing.
+This allows multiple requests to use the same connection, usually improving
+performance when fetching multiple files. If `false`, Cargo will use HTTP 1.1
+without pipelining.
+
+##### `http.user-agent`
+* Type: string
+* Default: Cargo's version
+* Environment: `CARGO_HTTP_USER_AGENT`
+
+Specifies a custom user-agent header to use. The default if not specified is a
+string that includes Cargo's version.
+
+#### `[install]`
+
+The `[install]` table defines defaults for the [`cargo install`] command.
+
+##### `install.root`
+* Type: string (path)
+* Default: Cargo's home directory
+* Environment: `CARGO_INSTALL_ROOT`
+
+Sets the path to the root directory for installing executables for [`cargo
+install`]. Executables go into a `bin` directory underneath the root.
+
+To track information of installed executables, some extra files, such as
+`.crates.toml` and `.crates2.json`, are also created under this root.
+
+The default if not specified is Cargo's home directory (default `.cargo` in
+your home directory).
+
+Can be overridden with the `--root` command-line option.
+
+#### `[net]`
+
+The `[net]` table controls networking configuration.
+
+##### `net.retry`
+* Type: integer
+* Default: 3
+* Environment: `CARGO_NET_RETRY`
+
+Number of times to retry possibly spurious network errors.
+
+##### `net.git-fetch-with-cli`
+* Type: boolean
+* Default: false
+* Environment: `CARGO_NET_GIT_FETCH_WITH_CLI`
+
+If this is `true`, then Cargo will use the `git` executable to fetch registry
+indexes and git dependencies. If `false`, then it uses a built-in `git`
+library.
+
+Setting this to `true` can be helpful if you have special authentication
+requirements that Cargo does not support. See [Git
+Authentication](../appendix/git-authentication.md) for more information about
+setting up git authentication.
+
+##### `net.offline`
+* Type: boolean
+* Default: false
+* Environment: `CARGO_NET_OFFLINE`
+
+If this is `true`, then Cargo will avoid accessing the network, and attempt to
+proceed with locally cached data. If `false`, Cargo will access the network as
+needed, and generate an error if it encounters a network error.
+
+Can be overridden with the `--offline` command-line option.
+
+##### `net.ssh`
+
+The `[net.ssh]` table contains settings for SSH connections.
+
+##### `net.ssh.known-hosts`
+* Type: array of strings
+* Default: see description
+* Environment: not supported
+
+The `known-hosts` array contains a list of SSH host keys that should be
+accepted as valid when connecting to an SSH server (such as for SSH git
+dependencies). Each entry should be a string in a format similar to OpenSSH
+`known_hosts` files. Each string should start with one or more hostnames
+separated by commas, a space, the key type name, a space, and the
+base64-encoded key. For example:
+
+```toml
+[net.ssh]
+known-hosts = [
+ "example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFO4Q5T0UV0SQevair9PFwoxY9dl4pQl3u5phoqJH3cF"
+]
+```
+
+Cargo will attempt to load known hosts keys from common locations supported in
+OpenSSH, and will join those with any listed in a Cargo configuration file.
+If any matching entry has the correct key, the connection will be allowed.
+
+Cargo comes with the host keys for [github.com][github-keys] built-in. If
+those ever change, you can add the new keys to the config or known_hosts file.
+
+See [Git Authentication](../appendix/git-authentication.md#ssh-known-hosts)
+for more details.
+
+[github-keys]: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/githubs-ssh-key-fingerprints
+
+#### `[patch]`
+
+Just as you can override dependencies using [`[patch]` in
+`Cargo.toml`](overriding-dependencies.md#the-patch-section), you can
+override them in the cargo configuration file to apply those patches to
+any affected build. The format is identical to the one used in
+`Cargo.toml`.
+
+Since `.cargo/config.toml` files are not usually checked into source
+control, you should prefer patching using `Cargo.toml` where possible to
+ensure that other developers can compile your crate in their own
+environments. Patching through cargo configuration files is generally
+only appropriate when the patch section is automatically generated by an
+external build tool.
+
+If a given dependency is patched both in a cargo configuration file and
+a `Cargo.toml` file, the patch in the configuration file is used. If
+multiple configuration files patch the same dependency, standard cargo
+configuration merging is used, which prefers the value defined closest
+to the current directory, with `$HOME/.cargo/config.toml` taking the
+lowest precedence.
+
+Relative `path` dependencies in such a `[patch]` section are resolved
+relative to the configuration file they appear in.
+
+#### `[profile]`
+
+The `[profile]` table can be used to globally change profile settings, and
+override settings specified in `Cargo.toml`. It has the same syntax and
+options as profiles specified in `Cargo.toml`. See the [Profiles chapter] for
+details about the options.
+
+[Profiles chapter]: profiles.md
+
+##### `[profile.<name>.build-override]`
+* Environment: `CARGO_PROFILE_<name>_BUILD_OVERRIDE_<key>`
+
+The build-override table overrides settings for build scripts, proc macros,
+and their dependencies. It has the same keys as a normal profile. See the
+[overrides section](profiles.md#overrides) for more details.
+
+##### `[profile.<name>.package.<name>]`
+* Environment: not supported
+
+The package table overrides settings for specific packages. It has the same
+keys as a normal profile, minus the `panic`, `lto`, and `rpath` settings. See
+the [overrides section](profiles.md#overrides) for more details.
+
+##### `profile.<name>.codegen-units`
+* Type: integer
+* Default: See profile docs.
+* Environment: `CARGO_PROFILE_<name>_CODEGEN_UNITS`
+
+See [codegen-units](profiles.md#codegen-units).
+
+##### `profile.<name>.debug`
+* Type: integer or boolean
+* Default: See profile docs.
+* Environment: `CARGO_PROFILE_<name>_DEBUG`
+
+See [debug](profiles.md#debug).
+
+##### `profile.<name>.split-debuginfo`
+* Type: string
+* Default: See profile docs.
+* Environment: `CARGO_PROFILE_<name>_SPLIT_DEBUGINFO`
+
+See [split-debuginfo](profiles.md#split-debuginfo).
+
+##### `profile.<name>.debug-assertions`
+* Type: boolean
+* Default: See profile docs.
+* Environment: `CARGO_PROFILE_<name>_DEBUG_ASSERTIONS`
+
+See [debug-assertions](profiles.md#debug-assertions).
+
+##### `profile.<name>.incremental`
+* Type: boolean
+* Default: See profile docs.
+* Environment: `CARGO_PROFILE_<name>_INCREMENTAL`
+
+See [incremental](profiles.md#incremental).
+
+##### `profile.<name>.lto`
+* Type: string or boolean
+* Default: See profile docs.
+* Environment: `CARGO_PROFILE_<name>_LTO`
+
+See [lto](profiles.md#lto).
+
+##### `profile.<name>.overflow-checks`
+* Type: boolean
+* Default: See profile docs.
+* Environment: `CARGO_PROFILE_<name>_OVERFLOW_CHECKS`
+
+See [overflow-checks](profiles.md#overflow-checks).
+
+##### `profile.<name>.opt-level`
+* Type: integer or string
+* Default: See profile docs.
+* Environment: `CARGO_PROFILE_<name>_OPT_LEVEL`
+
+See [opt-level](profiles.md#opt-level).
+
+##### `profile.<name>.panic`
+* Type: string
+* default: See profile docs.
+* Environment: `CARGO_PROFILE_<name>_PANIC`
+
+See [panic](profiles.md#panic).
+
+##### `profile.<name>.rpath`
+* Type: boolean
+* default: See profile docs.
+* Environment: `CARGO_PROFILE_<name>_RPATH`
+
+See [rpath](profiles.md#rpath).
+
+
+#### `[registries]`
+
+The `[registries]` table is used for specifying additional [registries]. It
+consists of a sub-table for each named registry.
+
+##### `registries.<name>.index`
+* Type: string (url)
+* Default: none
+* Environment: `CARGO_REGISTRIES_<name>_INDEX`
+
+Specifies the URL of the index for the registry.
+
+##### `registries.<name>.token`
+* Type: string
+* Default: none
+* Environment: `CARGO_REGISTRIES_<name>_TOKEN`
+
+Specifies the authentication token for the given registry. This value should
+only appear in the [credentials](#credentials) file. This is used for registry
+commands like [`cargo publish`] that require authentication.
+
+Can be overridden with the `--token` command-line option.
+
+##### `registries.crates-io.protocol`
+* Type: string
+* Default: `sparse`
+* Environment: `CARGO_REGISTRIES_CRATES_IO_PROTOCOL`
+
+Specifies the protocol used to access crates.io. Allowed values are `git` or `sparse`.
+
+`git` causes Cargo to clone the entire index of all packages ever published to [crates.io] from <https://github.com/rust-lang/crates.io-index/>.
+This can have performance implications due to the size of the index.
+`sparse` is a newer protocol which uses HTTPS to download only what is necessary from <https://index.crates.io/>.
+This can result in a significant performance improvement for resolving new dependencies in most situations.
+
+More information about registry protocols may be found in the [Registries chapter](registries.md).
+
+#### `[registry]`
+
+The `[registry]` table controls the default registry used when one is not
+specified.
+
+##### `registry.index`
+
+This value is no longer accepted and should not be used.
+
+##### `registry.default`
+* Type: string
+* Default: `"crates-io"`
+* Environment: `CARGO_REGISTRY_DEFAULT`
+
+The name of the registry (from the [`registries` table](#registries)) to use
+by default for registry commands like [`cargo publish`].
+
+Can be overridden with the `--registry` command-line option.
+
+##### `registry.token`
+* Type: string
+* Default: none
+* Environment: `CARGO_REGISTRY_TOKEN`
+
+Specifies the authentication token for [crates.io]. This value should only
+appear in the [credentials](#credentials) file. This is used for registry
+commands like [`cargo publish`] that require authentication.
+
+Can be overridden with the `--token` command-line option.
+
+#### `[source]`
+
+The `[source]` table defines the registry sources available. See [Source
+Replacement] for more information. It consists of a sub-table for each named
+source. A source should only define one kind (directory, registry,
+local-registry, or git).
+
+##### `source.<name>.replace-with`
+* Type: string
+* Default: none
+* Environment: not supported
+
+If set, replace this source with the given named source or named registry.
+
+##### `source.<name>.directory`
+* Type: string (path)
+* Default: none
+* Environment: not supported
+
+Sets the path to a directory to use as a directory source.
+
+##### `source.<name>.registry`
+* Type: string (url)
+* Default: none
+* Environment: not supported
+
+Sets the URL to use for a registry source.
+
+##### `source.<name>.local-registry`
+* Type: string (path)
+* Default: none
+* Environment: not supported
+
+Sets the path to a directory to use as a local registry source.
+
+##### `source.<name>.git`
+* Type: string (url)
+* Default: none
+* Environment: not supported
+
+Sets the URL to use for a git repository source.
+
+##### `source.<name>.branch`
+* Type: string
+* Default: none
+* Environment: not supported
+
+Sets the branch name to use for a git repository.
+
+If none of `branch`, `tag`, or `rev` is set, defaults to the `master` branch.
+
+##### `source.<name>.tag`
+* Type: string
+* Default: none
+* Environment: not supported
+
+Sets the tag name to use for a git repository.
+
+If none of `branch`, `tag`, or `rev` is set, defaults to the `master` branch.
+
+##### `source.<name>.rev`
+* Type: string
+* Default: none
+* Environment: not supported
+
+Sets the [revision] to use for a git repository.
+
+If none of `branch`, `tag`, or `rev` is set, defaults to the `master` branch.
+
+
+#### `[target]`
+
+The `[target]` table is used for specifying settings for specific platform
+targets. It consists of a sub-table which is either a [platform triple][target triple]
+or a [`cfg()` expression]. The given values will be used if the target platform
+matches either the `<triple>` value or the `<cfg>` expression.
+
+```toml
+[target.thumbv7m-none-eabi]
+linker = "arm-none-eabi-gcc"
+runner = "my-emulator"
+rustflags = ["…", "…"]
+
+[target.'cfg(all(target_arch = "arm", target_os = "none"))']
+runner = "my-arm-wrapper"
+rustflags = ["…", "…"]
+```
+
+`cfg` values come from those built-in to the compiler (run `rustc --print=cfg`
+to view), values set by [build scripts], and extra `--cfg` flags passed to
+`rustc` (such as those defined in `RUSTFLAGS`). Do not try to match on
+`debug_assertions` or Cargo features like `feature="foo"`.
+
+If using a target spec JSON file, the [`<triple>`] value is the filename stem.
+For example `--target foo/bar.json` would match `[target.bar]`.
+
+##### `target.<triple>.ar`
+
+This option is deprecated and unused.
+
+##### `target.<triple>.linker`
+* Type: string (program path)
+* Default: none
+* Environment: `CARGO_TARGET_<triple>_LINKER`
+
+Specifies the linker which is passed to `rustc` (via [`-C linker`]) when the
+[`<triple>`] is being compiled for. By default, the linker is not overridden.
+
+##### `target.<triple>.runner`
+* Type: string or array of strings ([program path with args])
+* Default: none
+* Environment: `CARGO_TARGET_<triple>_RUNNER`
+
+If a runner is provided, executables for the target [`<triple>`] will be
+executed by invoking the specified runner with the actual executable passed as
+an argument. This applies to [`cargo run`], [`cargo test`] and [`cargo bench`]
+commands. By default, compiled executables are executed directly.
+
+##### `target.<cfg>.runner`
+
+This is similar to the [target runner](#targettriplerunner), but using
+a [`cfg()` expression]. If both a [`<triple>`] and `<cfg>` runner match,
+the `<triple>` will take precedence. It is an error if more than one
+`<cfg>` runner matches the current target.
+
+##### `target.<triple>.rustflags`
+* Type: string or array of strings
+* Default: none
+* Environment: `CARGO_TARGET_<triple>_RUSTFLAGS`
+
+Passes a set of custom flags to the compiler for this [`<triple>`].
+The value may be an array of strings or a space-separated string.
+
+See [`build.rustflags`](#buildrustflags) for more details on the different
+ways to specific extra flags.
+
+##### `target.<cfg>.rustflags`
+
+This is similar to the [target rustflags](#targettriplerustflags), but
+using a [`cfg()` expression]. If several `<cfg>` and [`<triple>`] entries
+match the current target, the flags are joined together.
+
+##### `target.<triple>.<links>`
+
+The links sub-table provides a way to [override a build script]. When
+specified, the build script for the given `links` library will not be
+run, and the given values will be used instead.
+
+```toml
+[target.x86_64-unknown-linux-gnu.foo]
+rustc-link-lib = ["foo"]
+rustc-link-search = ["/path/to/foo"]
+rustc-flags = "-L /some/path"
+rustc-cfg = ['key="value"']
+rustc-env = {key = "value"}
+rustc-cdylib-link-arg = ["…"]
+metadata_key1 = "value"
+metadata_key2 = "value"
+```
+
+#### `[term]`
+
+The `[term]` table controls terminal output and interaction.
+
+##### `term.quiet`
+* Type: boolean
+* Default: false
+* Environment: `CARGO_TERM_QUIET`
+
+Controls whether or not log messages are displayed by Cargo.
+
+Specifying the `--quiet` flag will override and force quiet output.
+Specifying the `--verbose` flag will override and disable quiet output.
+
+##### `term.verbose`
+* Type: boolean
+* Default: false
+* Environment: `CARGO_TERM_VERBOSE`
+
+Controls whether or not extra detailed messages are displayed by Cargo.
+
+Specifying the `--quiet` flag will override and disable verbose output.
+Specifying the `--verbose` flag will override and force verbose output.
+
+##### `term.color`
+* Type: string
+* Default: "auto"
+* Environment: `CARGO_TERM_COLOR`
+
+Controls whether or not colored output is used in the terminal. Possible values:
+
+* `auto` (default): Automatically detect if color support is available on the
+ terminal.
+* `always`: Always display colors.
+* `never`: Never display colors.
+
+Can be overridden with the `--color` command-line option.
+
+##### `term.progress.when`
+* Type: string
+* Default: "auto"
+* Environment: `CARGO_TERM_PROGRESS_WHEN`
+
+Controls whether or not progress bar is shown in the terminal. Possible values:
+
+* `auto` (default): Intelligently guess whether to show progress bar.
+* `always`: Always show progress bar.
+* `never`: Never show progress bar.
+
+##### `term.progress.width`
+* Type: integer
+* Default: none
+* Environment: `CARGO_TERM_PROGRESS_WIDTH`
+
+Sets the width for progress bar.
+
+[`cargo bench`]: ../commands/cargo-bench.md
+[`cargo login`]: ../commands/cargo-login.md
+[`cargo logout`]: ../commands/cargo-logout.md
+[`cargo doc`]: ../commands/cargo-doc.md
+[`cargo new`]: ../commands/cargo-new.md
+[`cargo publish`]: ../commands/cargo-publish.md
+[`cargo run`]: ../commands/cargo-run.md
+[`cargo rustc`]: ../commands/cargo-rustc.md
+[`cargo test`]: ../commands/cargo-test.md
+[`cargo rustdoc`]: ../commands/cargo-rustdoc.md
+[`cargo install`]: ../commands/cargo-install.md
+[env]: environment-variables.md
+[`cfg()` expression]: ../../reference/conditional-compilation.html
+[build scripts]: build-scripts.md
+[`-C linker`]: ../../rustc/codegen-options/index.md#linker
+[override a build script]: build-scripts.md#overriding-build-scripts
+[toml]: https://toml.io/
+[incremental compilation]: profiles.md#incremental
+[program path with args]: #executable-paths-with-arguments
+[libcurl format]: https://everything.curl.dev/libcurl/proxies#proxy-types
+[source replacement]: source-replacement.md
+[revision]: https://git-scm.com/docs/gitrevisions
+[registries]: registries.md
+[crates.io]: https://crates.io/
+[target triple]: ../appendix/glossary.md#target '"target" (glossary)'
+[`<triple>`]: ../appendix/glossary.md#target '"target" (glossary)'
diff --git a/src/tools/cargo/src/doc/src/reference/environment-variables.md b/src/tools/cargo/src/doc/src/reference/environment-variables.md
new file mode 100644
index 000000000..95c4c87fb
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/environment-variables.md
@@ -0,0 +1,418 @@
+## Environment Variables
+
+Cargo sets and reads a number of environment variables which your code can detect
+or override. Here is a list of the variables Cargo sets, organized by when it interacts
+with them:
+
+### Environment variables Cargo reads
+
+You can override these environment variables to change Cargo's behavior on your
+system:
+
+* `CARGO_LOG` --- Cargo uses the [`env_logger`] crate to display debug log messages.
+ The `CARGO_LOG` environment variable can be set to enable debug logging,
+ with a value such as `trace`, `debug`, or `warn`.
+ Usually it is only used during debugging. For more details refer to the
+ [Debug logging].
+* `CARGO_HOME` --- Cargo maintains a local cache of the registry index and of
+ git checkouts of crates. By default these are stored under `$HOME/.cargo`
+ (`%USERPROFILE%\.cargo` on Windows), but this variable overrides the
+ location of this directory. Once a crate is cached it is not removed by the
+ clean command.
+ For more details refer to the [guide](../guide/cargo-home.md).
+* `CARGO_TARGET_DIR` --- Location of where to place all generated artifacts,
+ relative to the current working directory. See [`build.target-dir`] to set
+ via config.
+* `CARGO` --- If set, Cargo will forward this value instead of setting it
+ to its own auto-detected path when it builds crates and when it
+ executes build scripts and external subcommands. This value is not
+ directly executed by Cargo, and should always point at a command that
+ behaves exactly like `cargo`, as that's what users of the variable
+ will be expecting.
+* `RUSTC` --- Instead of running `rustc`, Cargo will execute this specified
+ compiler instead. See [`build.rustc`] to set via config.
+* `RUSTC_WRAPPER` --- Instead of simply running `rustc`, Cargo will execute this
+ specified wrapper, passing as its command-line arguments the rustc
+ invocation, with the first argument being the path to the actual rustc.
+ Useful to set up a build cache tool such as `sccache`. See
+ [`build.rustc-wrapper`] to set via config. Setting this to the empty string
+ overwrites the config and resets cargo to not use a wrapper.
+* `RUSTC_WORKSPACE_WRAPPER` --- Instead of simply running `rustc`, for workspace
+ members Cargo will execute this specified wrapper, passing
+ as its command-line arguments the rustc invocation, with the first argument
+ being the path to the actual rustc. It affects the filename hash
+ so that artifacts produced by the wrapper are cached separately.
+ See [`build.rustc-workspace-wrapper`] to set via config. Setting this to the empty string
+ overwrites the config and resets cargo to not use a wrapper for workspace members.
+* `RUSTDOC` --- Instead of running `rustdoc`, Cargo will execute this specified
+ `rustdoc` instance instead. See [`build.rustdoc`] to set via config.
+* `RUSTDOCFLAGS` --- A space-separated list of custom flags to pass to all `rustdoc`
+ invocations that Cargo performs. In contrast with [`cargo rustdoc`], this is
+ useful for passing a flag to *all* `rustdoc` instances. See
+ [`build.rustdocflags`] for some more ways to set flags. This string is
+ split by whitespace; for a more robust encoding of multiple arguments,
+ see `CARGO_ENCODED_RUSTDOCFLAGS`.
+* `CARGO_ENCODED_RUSTDOCFLAGS` --- A list of custom flags separated by `0x1f`
+ (ASCII Unit Separator) to pass to all `rustdoc` invocations that Cargo performs.
+* `RUSTFLAGS` --- A space-separated list of custom flags to pass to all compiler
+ invocations that Cargo performs. In contrast with [`cargo rustc`], this is
+ useful for passing a flag to *all* compiler instances. See
+ [`build.rustflags`] for some more ways to set flags. This string is
+ split by whitespace; for a more robust encoding of multiple arguments,
+ see `CARGO_ENCODED_RUSTFLAGS`.
+* `CARGO_ENCODED_RUSTFLAGS` --- A list of custom flags separated by `0x1f`
+ (ASCII Unit Separator) to pass to all compiler invocations that Cargo performs.
+* `CARGO_INCREMENTAL` --- If this is set to 1 then Cargo will force [incremental
+ compilation] to be enabled for the current compilation, and when set to 0 it
+ will force disabling it. If this env var isn't present then cargo's defaults
+ will otherwise be used. See also [`build.incremental`] config value.
+* `CARGO_CACHE_RUSTC_INFO` --- If this is set to 0 then Cargo will not try to cache
+ compiler version information.
+* `HTTPS_PROXY` or `https_proxy` or `http_proxy` --- The HTTP proxy to use, see
+ [`http.proxy`] for more detail.
+* `HTTP_TIMEOUT` --- The HTTP timeout in seconds, see [`http.timeout`] for more
+ detail.
+* `TERM` --- If this is set to `dumb`, it disables the progress bar.
+* `BROWSER` --- The web browser to execute to open documentation with [`cargo
+ doc`]'s' `--open` flag, see [`doc.browser`] for more details.
+* `RUSTFMT` --- Instead of running `rustfmt`,
+ [`cargo fmt`](https://github.com/rust-lang/rustfmt) will execute this specified
+ `rustfmt` instance instead.
+
+#### Configuration environment variables
+
+Cargo reads environment variables for some configuration values.
+See the [configuration chapter][config-env] for more details.
+In summary, the supported environment variables are:
+
+* `CARGO_ALIAS_<name>` --- Command aliases, see [`alias`].
+* `CARGO_BUILD_JOBS` --- Number of parallel jobs, see [`build.jobs`].
+* `CARGO_BUILD_RUSTC` --- The `rustc` executable, see [`build.rustc`].
+* `CARGO_BUILD_RUSTC_WRAPPER` --- The `rustc` wrapper, see [`build.rustc-wrapper`].
+* `CARGO_BUILD_RUSTC_WORKSPACE_WRAPPER` --- The `rustc` wrapper for workspace members only, see [`build.rustc-workspace-wrapper`].
+* `CARGO_BUILD_RUSTDOC` --- The `rustdoc` executable, see [`build.rustdoc`].
+* `CARGO_BUILD_TARGET` --- The default target platform, see [`build.target`].
+* `CARGO_BUILD_TARGET_DIR` --- The default output directory, see [`build.target-dir`].
+* `CARGO_BUILD_RUSTFLAGS` --- Extra `rustc` flags, see [`build.rustflags`].
+* `CARGO_BUILD_RUSTDOCFLAGS` --- Extra `rustdoc` flags, see [`build.rustdocflags`].
+* `CARGO_BUILD_INCREMENTAL` --- Incremental compilation, see [`build.incremental`].
+* `CARGO_BUILD_DEP_INFO_BASEDIR` --- Dep-info relative directory, see [`build.dep-info-basedir`].
+* `CARGO_CARGO_NEW_VCS` --- The default source control system with [`cargo new`], see [`cargo-new.vcs`].
+* `CARGO_FUTURE_INCOMPAT_REPORT_FREQUENCY` --- How often we should generate a future incompat report notification, see [`future-incompat-report.frequency`].
+* `CARGO_HTTP_DEBUG` --- Enables HTTP debugging, see [`http.debug`].
+* `CARGO_HTTP_PROXY` --- Enables HTTP proxy, see [`http.proxy`].
+* `CARGO_HTTP_TIMEOUT` --- The HTTP timeout, see [`http.timeout`].
+* `CARGO_HTTP_CAINFO` --- The TLS certificate Certificate Authority file, see [`http.cainfo`].
+* `CARGO_HTTP_CHECK_REVOKE` --- Disables TLS certificate revocation checks, see [`http.check-revoke`].
+* `CARGO_HTTP_SSL_VERSION` --- The TLS version to use, see [`http.ssl-version`].
+* `CARGO_HTTP_LOW_SPEED_LIMIT` --- The HTTP low-speed limit, see [`http.low-speed-limit`].
+* `CARGO_HTTP_MULTIPLEXING` --- Whether HTTP/2 multiplexing is used, see [`http.multiplexing`].
+* `CARGO_HTTP_USER_AGENT` --- The HTTP user-agent header, see [`http.user-agent`].
+* `CARGO_INSTALL_ROOT` --- The default directory for [`cargo install`], see [`install.root`].
+* `CARGO_NET_RETRY` --- Number of times to retry network errors, see [`net.retry`].
+* `CARGO_NET_GIT_FETCH_WITH_CLI` --- Enables the use of the `git` executable to fetch, see [`net.git-fetch-with-cli`].
+* `CARGO_NET_OFFLINE` --- Offline mode, see [`net.offline`].
+* `CARGO_PROFILE_<name>_BUILD_OVERRIDE_<key>` --- Override build script profile, see [`profile.<name>.build-override`].
+* `CARGO_PROFILE_<name>_CODEGEN_UNITS` --- Set code generation units, see [`profile.<name>.codegen-units`].
+* `CARGO_PROFILE_<name>_DEBUG` --- What kind of debug info to include, see [`profile.<name>.debug`].
+* `CARGO_PROFILE_<name>_DEBUG_ASSERTIONS` --- Enable/disable debug assertions, see [`profile.<name>.debug-assertions`].
+* `CARGO_PROFILE_<name>_INCREMENTAL` --- Enable/disable incremental compilation, see [`profile.<name>.incremental`].
+* `CARGO_PROFILE_<name>_LTO` --- Link-time optimization, see [`profile.<name>.lto`].
+* `CARGO_PROFILE_<name>_OVERFLOW_CHECKS` --- Enable/disable overflow checks, see [`profile.<name>.overflow-checks`].
+* `CARGO_PROFILE_<name>_OPT_LEVEL` --- Set the optimization level, see [`profile.<name>.opt-level`].
+* `CARGO_PROFILE_<name>_PANIC` --- The panic strategy to use, see [`profile.<name>.panic`].
+* `CARGO_PROFILE_<name>_RPATH` --- The rpath linking option, see [`profile.<name>.rpath`].
+* `CARGO_PROFILE_<name>_SPLIT_DEBUGINFO` --- Controls debug file output behavior, see [`profile.<name>.split-debuginfo`].
+* `CARGO_REGISTRIES_<name>_INDEX` --- URL of a registry index, see [`registries.<name>.index`].
+* `CARGO_REGISTRIES_<name>_TOKEN` --- Authentication token of a registry, see [`registries.<name>.token`].
+* `CARGO_REGISTRY_DEFAULT` --- Default registry for the `--registry` flag, see [`registry.default`].
+* `CARGO_REGISTRY_TOKEN` --- Authentication token for [crates.io], see [`registry.token`].
+* `CARGO_TARGET_<triple>_LINKER` --- The linker to use, see [`target.<triple>.linker`]. The triple must be [converted to uppercase and underscores](config.md#environment-variables).
+* `CARGO_TARGET_<triple>_RUNNER` --- The executable runner, see [`target.<triple>.runner`].
+* `CARGO_TARGET_<triple>_RUSTFLAGS` --- Extra `rustc` flags for a target, see [`target.<triple>.rustflags`].
+* `CARGO_TERM_QUIET` --- Quiet mode, see [`term.quiet`].
+* `CARGO_TERM_VERBOSE` --- The default terminal verbosity, see [`term.verbose`].
+* `CARGO_TERM_COLOR` --- The default color mode, see [`term.color`].
+* `CARGO_TERM_PROGRESS_WHEN` --- The default progress bar showing mode, see [`term.progress.when`].
+* `CARGO_TERM_PROGRESS_WIDTH` --- The default progress bar width, see [`term.progress.width`].
+
+[`cargo doc`]: ../commands/cargo-doc.md
+[`cargo install`]: ../commands/cargo-install.md
+[`cargo new`]: ../commands/cargo-new.md
+[`cargo rustc`]: ../commands/cargo-rustc.md
+[`cargo rustdoc`]: ../commands/cargo-rustdoc.md
+[config-env]: config.md#environment-variables
+[crates.io]: https://crates.io/
+[incremental compilation]: profiles.md#incremental
+[`alias`]: config.md#alias
+[`build.jobs`]: config.md#buildjobs
+[`build.rustc`]: config.md#buildrustc
+[`build.rustc-wrapper`]: config.md#buildrustc-wrapper
+[`build.rustc-workspace-wrapper`]: config.md#buildrustc-workspace-wrapper
+[`build.rustdoc`]: config.md#buildrustdoc
+[`build.target`]: config.md#buildtarget
+[`build.target-dir`]: config.md#buildtarget-dir
+[`build.rustflags`]: config.md#buildrustflags
+[`build.rustdocflags`]: config.md#buildrustdocflags
+[`build.incremental`]: config.md#buildincremental
+[`build.dep-info-basedir`]: config.md#builddep-info-basedir
+[`doc.browser`]: config.md#docbrowser
+[`cargo-new.name`]: config.md#cargo-newname
+[`cargo-new.email`]: config.md#cargo-newemail
+[`cargo-new.vcs`]: config.md#cargo-newvcs
+[`future-incompat-report.frequency`]: config.md#future-incompat-reportfrequency
+[`http.debug`]: config.md#httpdebug
+[`http.proxy`]: config.md#httpproxy
+[`http.timeout`]: config.md#httptimeout
+[`http.cainfo`]: config.md#httpcainfo
+[`http.check-revoke`]: config.md#httpcheck-revoke
+[`http.ssl-version`]: config.md#httpssl-version
+[`http.low-speed-limit`]: config.md#httplow-speed-limit
+[`http.multiplexing`]: config.md#httpmultiplexing
+[`http.user-agent`]: config.md#httpuser-agent
+[`install.root`]: config.md#installroot
+[`net.retry`]: config.md#netretry
+[`net.git-fetch-with-cli`]: config.md#netgit-fetch-with-cli
+[`net.offline`]: config.md#netoffline
+[`profile.<name>.build-override`]: config.md#profilenamebuild-override
+[`profile.<name>.codegen-units`]: config.md#profilenamecodegen-units
+[`profile.<name>.debug`]: config.md#profilenamedebug
+[`profile.<name>.debug-assertions`]: config.md#profilenamedebug-assertions
+[`profile.<name>.incremental`]: config.md#profilenameincremental
+[`profile.<name>.lto`]: config.md#profilenamelto
+[`profile.<name>.overflow-checks`]: config.md#profilenameoverflow-checks
+[`profile.<name>.opt-level`]: config.md#profilenameopt-level
+[`profile.<name>.panic`]: config.md#profilenamepanic
+[`profile.<name>.rpath`]: config.md#profilenamerpath
+[`profile.<name>.split-debuginfo`]: config.md#profilenamesplit-debuginfo
+[`registries.<name>.index`]: config.md#registriesnameindex
+[`registries.<name>.token`]: config.md#registriesnametoken
+[`registry.default`]: config.md#registrydefault
+[`registry.token`]: config.md#registrytoken
+[`target.<triple>.linker`]: config.md#targettriplelinker
+[`target.<triple>.runner`]: config.md#targettriplerunner
+[`target.<triple>.rustflags`]: config.md#targettriplerustflags
+[`term.quiet`]: config.md#termquiet
+[`term.verbose`]: config.md#termverbose
+[`term.color`]: config.md#termcolor
+[`term.progress.when`]: config.md#termprogresswhen
+[`term.progress.width`]: config.md#termprogresswidth
+
+### Environment variables Cargo sets for crates
+
+Cargo exposes these environment variables to your crate when it is compiled.
+Note that this applies for running binaries with `cargo run` and `cargo test`
+as well. To get the value of any of these variables in a Rust program, do
+this:
+
+```rust,ignore
+let version = env!("CARGO_PKG_VERSION");
+```
+
+`version` will now contain the value of `CARGO_PKG_VERSION`.
+
+Note that if one of these values is not provided in the manifest, the
+corresponding environment variable is set to the empty string, `""`.
+
+* `CARGO` --- Path to the `cargo` binary performing the build.
+* `CARGO_MANIFEST_DIR` --- The directory containing the manifest of your package.
+* `CARGO_PKG_VERSION` --- The full version of your package.
+* `CARGO_PKG_VERSION_MAJOR` --- The major version of your package.
+* `CARGO_PKG_VERSION_MINOR` --- The minor version of your package.
+* `CARGO_PKG_VERSION_PATCH` --- The patch version of your package.
+* `CARGO_PKG_VERSION_PRE` --- The pre-release version of your package.
+* `CARGO_PKG_AUTHORS` --- Colon separated list of authors from the manifest of your package.
+* `CARGO_PKG_NAME` --- The name of your package.
+* `CARGO_PKG_DESCRIPTION` --- The description from the manifest of your package.
+* `CARGO_PKG_HOMEPAGE` --- The home page from the manifest of your package.
+* `CARGO_PKG_REPOSITORY` --- The repository from the manifest of your package.
+* `CARGO_PKG_LICENSE` --- The license from the manifest of your package.
+* `CARGO_PKG_LICENSE_FILE` --- The license file from the manifest of your package.
+* `CARGO_PKG_RUST_VERSION` --- The Rust version from the manifest of your package.
+ Note that this is the minimum Rust version supported by the package, not the
+ current Rust version.
+* `CARGO_PKG_README` --- Path to the README file of your package.
+* `CARGO_CRATE_NAME` --- The name of the crate that is currently being compiled. It is the name of the [Cargo target] with `-` converted to `_`, such as the name of the library, binary, example, integration test, or benchmark.
+* `CARGO_BIN_NAME` --- The name of the binary that is currently being compiled.
+ Only set for [binaries] or binary [examples]. This name does not include any
+ file extension, such as `.exe`.
+* `OUT_DIR` --- If the package has a build script, this is set to the folder where the build
+ script should place its output. See below for more information.
+ (Only set during compilation.)
+* `CARGO_BIN_EXE_<name>` --- The absolute path to a binary target's executable.
+ This is only set when building an [integration test] or benchmark. This may
+ be used with the [`env` macro] to find the executable to run for testing
+ purposes. The `<name>` is the name of the binary target, exactly as-is. For
+ example, `CARGO_BIN_EXE_my-program` for a binary named `my-program`.
+ Binaries are automatically built when the test is built, unless the binary
+ has required features that are not enabled.
+* `CARGO_PRIMARY_PACKAGE` --- This environment variable will be set if the
+ package being built is primary. Primary packages are the ones the user
+ selected on the command-line, either with `-p` flags or the defaults based
+ on the current directory and the default workspace members. This environment
+ variable will not be set when building dependencies. This is only set when
+ compiling the package (not when running binaries or tests).
+* `CARGO_TARGET_TMPDIR` --- Only set when building [integration test] or benchmark code.
+ This is a path to a directory inside the target directory
+ where integration tests or benchmarks are free to put any data needed by
+ the tests/benches. Cargo initially creates this directory but doesn't
+ manage its content in any way, this is the responsibility of the test code.
+
+[Cargo target]: cargo-targets.md
+[binaries]: cargo-targets.md#binaries
+[examples]: cargo-targets.md#examples
+[integration test]: cargo-targets.md#integration-tests
+[`env` macro]: ../../std/macro.env.html
+
+#### Dynamic library paths
+
+Cargo also sets the dynamic library path when compiling and running binaries
+with commands like `cargo run` and `cargo test`. This helps with locating
+shared libraries that are part of the build process. The variable name depends
+on the platform:
+
+* Windows: `PATH`
+* macOS: `DYLD_FALLBACK_LIBRARY_PATH`
+* Unix: `LD_LIBRARY_PATH`
+
+The value is extended from the existing value when Cargo starts. macOS has
+special consideration where if `DYLD_FALLBACK_LIBRARY_PATH` is not already
+set, it will add the default `$HOME/lib:/usr/local/lib:/usr/lib`.
+
+Cargo includes the following paths:
+
+* Search paths included from any build script with the [`rustc-link-search`
+ instruction](build-scripts.md#rustc-link-search). Paths outside of the
+ `target` directory are removed. It is the responsibility of the user running
+ Cargo to properly set the environment if additional libraries on the system
+ are needed in the search path.
+* The base output directory, such as `target/debug`, and the "deps" directory.
+ This is mostly for legacy support of `rustc` compiler plugins.
+* The rustc sysroot library path. This generally is not important to most
+ users.
+
+### Environment variables Cargo sets for build scripts
+
+Cargo sets several environment variables when build scripts are run. Because these variables
+are not yet set when the build script is compiled, the above example using `env!` won't work
+and instead you'll need to retrieve the values when the build script is run:
+
+```rust,ignore
+use std::env;
+let out_dir = env::var("OUT_DIR").unwrap();
+```
+
+`out_dir` will now contain the value of `OUT_DIR`.
+
+* `CARGO` --- Path to the `cargo` binary performing the build.
+* `CARGO_MANIFEST_DIR` --- The directory containing the manifest for the package
+ being built (the package containing the build
+ script). Also note that this is the value of the
+ current working directory of the build script when it
+ starts.
+* `CARGO_MANIFEST_LINKS` --- the manifest `links` value.
+* `CARGO_MAKEFLAGS` --- Contains parameters needed for Cargo's [jobserver]
+ implementation to parallelize subprocesses.
+ Rustc or cargo invocations from build.rs can already
+ read `CARGO_MAKEFLAGS`, but GNU Make requires the
+ flags to be specified either directly as arguments,
+ or through the `MAKEFLAGS` environment variable.
+ Currently Cargo doesn't set the `MAKEFLAGS` variable,
+ but it's free for build scripts invoking GNU Make
+ to set it to the contents of `CARGO_MAKEFLAGS`.
+* `CARGO_FEATURE_<name>` --- For each activated feature of the package being
+ built, this environment variable will be present
+ where `<name>` is the name of the feature uppercased
+ and having `-` translated to `_`.
+* `CARGO_CFG_<cfg>` --- For each [configuration option][configuration] of the
+ package being built, this environment variable will contain the value of the
+ configuration, where `<cfg>` is the name of the configuration uppercased and
+ having `-` translated to `_`. Boolean configurations are present if they are
+ set, and not present otherwise. Configurations with multiple values are
+ joined to a single variable with the values delimited by `,`. This includes
+ values built-in to the compiler (which can be seen with `rustc --print=cfg`)
+ and values set by build scripts and extra flags passed to `rustc` (such as
+ those defined in `RUSTFLAGS`). Some examples of what these variables are:
+ * `CARGO_CFG_UNIX` --- Set on [unix-like platforms].
+ * `CARGO_CFG_WINDOWS` --- Set on [windows-like platforms].
+ * `CARGO_CFG_TARGET_FAMILY=unix` --- The [target family].
+ * `CARGO_CFG_TARGET_OS=macos` --- The [target operating system].
+ * `CARGO_CFG_TARGET_ARCH=x86_64` --- The CPU [target architecture].
+ * `CARGO_CFG_TARGET_VENDOR=apple` --- The [target vendor].
+ * `CARGO_CFG_TARGET_ENV=gnu` --- The [target environment] ABI.
+ * `CARGO_CFG_TARGET_POINTER_WIDTH=64` --- The CPU [pointer width].
+ * `CARGO_CFG_TARGET_ENDIAN=little` --- The CPU [target endianness].
+ * `CARGO_CFG_TARGET_FEATURE=mmx,sse` --- List of CPU [target features] enabled.
+* `OUT_DIR` --- the folder in which all output and intermediate artifacts should
+ be placed. This folder is inside the build directory for the
+ package being built, and it is unique for the package in question.
+* `TARGET` --- the target triple that is being compiled for. Native code should be
+ compiled for this triple. See the [Target Triple] description
+ for more information.
+* `HOST` --- the host triple of the Rust compiler.
+* `NUM_JOBS` --- the parallelism specified as the top-level parallelism. This can
+ be useful to pass a `-j` parameter to a system like `make`. Note
+ that care should be taken when interpreting this environment
+ variable. For historical purposes this is still provided but
+ recent versions of Cargo, for example, do not need to run `make
+ -j`, and instead can set the `MAKEFLAGS` env var to the content
+ of `CARGO_MAKEFLAGS` to activate the use of Cargo's GNU Make
+ compatible [jobserver] for sub-make invocations.
+* `OPT_LEVEL`, `DEBUG` --- values of the corresponding variables for the
+ profile currently being built.
+* `PROFILE` --- `release` for release builds, `debug` for other builds. This is
+ determined based on if the [profile] inherits from the [`dev`] or
+ [`release`] profile. Using this environment variable is not recommended.
+ Using other environment variables like `OPT_LEVEL` provide a more correct
+ view of the actual settings being used.
+* `DEP_<name>_<key>` --- For more information about this set of environment
+ variables, see build script documentation about [`links`][links].
+* `RUSTC`, `RUSTDOC` --- the compiler and documentation generator that Cargo has
+ resolved to use, passed to the build script so it might
+ use it as well.
+* `RUSTC_WRAPPER` --- the `rustc` wrapper, if any, that Cargo is using.
+ See [`build.rustc-wrapper`].
+* `RUSTC_WORKSPACE_WRAPPER` --- the `rustc` wrapper, if any, that Cargo is
+ using for workspace members. See
+ [`build.rustc-workspace-wrapper`].
+* `RUSTC_LINKER` --- The path to the linker binary that Cargo has resolved to use
+ for the current target, if specified. The linker can be
+ changed by editing `.cargo/config.toml`; see the documentation
+ about [cargo configuration][cargo-config] for more
+ information.
+* `CARGO_ENCODED_RUSTFLAGS` --- extra flags that Cargo invokes `rustc` with,
+ separated by a `0x1f` character (ASCII Unit Separator). See
+ [`build.rustflags`]. Note that since Rust 1.55, `RUSTFLAGS` is removed from
+ the environment; scripts should use `CARGO_ENCODED_RUSTFLAGS` instead.
+* `CARGO_PKG_<var>` --- The package information variables, with the same names and values as are [provided during crate building][variables set for crates].
+
+[`env_logger`]: https://docs.rs/env_logger
+[debug logging]: https://doc.crates.io/contrib/architecture/console.html#debug-logging
+[unix-like platforms]: ../../reference/conditional-compilation.html#unix-and-windows
+[windows-like platforms]: ../../reference/conditional-compilation.html#unix-and-windows
+[target family]: ../../reference/conditional-compilation.html#target_family
+[target operating system]: ../../reference/conditional-compilation.html#target_os
+[target architecture]: ../../reference/conditional-compilation.html#target_arch
+[target vendor]: ../../reference/conditional-compilation.html#target_vendor
+[target environment]: ../../reference/conditional-compilation.html#target_env
+[pointer width]: ../../reference/conditional-compilation.html#target_pointer_width
+[target endianness]: ../../reference/conditional-compilation.html#target_endian
+[target features]: ../../reference/conditional-compilation.html#target_feature
+[links]: build-scripts.md#the-links-manifest-key
+[configuration]: ../../reference/conditional-compilation.html
+[jobserver]: https://www.gnu.org/software/make/manual/html_node/Job-Slots.html
+[cargo-config]: config.md
+[Target Triple]: ../appendix/glossary.md#target
+[variables set for crates]: #environment-variables-cargo-sets-for-crates
+[profile]: profiles.md
+[`dev`]: profiles.md#dev
+[`release`]: profiles.md#release
+
+### Environment variables Cargo sets for 3rd party subcommands
+
+Cargo exposes this environment variable to 3rd party subcommands
+(ie. programs named `cargo-foobar` placed in `$PATH`):
+
+* `CARGO` --- Path to the `cargo` binary performing the build.
+
+For extended information about your environment you may run `cargo metadata`.
diff --git a/src/tools/cargo/src/doc/src/reference/external-tools.md b/src/tools/cargo/src/doc/src/reference/external-tools.md
new file mode 100644
index 000000000..7b5110cbe
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/external-tools.md
@@ -0,0 +1,290 @@
+## External tools
+
+One of the goals of Cargo is simple integration with third-party tools, like
+IDEs and other build systems. To make integration easier, Cargo has several
+facilities:
+
+* a [`cargo metadata`] command, which outputs package structure and dependencies
+ information in JSON,
+
+* a `--message-format` flag, which outputs information about a particular build,
+ and
+
+* support for custom subcommands.
+
+
+### Information about package structure
+
+You can use [`cargo metadata`] command to get information about package
+structure and dependencies. See the [`cargo metadata`] documentation
+for details on the format of the output.
+
+The format is stable and versioned. When calling `cargo metadata`, you should
+pass `--format-version` flag explicitly to avoid forward incompatibility
+hazard.
+
+If you are using Rust, the [cargo_metadata] crate can be used to parse the
+output.
+
+[cargo_metadata]: https://crates.io/crates/cargo_metadata
+[`cargo metadata`]: ../commands/cargo-metadata.md
+
+### JSON messages
+
+When passing `--message-format=json`, Cargo will output the following
+information during the build:
+
+* compiler errors and warnings,
+
+* produced artifacts,
+
+* results of the build scripts (for example, native dependencies).
+
+The output goes to stdout in the JSON object per line format. The `reason` field
+distinguishes different kinds of messages.
+
+The `--message-format` option can also take additional formatting values which
+alter the way the JSON messages are computed and rendered. See the description
+of the `--message-format` option in the [build command documentation] for more
+details.
+
+If you are using Rust, the [cargo_metadata] crate can be used to parse these
+messages.
+
+[build command documentation]: ../commands/cargo-build.md
+[cargo_metadata]: https://crates.io/crates/cargo_metadata
+
+#### Compiler messages
+
+The "compiler-message" message includes output from the compiler, such as
+warnings and errors. See the [rustc JSON chapter](../../rustc/json.md) for
+details on `rustc`'s message format, which is embedded in the following
+structure:
+
+```javascript
+{
+ /* The "reason" indicates the kind of message. */
+ "reason": "compiler-message",
+ /* The Package ID, a unique identifier for referring to the package. */
+ "package_id": "my-package 0.1.0 (path+file:///path/to/my-package)",
+ /* Absolute path to the package manifest. */
+ "manifest_path": "/path/to/my-package/Cargo.toml",
+ /* The Cargo target (lib, bin, example, etc.) that generated the message. */
+ "target": {
+ /* Array of target kinds.
+ - lib targets list the `crate-type` values from the
+ manifest such as "lib", "rlib", "dylib",
+ "proc-macro", etc. (default ["lib"])
+ - binary is ["bin"]
+ - example is ["example"]
+ - integration test is ["test"]
+ - benchmark is ["bench"]
+ - build script is ["custom-build"]
+ */
+ "kind": [
+ "lib"
+ ],
+ /* Array of crate types.
+ - lib and example libraries list the `crate-type` values
+ from the manifest such as "lib", "rlib", "dylib",
+ "proc-macro", etc. (default ["lib"])
+ - all other target kinds are ["bin"]
+ */
+ "crate_types": [
+ "lib"
+ ],
+ /* The name of the target. */
+ "name": "my-package",
+ /* Absolute path to the root source file of the target. */
+ "src_path": "/path/to/my-package/src/lib.rs",
+ /* The Rust edition of the target.
+ Defaults to the package edition.
+ */
+ "edition": "2018",
+ /* Array of required features.
+ This property is not included if no required features are set.
+ */
+ "required-features": ["feat1"],
+ /* Whether the target should be documented by `cargo doc`. */
+ "doc": true,
+ /* Whether or not this target has doc tests enabled, and
+ the target is compatible with doc testing.
+ */
+ "doctest": true
+ /* Whether or not this target should be built and run with `--test`
+ */
+ "test": true
+ },
+ /* The message emitted by the compiler.
+
+ See https://doc.rust-lang.org/rustc/json.html for details.
+ */
+ "message": {
+ /* ... */
+ }
+}
+```
+
+#### Artifact messages
+
+For every compilation step, a "compiler-artifact" message is emitted with the
+following structure:
+
+```javascript
+{
+ /* The "reason" indicates the kind of message. */
+ "reason": "compiler-artifact",
+ /* The Package ID, a unique identifier for referring to the package. */
+ "package_id": "my-package 0.1.0 (path+file:///path/to/my-package)",
+ /* Absolute path to the package manifest. */
+ "manifest_path": "/path/to/my-package/Cargo.toml",
+ /* The Cargo target (lib, bin, example, etc.) that generated the artifacts.
+ See the definition above for `compiler-message` for details.
+ */
+ "target": {
+ "kind": [
+ "lib"
+ ],
+ "crate_types": [
+ "lib"
+ ],
+ "name": "my-package",
+ "src_path": "/path/to/my-package/src/lib.rs",
+ "edition": "2018",
+ "doc": true,
+ "doctest": true,
+ "test": true
+ },
+ /* The profile indicates which compiler settings were used. */
+ "profile": {
+ /* The optimization level. */
+ "opt_level": "0",
+ /* The debug level, an integer of 0, 1, or 2. If `null`, it implies
+ rustc's default of 0.
+ */
+ "debuginfo": 2,
+ /* Whether or not debug assertions are enabled. */
+ "debug_assertions": true,
+ /* Whether or not overflow checks are enabled. */
+ "overflow_checks": true,
+ /* Whether or not the `--test` flag is used. */
+ "test": false
+ },
+ /* Array of features enabled. */
+ "features": ["feat1", "feat2"],
+ /* Array of files generated by this step. */
+ "filenames": [
+ "/path/to/my-package/target/debug/libmy_package.rlib",
+ "/path/to/my-package/target/debug/deps/libmy_package-be9f3faac0a26ef0.rmeta"
+ ],
+ /* A string of the path to the executable that was created, or null if
+ this step did not generate an executable.
+ */
+ "executable": null,
+ /* Whether or not this step was actually executed.
+ When `true`, this means that the pre-existing artifacts were
+ up-to-date, and `rustc` was not executed. When `false`, this means that
+ `rustc` was run to generate the artifacts.
+ */
+ "fresh": true
+}
+
+```
+
+#### Build script output
+
+The "build-script-executed" message includes the parsed output of a build
+script. Note that this is emitted even if the build script is not run; it will
+display the previously cached value. More details about build script output
+may be found in [the chapter on build scripts](build-scripts.md).
+
+```javascript
+{
+ /* The "reason" indicates the kind of message. */
+ "reason": "build-script-executed",
+ /* The Package ID, a unique identifier for referring to the package. */
+ "package_id": "my-package 0.1.0 (path+file:///path/to/my-package)",
+ /* Array of libraries to link, as indicated by the `cargo:rustc-link-lib`
+ instruction. Note that this may include a "KIND=" prefix in the string
+ where KIND is the library kind.
+ */
+ "linked_libs": ["foo", "static=bar"],
+ /* Array of paths to include in the library search path, as indicated by
+ the `cargo:rustc-link-search` instruction. Note that this may include a
+ "KIND=" prefix in the string where KIND is the library kind.
+ */
+ "linked_paths": ["/some/path", "native=/another/path"],
+ /* Array of cfg values to enable, as indicated by the `cargo:rustc-cfg`
+ instruction.
+ */
+ "cfgs": ["cfg1", "cfg2=\"string\""],
+ /* Array of [KEY, VALUE] arrays of environment variables to set, as
+ indicated by the `cargo:rustc-env` instruction.
+ */
+ "env": [
+ ["SOME_KEY", "some value"],
+ ["ANOTHER_KEY", "another value"]
+ ],
+ /* An absolute path which is used as a value of `OUT_DIR` environmental
+ variable when compiling current package.
+ */
+ "out_dir": "/some/path/in/target/dir"
+}
+```
+
+#### Build finished
+
+The "build-finished" message is emitted at the end of the build.
+
+```javascript
+{
+ /* The "reason" indicates the kind of message. */
+ "reason": "build-finished",
+ /* Whether or not the build finished successfully. */
+ "success": true,
+}
+````
+
+This message can be helpful for tools to know when to stop reading JSON
+messages. Commands such as `cargo test` or `cargo run` can produce additional
+output after the build has finished. This message lets a tool know that Cargo
+will not produce additional JSON messages, but there may be additional output
+that may be generated afterwards (such as the output generated by the program
+executed by `cargo run`).
+
+> Note: There is experimental nightly-only support for JSON output for tests,
+> so additional test-specific JSON messages may begin arriving after the
+> "build-finished" message if that is enabled.
+
+### Custom subcommands
+
+Cargo is designed to be extensible with new subcommands without having to modify
+Cargo itself. This is achieved by translating a cargo invocation of the form
+cargo `(?<command>[^ ]+)` into an invocation of an external tool
+`cargo-${command}`. The external tool must be present in one of the user's
+`$PATH` directories.
+
+When Cargo invokes a custom subcommand, the first argument to the subcommand
+will be the filename of the custom subcommand, as usual. The second argument
+will be the subcommand name itself. For example, the second argument would be
+`${command}` when invoking `cargo-${command}`. Any additional arguments on the
+command line will be forwarded unchanged.
+
+Cargo can also display the help output of a custom subcommand with `cargo help
+${command}`. Cargo assumes that the subcommand will print a help message if its
+third argument is `--help`. So, `cargo help ${command}` would invoke
+`cargo-${command} ${command} --help`.
+
+Custom subcommands may use the `CARGO` environment variable to call back to
+Cargo. Alternatively, it can link to `cargo` crate as a library, but this
+approach has drawbacks:
+
+* Cargo as a library is unstable: the API may change without deprecation
+* versions of the linked Cargo library may be different from the Cargo binary
+
+Instead, it is encouraged to use the CLI interface to drive Cargo. The [`cargo
+metadata`] command can be used to obtain information about the current project
+(the [`cargo_metadata`] crate provides a Rust interface to this command).
+
+[`cargo metadata`]: ../commands/cargo-metadata.md
+[`cargo_metadata`]: https://crates.io/crates/cargo_metadata
diff --git a/src/tools/cargo/src/doc/src/reference/features-examples.md b/src/tools/cargo/src/doc/src/reference/features-examples.md
new file mode 100644
index 000000000..ac9636fcb
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/features-examples.md
@@ -0,0 +1,187 @@
+## Features Examples
+
+The following illustrates some real-world examples of features in action.
+
+### Minimizing build times and file sizes
+
+Some packages use features so that if the features are not enabled, it reduces
+the size of the crate and reduces compile time. Some examples are:
+
+* [`syn`] is a popular crate for parsing Rust code. Since it is so popular, it
+ is helpful to reduce compile times since it affects so many projects. It has
+ a [clearly documented list][syn-features] of features which can be used to
+ minimize the amount of code it contains.
+* [`regex`] has a [several features][regex-features] that are [well
+ documented][regex-docs]. Cutting out Unicode support can reduce the
+ resulting file size as it can remove some large tables.
+* [`winapi`] has [a large number][winapi-features] of features that
+ limit which Windows API bindings it supports.
+* [`web-sys`] is another example similar to `winapi` that provides a [huge
+ surface area][web-sys-features] of API bindings that are limited by using
+ features.
+
+[`winapi`]: https://crates.io/crates/winapi
+[winapi-features]: https://github.com/retep998/winapi-rs/blob/0.3.9/Cargo.toml#L25-L431
+[`regex`]: https://crates.io/crates/regex
+[`syn`]: https://crates.io/crates/syn
+[syn-features]: https://docs.rs/syn/1.0.54/syn/#optional-features
+[regex-features]: https://github.com/rust-lang/regex/blob/1.4.2/Cargo.toml#L33-L101
+[regex-docs]: https://docs.rs/regex/1.4.2/regex/#crate-features
+[`web-sys`]: https://crates.io/crates/web-sys
+[web-sys-features]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/crates/web-sys/Cargo.toml#L32-L1395
+
+### Extending behavior
+
+The [`serde_json`] package has a [`preserve_order` feature][serde_json-preserve_order]
+which [changes the behavior][serde_json-code] of JSON maps to preserve the
+order that keys are inserted. Notice that it enables an optional dependency
+[`indexmap`] to implement the new behavior.
+
+When changing behavior like this, be careful to make sure the changes are
+[SemVer compatible]. That is, enabling the feature should not break code that
+usually builds with the feature off.
+
+[`serde_json`]: https://crates.io/crates/serde_json
+[serde_json-preserve_order]: https://github.com/serde-rs/json/blob/v1.0.60/Cargo.toml#L53-L56
+[SemVer compatible]: features.md#semver-compatibility
+[serde_json-code]: https://github.com/serde-rs/json/blob/v1.0.60/src/map.rs#L23-L26
+[`indexmap`]: https://crates.io/crates/indexmap
+
+### `no_std` support
+
+Some packages want to support both [`no_std`] and `std` environments. This is
+useful for supporting embedded and resource-constrained platforms, but still
+allowing extended capabilities for platforms that support the full standard
+library.
+
+The [`wasm-bindgen`] package defines a [`std` feature][wasm-bindgen-std] that
+is [enabled by default][wasm-bindgen-default]. At the top of the library, it
+[unconditionally enables the `no_std` attribute][wasm-bindgen-no_std]. This
+ensures that `std` and the [`std` prelude] are not automatically in scope.
+Then, in various places in the code ([example1][wasm-bindgen-cfg1],
+[example2][wasm-bindgen-cfg2]), it uses `#[cfg(feature = "std")]` attributes
+to conditionally enable extra functionality that requires `std`.
+
+[`no_std`]: ../../reference/names/preludes.html#the-no_std-attribute
+[`wasm-bindgen`]: https://crates.io/crates/wasm-bindgen
+[`std` prelude]: ../../std/prelude/index.html
+[wasm-bindgen-std]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/Cargo.toml#L25
+[wasm-bindgen-default]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/Cargo.toml#L23
+[wasm-bindgen-no_std]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/src/lib.rs#L8
+[wasm-bindgen-cfg1]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/src/lib.rs#L270-L273
+[wasm-bindgen-cfg2]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/src/lib.rs#L67-L75
+
+### Re-exporting dependency features
+
+It can be convenient to re-export the features from a dependency. This allows
+the user depending on the crate to control those features without needing to
+specify those dependencies directly. For example, [`regex`] [re-exports the
+features][regex-re-export] from the [`regex_syntax`][regex_syntax-features]
+package. Users of `regex` don't need to know about the `regex_syntax` package,
+but they can still access the features it contains.
+
+[regex-re-export]: https://github.com/rust-lang/regex/blob/1.4.2/Cargo.toml#L65-L89
+[regex_syntax-features]: https://github.com/rust-lang/regex/blob/1.4.2/regex-syntax/Cargo.toml#L17-L32
+
+### Vendoring of C libraries
+
+Some packages provide bindings to common C libraries (sometimes referred to as
+["sys" crates][sys]). Sometimes these packages give you the choice to use the
+C library installed on the system, or to build it from source. For example,
+the [`openssl`] package has a [`vendored` feature][openssl-vendored] which
+enables the corresponding `vendored` feature of [`openssl-sys`]. The
+`openssl-sys` build script has some [conditional logic][openssl-sys-cfg] which
+causes it to build from a local copy of the OpenSSL source code instead of
+using the version from the system.
+
+The [`curl-sys`] package is another example where the [`static-curl`
+feature][curl-sys-static] causes it to build libcurl from source. Notice that
+it also has a [`force-system-lib-on-osx`][curl-sys-macos] feature which forces
+it [to use the system libcurl][curl-sys-macos-code], overriding the
+static-curl setting.
+
+[`openssl`]: https://crates.io/crates/openssl
+[`openssl-sys`]: https://crates.io/crates/openssl-sys
+[sys]: build-scripts.md#-sys-packages
+[openssl-vendored]: https://github.com/sfackler/rust-openssl/blob/openssl-v0.10.31/openssl/Cargo.toml#L19
+[build script]: build-scripts.md
+[openssl-sys-cfg]: https://github.com/sfackler/rust-openssl/blob/openssl-v0.10.31/openssl-sys/build/main.rs#L47-L54
+[`curl-sys`]: https://crates.io/crates/curl-sys
+[curl-sys-static]: https://github.com/alexcrichton/curl-rust/blob/0.4.34/curl-sys/Cargo.toml#L49
+[curl-sys-macos]: https://github.com/alexcrichton/curl-rust/blob/0.4.34/curl-sys/Cargo.toml#L52
+[curl-sys-macos-code]: https://github.com/alexcrichton/curl-rust/blob/0.4.34/curl-sys/build.rs#L15-L20
+
+### Feature precedence
+
+Some packages may have mutually-exclusive features. One option to handle this
+is to prefer one feature over another. The [`log`] package is an example. It
+has [several features][log-features] for choosing the maximum logging level at
+compile-time described [here][log-docs]. It uses [`cfg-if`] to [choose a
+precedence][log-cfg-if]. If multiple features are enabled, the higher "max"
+levels will be preferred over the lower levels.
+
+[`log`]: https://crates.io/crates/log
+[log-features]: https://github.com/rust-lang/log/blob/0.4.11/Cargo.toml#L29-L42
+[log-docs]: https://docs.rs/log/0.4.11/log/#compile-time-filters
+[log-cfg-if]: https://github.com/rust-lang/log/blob/0.4.11/src/lib.rs#L1422-L1448
+[`cfg-if`]: https://crates.io/crates/cfg-if
+
+### Proc-macro companion package
+
+Some packages have a proc-macro that is intimately tied with it. However, not
+all users will need to use the proc-macro. By making the proc-macro an
+optional-dependency, this allows you to conveniently choose whether or not it
+is included. This is helpful, because sometimes the proc-macro version must
+stay in sync with the parent package, and you don't want to force the users to
+have to specify both dependencies and keep them in sync.
+
+An example is [`serde`] which has a [`derive`][serde-derive] feature which
+enables the [`serde_derive`] proc-macro. The `serde_derive` crate is very
+tightly tied to `serde`, so it uses an [equals version
+requirement][serde-equals] to ensure they stay in sync.
+
+[`serde`]: https://crates.io/crates/serde
+[`serde_derive`]: https://crates.io/crates/serde_derive
+[serde-derive]: https://github.com/serde-rs/serde/blob/v1.0.118/serde/Cargo.toml#L34-L35
+[serde-equals]: https://github.com/serde-rs/serde/blob/v1.0.118/serde/Cargo.toml#L17
+
+### Nightly-only features
+
+Some packages want to experiment with APIs or language features that are only
+available on the Rust [nightly channel]. However, they may not want to require
+their users to also use the nightly channel. An example is [`wasm-bindgen`]
+which has a [`nightly` feature][wasm-bindgen-nightly] which enables an
+[extended API][wasm-bindgen-unsize] that uses the [`Unsize`] marker trait that
+is only available on the nightly channel at the time of this writing.
+
+Note that at the root of the crate it uses [`cfg_attr` to enable the nightly
+feature][wasm-bindgen-cfg_attr]. Keep in mind that the [`feature` attribute]
+is unrelated to Cargo features, and is used to opt-in to experimental language
+features.
+
+The [`simd_support` feature][rand-simd_support] of the [`rand`] package is another example,
+which relies on a dependency that only builds on the nightly channel.
+
+[`wasm-bindgen`]: https://crates.io/crates/wasm-bindgen
+[nightly channel]: ../../book/appendix-07-nightly-rust.html
+[wasm-bindgen-nightly]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/Cargo.toml#L27
+[wasm-bindgen-unsize]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/src/closure.rs#L257-L269
+[`Unsize`]: ../../std/marker/trait.Unsize.html
+[wasm-bindgen-cfg_attr]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/src/lib.rs#L11
+[`feature` attribute]: ../../unstable-book/index.html
+[`rand`]: https://crates.io/crates/rand
+[rand-simd_support]: https://github.com/rust-random/rand/blob/0.7.3/Cargo.toml#L40
+
+### Experimental features
+
+Some packages have new functionality that they may want to experiment with,
+without having to commit to the stability of those APIs. The features are
+usually documented that they are experimental, and thus may change or break in
+the future, even during a minor release. An example is the [`async-std`]
+package, which has an [`unstable` feature][async-std-unstable], which [gates
+new APIs][async-std-gate] that people can opt-in to using, but may not be
+completely ready to be relied upon.
+
+[`async-std`]: https://crates.io/crates/async-std
+[async-std-unstable]: https://github.com/async-rs/async-std/blob/v1.8.0/Cargo.toml#L38-L42
+[async-std-gate]: https://github.com/async-rs/async-std/blob/v1.8.0/src/macros.rs#L46
diff --git a/src/tools/cargo/src/doc/src/reference/features.md b/src/tools/cargo/src/doc/src/reference/features.md
new file mode 100644
index 000000000..40eb32c41
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/features.md
@@ -0,0 +1,521 @@
+## Features
+
+Cargo "features" provide a mechanism to express [conditional compilation] and
+[optional dependencies](#optional-dependencies). A package defines a set of
+named features in the `[features]` table of `Cargo.toml`, and each feature can
+either be enabled or disabled. Features for the package being built can be
+enabled on the command-line with flags such as `--features`. Features for
+dependencies can be enabled in the dependency declaration in `Cargo.toml`.
+
+See also the [Features Examples] chapter for some examples of how features can
+be used.
+
+[conditional compilation]: ../../reference/conditional-compilation.md
+[Features Examples]: features-examples.md
+
+### The `[features]` section
+
+Features are defined in the `[features]` table in `Cargo.toml`. Each feature
+specifies an array of other features or optional dependencies that it enables.
+The following examples illustrate how features could be used for a 2D image
+processing library where support for different image formats can be optionally
+included:
+
+```toml
+[features]
+# Defines a feature named `webp` that does not enable any other features.
+webp = []
+```
+
+With this feature defined, [`cfg` expressions] can be used to conditionally
+include code to support the requested feature at compile time. For example,
+inside `lib.rs` of the package could include this:
+
+```rust
+// This conditionally includes a module which implements WEBP support.
+#[cfg(feature = "webp")]
+pub mod webp;
+```
+
+Cargo sets features in the package using the `rustc` [`--cfg` flag], and code
+can test for their presence with the [`cfg` attribute] or the [`cfg` macro].
+
+Features can list other features to enable. For example, the ICO image format
+can contain BMP and PNG images, so when it is enabled, it should make sure
+those other features are enabled, too:
+
+```toml
+[features]
+bmp = []
+png = []
+ico = ["bmp", "png"]
+webp = []
+```
+
+Feature names may include characters from the [Unicode XID standard] (which
+includes most letters), and additionally allows starting with `_` or digits
+`0` through `9`, and after the first character may also contain `-`, `+`, or
+`.`.
+
+> **Note**: [crates.io] imposes additional constraints on feature name syntax
+> that they must only be [ASCII alphanumeric] characters or `_`, `-`, or `+`.
+
+[crates.io]: https://crates.io/
+[Unicode XID standard]: https://unicode.org/reports/tr31/
+[ASCII alphanumeric]: ../../std/primitive.char.html#method.is_ascii_alphanumeric
+[`--cfg` flag]: ../../rustc/command-line-arguments.md#option-cfg
+[`cfg` expressions]: ../../reference/conditional-compilation.md
+[`cfg` attribute]: ../../reference/conditional-compilation.md#the-cfg-attribute
+[`cfg` macro]: ../../std/macro.cfg.html
+
+### The `default` feature
+
+By default, all features are disabled unless explicitly enabled. This can be
+changed by specifying the `default` feature:
+
+```toml
+[features]
+default = ["ico", "webp"]
+bmp = []
+png = []
+ico = ["bmp", "png"]
+webp = []
+```
+
+When the package is built, the `default` feature is enabled which in turn
+enables the listed features. This behavior can be changed by:
+
+* The `--no-default-features` [command-line
+ flag](#command-line-feature-options) disables the default features of the
+ package.
+* The `default-features = false` option can be specified in a [dependency
+ declaration](#dependency-features).
+
+> **Note**: Be careful about choosing the default feature set. The default
+> features are a convenience that make it easier to use a package without
+> forcing the user to carefully select which features to enable for common
+> use, but there are some drawbacks. Dependencies automatically enable default
+> features unless `default-features = false` is specified. This can make it
+> difficult to ensure that the default features are not enabled, especially
+> for a dependency that appears multiple times in the dependency graph. Every
+> package must ensure that `default-features = false` is specified to avoid
+> enabling them.
+>
+> Another issue is that it can be a [SemVer incompatible
+> change](#semver-compatibility) to remove a feature from the default set, so
+> you should be confident that you will keep those features.
+
+### Optional dependencies
+
+Dependencies can be marked "optional", which means they will not be compiled
+by default. For example, let's say that our 2D image processing library uses
+an external package to handle GIF images. This can be expressed like this:
+
+```toml
+[dependencies]
+gif = { version = "0.11.1", optional = true }
+```
+
+By default, this optional dependency implicitly defines a feature that looks
+like this:
+
+```toml
+[features]
+gif = ["dep:gif"]
+```
+
+This means that this dependency will only be included if the `gif`
+feature is enabled.
+The same `cfg(feature = "gif")` syntax can be used in the code, and the
+dependency can be enabled just like any feature such as `--features gif` (see
+[Command-line feature options](#command-line-feature-options) below).
+
+In some cases, you may not want to expose a feature that has the same name
+as the optional dependency.
+For example, perhaps the optional dependency is an internal detail, or you
+want to group multiple optional dependencies together, or you just want to use
+a better name.
+If you specify the optional dependency with the `dep:` prefix anywhere
+in the `[features]` table, that disables the implicit feature.
+
+> **Note**: The `dep:` syntax is only available starting with Rust 1.60.
+> Previous versions can only use the implicit feature name.
+
+For example, let's say in order to support the AVIF image format, our library
+needs two other dependencies to be enabled:
+
+```toml
+[dependencies]
+ravif = { version = "0.6.3", optional = true }
+rgb = { version = "0.8.25", optional = true }
+
+[features]
+avif = ["dep:ravif", "dep:rgb"]
+```
+
+In this example, the `avif` feature will enable the two listed dependencies.
+This also avoids creating the implicit `ravif` and `rgb` features, since we
+don't want users to enable those individually as they are internal details to
+our crate.
+
+> **Note**: Another way to optionally include a dependency is to use
+> [platform-specific dependencies]. Instead of using features, these are
+> conditional based on the target platform.
+
+[platform-specific dependencies]: specifying-dependencies.md#platform-specific-dependencies
+
+### Dependency features
+
+Features of dependencies can be enabled within the dependency declaration. The
+`features` key indicates which features to enable:
+
+```toml
+[dependencies]
+# Enables the `derive` feature of serde.
+serde = { version = "1.0.118", features = ["derive"] }
+```
+
+The [`default` features](#the-default-feature) can be disabled using
+`default-features = false`:
+
+```toml
+[dependencies]
+flate2 = { version = "1.0.3", default-features = false, features = ["zlib"] }
+```
+
+> **Note**: This may not ensure the default features are disabled. If another
+> dependency includes `flate2` without specifying `default-features = false`,
+> then the default features will be enabled. See [feature
+> unification](#feature-unification) below for more details.
+
+Features of dependencies can also be enabled in the `[features]` table. The
+syntax is `"package-name/feature-name"`. For example:
+
+```toml
+[dependencies]
+jpeg-decoder = { version = "0.1.20", default-features = false }
+
+[features]
+# Enables parallel processing support by enabling the "rayon" feature of jpeg-decoder.
+parallel = ["jpeg-decoder/rayon"]
+```
+
+The `"package-name/feature-name"` syntax will also enable `package-name`
+if it is an optional dependency. Often this is not what you want.
+You can add a `?` as in `"package-name?/feature-name"` which will only enable
+the given feature if something else enables the optional dependency.
+
+> **Note**: The `?` syntax is only available starting with Rust 1.60.
+
+For example, let's say we have added some serialization support to our
+library, and it requires enabling a corresponding feature in some optional
+dependencies.
+That can be done like this:
+
+```toml
+[dependencies]
+serde = { version = "1.0.133", optional = true }
+rgb = { version = "0.8.25", optional = true }
+
+[features]
+serde = ["dep:serde", "rgb?/serde"]
+```
+
+In this example, enabling the `serde` feature will enable the serde
+dependency.
+It will also enable the `serde` feature for the `rgb` dependency, but only if
+something else has enabled the `rgb` dependency.
+
+### Command-line feature options
+
+The following command-line flags can be used to control which features are
+enabled:
+
+* `--features` _FEATURES_: Enables the listed features. Multiple features may
+ be separated with commas or spaces. If using spaces, be sure to use quotes
+ around all the features if running Cargo from a shell (such as `--features
+ "foo bar"`). If building multiple packages in a [workspace], the
+ `package-name/feature-name` syntax can be used to specify features for
+ specific workspace members.
+
+* `--all-features`: Activates all features of all packages selected on the
+ command-line.
+
+* `--no-default-features`: Does not activate the [`default`
+ feature](#the-default-feature) of the selected packages.
+
+[workspace]: workspaces.md
+
+### Feature unification
+
+Features are unique to the package that defines them. Enabling a feature on a
+package does not enable a feature of the same name on other packages.
+
+When a dependency is used by multiple packages, Cargo will use the union of
+all features enabled on that dependency when building it. This helps ensure
+that only a single copy of the dependency is used. See the [features section]
+of the resolver documentation for more details.
+
+For example, let's look at the [`winapi`] package which uses a [large
+number][winapi-features] of features. If your package depends on a package
+`foo` which enables the "fileapi" and "handleapi" features of `winapi`, and
+another dependency `bar` which enables the "std" and "winnt" features of
+`winapi`, then `winapi` will be built with all four of those features enabled.
+
+![winapi features example](../images/winapi-features.svg)
+
+[`winapi`]: https://crates.io/crates/winapi
+[winapi-features]: https://github.com/retep998/winapi-rs/blob/0.3.9/Cargo.toml#L25-L431
+
+A consequence of this is that features should be *additive*. That is, enabling
+a feature should not disable functionality, and it should usually be safe to
+enable any combination of features. A feature should not introduce a
+[SemVer-incompatible change](#semver-compatibility).
+
+For example, if you want to optionally support [`no_std`] environments, **do
+not** use a `no_std` feature. Instead, use a `std` feature that *enables*
+`std`. For example:
+
+```rust
+#![no_std]
+
+#[cfg(feature = "std")]
+extern crate std;
+
+#[cfg(feature = "std")]
+pub fn function_that_requires_std() {
+ // ...
+}
+```
+
+[`no_std`]: ../../reference/names/preludes.html#the-no_std-attribute
+[features section]: resolver.md#features
+
+#### Mutually exclusive features
+
+There are rare cases where features may be mutually incompatible with one
+another. This should be avoided if at all possible, because it requires
+coordinating all uses of the package in the dependency graph to cooperate to
+avoid enabling them together. If it is not possible, consider adding a compile
+error to detect this scenario. For example:
+
+```rust,ignore
+#[cfg(all(feature = "foo", feature = "bar"))]
+compile_error!("feature \"foo\" and feature \"bar\" cannot be enabled at the same time");
+```
+
+Instead of using mutually exclusive features, consider some other options:
+
+* Split the functionality into separate packages.
+* When there is a conflict, [choose one feature over
+ another][feature-precedence]. The [`cfg-if`] package can help with writing
+ more complex `cfg` expressions.
+* Architect the code to allow the features to be enabled concurrently, and use
+ runtime options to control which is used. For example, use a config file,
+ command-line argument, or environment variable to choose which behavior to
+ enable.
+
+[`cfg-if`]: https://crates.io/crates/cfg-if
+[feature-precedence]: features-examples.md#feature-precedence
+
+#### Inspecting resolved features
+
+In complex dependency graphs, it can sometimes be difficult to understand how
+different features get enabled on various packages. The [`cargo tree`] command
+offers several options to help inspect and visualize which features are
+enabled. Some options to try:
+
+* `cargo tree -e features`: This will show features in the dependency graph.
+ Each feature will appear showing which package enabled it.
+* `cargo tree -f "{p} {f}"`: This is a more compact view that shows a
+ comma-separated list of features enabled on each package.
+* `cargo tree -e features -i foo`: This will invert the tree, showing how
+ features flow into the given package "foo". This can be useful because
+ viewing the entire graph can be quite large and overwhelming. Use this when
+ you are trying to figure out which features are enabled on a specific
+ package and why. See the example at the bottom of the [`cargo tree`] page on
+ how to read this.
+
+[`cargo tree`]: ../commands/cargo-tree.md
+
+### Feature resolver version 2
+
+A different feature resolver can be specified with the `resolver` field in
+`Cargo.toml`, like this:
+
+```toml
+[package]
+name = "my-package"
+version = "1.0.0"
+resolver = "2"
+```
+
+See the [resolver versions] section for more detail on specifying resolver
+versions.
+
+The version `"2"` resolver avoids unifying features in a few situations where
+that unification can be unwanted. The exact situations are described in the
+[resolver chapter][resolver-v2], but in short, it avoids unifying in these
+situations:
+
+* Features enabled on [platform-specific dependencies] for targets not
+ currently being built are ignored.
+* [Build-dependencies] and proc-macros do not share features with normal
+ dependencies.
+* [Dev-dependencies] do not activate features unless building a target that
+ needs them (like tests or examples).
+
+Avoiding the unification is necessary for some situations. For example, if a
+build-dependency enables a `std` feature, and the same dependency is used as a
+normal dependency for a `no_std` environment, enabling `std` would break the
+build.
+
+However, one drawback is that this can increase build times because the
+dependency is built multiple times (each with different features). When using
+the version `"2"` resolver, it is recommended to check for dependencies that
+are built multiple times to reduce overall build time. If it is not *required*
+to build those duplicated packages with separate features, consider adding
+features to the `features` list in the [dependency
+declaration](#dependency-features) so that the duplicates end up with the same
+features (and thus Cargo will build it only once). You can detect these
+duplicate dependencies with the [`cargo tree --duplicates`][`cargo tree`]
+command. It will show which packages are built multiple times; look for any
+entries listed with the same version. See [Inspecting resolved
+features](#inspecting-resolved-features) for more on fetching information on
+the resolved features. For build dependencies, this is not necessary if you
+are cross-compiling with the `--target` flag because build dependencies are
+always built separately from normal dependencies in that scenario.
+
+#### Resolver version 2 command-line flags
+
+The `resolver = "2"` setting also changes the behavior of the `--features` and
+`--no-default-features` [command-line options](#command-line-feature-options).
+
+With version `"1"`, you can only enable features for the package in the
+current working directory. For example, in a workspace with packages `foo` and
+`bar`, and you are in the directory for package `foo`, and ran the command
+`cargo build -p bar --features bar-feat`, this would fail because the
+`--features` flag only allowed enabling features on `foo`.
+
+With `resolver = "2"`, the features flags allow enabling features for any of
+the packages selected on the command-line with `-p` and `--workspace` flags.
+For example:
+
+```sh
+# This command is allowed with resolver = "2", regardless of which directory
+# you are in.
+cargo build -p foo -p bar --features foo-feat,bar-feat
+
+# This explicit equivalent works with any resolver version:
+cargo build -p foo -p bar --features foo/foo-feat,bar/bar-feat
+```
+
+Additionally, with `resolver = "1"`, the `--no-default-features` flag only
+disables the default feature for the package in the current directory. With
+version "2", it will disable the default features for all workspace members.
+
+[resolver versions]: resolver.md#resolver-versions
+[build-dependencies]: specifying-dependencies.md#build-dependencies
+[dev-dependencies]: specifying-dependencies.md#development-dependencies
+[resolver-v2]: resolver.md#feature-resolver-version-2
+
+### Build scripts
+
+[Build scripts] can detect which features are enabled on the package by
+inspecting the `CARGO_FEATURE_<name>` environment variable, where `<name>` is
+the feature name converted to uppercase and `-` converted to `_`.
+
+[build scripts]: build-scripts.md
+
+### Required features
+
+The [`required-features` field] can be used to disable specific [Cargo
+targets] if a feature is not enabled. See the linked documentation for more
+details.
+
+[`required-features` field]: cargo-targets.md#the-required-features-field
+[Cargo targets]: cargo-targets.md
+
+### SemVer compatibility
+
+Enabling a feature should not introduce a SemVer-incompatible change. For
+example, the feature shouldn't change an existing API in a way that could
+break existing uses. More details about what changes are compatible can be
+found in the [SemVer Compatibility chapter](semver.md).
+
+Care should be taken when adding and removing feature definitions and optional
+dependencies, as these can sometimes be backwards-incompatible changes. More
+details can be found in the [Cargo section](semver.md#cargo) of the SemVer
+Compatibility chapter. In short, follow these rules:
+
+* The following is usually safe to do in a minor release:
+ * Add a [new feature][cargo-feature-add] or [optional dependency][cargo-dep-add].
+ * [Change the features used on a dependency][cargo-change-dep-feature].
+* The following should usually **not** be done in a minor release:
+ * [Remove a feature][cargo-feature-remove] or [optional dependency][cargo-remove-opt-dep].
+ * [Moving existing public code behind a feature][item-remove].
+ * [Remove a feature from a feature list][cargo-feature-remove-another].
+
+See the links for caveats and examples.
+
+[cargo-change-dep-feature]: semver.md#cargo-change-dep-feature
+[cargo-dep-add]: semver.md#cargo-dep-add
+[cargo-feature-add]: semver.md#cargo-feature-add
+[item-remove]: semver.md#item-remove
+[cargo-feature-remove]: semver.md#cargo-feature-remove
+[cargo-remove-opt-dep]: semver.md#cargo-remove-opt-dep
+[cargo-feature-remove-another]: semver.md#cargo-feature-remove-another
+
+### Feature documentation and discovery
+
+You are encouraged to document which features are available in your package.
+This can be done by adding [doc comments] at the top of `lib.rs`. As an
+example, see the [regex crate source], which when rendered can be viewed on
+[docs.rs][regex-docs-rs]. If you have other documentation, such as a user
+guide, consider adding the documentation there (for example, see [serde.rs]).
+If you have a binary project, consider documenting the features in the README
+or other documentation for the project (for example, see [sccache]).
+
+Clearly documenting the features can set expectations about features that are
+considered "unstable" or otherwise shouldn't be used. For example, if there is
+an optional dependency, but you don't want users to explicitly list that
+optional dependency as a feature, exclude it from the documented list.
+
+Documentation published on [docs.rs] can use metadata in `Cargo.toml` to
+control which features are enabled when the documentation is built. See
+[docs.rs metadata documentation] for more details.
+
+> **Note**: Rustdoc has experimental support for annotating the documentation
+> to indicate which features are required to use certain APIs. See the
+> [`doc_cfg`] documentation for more details. An example is the [`syn`
+> documentation], where you can see colored boxes which note which features
+> are required to use it.
+
+[docs.rs metadata documentation]: https://docs.rs/about/metadata
+[docs.rs]: https://docs.rs/
+[serde.rs]: https://serde.rs/feature-flags.html
+[doc comments]: ../../rustdoc/how-to-write-documentation.html
+[regex crate source]: https://github.com/rust-lang/regex/blob/1.4.2/src/lib.rs#L488-L583
+[regex-docs-rs]: https://docs.rs/regex/1.4.2/regex/#crate-features
+[sccache]: https://github.com/mozilla/sccache/blob/0.2.13/README.md#build-requirements
+[`doc_cfg`]: ../../unstable-book/language-features/doc-cfg.html
+[`syn` documentation]: https://docs.rs/syn/1.0.54/syn/#modules
+
+#### Discovering features
+
+When features are documented in the library API, this can make it easier for
+your users to discover which features are available and what they do. If the
+feature documentation for a package isn't readily available, you can look at
+the `Cargo.toml` file, but sometimes it can be hard to track it down. The
+crate page on [crates.io] has a link to the source repository if available.
+Tools like [`cargo vendor`] or [cargo-clone-crate] can be used to download the
+source and inspect it.
+
+[`cargo vendor`]: ../commands/cargo-vendor.md
+[cargo-clone-crate]: https://crates.io/crates/cargo-clone-crate
+
+### Feature combinations
+
+Because features are a form of conditional compilation, they require an exponential number of configurations and test cases to be 100% covered. By default, tests, docs, and other tooling such as [Clippy](https://github.com/rust-lang/rust-clippy) will only run with the default set of features.
+
+We encourage you to consider your strategy and tooling in regards to different feature combinations --- Every project will have different requirements in conjunction with time, resources, and the cost-benefit of covering specific scenarios. Common configurations may be with / without default features, specific combinations of features, or all combinations of features.
diff --git a/src/tools/cargo/src/doc/src/reference/future-incompat-report.md b/src/tools/cargo/src/doc/src/reference/future-incompat-report.md
new file mode 100644
index 000000000..b72f11757
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/future-incompat-report.md
@@ -0,0 +1,37 @@
+### Future incompat report
+
+Cargo checks for future-incompatible warnings in all dependencies. These are warnings for
+changes that may become hard errors in the future, causing the dependency to
+stop building in a future version of rustc. If any warnings are found, a small
+notice is displayed indicating that the warnings were found, and provides
+instructions on how to display a full report.
+
+For example, you may see something like this at the end of a build:
+
+```text
+warning: the following packages contain code that will be rejected by a future
+ version of Rust: rental v0.5.5
+note: to see what the problems were, use the option `--future-incompat-report`,
+ or run `cargo report future-incompatibilities --id 1`
+```
+
+A full report can be displayed with the `cargo report future-incompatibilities
+--id ID` command, or by running the build again with
+the `--future-incompat-report` flag. The developer should then update their
+dependencies to a version where the issue is fixed, or work with the
+developers of the dependencies to help resolve the issue.
+
+## Configuration
+
+This feature can be configured through a [`[future-incompat-report]`][config]
+section in `.cargo/config.toml`. Currently, the supported options are:
+
+```toml
+[future-incompat-report]
+frequency = "always"
+```
+
+The supported values for the frequency are `"always"` and `"never"`, which control
+whether or not a message is printed out at the end of `cargo build` / `cargo check`.
+
+[config]: config.md#future-incompat-report
diff --git a/src/tools/cargo/src/doc/src/reference/index.md b/src/tools/cargo/src/doc/src/reference/index.md
new file mode 100644
index 000000000..b931306c2
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/index.md
@@ -0,0 +1,26 @@
+## Cargo Reference
+
+The reference covers the details of various areas of Cargo.
+
+* [Specifying Dependencies](specifying-dependencies.md)
+ * [Overriding Dependencies](overriding-dependencies.md)
+* [The Manifest Format](manifest.md)
+ * [Cargo Targets](cargo-targets.md)
+* [Workspaces](workspaces.md)
+* [Features](features.md)
+ * [Features Examples](features-examples.md)
+* [Profiles](profiles.md)
+* [Configuration](config.md)
+* [Environment Variables](environment-variables.md)
+* [Build Scripts](build-scripts.md)
+ * [Build Script Examples](build-script-examples.md)
+* [Publishing on crates.io](publishing.md)
+* [Package ID Specifications](pkgid-spec.md)
+* [Source Replacement](source-replacement.md)
+* [External Tools](external-tools.md)
+* [Registries](registries.md)
+* [Dependency Resolution](resolver.md)
+* [SemVer Compatibility](semver.md)
+* [Future incompat report](future-incompat-report.md)
+* [Reporting build timings](timings.md)
+* [Unstable Features](unstable.md)
diff --git a/src/tools/cargo/src/doc/src/reference/manifest.md b/src/tools/cargo/src/doc/src/reference/manifest.md
new file mode 100644
index 000000000..49d5b96c6
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/manifest.md
@@ -0,0 +1,632 @@
+## The Manifest Format
+
+The `Cargo.toml` file for each package is called its *manifest*. It is written
+in the [TOML] format. It contains metadata that is needed to compile the package. Checkout
+the `cargo locate-project` section for more detail on how cargo finds the manifest file.
+
+Every manifest file consists of the following sections:
+
+* [`cargo-features`](unstable.md) --- Unstable, nightly-only features.
+* [`[package]`](#the-package-section) --- Defines a package.
+ * [`name`](#the-name-field) --- The name of the package.
+ * [`version`](#the-version-field) --- The version of the package.
+ * [`authors`](#the-authors-field) --- The authors of the package.
+ * [`edition`](#the-edition-field) --- The Rust edition.
+ * [`rust-version`](#the-rust-version-field) --- The minimal supported Rust version.
+ * [`description`](#the-description-field) --- A description of the package.
+ * [`documentation`](#the-documentation-field) --- URL of the package documentation.
+ * [`readme`](#the-readme-field) --- Path to the package's README file.
+ * [`homepage`](#the-homepage-field) --- URL of the package homepage.
+ * [`repository`](#the-repository-field) --- URL of the package source repository.
+ * [`license`](#the-license-and-license-file-fields) --- The package license.
+ * [`license-file`](#the-license-and-license-file-fields) --- Path to the text of the license.
+ * [`keywords`](#the-keywords-field) --- Keywords for the package.
+ * [`categories`](#the-categories-field) --- Categories of the package.
+ * [`workspace`](#the-workspace-field) --- Path to the workspace for the package.
+ * [`build`](#the-build-field) --- Path to the package build script.
+ * [`links`](#the-links-field) --- Name of the native library the package links with.
+ * [`exclude`](#the-exclude-and-include-fields) --- Files to exclude when publishing.
+ * [`include`](#the-exclude-and-include-fields) --- Files to include when publishing.
+ * [`publish`](#the-publish-field) --- Can be used to prevent publishing the package.
+ * [`metadata`](#the-metadata-table) --- Extra settings for external tools.
+ * [`default-run`](#the-default-run-field) --- The default binary to run by [`cargo run`].
+ * [`autobins`](cargo-targets.md#target-auto-discovery) --- Disables binary auto discovery.
+ * [`autoexamples`](cargo-targets.md#target-auto-discovery) --- Disables example auto discovery.
+ * [`autotests`](cargo-targets.md#target-auto-discovery) --- Disables test auto discovery.
+ * [`autobenches`](cargo-targets.md#target-auto-discovery) --- Disables bench auto discovery.
+ * [`resolver`](resolver.md#resolver-versions) --- Sets the dependency resolver to use.
+* Target tables: (see [configuration](cargo-targets.md#configuring-a-target) for settings)
+ * [`[lib]`](cargo-targets.md#library) --- Library target settings.
+ * [`[[bin]]`](cargo-targets.md#binaries) --- Binary target settings.
+ * [`[[example]]`](cargo-targets.md#examples) --- Example target settings.
+ * [`[[test]]`](cargo-targets.md#tests) --- Test target settings.
+ * [`[[bench]]`](cargo-targets.md#benchmarks) --- Benchmark target settings.
+* Dependency tables:
+ * [`[dependencies]`](specifying-dependencies.md) --- Package library dependencies.
+ * [`[dev-dependencies]`](specifying-dependencies.md#development-dependencies) --- Dependencies for examples, tests, and benchmarks.
+ * [`[build-dependencies]`](specifying-dependencies.md#build-dependencies) --- Dependencies for build scripts.
+ * [`[target]`](specifying-dependencies.md#platform-specific-dependencies) --- Platform-specific dependencies.
+* [`[badges]`](#the-badges-section) --- Badges to display on a registry.
+* [`[features]`](features.md) --- Conditional compilation features.
+* [`[patch]`](overriding-dependencies.md#the-patch-section) --- Override dependencies.
+* [`[replace]`](overriding-dependencies.md#the-replace-section) --- Override dependencies (deprecated).
+* [`[profile]`](profiles.md) --- Compiler settings and optimizations.
+* [`[workspace]`](workspaces.md) --- The workspace definition.
+
+<a id="package-metadata"></a>
+### The `[package]` section
+
+The first section in a `Cargo.toml` is `[package]`.
+
+```toml
+[package]
+name = "hello_world" # the name of the package
+version = "0.1.0" # the current version, obeying semver
+authors = ["Alice <a@example.com>", "Bob <b@example.com>"]
+```
+
+The only fields required by Cargo are [`name`](#the-name-field) and
+[`version`](#the-version-field). If publishing to a registry, the registry may
+require additional fields. See the notes below and [the publishing
+chapter][publishing] for requirements for publishing to [crates.io].
+
+#### The `name` field
+
+The package name is an identifier used to refer to the package. It is used
+when listed as a dependency in another package, and as the default name of
+inferred lib and bin targets.
+
+The name must use only [alphanumeric] characters or `-` or `_`, and cannot be empty.
+
+Note that [`cargo new`] and [`cargo init`] impose some additional restrictions on
+the package name, such as enforcing that it is a valid Rust identifier and not
+a keyword. [crates.io] imposes even more restrictions, such as:
+
+- Only ASCII characters are allowed.
+- Do not use reserved names.
+- Do not use special Windows names such as "nul".
+- Use a maximum of 64 characters of length.
+
+[alphanumeric]: ../../std/primitive.char.html#method.is_alphanumeric
+
+#### The `version` field
+
+Cargo bakes in the concept of [Semantic
+Versioning](https://semver.org/), so make sure you follow some basic rules:
+
+* Before you reach 1.0.0, anything goes, but if you make breaking changes,
+ increment the minor version. In Rust, breaking changes include adding fields to
+ structs or variants to enums.
+* After 1.0.0, only make breaking changes when you increment the major version.
+ Don’t break the build.
+* After 1.0.0, don’t add any new public API (no new `pub` anything) in patch-level
+ versions. Always increment the minor version if you add any new `pub` structs,
+ traits, fields, types, functions, methods or anything else.
+* Use version numbers with three numeric parts such as 1.0.0 rather than 1.0.
+
+See the [Resolver] chapter for more information on how Cargo uses versions to
+resolve dependencies, and for guidelines on setting your own version. See the
+[SemVer compatibility] chapter for more details on exactly what constitutes a
+breaking change.
+
+[Resolver]: resolver.md
+[SemVer compatibility]: semver.md
+
+<a id="the-authors-field-optional"></a>
+#### The `authors` field
+
+The optional `authors` field lists in an array the people or organizations that are considered
+the "authors" of the package. The exact meaning is open to interpretation --- it
+may list the original or primary authors, current maintainers, or owners of the
+package. An optional email address may be included within angled brackets at
+the end of each author entry.
+
+```toml
+[package]
+# ...
+authors = ["Graydon Hoare", "Fnu Lnu <no-reply@rust-lang.org>"]
+```
+
+This field is only surfaced in package metadata and in the `CARGO_PKG_AUTHORS`
+environment variable within `build.rs`. It is not displayed in the [crates.io]
+user interface.
+
+> **Warning**: Package manifests cannot be changed once published, so this
+> field cannot be changed or removed in already-published versions of a
+> package.
+
+<a id="the-edition-field-optional"></a>
+#### The `edition` field
+
+The `edition` key is an optional key that affects which [Rust Edition] your package
+is compiled with. Setting the `edition` key in `[package]` will affect all
+targets/crates in the package, including test suites, benchmarks, binaries,
+examples, etc.
+
+```toml
+[package]
+# ...
+edition = '2021'
+```
+
+Most manifests have the `edition` field filled in automatically by [`cargo new`]
+with the latest stable edition. By default `cargo new` creates a manifest with
+the 2021 edition currently.
+
+If the `edition` field is not present in `Cargo.toml`, then the 2015 edition is
+assumed for backwards compatibility. Note that all manifests
+created with [`cargo new`] will not use this historical fallback because they
+will have `edition` explicitly specified to a newer value.
+
+#### The `rust-version` field
+
+The `rust-version` field is an optional key that tells cargo what version of the
+Rust language and compiler your package can be compiled with. If the currently
+selected version of the Rust compiler is older than the stated version, cargo
+will exit with an error, telling the user what version is required.
+
+The first version of Cargo that supports this field was released with Rust 1.56.0.
+In older releases, the field will be ignored, and Cargo will display a warning.
+
+```toml
+[package]
+# ...
+rust-version = "1.56"
+```
+
+The Rust version must be a bare version number with two or three components; it
+cannot include semver operators or pre-release identifiers. Compiler pre-release
+identifiers such as -nightly will be ignored while checking the Rust version.
+The `rust-version` must be equal to or newer than the version that first
+introduced the configured `edition`.
+
+The `rust-version` may be ignored using the `--ignore-rust-version` option.
+
+Setting the `rust-version` key in `[package]` will affect all targets/crates in
+the package, including test suites, benchmarks, binaries, examples, etc.
+
+#### The `description` field
+
+The description is a short blurb about the package. [crates.io] will display
+this with your package. This should be plain text (not Markdown).
+
+```toml
+[package]
+# ...
+description = "A short description of my package"
+```
+
+> **Note**: [crates.io] requires the `description` to be set.
+
+<a id="the-documentation-field-optional"></a>
+#### The `documentation` field
+
+The `documentation` field specifies a URL to a website hosting the crate's
+documentation. If no URL is specified in the manifest file, [crates.io] will
+automatically link your crate to the corresponding [docs.rs] page.
+
+```toml
+[package]
+# ...
+documentation = "https://docs.rs/bitflags"
+```
+
+#### The `readme` field
+
+The `readme` field should be the path to a file in the package root (relative
+to this `Cargo.toml`) that contains general information about the package.
+This file will be transferred to the registry when you publish. [crates.io]
+will interpret it as Markdown and render it on the crate's page.
+
+```toml
+[package]
+# ...
+readme = "README.md"
+```
+
+If no value is specified for this field, and a file named `README.md`,
+`README.txt` or `README` exists in the package root, then the name of that
+file will be used. You can suppress this behavior by setting this field to
+`false`. If the field is set to `true`, a default value of `README.md` will
+be assumed.
+
+#### The `homepage` field
+
+The `homepage` field should be a URL to a site that is the home page for your
+package.
+
+```toml
+[package]
+# ...
+homepage = "https://serde.rs/"
+```
+
+#### The `repository` field
+
+The `repository` field should be a URL to the source repository for your
+package.
+
+```toml
+[package]
+# ...
+repository = "https://github.com/rust-lang/cargo/"
+```
+
+#### The `license` and `license-file` fields
+
+The `license` field contains the name of the software license that the package
+is released under. The `license-file` field contains the path to a file
+containing the text of the license (relative to this `Cargo.toml`).
+
+[crates.io] interprets the `license` field as an [SPDX 2.1 license
+expression][spdx-2.1-license-expressions]. The name must be a known license
+from the [SPDX license list 3.11][spdx-license-list-3.11]. Parentheses are not
+currently supported. See the [SPDX site] for more information.
+
+SPDX license expressions support AND and OR operators to combine multiple
+licenses.[^slash]
+
+```toml
+[package]
+# ...
+license = "MIT OR Apache-2.0"
+```
+
+Using `OR` indicates the user may choose either license. Using `AND` indicates
+the user must comply with both licenses simultaneously. The `WITH` operator
+indicates a license with a special exception. Some examples:
+
+* `MIT OR Apache-2.0`
+* `LGPL-2.1-only AND MIT AND BSD-2-Clause`
+* `GPL-2.0-or-later WITH Bison-exception-2.2`
+
+If a package is using a nonstandard license, then the `license-file` field may
+be specified in lieu of the `license` field.
+
+```toml
+[package]
+# ...
+license-file = "LICENSE.txt"
+```
+
+> **Note**: [crates.io] requires either `license` or `license-file` to be set.
+
+[^slash]: Previously multiple licenses could be separated with a `/`, but that
+usage is deprecated.
+
+#### The `keywords` field
+
+The `keywords` field is an array of strings that describe this package. This
+can help when searching for the package on a registry, and you may choose any
+words that would help someone find this crate.
+
+```toml
+[package]
+# ...
+keywords = ["gamedev", "graphics"]
+```
+
+> **Note**: [crates.io] has a maximum of 5 keywords. Each keyword must be
+> ASCII text, start with a letter, and only contain letters, numbers, `_` or
+> `-`, and have at most 20 characters.
+
+#### The `categories` field
+
+The `categories` field is an array of strings of the categories this package
+belongs to.
+
+```toml
+categories = ["command-line-utilities", "development-tools::cargo-plugins"]
+```
+
+> **Note**: [crates.io] has a maximum of 5 categories. Each category should
+> match one of the strings available at <https://crates.io/category_slugs>, and
+> must match exactly.
+
+<a id="the-workspace--field-optional"></a>
+#### The `workspace` field
+
+The `workspace` field can be used to configure the workspace that this package
+will be a member of. If not specified this will be inferred as the first
+Cargo.toml with `[workspace]` upwards in the filesystem. Setting this is
+useful if the member is not inside a subdirectory of the workspace root.
+
+```toml
+[package]
+# ...
+workspace = "path/to/workspace/root"
+```
+
+This field cannot be specified if the manifest already has a `[workspace]`
+table defined. That is, a crate cannot both be a root crate in a workspace
+(contain `[workspace]`) and also be a member crate of another workspace
+(contain `package.workspace`).
+
+For more information, see the [workspaces chapter](workspaces.md).
+
+<a id="package-build"></a>
+<a id="the-build-field-optional"></a>
+#### The `build` field
+
+The `build` field specifies a file in the package root which is a [build
+script] for building native code. More information can be found in the [build
+script guide][build script].
+
+[build script]: build-scripts.md
+
+```toml
+[package]
+# ...
+build = "build.rs"
+```
+
+The default is `"build.rs"`, which loads the script from a file named
+`build.rs` in the root of the package. Use `build = "custom_build_name.rs"` to
+specify a path to a different file or `build = false` to disable automatic
+detection of the build script.
+
+<a id="the-links-field-optional"></a>
+#### The `links` field
+
+The `links` field specifies the name of a native library that is being linked
+to. More information can be found in the [`links`][links] section of the build
+script guide.
+
+[links]: build-scripts.md#the-links-manifest-key
+
+For example, a crate that links a native library called "git2" (e.g. `libgit2.a`
+on Linux) may specify:
+
+```toml
+[package]
+# ...
+links = "git2"
+```
+
+<a id="the-exclude-and-include-fields-optional"></a>
+#### The `exclude` and `include` fields
+
+The `exclude` and `include` fields can be used to explicitly specify which
+files are included when packaging a project to be [published][publishing],
+and certain kinds of change tracking (described below).
+The patterns specified in the `exclude` field identify a set of files that are
+not included, and the patterns in `include` specify files that are explicitly
+included.
+You may run [`cargo package --list`][`cargo package`] to verify which files will
+be included in the package.
+
+```toml
+[package]
+# ...
+exclude = ["/ci", "images/", ".*"]
+```
+
+```toml
+[package]
+# ...
+include = ["/src", "COPYRIGHT", "/examples", "!/examples/big_example"]
+```
+
+The default if neither field is specified is to include all files from the
+root of the package, except for the exclusions listed below.
+
+If `include` is not specified, then the following files will be excluded:
+
+* If the package is not in a git repository, all "hidden" files starting with
+ a dot will be skipped.
+* If the package is in a git repository, any files that are ignored by the
+ [gitignore] rules of the repository and global git configuration will be
+ skipped.
+
+Regardless of whether `exclude` or `include` is specified, the following files
+are always excluded:
+
+* Any sub-packages will be skipped (any subdirectory that contains a
+ `Cargo.toml` file).
+* A directory named `target` in the root of the package will be skipped.
+
+The following files are always included:
+
+* The `Cargo.toml` file of the package itself is always included, it does not
+ need to be listed in `include`.
+* A minimized `Cargo.lock` is automatically included if the package contains a
+ binary or example target, see [`cargo package`] for more information.
+* If a [`license-file`](#the-license-and-license-file-fields) is specified, it
+ is always included.
+
+The options are mutually exclusive; setting `include` will override an
+`exclude`. If you need to have exclusions to a set of `include` files, use the
+`!` operator described below.
+
+The patterns should be [gitignore]-style patterns. Briefly:
+
+- `foo` matches any file or directory with the name `foo` anywhere in the
+ package. This is equivalent to the pattern `**/foo`.
+- `/foo` matches any file or directory with the name `foo` only in the root of
+ the package.
+- `foo/` matches any *directory* with the name `foo` anywhere in the package.
+- Common glob patterns like `*`, `?`, and `[]` are supported:
+ - `*` matches zero or more characters except `/`. For example, `*.html`
+ matches any file or directory with the `.html` extension anywhere in the
+ package.
+ - `?` matches any character except `/`. For example, `foo?` matches `food`,
+ but not `foo`.
+ - `[]` allows for matching a range of characters. For example, `[ab]`
+ matches either `a` or `b`. `[a-z]` matches letters a through z.
+- `**/` prefix matches in any directory. For example, `**/foo/bar` matches the
+ file or directory `bar` anywhere that is directly under directory `foo`.
+- `/**` suffix matches everything inside. For example, `foo/**` matches all
+ files inside directory `foo`, including all files in subdirectories below
+ `foo`.
+- `/**/` matches zero or more directories. For example, `a/**/b` matches
+ `a/b`, `a/x/b`, `a/x/y/b`, and so on.
+- `!` prefix negates a pattern. For example, a pattern of `src/*.rs` and
+ `!foo.rs` would match all files with the `.rs` extension inside the `src`
+ directory, except for any file named `foo.rs`.
+
+The include/exclude list is also used for change tracking in some situations.
+For targets built with `rustdoc`, it is used to determine the list of files to
+track to determine if the target should be rebuilt. If the package has a
+[build script] that does not emit any `rerun-if-*` directives, then the
+include/exclude list is used for tracking if the build script should be re-run
+if any of those files change.
+
+[gitignore]: https://git-scm.com/docs/gitignore
+
+<a id="the-publish--field-optional"></a>
+#### The `publish` field
+
+The `publish` field can be used to prevent a package from being published to a
+package registry (like *crates.io*) by mistake, for instance to keep a package
+private in a company.
+
+```toml
+[package]
+# ...
+publish = false
+```
+
+The value may also be an array of strings which are registry names that are
+allowed to be published to.
+
+```toml
+[package]
+# ...
+publish = ["some-registry-name"]
+```
+
+If publish array contains a single registry, `cargo publish` command will use
+it when `--registry` flag is not specified.
+
+<a id="the-metadata-table-optional"></a>
+#### The `metadata` table
+
+Cargo by default will warn about unused keys in `Cargo.toml` to assist in
+detecting typos and such. The `package.metadata` table, however, is completely
+ignored by Cargo and will not be warned about. This section can be used for
+tools which would like to store package configuration in `Cargo.toml`. For
+example:
+
+```toml
+[package]
+name = "..."
+# ...
+
+# Metadata used when generating an Android APK, for example.
+[package.metadata.android]
+package-name = "my-awesome-android-app"
+assets = "path/to/static"
+```
+
+There is a similar table at the workspace level at
+[`workspace.metadata`][workspace-metadata]. While cargo does not specify a
+format for the content of either of these tables, it is suggested that
+external tools may wish to use them in a consistent fashion, such as referring
+to the data in `workspace.metadata` if data is missing from `package.metadata`,
+if that makes sense for the tool in question.
+
+[workspace-metadata]: workspaces.md#the-metadata-table
+
+#### The `default-run` field
+
+The `default-run` field in the `[package]` section of the manifest can be used
+to specify a default binary picked by [`cargo run`]. For example, when there is
+both `src/bin/a.rs` and `src/bin/b.rs`:
+
+```toml
+[package]
+default-run = "a"
+```
+
+### The `[badges]` section
+
+The `[badges]` section is for specifying status badges that can be displayed
+on a registry website when the package is published.
+
+> Note: [crates.io] previously displayed badges next to a crate on its
+> website, but that functionality has been removed. Packages should place
+> badges in its README file which will be displayed on [crates.io] (see [the
+> `readme` field](#the-readme-field)).
+
+```toml
+[badges]
+# The `maintenance` table indicates the status of the maintenance of
+# the crate. This may be used by a registry, but is currently not
+# used by crates.io. See https://github.com/rust-lang/crates.io/issues/2437
+# and https://github.com/rust-lang/crates.io/issues/2438 for more details.
+#
+# The `status` field is required. Available options are:
+# - `actively-developed`: New features are being added and bugs are being fixed.
+# - `passively-maintained`: There are no plans for new features, but the maintainer intends to
+# respond to issues that get filed.
+# - `as-is`: The crate is feature complete, the maintainer does not intend to continue working on
+# it or providing support, but it works for the purposes it was designed for.
+# - `experimental`: The author wants to share it with the community but is not intending to meet
+# anyone's particular use case.
+# - `looking-for-maintainer`: The current maintainer would like to transfer the crate to someone
+# else.
+# - `deprecated`: The maintainer does not recommend using this crate (the description of the crate
+# can describe why, there could be a better solution available or there could be problems with
+# the crate that the author does not want to fix).
+# - `none`: Displays no badge on crates.io, since the maintainer has not chosen to specify
+# their intentions, potential crate users will need to investigate on their own.
+maintenance = { status = "..." }
+```
+
+### Dependency sections
+
+See the [specifying dependencies page](specifying-dependencies.md) for
+information on the `[dependencies]`, `[dev-dependencies]`,
+`[build-dependencies]`, and target-specific `[target.*.dependencies]` sections.
+
+### The `[profile.*]` sections
+
+The `[profile]` tables provide a way to customize compiler settings such as
+optimizations and debug settings. See [the Profiles chapter](profiles.md) for
+more detail.
+
+
+
+[`cargo init`]: ../commands/cargo-init.md
+[`cargo new`]: ../commands/cargo-new.md
+[`cargo package`]: ../commands/cargo-package.md
+[`cargo run`]: ../commands/cargo-run.md
+[crates.io]: https://crates.io/
+[docs.rs]: https://docs.rs/
+[publishing]: publishing.md
+[Rust Edition]: ../../edition-guide/index.html
+[spdx-2.1-license-expressions]: https://spdx.org/spdx-specification-21-web-version#h.jxpfx0ykyb60
+[spdx-license-list-3.11]: https://github.com/spdx/license-list-data/tree/v3.11
+[SPDX site]: https://spdx.org/license-list
+[TOML]: https://toml.io/
+
+<script>
+(function() {
+ var fragments = {
+ "#the-project-layout": "../guide/project-layout.html",
+ "#examples": "cargo-targets.html#examples",
+ "#tests": "cargo-targets.html#tests",
+ "#integration-tests": "cargo-targets.html#integration-tests",
+ "#configuring-a-target": "cargo-targets.html#configuring-a-target",
+ "#target-auto-discovery": "cargo-targets.html#target-auto-discovery",
+ "#the-required-features-field-optional": "cargo-targets.html#the-required-features-field",
+ "#building-dynamic-or-static-libraries": "cargo-targets.html#the-crate-type-field",
+ "#the-workspace-section": "workspaces.html#the-workspace-section",
+ "#virtual-workspace": "workspaces.html",
+ "#package-selection": "workspaces.html#package-selection",
+ "#the-features-section": "features.html#the-features-section",
+ "#rules": "features.html",
+ "#usage-in-end-products": "features.html",
+ "#usage-in-packages": "features.html",
+ "#the-patch-section": "overriding-dependencies.html#the-patch-section",
+ "#using-patch-with-multiple-versions": "overriding-dependencies.html#using-patch-with-multiple-versions",
+ "#the-replace-section": "overriding-dependencies.html#the-replace-section",
+ };
+ var target = fragments[window.location.hash];
+ if (target) {
+ var url = window.location.toString();
+ var base = url.substring(0, url.lastIndexOf('/'));
+ window.location.replace(base + "/" + target);
+ }
+})();
+</script>
diff --git a/src/tools/cargo/src/doc/src/reference/overriding-dependencies.md b/src/tools/cargo/src/doc/src/reference/overriding-dependencies.md
new file mode 100644
index 000000000..c8e8fbcb6
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/overriding-dependencies.md
@@ -0,0 +1,359 @@
+## Overriding Dependencies
+
+The desire to override a dependency can arise through a number of scenarios.
+Most of them, however, boil down to the ability to work with a crate before
+it's been published to [crates.io]. For example:
+
+* A crate you're working on is also used in a much larger application you're
+ working on, and you'd like to test a bug fix to the library inside of the
+ larger application.
+* An upstream crate you don't work on has a new feature or a bug fix on the
+ master branch of its git repository which you'd like to test out.
+* You're about to publish a new major version of your crate, but you'd like to
+ do integration testing across an entire package to ensure the new major
+ version works.
+* You've submitted a fix to an upstream crate for a bug you found, but you'd
+ like to immediately have your application start depending on the fixed
+ version of the crate to avoid blocking on the bug fix getting merged.
+
+These scenarios can be solved with the [`[patch]` manifest
+section](#the-patch-section).
+
+This chapter walks through a few different use cases, and includes details
+on the different ways to override a dependency.
+
+* Example use cases
+ * [Testing a bugfix](#testing-a-bugfix)
+ * [Working with an unpublished minor version](#working-with-an-unpublished-minor-version)
+ * [Overriding repository URL](#overriding-repository-url)
+ * [Prepublishing a breaking change](#prepublishing-a-breaking-change)
+ * [Using `[patch]` with multiple versions](#using-patch-with-multiple-versions)
+* Reference
+ * [The `[patch]` section](#the-patch-section)
+ * [The `[replace]` section](#the-replace-section)
+ * [`paths` overrides](#paths-overrides)
+
+> **Note**: See also specifying a dependency with [multiple locations], which
+> can be used to override the source for a single dependency declaration in a
+> local package.
+
+### Testing a bugfix
+
+Let's say you're working with the [`uuid` crate] but while you're working on it
+you discover a bug. You are, however, quite enterprising so you decide to also
+try to fix the bug! Originally your manifest will look like:
+
+[`uuid` crate]: https://crates.io/crates/uuid
+
+```toml
+[package]
+name = "my-library"
+version = "0.1.0"
+
+[dependencies]
+uuid = "1.0"
+```
+
+First thing we'll do is to clone the [`uuid` repository][uuid-repository]
+locally via:
+
+```console
+$ git clone https://github.com/uuid-rs/uuid.git
+```
+
+Next we'll edit the manifest of `my-library` to contain:
+
+```toml
+[patch.crates-io]
+uuid = { path = "../path/to/uuid" }
+```
+
+Here we declare that we're *patching* the source `crates-io` with a new
+dependency. This will effectively add the local checked out version of `uuid` to
+the crates.io registry for our local package.
+
+Next up we need to ensure that our lock file is updated to use this new version
+of `uuid` so our package uses the locally checked out copy instead of one from
+crates.io. The way `[patch]` works is that it'll load the dependency at
+`../path/to/uuid` and then whenever crates.io is queried for versions of `uuid`
+it'll *also* return the local version.
+
+This means that the version number of the local checkout is significant and will
+affect whether the patch is used. Our manifest declared `uuid = "1.0"` which
+means we'll only resolve to `>= 1.0.0, < 2.0.0`, and Cargo's greedy resolution
+algorithm also means that we'll resolve to the maximum version within that
+range. Typically this doesn't matter as the version of the git repository will
+already be greater or match the maximum version published on crates.io, but it's
+important to keep this in mind!
+
+In any case, typically all you need to do now is:
+
+```console
+$ cargo build
+ Compiling uuid v1.0.0 (.../uuid)
+ Compiling my-library v0.1.0 (.../my-library)
+ Finished dev [unoptimized + debuginfo] target(s) in 0.32 secs
+```
+
+And that's it! You're now building with the local version of `uuid` (note the
+path in parentheses in the build output). If you don't see the local path version getting
+built then you may need to run `cargo update -p uuid --precise $version` where
+`$version` is the version of the locally checked out copy of `uuid`.
+
+Once you've fixed the bug you originally found the next thing you'll want to do
+is to likely submit that as a pull request to the `uuid` crate itself. Once
+you've done this then you can also update the `[patch]` section. The listing
+inside of `[patch]` is just like the `[dependencies]` section, so once your pull
+request is merged you could change your `path` dependency to:
+
+```toml
+[patch.crates-io]
+uuid = { git = 'https://github.com/uuid-rs/uuid.git' }
+```
+
+[uuid-repository]: https://github.com/uuid-rs/uuid
+
+### Working with an unpublished minor version
+
+Let's now shift gears a bit from bug fixes to adding features. While working on
+`my-library` you discover that a whole new feature is needed in the `uuid`
+crate. You've implemented this feature, tested it locally above with `[patch]`,
+and submitted a pull request. Let's go over how you continue to use and test it
+before it's actually published.
+
+Let's also say that the current version of `uuid` on crates.io is `1.0.0`, but
+since then the master branch of the git repository has updated to `1.0.1`. This
+branch includes your new feature you submitted previously. To use this
+repository we'll edit our `Cargo.toml` to look like
+
+```toml
+[package]
+name = "my-library"
+version = "0.1.0"
+
+[dependencies]
+uuid = "1.0.1"
+
+[patch.crates-io]
+uuid = { git = 'https://github.com/uuid-rs/uuid.git' }
+```
+
+Note that our local dependency on `uuid` has been updated to `1.0.1` as it's
+what we'll actually require once the crate is published. This version doesn't
+exist on crates.io, though, so we provide it with the `[patch]` section of the
+manifest.
+
+Now when our library is built it'll fetch `uuid` from the git repository and
+resolve to 1.0.1 inside the repository instead of trying to download a version
+from crates.io. Once 1.0.1 is published on crates.io the `[patch]` section can
+be deleted.
+
+It's also worth noting that `[patch]` applies *transitively*. Let's say you use
+`my-library` in a larger package, such as:
+
+```toml
+[package]
+name = "my-binary"
+version = "0.1.0"
+
+[dependencies]
+my-library = { git = 'https://example.com/git/my-library' }
+uuid = "1.0"
+
+[patch.crates-io]
+uuid = { git = 'https://github.com/uuid-rs/uuid.git' }
+```
+
+Remember that `[patch]` is applicable *transitively* but can only be defined at
+the *top level* so we consumers of `my-library` have to repeat the `[patch]` section
+if necessary. Here, though, the new `uuid` crate applies to *both* our dependency on
+`uuid` and the `my-library -> uuid` dependency. The `uuid` crate will be resolved to
+one version for this entire crate graph, 1.0.1, and it'll be pulled from the git
+repository.
+
+#### Overriding repository URL
+
+In case the dependency you want to override isn't loaded from `crates.io`,
+you'll have to change a bit how you use `[patch]`. For example, if the
+dependency is a git dependency, you can override it to a local path with:
+
+```toml
+[patch."https://github.com/your/repository"]
+my-library = { path = "../my-library/path" }
+```
+
+And that's it!
+
+### Prepublishing a breaking change
+
+Let's take a look at working with a new major version of a crate, typically
+accompanied with breaking changes. Sticking with our previous crates, this
+means that we're going to be creating version 2.0.0 of the `uuid` crate. After
+we've submitted all changes upstream we can update our manifest for
+`my-library` to look like:
+
+```toml
+[dependencies]
+uuid = "2.0"
+
+[patch.crates-io]
+uuid = { git = "https://github.com/uuid-rs/uuid.git", branch = "2.0.0" }
+```
+
+And that's it! Like with the previous example the 2.0.0 version doesn't actually
+exist on crates.io but we can still put it in through a git dependency through
+the usage of the `[patch]` section. As a thought exercise let's take another
+look at the `my-binary` manifest from above again as well:
+
+```toml
+[package]
+name = "my-binary"
+version = "0.1.0"
+
+[dependencies]
+my-library = { git = 'https://example.com/git/my-library' }
+uuid = "1.0"
+
+[patch.crates-io]
+uuid = { git = 'https://github.com/uuid-rs/uuid.git', branch = '2.0.0' }
+```
+
+Note that this will actually resolve to two versions of the `uuid` crate. The
+`my-binary` crate will continue to use the 1.x.y series of the `uuid` crate but
+the `my-library` crate will use the `2.0.0` version of `uuid`. This will allow you
+to gradually roll out breaking changes to a crate through a dependency graph
+without being forced to update everything all at once.
+
+### Using `[patch]` with multiple versions
+
+You can patch in multiple versions of the same crate with the `package` key
+used to rename dependencies. For example let's say that the `serde` crate has
+a bugfix that we'd like to use to its `1.*` series but we'd also like to
+prototype using a `2.0.0` version of serde we have in our git repository. To
+configure this we'd do:
+
+```toml
+[patch.crates-io]
+serde = { git = 'https://github.com/serde-rs/serde.git' }
+serde2 = { git = 'https://github.com/example/serde.git', package = 'serde', branch = 'v2' }
+```
+
+The first `serde = ...` directive indicates that serde `1.*` should be used
+from the git repository (pulling in the bugfix we need) and the second `serde2
+= ...` directive indicates that the `serde` package should also be pulled from
+the `v2` branch of `https://github.com/example/serde`. We're assuming here
+that `Cargo.toml` on that branch mentions version `2.0.0`.
+
+Note that when using the `package` key the `serde2` identifier here is actually
+ignored. We simply need a unique name which doesn't conflict with other patched
+crates.
+
+### The `[patch]` section
+
+The `[patch]` section of `Cargo.toml` can be used to override dependencies
+with other copies. The syntax is similar to the
+[`[dependencies]`][dependencies] section:
+
+```toml
+[patch.crates-io]
+foo = { git = 'https://github.com/example/foo.git' }
+bar = { path = 'my/local/bar' }
+
+[dependencies.baz]
+git = 'https://github.com/example/baz.git'
+
+[patch.'https://github.com/example/baz']
+baz = { git = 'https://github.com/example/patched-baz.git', branch = 'my-branch' }
+```
+
+> **Note**: The `[patch]` table can also be specified as a [configuration
+> option](config.md), such as in a `.cargo/config.toml` file or a CLI option
+> like `--config 'patch.crates-io.rand.path="rand"'`. This can be useful for
+> local-only changes that you don't want to commit, or temporarily testing a
+> patch.
+
+The `[patch]` table is made of dependency-like sub-tables. Each key after
+`[patch]` is a URL of the source that is being patched, or the name of a
+registry. The name `crates-io` may be used to override the default registry
+[crates.io]. The first `[patch]` in the example above demonstrates overriding
+[crates.io], and the second `[patch]` demonstrates overriding a git source.
+
+Each entry in these tables is a normal dependency specification, the same as
+found in the `[dependencies]` section of the manifest. The dependencies listed
+in the `[patch]` section are resolved and used to patch the source at the
+URL specified. The above manifest snippet patches the `crates-io` source (e.g.
+crates.io itself) with the `foo` crate and `bar` crate. It also
+patches the `https://github.com/example/baz` source with a `my-branch` that
+comes from elsewhere.
+
+Sources can be patched with versions of crates that do not exist, and they can
+also be patched with versions of crates that already exist. If a source is
+patched with a crate version that already exists in the source, then the
+source's original crate is replaced.
+
+Cargo only looks at the patch settings in the `Cargo.toml` manifest at the
+root of the workspace. Patch settings defined in dependencies will be
+ignored.
+
+### The `[replace]` section
+
+> **Note**: `[replace]` is deprecated. You should use the
+> [`[patch]`](#the-patch-section) table instead.
+
+This section of Cargo.toml can be used to override dependencies with other
+copies. The syntax is similar to the `[dependencies]` section:
+
+```toml
+[replace]
+"foo:0.1.0" = { git = 'https://github.com/example/foo.git' }
+"bar:1.0.2" = { path = 'my/local/bar' }
+```
+
+Each key in the `[replace]` table is a [package ID
+specification](pkgid-spec.md), which allows arbitrarily choosing a node in the
+dependency graph to override (the 3-part version number is required). The
+value of each key is the same as the `[dependencies]` syntax for specifying
+dependencies, except that you can't specify features. Note that when a crate
+is overridden the copy it's overridden with must have both the same name and
+version, but it can come from a different source (e.g., git or a local path).
+
+Cargo only looks at the replace settings in the `Cargo.toml` manifest at the
+root of the workspace. Replace settings defined in dependencies will be
+ignored.
+
+### `paths` overrides
+
+Sometimes you're only temporarily working on a crate and you don't want to have
+to modify `Cargo.toml` like with the `[patch]` section above. For this use
+case Cargo offers a much more limited version of overrides called **path
+overrides**.
+
+Path overrides are specified through [`.cargo/config.toml`](config.md) instead of
+`Cargo.toml`. Inside of `.cargo/config.toml` you'll specify a key called `paths`:
+
+```toml
+paths = ["/path/to/uuid"]
+```
+
+This array should be filled with directories that contain a `Cargo.toml`. In
+this instance, we’re just adding `uuid`, so it will be the only one that’s
+overridden. This path can be either absolute or relative to the directory that
+contains the `.cargo` folder.
+
+Path overrides are more restricted than the `[patch]` section, however, in
+that they cannot change the structure of the dependency graph. When a
+path replacement is used then the previous set of dependencies
+must all match exactly to the new `Cargo.toml` specification. For example this
+means that path overrides cannot be used to test out adding a dependency to a
+crate, instead `[patch]` must be used in that situation. As a result usage of a
+path override is typically isolated to quick bug fixes rather than larger
+changes.
+
+Note: using a local configuration to override paths will only work for crates
+that have been published to [crates.io]. You cannot use this feature to tell
+Cargo how to find local unpublished crates.
+
+
+[crates.io]: https://crates.io/
+[multiple locations]: specifying-dependencies.md#multiple-locations
+[dependencies]: specifying-dependencies.md
diff --git a/src/tools/cargo/src/doc/src/reference/pkgid-spec.md b/src/tools/cargo/src/doc/src/reference/pkgid-spec.md
new file mode 100644
index 000000000..6c11b4b3d
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/pkgid-spec.md
@@ -0,0 +1,67 @@
+## Package ID Specifications
+
+### Package ID specifications
+
+Subcommands of Cargo frequently need to refer to a particular package within a
+dependency graph for various operations like updating, cleaning, building, etc.
+To solve this problem, Cargo supports *Package ID Specifications*. A specification
+is a string which is used to uniquely refer to one package within a graph of
+packages.
+
+The specification may be fully qualified, such as
+`https://github.com/rust-lang/crates.io-index#regex@1.4.3` or it may be
+abbreviated, such as `regex`. The abbreviated form may be used as long as it
+uniquely identifies a single package in the dependency graph. If there is
+ambiguity, additional qualifiers can be added to make it unique. For example,
+if there are two versions of the `regex` package in the graph, then it can be
+qualified with a version to make it unique, such as `regex@1.4.3`.
+
+#### Specification grammar
+
+The formal grammar for a Package Id Specification is:
+
+```notrust
+spec := pkgname
+ | proto "://" hostname-and-path [ "#" ( pkgname | semver ) ]
+pkgname := name [ ("@" | ":" ) semver ]
+
+proto := "http" | "git" | ...
+```
+
+Here, brackets indicate that the contents are optional.
+
+The URL form can be used for git dependencies, or to differentiate packages
+that come from different sources such as different registries.
+
+#### Example specifications
+
+The following are references to the `regex` package on `crates.io`:
+
+| Spec | Name | Version |
+|:------------------------------------------------------------|:-------:|:-------:|
+| `regex` | `regex` | `*` |
+| `regex@1.4.3` | `regex` | `1.4.3` |
+| `https://github.com/rust-lang/crates.io-index#regex` | `regex` | `*` |
+| `https://github.com/rust-lang/crates.io-index#regex@1.4.3` | `regex` | `1.4.3` |
+
+The following are some examples of specs for several different git dependencies:
+
+| Spec | Name | Version |
+|:----------------------------------------------------------|:----------------:|:--------:|
+| `https://github.com/rust-lang/cargo#0.52.0` | `cargo` | `0.52.0` |
+| `https://github.com/rust-lang/cargo#cargo-platform@0.1.2` | <nobr>`cargo-platform`</nobr> | `0.1.2` |
+| `ssh://git@github.com/rust-lang/regex.git#regex@1.4.3` | `regex` | `1.4.3` |
+
+Local packages on the filesystem can use `file://` URLs to reference them:
+
+| Spec | Name | Version |
+|:---------------------------------------|:-----:|:-------:|
+| `file:///path/to/my/project/foo` | `foo` | `*` |
+| `file:///path/to/my/project/foo#1.1.8` | `foo` | `1.1.8` |
+
+#### Brevity of specifications
+
+The goal of this is to enable both succinct and exhaustive syntaxes for
+referring to packages in a dependency graph. Ambiguous references may refer to
+one or more packages. Most commands generate an error if more than one package
+could be referred to with the same specification.
diff --git a/src/tools/cargo/src/doc/src/reference/profiles.md b/src/tools/cargo/src/doc/src/reference/profiles.md
new file mode 100644
index 000000000..56c8538f8
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/profiles.md
@@ -0,0 +1,469 @@
+## Profiles
+
+Profiles provide a way to alter the compiler settings, influencing things like
+optimizations and debugging symbols.
+
+Cargo has 4 built-in profiles: `dev`, `release`, `test`, and `bench`. The
+profile is automatically chosen based on which command is being run if a
+profile is not specified on the command-line. In addition to the built-in
+profiles, custom user-defined profiles can also be specified.
+
+Profile settings can be changed in [`Cargo.toml`](manifest.md) with the
+`[profile]` table. Within each named profile, individual settings can be changed
+with key/value pairs like this:
+
+```toml
+[profile.dev]
+opt-level = 1 # Use slightly better optimizations.
+overflow-checks = false # Disable integer overflow checks.
+```
+
+Cargo only looks at the profile settings in the `Cargo.toml` manifest at the
+root of the workspace. Profile settings defined in dependencies will be
+ignored.
+
+Additionally, profiles can be overridden from a [config] definition.
+Specifying a profile in a config file or environment variable will override
+the settings from `Cargo.toml`.
+
+[config]: config.md
+
+### Profile settings
+
+The following is a list of settings that can be controlled in a profile.
+
+#### opt-level
+
+The `opt-level` setting controls the [`-C opt-level` flag] which controls the level
+of optimization. Higher optimization levels may produce faster runtime code at
+the expense of longer compiler times. Higher levels may also change and
+rearrange the compiled code which may make it harder to use with a debugger.
+
+The valid options are:
+
+* `0`: no optimizations
+* `1`: basic optimizations
+* `2`: some optimizations
+* `3`: all optimizations
+* `"s"`: optimize for binary size
+* `"z"`: optimize for binary size, but also turn off loop vectorization.
+
+It is recommended to experiment with different levels to find the right
+balance for your project. There may be surprising results, such as level `3`
+being slower than `2`, or the `"s"` and `"z"` levels not being necessarily
+smaller. You may also want to reevaluate your settings over time as newer
+versions of `rustc` changes optimization behavior.
+
+See also [Profile Guided Optimization] for more advanced optimization
+techniques.
+
+[`-C opt-level` flag]: ../../rustc/codegen-options/index.html#opt-level
+[Profile Guided Optimization]: ../../rustc/profile-guided-optimization.html
+
+#### debug
+
+The `debug` setting controls the [`-C debuginfo` flag] which controls the
+amount of debug information included in the compiled binary.
+
+The valid options are:
+
+* `0` or `false`: no debug info at all
+* `1`: line tables only
+* `2` or `true`: full debug info
+
+You may wish to also configure the [`split-debuginfo`](#split-debuginfo) option
+depending on your needs as well.
+
+[`-C debuginfo` flag]: ../../rustc/codegen-options/index.html#debuginfo
+
+#### split-debuginfo
+
+The `split-debuginfo` setting controls the [`-C split-debuginfo` flag] which
+controls whether debug information, if generated, is either placed in the
+executable itself or adjacent to it.
+
+This option is a string and acceptable values are the same as those the
+[compiler accepts][`-C split-debuginfo` flag]. The default value for this option
+is `unpacked` on macOS for profiles that have debug information otherwise
+enabled. Otherwise the default for this option is [documented with rustc][`-C
+split-debuginfo` flag] and is platform-specific. Some options are only
+available on the [nightly channel]. The Cargo default may change in the future
+once more testing has been performed, and support for DWARF is stabilized.
+
+[nightly channel]: ../../book/appendix-07-nightly-rust.html
+[`-C split-debuginfo` flag]: ../../rustc/codegen-options/index.html#split-debuginfo
+
+#### strip
+
+The `strip` option controls the [`-C strip` flag], which directs rustc to
+strip either symbols or debuginfo from a binary. This can be enabled like so:
+
+```toml
+[package]
+# ...
+
+[profile.release]
+strip = "debuginfo"
+```
+
+Possible string values of `strip` are `"none"`, `"debuginfo"`, and `"symbols"`.
+The default is `"none"`.
+
+You can also configure this option with the boolean values `true` or `false`.
+`strip = true` is equivalent to `strip = "symbols"`. `strip = false` is
+equivalent to `strip = "none"` and disables `strip` completely.
+
+[`-C strip` flag]: ../../rustc/codegen-options/index.html#strip
+
+#### debug-assertions
+
+The `debug-assertions` setting controls the [`-C debug-assertions` flag] which
+turns `cfg(debug_assertions)` [conditional compilation] on or off. Debug
+assertions are intended to include runtime validation which is only available
+in debug/development builds. These may be things that are too expensive or
+otherwise undesirable in a release build. Debug assertions enables the
+[`debug_assert!` macro] in the standard library.
+
+The valid options are:
+
+* `true`: enabled
+* `false`: disabled
+
+[`-C debug-assertions` flag]: ../../rustc/codegen-options/index.html#debug-assertions
+[conditional compilation]: ../../reference/conditional-compilation.md#debug_assertions
+[`debug_assert!` macro]: ../../std/macro.debug_assert.html
+
+#### overflow-checks
+
+The `overflow-checks` setting controls the [`-C overflow-checks` flag] which
+controls the behavior of [runtime integer overflow]. When overflow-checks are
+enabled, a panic will occur on overflow.
+
+The valid options are:
+
+* `true`: enabled
+* `false`: disabled
+
+[`-C overflow-checks` flag]: ../../rustc/codegen-options/index.html#overflow-checks
+[runtime integer overflow]: ../../reference/expressions/operator-expr.md#overflow
+
+#### lto
+
+The `lto` setting controls the [`-C lto` flag] which controls LLVM's [link
+time optimizations]. LTO can produce better optimized code, using
+whole-program analysis, at the cost of longer linking time.
+
+The valid options are:
+
+* `false`: Performs "thin local LTO" which performs "thin" LTO on the local
+ crate only across its [codegen units](#codegen-units). No LTO is performed
+ if codegen units is 1 or [opt-level](#opt-level) is 0.
+* `true` or `"fat"`: Performs "fat" LTO which attempts to perform
+ optimizations across all crates within the dependency graph.
+* `"thin"`: Performs ["thin" LTO]. This is similar to "fat", but takes
+ substantially less time to run while still achieving performance gains
+ similar to "fat".
+* `"off"`: Disables LTO.
+
+See also the [`-C linker-plugin-lto`] `rustc` flag for cross-language LTO.
+
+[`-C lto` flag]: ../../rustc/codegen-options/index.html#lto
+[link time optimizations]: https://llvm.org/docs/LinkTimeOptimization.html
+[`-C linker-plugin-lto`]: ../../rustc/codegen-options/index.html#linker-plugin-lto
+["thin" LTO]: http://blog.llvm.org/2016/06/thinlto-scalable-and-incremental-lto.html
+
+#### panic
+
+The `panic` setting controls the [`-C panic` flag] which controls which panic
+strategy to use.
+
+The valid options are:
+
+* `"unwind"`: Unwind the stack upon panic.
+* `"abort"`: Terminate the process upon panic.
+
+When set to `"unwind"`, the actual value depends on the default of the target
+platform. For example, the NVPTX platform does not support unwinding, so it
+always uses `"abort"`.
+
+Tests, benchmarks, build scripts, and proc macros ignore the `panic` setting.
+The `rustc` test harness currently requires `unwind` behavior. See the
+[`panic-abort-tests`] unstable flag which enables `abort` behavior.
+
+Additionally, when using the `abort` strategy and building a test, all of the
+dependencies will also be forced to build with the `unwind` strategy.
+
+[`-C panic` flag]: ../../rustc/codegen-options/index.html#panic
+[`panic-abort-tests`]: unstable.md#panic-abort-tests
+
+#### incremental
+
+The `incremental` setting controls the [`-C incremental` flag] which controls
+whether or not incremental compilation is enabled. Incremental compilation
+causes `rustc` to save additional information to disk which will be reused
+when recompiling the crate, improving re-compile times. The additional
+information is stored in the `target` directory.
+
+The valid options are:
+
+* `true`: enabled
+* `false`: disabled
+
+Incremental compilation is only used for workspace members and "path"
+dependencies.
+
+The incremental value can be overridden globally with the `CARGO_INCREMENTAL`
+[environment variable] or the [`build.incremental`] config variable.
+
+[`-C incremental` flag]: ../../rustc/codegen-options/index.html#incremental
+[environment variable]: environment-variables.md
+[`build.incremental`]: config.md#buildincremental
+
+#### codegen-units
+
+The `codegen-units` setting controls the [`-C codegen-units` flag] which
+controls how many "code generation units" a crate will be split into. More
+code generation units allows more of a crate to be processed in parallel
+possibly reducing compile time, but may produce slower code.
+
+This option takes an integer greater than 0.
+
+The default is 256 for [incremental](#incremental) builds, and 16 for
+non-incremental builds.
+
+[`-C codegen-units` flag]: ../../rustc/codegen-options/index.html#codegen-units
+
+#### rpath
+
+The `rpath` setting controls the [`-C rpath` flag] which controls
+whether or not [`rpath`] is enabled.
+
+[`-C rpath` flag]: ../../rustc/codegen-options/index.html#rpath
+[`rpath`]: https://en.wikipedia.org/wiki/Rpath
+
+### Default profiles
+
+#### dev
+
+The `dev` profile is used for normal development and debugging. It is the
+default for build commands like [`cargo build`], and is used for `cargo install --debug`.
+
+The default settings for the `dev` profile are:
+
+```toml
+[profile.dev]
+opt-level = 0
+debug = true
+split-debuginfo = '...' # Platform-specific.
+debug-assertions = true
+overflow-checks = true
+lto = false
+panic = 'unwind'
+incremental = true
+codegen-units = 256
+rpath = false
+```
+
+#### release
+
+The `release` profile is intended for optimized artifacts used for releases
+and in production. This profile is used when the `--release` flag is used, and
+is the default for [`cargo install`].
+
+The default settings for the `release` profile are:
+
+```toml
+[profile.release]
+opt-level = 3
+debug = false
+split-debuginfo = '...' # Platform-specific.
+debug-assertions = false
+overflow-checks = false
+lto = false
+panic = 'unwind'
+incremental = false
+codegen-units = 16
+rpath = false
+```
+
+#### test
+
+The `test` profile is the default profile used by [`cargo test`].
+The `test` profile inherits the settings from the [`dev`](#dev) profile.
+
+#### bench
+
+The `bench` profile is the default profile used by [`cargo bench`].
+The `bench` profile inherits the settings from the [`release`](#release) profile.
+
+#### Build Dependencies
+
+To compile quickly, all profiles, by default, do not optimize build
+dependencies (build scripts, proc macros, and their dependencies), and avoid
+computing debug info when a build dependency is not used as a runtime
+dependency. The default settings for build overrides are:
+
+```toml
+[profile.dev.build-override]
+opt-level = 0
+codegen-units = 256
+debug = false # when possible
+
+[profile.release.build-override]
+opt-level = 0
+codegen-units = 256
+```
+
+However, if errors occur while running build dependencies, turning full debug
+info on will improve backtraces and debuggability when needed:
+
+```toml
+debug = true
+```
+
+Build dependencies otherwise inherit settings from the active profile in use, as
+described in [Profile selection](#profile-selection).
+
+### Custom profiles
+
+In addition to the built-in profiles, additional custom profiles can be
+defined. These may be useful for setting up multiple workflows and build
+modes. When defining a custom profile, you must specify the `inherits` key to
+specify which profile the custom profile inherits settings from when the
+setting is not specified.
+
+For example, let's say you want to compare a normal release build with a
+release build with [LTO](#lto) optimizations, you can specify something like
+the following in `Cargo.toml`:
+
+```toml
+[profile.release-lto]
+inherits = "release"
+lto = true
+```
+
+The `--profile` flag can then be used to choose this custom profile:
+
+```console
+cargo build --profile release-lto
+```
+
+The output for each profile will be placed in a directory of the same name
+as the profile in the [`target` directory]. As in the example above, the
+output would go into the `target/release-lto` directory.
+
+[`target` directory]: ../guide/build-cache.md
+
+### Profile selection
+
+The profile used depends on the command, the command-line flags like
+`--release` or `--profile`, and the package (in the case of
+[overrides](#overrides)). The default profile if none is specified is:
+
+| Command | Default Profile |
+|---------|-----------------|
+| [`cargo run`], [`cargo build`],<br>[`cargo check`], [`cargo rustc`] | [`dev` profile](#dev) |
+| [`cargo test`] | [`test` profile](#test)
+| [`cargo bench`] | [`bench` profile](#bench)
+| [`cargo install`] | [`release` profile](#release)
+
+You can switch to a different profile using the `--profile=NAME` option which will used the given profile.
+The `--release` flag is equivalent to `--profile=release`.
+
+The selected profile applies to all Cargo targets,
+including [library](./cargo-targets.md#library),
+[binary](./cargo-targets.md#binaries),
+[example](./cargo-targets.md#examples),
+[test](./cargo-targets.md#tests),
+and [benchmark](./cargo-targets.md#benchmarks).
+
+The profile for specific packages can be specified with
+[overrides](#overrides), described below.
+
+[`cargo bench`]: ../commands/cargo-bench.md
+[`cargo build`]: ../commands/cargo-build.md
+[`cargo check`]: ../commands/cargo-check.md
+[`cargo install`]: ../commands/cargo-install.md
+[`cargo run`]: ../commands/cargo-run.md
+[`cargo rustc`]: ../commands/cargo-rustc.md
+[`cargo test`]: ../commands/cargo-test.md
+
+### Overrides
+
+Profile settings can be overridden for specific packages and build-time
+crates. To override the settings for a specific package, use the `package`
+table to change the settings for the named package:
+
+```toml
+# The `foo` package will use the -Copt-level=3 flag.
+[profile.dev.package.foo]
+opt-level = 3
+```
+
+The package name is actually a [Package ID Spec](pkgid-spec.md), so you can
+target individual versions of a package with syntax such as
+`[profile.dev.package."foo:2.1.0"]`.
+
+To override the settings for all dependencies (but not any workspace member),
+use the `"*"` package name:
+
+```toml
+# Set the default for dependencies.
+[profile.dev.package."*"]
+opt-level = 2
+```
+
+To override the settings for build scripts, proc macros, and their
+dependencies, use the `build-override` table:
+
+```toml
+# Set the settings for build scripts and proc-macros.
+[profile.dev.build-override]
+opt-level = 3
+```
+
+> Note: When a dependency is both a normal dependency and a build dependency,
+> Cargo will try to only build it once when `--target` is not specified. When
+> using `build-override`, the dependency may need to be built twice, once as a
+> normal dependency and once with the overridden build settings. This may
+> increase initial build times.
+
+The precedence for which value is used is done in the following order (first
+match wins):
+
+1. `[profile.dev.package.name]` --- A named package.
+2. `[profile.dev.package."*"]` --- For any non-workspace member.
+3. `[profile.dev.build-override]` --- Only for build scripts, proc macros, and
+ their dependencies.
+4. `[profile.dev]` --- Settings in `Cargo.toml`.
+5. Default values built-in to Cargo.
+
+Overrides cannot specify the `panic`, `lto`, or `rpath` settings.
+
+#### Overrides and generics
+
+The location where generic code is instantiated will influence the
+optimization settings used for that generic code. This can cause subtle
+interactions when using profile overrides to change the optimization level of
+a specific crate. If you attempt to raise the optimization level of a
+dependency which defines generic functions, those generic functions may not be
+optimized when used in your local crate. This is because the code may be
+generated in the crate where it is instantiated, and thus may use the
+optimization settings of that crate.
+
+For example, [nalgebra] is a library which defines vectors and matrices making
+heavy use of generic parameters. If your local code defines concrete nalgebra
+types like `Vector4<f64>` and uses their methods, the corresponding nalgebra
+code will be instantiated and built within your crate. Thus, if you attempt to
+increase the optimization level of `nalgebra` using a profile override, it may
+not result in faster performance.
+
+Further complicating the issue, `rustc` has some optimizations where it will
+attempt to share monomorphized generics between crates. If the opt-level is 2
+or 3, then a crate will not use monomorphized generics from other crates, nor
+will it export locally defined monomorphized items to be shared with other
+crates. When experimenting with optimizing dependencies for development,
+consider trying opt-level 1, which will apply some optimizations while still
+allowing monomorphized items to be shared.
+
+[nalgebra]: https://crates.io/crates/nalgebra
diff --git a/src/tools/cargo/src/doc/src/reference/publishing.md b/src/tools/cargo/src/doc/src/reference/publishing.md
new file mode 100644
index 000000000..98d572c34
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/publishing.md
@@ -0,0 +1,284 @@
+## Publishing on crates.io
+
+Once you've got a library that you'd like to share with the world, it's time to
+publish it on [crates.io]! Publishing a crate is when a specific
+version is uploaded to be hosted on [crates.io].
+
+Take care when publishing a crate, because a publish is **permanent**. The
+version can never be overwritten, and the code cannot be deleted. There is no
+limit to the number of versions which can be published, however.
+
+### Before your first publish
+
+First things first, you’ll need an account on [crates.io] to acquire
+an API token. To do so, [visit the home page][crates.io] and log in via a GitHub
+account (required for now). You will also need to verify your email address on the
+[Account Settings](https://crates.io/me) page. Once that is done create an API token,
+make sure you copy it. Once you leave the page you will not be able to see it
+again.
+
+Then run the [`cargo login`] command.
+
+```console
+$ cargo login
+```
+
+Then at the prompt put in the token specified.
+```console
+please paste the API Token found on https://crates.io/me below
+abcdefghijklmnopqrstuvwxyz012345
+```
+
+This command will inform Cargo of your API token and store it locally in your
+`~/.cargo/credentials.toml`. Note that this token is a **secret** and should not be
+shared with anyone else. If it leaks for any reason, you should revoke it
+immediately.
+
+> **Note**: The [`cargo logout`] command can be used to remove the token from
+> `credentials.toml`. This can be useful if you no longer need it stored on
+> the local machine.
+
+### Before publishing a new crate
+
+Keep in mind that crate names on [crates.io] are allocated on a first-come-first-serve
+basis. Once a crate name is taken, it cannot be used for another crate.
+
+Check out the [metadata you can specify](manifest.md) in `Cargo.toml` to
+ensure your crate can be discovered more easily! Before publishing, make sure
+you have filled out the following fields:
+
+- [`license` or `license-file`]
+- [`description`]
+- [`homepage`]
+- [`documentation`]
+- [`repository`]
+- [`readme`]
+
+It would also be a good idea to include some [`keywords`] and [`categories`],
+though they are not required.
+
+If you are publishing a library, you may also want to consult the [Rust API
+Guidelines].
+
+#### Packaging a crate
+
+The next step is to package up your crate and upload it to [crates.io]. For
+this we’ll use the [`cargo publish`] subcommand. This command performs the following
+steps:
+
+1. Perform some verification checks on your package.
+2. Compress your source code into a `.crate` file.
+3. Extract the `.crate` file into a temporary directory and verify that it
+ compiles.
+4. Upload the `.crate` file to [crates.io].
+5. The registry will perform some additional checks on the uploaded package
+ before adding it.
+
+It is recommended that you first run `cargo publish --dry-run` (or [`cargo
+package`] which is equivalent) to ensure there aren't any warnings or errors
+before publishing. This will perform the first three steps listed above.
+
+```console
+$ cargo publish --dry-run
+```
+
+You can inspect the generated `.crate` file in the `target/package` directory.
+[crates.io] currently has a 10MB size limit on the `.crate` file. You may want
+to check the size of the `.crate` file to ensure you didn't accidentally
+package up large assets that are not required to build your package, such as
+test data, website documentation, or code generation. You can check which
+files are included with the following command:
+
+```console
+$ cargo package --list
+```
+
+Cargo will automatically ignore files ignored by your version control system
+when packaging, but if you want to specify an extra set of files to ignore you
+can use the [`exclude` key](manifest.md#the-exclude-and-include-fields) in the
+manifest:
+
+```toml
+[package]
+# ...
+exclude = [
+ "public/assets/*",
+ "videos/*",
+]
+```
+
+If you’d rather explicitly list the files to include, Cargo also supports an
+`include` key, which if set, overrides the `exclude` key:
+
+```toml
+[package]
+# ...
+include = [
+ "**/*.rs",
+ "Cargo.toml",
+]
+```
+
+### Uploading the crate
+
+When you are ready to publish, use the [`cargo publish`] command
+to upload to [crates.io]:
+
+```console
+$ cargo publish
+```
+
+And that’s it, you’ve now published your first crate!
+
+### Publishing a new version of an existing crate
+
+In order to release a new version, change [the `version` value](manifest.md#the-version-field) specified in your `Cargo.toml` manifest.
+Keep in mind [the SemVer rules](semver.md) which provide guidelines on what is a compatible change.
+Then run [`cargo publish`] as described above to upload the new version.
+
+### Managing a crates.io-based crate
+
+Management of crates is primarily done through the command line `cargo` tool
+rather than the [crates.io] web interface. For this, there are a few subcommands
+to manage a crate.
+
+#### `cargo yank`
+
+Occasions may arise where you publish a version of a crate that actually ends up
+being broken for one reason or another (syntax error, forgot to include a file,
+etc.). For situations such as this, Cargo supports a “yank” of a version of a
+crate.
+
+```console
+$ cargo yank --version 1.0.1
+$ cargo yank --version 1.0.1 --undo
+```
+
+A yank **does not** delete any code. This feature is not intended for deleting
+accidentally uploaded secrets, for example. If that happens, you must reset
+those secrets immediately.
+
+The semantics of a yanked version are that no new dependencies can be created
+against that version, but all existing dependencies continue to work. One of the
+major goals of [crates.io] is to act as a permanent archive of crates that does
+not change over time, and allowing deletion of a version would go against this
+goal. Essentially a yank means that all packages with a `Cargo.lock` will not
+break, while any future `Cargo.lock` files generated will not list the yanked
+version.
+
+#### `cargo owner`
+
+A crate is often developed by more than one person, or the primary maintainer
+may change over time! The owner of a crate is the only person allowed to publish
+new versions of the crate, but an owner may designate additional owners.
+
+```console
+$ cargo owner --add github-handle
+$ cargo owner --remove github-handle
+$ cargo owner --add github:rust-lang:owners
+$ cargo owner --remove github:rust-lang:owners
+```
+
+The owner IDs given to these commands must be GitHub user names or GitHub teams.
+
+If a user name is given to `--add`, that user is invited as a “named” owner, with
+full rights to the crate. In addition to being able to publish or yank versions
+of the crate, they have the ability to add or remove owners, *including* the
+owner that made *them* an owner. Needless to say, you shouldn’t make people you
+don’t fully trust into a named owner. In order to become a named owner, a user
+must have logged into [crates.io] previously.
+
+If a team name is given to `--add`, that team is invited as a “team” owner, with
+restricted right to the crate. While they have permission to publish or yank
+versions of the crate, they *do not* have the ability to add or remove owners.
+In addition to being more convenient for managing groups of owners, teams are
+just a bit more secure against owners becoming malicious.
+
+The syntax for teams is currently `github:org:team` (see examples above).
+In order to invite a team as an owner one must be a member of that team. No
+such restriction applies to removing a team as an owner.
+
+### GitHub permissions
+
+Team membership is not something GitHub provides simple public access to, and it
+is likely for you to encounter the following message when working with them:
+
+> It looks like you don’t have permission to query a necessary property from
+GitHub to complete this request. You may need to re-authenticate on [crates.io]
+to grant permission to read GitHub org memberships.
+
+This is basically a catch-all for “you tried to query a team, and one of the
+five levels of membership access control denied this”. That is not an
+exaggeration. GitHub’s support for team access control is Enterprise Grade.
+
+The most likely cause of this is simply that you last logged in before this
+feature was added. We originally requested *no* permissions from GitHub when
+authenticating users, because we didn’t actually ever use the user’s token for
+anything other than logging them in. However to query team membership on your
+behalf, we now require [the `read:org` scope][oauth-scopes].
+
+You are free to deny us this scope, and everything that worked before teams
+were introduced will keep working. However you will never be able to add a team
+as an owner, or publish a crate as a team owner. If you ever attempt to do this,
+you will get the error above. You may also see this error if you ever try to
+publish a crate that you don’t own at all, but otherwise happens to have a team.
+
+If you ever change your mind, or just aren’t sure if [crates.io] has sufficient
+permission, you can always go to <https://crates.io/> and re-authenticate,
+which will prompt you for permission if [crates.io] doesn’t have all the scopes
+it would like to.
+
+An additional barrier to querying GitHub is that the organization may be
+actively denying third party access. To check this, you can go to:
+
+```text
+https://github.com/organizations/:org/settings/oauth_application_policy
+```
+
+where `:org` is the name of the organization (e.g., `rust-lang`). You may see
+something like:
+
+![Organization Access Control](../images/org-level-acl.png)
+
+Where you may choose to explicitly remove [crates.io] from your organization’s
+blacklist, or simply press the “Remove Restrictions” button to allow all third
+party applications to access this data.
+
+Alternatively, when [crates.io] requested the `read:org` scope, you could have
+explicitly whitelisted [crates.io] querying the org in question by pressing
+the “Grant Access” button next to its name:
+
+![Authentication Access Control](../images/auth-level-acl.png)
+
+#### Troubleshooting GitHub team access errors
+
+When trying to add a GitHub team as crate owner, you may see an error like:
+
+```text
+error: failed to invite owners to crate <crate_name>: api errors (status 200 OK): could not find the github team org/repo
+```
+In that case, you should go to [the GitHub Application settings page] and
+check if crates.io is listed in the `Authorized OAuth Apps` tab.
+If it isn't, you should go to <https://crates.io/> and authorize it.
+Then go back to the Application Settings page on GitHub, click on the
+crates.io application in the list, and make sure you or your organization is
+listed in the "Organization access" list with a green check mark. If there's
+a button labeled `Grant` or `Request`, you should grant the access or
+request the org owner to do so.
+
+[Rust API Guidelines]: https://rust-lang.github.io/api-guidelines/
+[`cargo login`]: ../commands/cargo-login.md
+[`cargo logout`]: ../commands/cargo-logout.md
+[`cargo package`]: ../commands/cargo-package.md
+[`cargo publish`]: ../commands/cargo-publish.md
+[`categories`]: manifest.md#the-categories-field
+[`description`]: manifest.md#the-description-field
+[`documentation`]: manifest.md#the-documentation-field
+[`homepage`]: manifest.md#the-homepage-field
+[`keywords`]: manifest.md#the-keywords-field
+[`license` or `license-file`]: manifest.md#the-license-and-license-file-fields
+[`readme`]: manifest.md#the-readme-field
+[`repository`]: manifest.md#the-repository-field
+[crates.io]: https://crates.io/
+[oauth-scopes]: https://developer.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/
+[the GitHub Application settings page]: https://github.com/settings/applications
diff --git a/src/tools/cargo/src/doc/src/reference/registries.md b/src/tools/cargo/src/doc/src/reference/registries.md
new file mode 100644
index 000000000..7714a36cd
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/registries.md
@@ -0,0 +1,151 @@
+## Registries
+
+Cargo installs crates and fetches dependencies from a "registry". The default
+registry is [crates.io]. A registry contains an "index" which contains a
+searchable list of available crates. A registry may also provide a web API to
+support publishing new crates directly from Cargo.
+
+> Note: If you are interested in mirroring or vendoring an existing registry,
+> take a look at [Source Replacement].
+
+If you are implementing a registry server, see [Running a Registry] for more
+details about the protocol between Cargo and a registry.
+
+### Using an Alternate Registry
+
+To use a registry other than [crates.io], the name and index URL of the
+registry must be added to a [`.cargo/config.toml` file][config]. The `registries`
+table has a key for each registry, for example:
+
+```toml
+[registries]
+my-registry = { index = "https://my-intranet:8080/git/index" }
+```
+
+The `index` key should be a URL to a git repository with the registry's index or a
+Cargo sparse registry URL with the `sparse+` prefix.
+
+A crate can then depend on a crate from another registry by specifying the
+`registry` key and a value of the registry's name in that dependency's entry
+in `Cargo.toml`:
+
+```toml
+# Sample Cargo.toml
+[package]
+name = "my-project"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+other-crate = { version = "1.0", registry = "my-registry" }
+```
+
+As with most config values, the index may be specified with an environment
+variable instead of a config file. For example, setting the following
+environment variable will accomplish the same thing as defining a config file:
+
+```ignore
+CARGO_REGISTRIES_MY_REGISTRY_INDEX=https://my-intranet:8080/git/index
+```
+
+> Note: [crates.io] does not accept packages that depend on crates from other
+> registries.
+
+### Publishing to an Alternate Registry
+
+If the registry supports web API access, then packages can be published
+directly to the registry from Cargo. Several of Cargo's commands such as
+[`cargo publish`] take a `--registry` command-line flag to indicate which
+registry to use. For example, to publish the package in the current directory:
+
+1. `cargo login --registry=my-registry`
+
+ This only needs to be done once. You must enter the secret API token
+ retrieved from the registry's website. Alternatively the token may be
+ passed directly to the `publish` command with the `--token` command-line
+ flag or an environment variable with the name of the registry such as
+ `CARGO_REGISTRIES_MY_REGISTRY_TOKEN`.
+
+2. `cargo publish --registry=my-registry`
+
+Instead of always passing the `--registry` command-line option, the default
+registry may be set in [`.cargo/config.toml`][config] with the `registry.default`
+key. For example:
+
+```toml
+[registry]
+default = "my-registry"
+```
+
+Setting the `package.publish` key in the `Cargo.toml` manifest restricts which
+registries the package is allowed to be published to. This is useful to
+prevent accidentally publishing a closed-source package to [crates.io]. The
+value may be a list of registry names, for example:
+
+```toml
+[package]
+# ...
+publish = ["my-registry"]
+```
+
+The `publish` value may also be `false` to restrict all publishing, which is
+the same as an empty list.
+
+The authentication information saved by [`cargo login`] is stored in the
+`credentials.toml` file in the Cargo home directory (default `$HOME/.cargo`). It
+has a separate table for each registry, for example:
+
+```toml
+[registries.my-registry]
+token = "854DvwSlUwEHtIo3kWy6x7UCPKHfzCmy"
+```
+
+### Registry Protocols
+Cargo supports two remote registry protocols: `git` and `sparse`. If the registry
+index URL starts with `sparse+`, Cargo uses the sparse protocol. Otherwise
+Cargo uses the `git` protocol.
+
+The `git` protocol stores index metadata in a git repository and requires Cargo to clone
+the entire repo.
+
+The `sparse` protocol fetches individual metadata files using plain HTTP requests.
+Since Cargo only downloads the metadata for relevant crates, the `sparse` protocol can
+save significant time and bandwidth.
+
+The [crates.io] registry supports both protocols. The protocol for crates.io is
+controlled via the [`registries.crates-io.protocol`] config key.
+
+[Source Replacement]: source-replacement.md
+[Running a Registry]: running-a-registry.md
+[`cargo publish`]: ../commands/cargo-publish.md
+[`cargo package`]: ../commands/cargo-package.md
+[`cargo login`]: ../commands/cargo-login.md
+[config]: config.md
+[crates.io]: https://crates.io/
+[`registries.crates-io.protocol`]: config.md#registriescrates-ioprotocol
+
+
+<script>
+(function() {
+ var fragments = {
+ "#running-a-registry": "running-a-registry.html",
+ "#index-format": "registry-index.html",
+ "#web-api": "registry-web-api.html",
+ "#publish": "registry-web-api.html#publish",
+ "#yank": "registry-web-api.html#yank",
+ "#unyank": "registry-web-api.html#unyank",
+ "#owners": "registry-web-api.html#owners",
+ "#owners-list": "registry-web-api.html#owners-list",
+ "#owners-add": "registry-web-api.html#owners-add",
+ "#owners-remove": "registry-web-api.html#owners-remove",
+ "#search": "registry-web-api.html#search",
+ "#login": "registry-web-api.html#login",
+ };
+ var target = fragments[window.location.hash];
+ if (target) {
+ var url = window.location.toString();
+ var base = url.substring(0, url.lastIndexOf('/'));
+ window.location.replace(base + "/" + target);
+ }
+})();
+</script>
diff --git a/src/tools/cargo/src/doc/src/reference/registry-index.md b/src/tools/cargo/src/doc/src/reference/registry-index.md
new file mode 100644
index 000000000..38e3dd5fe
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/registry-index.md
@@ -0,0 +1,293 @@
+## Index Format
+
+The following defines the format of the index. New features are occasionally
+added, which are only understood starting with the version of Cargo that
+introduced them. Older versions of Cargo may not be able to use packages that
+make use of new features. However, the format for older packages should not
+change, so older versions of Cargo should be able to use them.
+
+### Index Configuration
+The root of the index contains a file named `config.json` which contains JSON
+information used by Cargo for accessing the registry. This is an example of
+what the [crates.io] config file looks like:
+
+```javascript
+{
+ "dl": "https://crates.io/api/v1/crates",
+ "api": "https://crates.io"
+}
+```
+
+The keys are:
+- `dl`: This is the URL for downloading crates listed in the index. The value
+ may have the following markers which will be replaced with their
+ corresponding value:
+
+ - `{crate}`: The name of crate.
+ - `{version}`: The crate version.
+ - `{prefix}`: A directory prefix computed from the crate name. For example,
+ a crate named `cargo` has a prefix of `ca/rg`. See below for details.
+ - `{lowerprefix}`: Lowercase variant of `{prefix}`.
+ - `{sha256-checksum}`: The crate's sha256 checksum.
+
+ If none of the markers are present, then the value
+ `/{crate}/{version}/download` is appended to the end.
+- `api`: This is the base URL for the web API. This key is optional, but if it
+ is not specified, commands such as [`cargo publish`] will not work. The web
+ API is described below.
+
+
+### Download Endpoint
+The download endpoint should send the `.crate` file for the requested package.
+Cargo supports https, http, and file URLs, HTTP redirects, HTTP1 and HTTP2.
+The exact specifics of TLS support depend on the platform that Cargo is
+running on, the version of Cargo, and how it was compiled.
+
+
+### Index files
+The rest of the index repository contains one file for each package, where the
+filename is the name of the package in lowercase. Each version of the package
+has a separate line in the file. The files are organized in a tier of
+directories:
+
+- Packages with 1 character names are placed in a directory named `1`.
+- Packages with 2 character names are placed in a directory named `2`.
+- Packages with 3 character names are placed in the directory
+ `3/{first-character}` where `{first-character}` is the first character of
+ the package name.
+- All other packages are stored in directories named
+ `{first-two}/{second-two}` where the top directory is the first two
+ characters of the package name, and the next subdirectory is the third and
+ fourth characters of the package name. For example, `cargo` would be stored
+ in a file named `ca/rg/cargo`.
+
+> Note: Although the index filenames are in lowercase, the fields that contain
+> package names in `Cargo.toml` and the index JSON data are case-sensitive and
+> may contain upper and lower case characters.
+
+The directory name above is calculated based on the package name converted to
+lowercase; it is represented by the marker `{lowerprefix}`. When the original
+package name is used without case conversion, the resulting directory name is
+represented by the marker `{prefix}`. For example, the package `MyCrate` would
+have a `{prefix}` of `My/Cr` and a `{lowerprefix}` of `my/cr`. In general,
+using `{prefix}` is recommended over `{lowerprefix}`, but there are pros and
+cons to each choice. Using `{prefix}` on case-insensitive filesystems results
+in (harmless-but-inelegant) directory aliasing. For example, `crate` and
+`CrateTwo` have `{prefix}` values of `cr/at` and `Cr/at`; these are distinct on
+Unix machines but alias to the same directory on Windows. Using directories
+with normalized case avoids aliasing, but on case-sensitive filesystems it's
+harder to support older versions of Cargo that lack `{prefix}`/`{lowerprefix}`.
+For example, nginx rewrite rules can easily construct `{prefix}` but can't
+perform case-conversion to construct `{lowerprefix}`.
+
+Registries should consider enforcing limitations on package names added to
+their index. Cargo itself allows names with any [alphanumeric], `-`, or `_`
+characters. [crates.io] imposes its own limitations, including the following:
+
+- Only allows ASCII characters.
+- Only alphanumeric, `-`, and `_` characters.
+- First character must be alphabetic.
+- Case-insensitive collision detection.
+- Prevent differences of `-` vs `_`.
+- Under a specific length (max 64).
+- Rejects reserved names, such as Windows special filenames like "nul".
+
+Registries should consider incorporating similar restrictions, and consider
+the security implications, such as [IDN homograph
+attacks](https://en.wikipedia.org/wiki/IDN_homograph_attack) and other
+concerns in [UTR36](https://www.unicode.org/reports/tr36/) and
+[UTS39](https://www.unicode.org/reports/tr39/).
+
+Each line in a package file contains a JSON object that describes a published
+version of the package. The following is a pretty-printed example with comments
+explaining the format of the entry.
+
+```javascript
+{
+ // The name of the package.
+ // This must only contain alphanumeric, `-`, or `_` characters.
+ "name": "foo",
+ // The version of the package this row is describing.
+ // This must be a valid version number according to the Semantic
+ // Versioning 2.0.0 spec at https://semver.org/.
+ "vers": "0.1.0",
+ // Array of direct dependencies of the package.
+ "deps": [
+ {
+ // Name of the dependency.
+ // If the dependency is renamed from the original package name,
+ // this is the new name. The original package name is stored in
+ // the `package` field.
+ "name": "rand",
+ // The SemVer requirement for this dependency.
+ // This must be a valid version requirement defined at
+ // https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html.
+ "req": "^0.6",
+ // Array of features (as strings) enabled for this dependency.
+ "features": ["i128_support"],
+ // Boolean of whether or not this is an optional dependency.
+ "optional": false,
+ // Boolean of whether or not default features are enabled.
+ "default_features": true,
+ // The target platform for the dependency.
+ // null if not a target dependency.
+ // Otherwise, a string such as "cfg(windows)".
+ "target": null,
+ // The dependency kind.
+ // "dev", "build", or "normal".
+ // Note: this is a required field, but a small number of entries
+ // exist in the crates.io index with either a missing or null
+ // `kind` field due to implementation bugs.
+ "kind": "normal",
+ // The URL of the index of the registry where this dependency is
+ // from as a string. If not specified or null, it is assumed the
+ // dependency is in the current registry.
+ "registry": null,
+ // If the dependency is renamed, this is a string of the actual
+ // package name. If not specified or null, this dependency is not
+ // renamed.
+ "package": null,
+ }
+ ],
+ // A SHA256 checksum of the `.crate` file.
+ "cksum": "d867001db0e2b6e0496f9fac96930e2d42233ecd3ca0413e0753d4c7695d289c",
+ // Set of features defined for the package.
+ // Each feature maps to an array of features or dependencies it enables.
+ "features": {
+ "extras": ["rand/simd_support"]
+ },
+ // Boolean of whether or not this version has been yanked.
+ "yanked": false,
+ // The `links` string value from the package's manifest, or null if not
+ // specified. This field is optional and defaults to null.
+ "links": null,
+ // An unsigned 32-bit integer value indicating the schema version of this
+ // entry.
+ //
+ // If this not specified, it should be interpreted as the default of 1.
+ //
+ // Cargo (starting with version 1.51) will ignore versions it does not
+ // recognize. This provides a method to safely introduce changes to index
+ // entries and allow older versions of cargo to ignore newer entries it
+ // doesn't understand. Versions older than 1.51 ignore this field, and
+ // thus may misinterpret the meaning of the index entry.
+ //
+ // The current values are:
+ //
+ // * 1: The schema as documented here, not including newer additions.
+ // This is honored in Rust version 1.51 and newer.
+ // * 2: The addition of the `features2` field.
+ // This is honored in Rust version 1.60 and newer.
+ "v": 2,
+ // This optional field contains features with new, extended syntax.
+ // Specifically, namespaced features (`dep:`) and weak dependencies
+ // (`pkg?/feat`).
+ //
+ // This is separated from `features` because versions older than 1.19
+ // will fail to load due to not being able to parse the new syntax, even
+ // with a `Cargo.lock` file.
+ //
+ // Cargo will merge any values listed here with the "features" field.
+ //
+ // If this field is included, the "v" field should be set to at least 2.
+ //
+ // Registries are not required to use this field for extended feature
+ // syntax, they are allowed to include those in the "features" field.
+ // Using this is only necessary if the registry wants to support cargo
+ // versions older than 1.19, which in practice is only crates.io since
+ // those older versions do not support other registries.
+ "features2": {
+ "serde": ["dep:serde", "chrono?/serde"]
+ }
+}
+```
+
+The JSON objects should not be modified after they are added except for the
+`yanked` field whose value may change at any time.
+
+> **Note**: The index JSON format has subtle differences from the JSON format of the [Publish API] and [`cargo metadata`].
+> If you are using one of those as a source to generate index entries, you are encouraged to carefully inspect the documentation differences between them.
+>
+> For the [Publish API], the differences are:
+>
+> * `deps`
+> * `name` --- When the dependency is [renamed] in `Cargo.toml`, the publish API puts the original package name in the `name` field and the aliased name in the `explicit_name_in_toml` field.
+> The index places the aliased name in the `name` field, and the original package name in the `package` field.
+> * `req` --- The Publish API field is called `version_req`.
+> * `cksum` --- The publish API does not specify the checksum, it must be computed by the registry before adding to the index.
+> * `features` --- Some features may be placed in the `features2` field.
+> Note: This is only a legacy requirement for [crates.io]; other registries should not need to bother with modifying the features map.
+> The `v` field indicates the presence of the `features2` field.
+> * The publish API includes several other fields, such as `description` and `readme`, which don't appear in the index.
+> These are intended to make it easier for a registry to obtain the metadata about the crate to display on a website without needing to extract and parse the `.crate` file.
+> This additional information is typically added to a database on the registry server.
+>
+> For [`cargo metadata`], the differences are:
+>
+> * `vers` --- The `cargo metadata` field is called `version`.
+> * `deps`
+> * `name` --- When the dependency is [renamed] in `Cargo.toml`, `cargo metadata` puts the original package name in the `name` field and the aliased name in the `rename` field.
+> The index places the aliased name in the `name` field, and the original package name in the `package` field.
+> * `default_features` --- The `cargo metadata` field is called `uses_default_features`.
+> * `registry` --- `cargo metadata` uses a value of `null` to indicate that the dependency comes from [crates.io].
+> The index uses a value of `null` to indicate that the dependency comes from the same registry as the index.
+> When creating an index entry, a registry other than [crates.io] should translate a value of `null` to be `https://github.com/rust-lang/crates.io-index` and translate a URL that matches the current index to be `null`.
+> * `cargo metadata` includes some extra fields, such as `source` and `path`.
+> * The index includes additional fields such as `yanked`, `cksum`, and `v`.
+
+[renamed]: specifying-dependencies.md#renaming-dependencies-in-cargotoml
+[Publish API]: registry-web-api.md#publish
+[`cargo metadata`]: ../commands/cargo-metadata.md
+
+### Index Protocols
+Cargo supports two remote registry protocols: `git` and `sparse`. The `git` protocol
+stores index files in a git repository and the `sparse` protocol fetches individual
+files over HTTP.
+
+#### Git Protocol
+The git protocol has no protocol prefix in the index url. For example the git index URL
+for [crates.io] is `https://github.com/rust-lang/crates.io-index`.
+
+Cargo caches the git repository on disk so that it can efficiently incrementally fetch
+updates.
+
+#### Sparse Protocol
+The sparse protocol uses the `sparse+` protocol prefix in the registry URL. For example,
+the sparse index URL for [crates.io] is `sparse+https://index.crates.io/`.
+
+The sparse protocol downloads each index file using an individual HTTP request. Since
+this results in a large number of small HTTP requests, performance is significantly
+improved with a server that supports pipelining and HTTP/2.
+
+##### Caching
+Cargo caches the crate metadata files, and captures the `ETag` or `Last-Modified`
+HTTP header from the server for each entry. When refreshing crate metadata, Cargo
+sends the `If-None-Match` or `If-Modified-Since` header to allow the server to respond
+with HTTP 304 "Not Modified" if the local cache is valid, saving time and bandwidth.
+If both `ETag` and `Last-Modified` headers are present, Cargo uses the `ETag` only.
+
+##### Cache Invalidation
+If a registry is using some kind of CDN or proxy which caches access to the index files,
+then it is recommended that registries implement some form of cache invalidation when
+the files are updated. If these caches are not updated, then users may not be able to
+access new crates until the cache is cleared.
+
+##### Nonexistent Crates
+For crates that do not exist, the registry should respond with a 404 "Not Found", 410 "Gone"
+or 451 "Unavailable For Legal Reasons" code.
+
+##### Sparse Limitations
+Since the URL of the registry is stored in the lockfile, it's not recommended to offer
+a registry with both protocols. Discussion about a transition plan is ongoing in issue
+[#10964]. The [crates.io] registry is an exception, since Cargo internally substitutes
+the equivalent git URL when the sparse protocol is used.
+
+If a registry does offer both protocols, it's currently recommended to choose one protocol
+as the canonical protocol and use [source replacement] for the other protocol.
+
+
+[`cargo publish`]: ../commands/cargo-publish.md
+[alphanumeric]: ../../std/primitive.char.html#method.is_alphanumeric
+[crates.io]: https://crates.io/
+[source replacement]: ../reference/source-replacement.md
+[#10964]: https://github.com/rust-lang/cargo/issues/10964
diff --git a/src/tools/cargo/src/doc/src/reference/registry-web-api.md b/src/tools/cargo/src/doc/src/reference/registry-web-api.md
new file mode 100644
index 000000000..147ababd5
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/registry-web-api.md
@@ -0,0 +1,358 @@
+
+## Web API
+
+A registry may host a web API at the location defined in `config.json` to
+support any of the actions listed below.
+
+Cargo includes the `Authorization` header for requests that require
+authentication. The header value is the API token. The server should respond
+with a 403 response code if the token is not valid. Users are expected to
+visit the registry's website to obtain a token, and Cargo can store the token
+using the [`cargo login`] command, or by passing the token on the
+command-line.
+
+Responses use the 200 response code for success.
+Errors should use an appropriate response code, such as 404.
+Failure
+responses should have a JSON object with the following structure:
+
+```javascript
+{
+ // Array of errors to display to the user.
+ "errors": [
+ {
+ // The error message as a string.
+ "detail": "error message text"
+ }
+ ]
+}
+```
+
+If the response has this structure Cargo will display the detailed message to the user, even if the response code is 200.
+If the response code indicates an error and the content does not have this structure, Cargo will display to the user a
+ message intended to help debugging the server error. A server returning an `errors` object allows a registry to provide a more
+detailed or user-centric error message.
+
+For backwards compatibility, servers should ignore any unexpected query
+parameters or JSON fields. If a JSON field is missing, it should be assumed to
+be null. The endpoints are versioned with the `v1` component of the path, and
+Cargo is responsible for handling backwards compatibility fallbacks should any
+be required in the future.
+
+Cargo sets the following headers for all requests:
+
+- `Content-Type`: `application/json`
+- `Accept`: `application/json`
+- `User-Agent`: The Cargo version such as `cargo 1.32.0 (8610973aa
+ 2019-01-02)`. This may be modified by the user in a configuration value.
+ Added in 1.29.
+
+### Publish
+
+- Endpoint: `/api/v1/crates/new`
+- Method: PUT
+- Authorization: Included
+
+The publish endpoint is used to publish a new version of a crate. The server
+should validate the crate, make it available for download, and add it to the
+index.
+
+It is not required for the index to be updated before the successful response is sent.
+After a successful response, Cargo will poll the index for a short period of time to identify that the new crate has been added.
+If the crate does not appear in the index after a short period of time, then Cargo will display a warning letting the user know that the new crate is not yet available.
+
+The body of the data sent by Cargo is:
+
+- 32-bit unsigned little-endian integer of the length of JSON data.
+- Metadata of the package as a JSON object.
+- 32-bit unsigned little-endian integer of the length of the `.crate` file.
+- The `.crate` file.
+
+The following is a commented example of the JSON object. Some notes of some
+restrictions imposed by [crates.io] are included only to illustrate some
+suggestions on types of validation that may be done, and should not be
+considered as an exhaustive list of restrictions [crates.io] imposes.
+
+```javascript
+{
+ // The name of the package.
+ "name": "foo",
+ // The version of the package being published.
+ "vers": "0.1.0",
+ // Array of direct dependencies of the package.
+ "deps": [
+ {
+ // Name of the dependency.
+ // If the dependency is renamed from the original package name,
+ // this is the original name. The new package name is stored in
+ // the `explicit_name_in_toml` field.
+ "name": "rand",
+ // The semver requirement for this dependency.
+ "version_req": "^0.6",
+ // Array of features (as strings) enabled for this dependency.
+ "features": ["i128_support"],
+ // Boolean of whether or not this is an optional dependency.
+ "optional": false,
+ // Boolean of whether or not default features are enabled.
+ "default_features": true,
+ // The target platform for the dependency.
+ // null if not a target dependency.
+ // Otherwise, a string such as "cfg(windows)".
+ "target": null,
+ // The dependency kind.
+ // "dev", "build", or "normal".
+ "kind": "normal",
+ // The URL of the index of the registry where this dependency is
+ // from as a string. If not specified or null, it is assumed the
+ // dependency is in the current registry.
+ "registry": null,
+ // If the dependency is renamed, this is a string of the new
+ // package name. If not specified or null, this dependency is not
+ // renamed.
+ "explicit_name_in_toml": null,
+ }
+ ],
+ // Set of features defined for the package.
+ // Each feature maps to an array of features or dependencies it enables.
+ // Cargo does not impose limitations on feature names, but crates.io
+ // requires alphanumeric ASCII, `_` or `-` characters.
+ "features": {
+ "extras": ["rand/simd_support"]
+ },
+ // List of strings of the authors.
+ // May be empty.
+ "authors": ["Alice <a@example.com>"],
+ // Description field from the manifest.
+ // May be null. crates.io requires at least some content.
+ "description": null,
+ // String of the URL to the website for this package's documentation.
+ // May be null.
+ "documentation": null,
+ // String of the URL to the website for this package's home page.
+ // May be null.
+ "homepage": null,
+ // String of the content of the README file.
+ // May be null.
+ "readme": null,
+ // String of a relative path to a README file in the crate.
+ // May be null.
+ "readme_file": null,
+ // Array of strings of keywords for the package.
+ "keywords": [],
+ // Array of strings of categories for the package.
+ "categories": [],
+ // String of the license for the package.
+ // May be null. crates.io requires either `license` or `license_file` to be set.
+ "license": null,
+ // String of a relative path to a license file in the crate.
+ // May be null.
+ "license_file": null,
+ // String of the URL to the website for the source repository of this package.
+ // May be null.
+ "repository": null,
+ // Optional object of "status" badges. Each value is an object of
+ // arbitrary string to string mappings.
+ // crates.io has special interpretation of the format of the badges.
+ "badges": {
+ "travis-ci": {
+ "branch": "master",
+ "repository": "rust-lang/cargo"
+ }
+ },
+ // The `links` string value from the package's manifest, or null if not
+ // specified. This field is optional and defaults to null.
+ "links": null
+}
+```
+
+A successful response includes the JSON object:
+
+```javascript
+{
+ // Optional object of warnings to display to the user.
+ "warnings": {
+ // Array of strings of categories that are invalid and ignored.
+ "invalid_categories": [],
+ // Array of strings of badge names that are invalid and ignored.
+ "invalid_badges": [],
+ // Array of strings of arbitrary warnings to display to the user.
+ "other": []
+ }
+}
+```
+
+### Yank
+
+- Endpoint: `/api/v1/crates/{crate_name}/{version}/yank`
+- Method: DELETE
+- Authorization: Included
+
+The yank endpoint will set the `yank` field of the given version of a crate to
+`true` in the index.
+
+A successful response includes the JSON object:
+
+```javascript
+{
+ // Indicates the delete succeeded, always true.
+ "ok": true,
+}
+```
+
+### Unyank
+
+- Endpoint: `/api/v1/crates/{crate_name}/{version}/unyank`
+- Method: PUT
+- Authorization: Included
+
+The unyank endpoint will set the `yank` field of the given version of a crate
+to `false` in the index.
+
+A successful response includes the JSON object:
+
+```javascript
+{
+ // Indicates the delete succeeded, always true.
+ "ok": true,
+}
+```
+
+### Owners
+
+Cargo does not have an inherent notion of users and owners, but it does
+provide the `owner` command to assist managing who has authorization to
+control a crate. It is up to the registry to decide exactly how users and
+owners are handled. See the [publishing documentation] for a description of
+how [crates.io] handles owners via GitHub users and teams.
+
+#### Owners: List
+
+- Endpoint: `/api/v1/crates/{crate_name}/owners`
+- Method: GET
+- Authorization: Included
+
+The owners endpoint returns a list of owners of the crate.
+
+A successful response includes the JSON object:
+
+```javascript
+{
+ // Array of owners of the crate.
+ "users": [
+ {
+ // Unique unsigned 32-bit integer of the owner.
+ "id": 70,
+ // The unique username of the owner.
+ "login": "github:rust-lang:core",
+ // Name of the owner.
+ // This is optional and may be null.
+ "name": "Core",
+ }
+ ]
+}
+```
+
+#### Owners: Add
+
+- Endpoint: `/api/v1/crates/{crate_name}/owners`
+- Method: PUT
+- Authorization: Included
+
+A PUT request will send a request to the registry to add a new owner to a
+crate. It is up to the registry how to handle the request. For example,
+[crates.io] sends an invite to the user that they must accept before being
+added.
+
+The request should include the following JSON object:
+
+```javascript
+{
+ // Array of `login` strings of owners to add.
+ "users": ["login_name"]
+}
+```
+
+A successful response includes the JSON object:
+
+```javascript
+{
+ // Indicates the add succeeded, always true.
+ "ok": true,
+ // A string to be displayed to the user.
+ "msg": "user ehuss has been invited to be an owner of crate cargo"
+}
+```
+
+#### Owners: Remove
+
+- Endpoint: `/api/v1/crates/{crate_name}/owners`
+- Method: DELETE
+- Authorization: Included
+
+A DELETE request will remove an owner from a crate. The request should include
+the following JSON object:
+
+```javascript
+{
+ // Array of `login` strings of owners to remove.
+ "users": ["login_name"]
+}
+```
+
+A successful response includes the JSON object:
+
+```javascript
+{
+ // Indicates the remove succeeded, always true.
+ "ok": true
+}
+```
+
+### Search
+
+- Endpoint: `/api/v1/crates`
+- Method: GET
+- Query Parameters:
+ - `q`: The search query string.
+ - `per_page`: Number of results, default 10, max 100.
+
+The search request will perform a search for crates, using criteria defined on
+the server.
+
+A successful response includes the JSON object:
+
+```javascript
+{
+ // Array of results.
+ "crates": [
+ {
+ // Name of the crate.
+ "name": "rand",
+ // The highest version available.
+ "max_version": "0.6.1",
+ // Textual description of the crate.
+ "description": "Random number generators and other randomness functionality.\n",
+ }
+ ],
+ "meta": {
+ // Total number of results available on the server.
+ "total": 119
+ }
+}
+```
+
+### Login
+
+- Endpoint: `/me`
+
+The "login" endpoint is not an actual API request. It exists solely for the
+[`cargo login`] command to display a URL to instruct a user to visit in a web
+browser to log in and retrieve an API token.
+
+[`cargo login`]: ../commands/cargo-login.md
+[`cargo package`]: ../commands/cargo-package.md
+[`cargo publish`]: ../commands/cargo-publish.md
+[alphanumeric]: ../../std/primitive.char.html#method.is_alphanumeric
+[config]: config.md
+[crates.io]: https://crates.io/
+[publishing documentation]: publishing.md#cargo-owner
diff --git a/src/tools/cargo/src/doc/src/reference/resolver.md b/src/tools/cargo/src/doc/src/reference/resolver.md
new file mode 100644
index 000000000..af02d6cf2
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/resolver.md
@@ -0,0 +1,558 @@
+# Dependency Resolution
+
+One of Cargo's primary tasks is to determine the versions of dependencies to
+use based on the version requirements specified in each package. This process
+is called "dependency resolution" and is performed by the "resolver". The
+result of the resolution is stored in the `Cargo.lock` file which "locks" the
+dependencies to specific versions, and keeps them fixed over time.
+
+The resolver attempts to unify common dependencies while considering possibly
+conflicting requirements. It turns out, however, that in many cases there is no
+single "best" dependency resolution, and so the resolver must use heuristics to
+choose a preferred solution. The sections below provide some details on how
+requirements are handled, and how to work with the resolver.
+
+See the chapter [Specifying Dependencies] for more details about how
+dependency requirements are specified.
+
+The [`cargo tree`] command can be used to visualize the result of the
+resolver.
+
+[Specifying Dependencies]: specifying-dependencies.md
+[`cargo tree`]: ../commands/cargo-tree.md
+
+## SemVer compatibility
+
+Cargo uses [SemVer] for specifying version numbers. This establishes a common
+convention for what is compatible between different versions of a package. See
+the [SemVer Compatibility] chapter for guidance on what is considered a
+"compatible" change. This notion of "compatibility" is important because Cargo
+assumes it should be safe to update a dependency within a compatibility range
+without breaking the build.
+
+Versions are considered compatible if their left-most non-zero
+major/minor/patch component is the same. For example, `1.0.3` and `1.1.0` are
+considered compatible, and thus it should be safe to update from the older
+release to the newer one. However, an update from `1.1.0` to `2.0.0` would not
+be allowed to be made automatically. This convention also applies to versions
+with leading zeros. For example, `0.1.0` and `0.1.2` are compatible, but
+`0.1.0` and `0.2.0` are not. Similarly, `0.0.1` and `0.0.2` are not
+compatible.
+
+As a quick refresher, the
+[*version requirement* syntax][Specifying Dependencies] Cargo uses for
+dependencies is:
+
+Requirement | Example | Equivalence | Description
+------------|---------|-------------|-------------
+Caret | `1.2.3` or `^1.2.3` | <code>>=1.2.3,&nbsp;<2.0.0</code> | Any SemVer-compatible version of at least the given value.
+Tilde | `~1.2` | <code>>=1.2.0,&nbsp;<1.3.0</code> | Minimum version, with restricted compatibility range.
+Wildcard | `1.*` | <code>>=1.0.0,&nbsp;<2.0.0</code> | Any version in the `*` position.
+Equals | `=1.2.3` | <code>=1.2.3</code> | Exactly the specified version only.
+Comparison | `>1.1` | <code>>=1.2.0</code> | Naive numeric comparison of specified digits.
+Compound | <code>>=1.2,&nbsp;<1.5</code> | <code>>1.2.0,&nbsp;<1.5.0</code> | Multiple requirements that must be simultaneously satisfied.
+
+When multiple packages specify a dependency for a common package, the resolver
+attempts to ensure that they use the same version of that common package, as
+long as they are within a SemVer compatibility range. It also attempts to use
+the greatest version currently available within that compatibility range. For
+example, if there are two packages in the resolve graph with the following
+requirements:
+
+```toml
+# Package A
+[dependencies]
+bitflags = "1.0"
+
+# Package B
+[dependencies]
+bitflags = "1.1"
+```
+
+If at the time the `Cargo.lock` file is generated, the greatest version of
+`bitflags` is `1.2.1`, then both packages will use `1.2.1` because it is the
+greatest within the compatibility range. If `2.0.0` is published, it will
+still use `1.2.1` because `2.0.0` is considered incompatible.
+
+If multiple packages have a common dependency with semver-incompatible
+versions, then Cargo will allow this, but will build two separate copies of
+the dependency. For example:
+
+```toml
+# Package A
+[dependencies]
+rand = "0.7"
+
+# Package B
+[dependencies]
+rand = "0.6"
+```
+
+The above will result in Package A using the greatest `0.7` release (`0.7.3`
+at the time of this writing) and Package B will use the greatest `0.6` release
+(`0.6.5` for example). This can lead to potential problems, see the
+[Version-incompatibility hazards] section for more details.
+
+Multiple versions within the same compatibility range are not allowed and will
+result in a resolver error if it is constrained to two different versions
+within a compatibility range. For example, if there are two packages in the
+resolve graph with the following requirements:
+
+```toml
+# Package A
+[dependencies]
+log = "=0.4.11"
+
+# Package B
+[dependencies]
+log = "=0.4.8"
+```
+
+The above will fail because it is not allowed to have two separate copies of
+the `0.4` release of the `log` package.
+
+[SemVer]: https://semver.org/
+[SemVer Compatibility]: semver.md
+[Version-incompatibility hazards]: #version-incompatibility-hazards
+
+### Version-incompatibility hazards
+
+When multiple versions of a crate appear in the resolve graph, this can cause
+problems when types from those crates are exposed by the crates using them.
+This is because the types and items are considered different by the Rust
+compiler, even if they have the same name. Libraries should take care when
+publishing a SemVer-incompatible version (for example, publishing `2.0.0`
+after `1.0.0` has been in use), particularly for libraries that are widely
+used.
+
+The "[semver trick]" is a workaround for this problem of publishing a breaking
+change while retaining compatibility with older versions. The linked page goes
+into detail about what the problem is and how to address it. In short, when a
+library wants to publish a SemVer-breaking release, publish the new release,
+and also publish a point release of the previous version that reexports the
+types from the newer version.
+
+These incompatibilities usually manifest as a compile-time error, but
+sometimes they will only appear as a runtime misbehavior. For example, let's
+say there is a common library named `foo` that ends up appearing with both
+version `1.0.0` and `2.0.0` in the resolve graph. If [`downcast_ref`] is used
+on a object created by a library using version `1.0.0`, and the code calling
+`downcast_ref` is downcasting to a type from version `2.0.0`, the downcast
+will fail at runtime.
+
+It is important to make sure that if you have multiple versions of a library
+that you are properly using them, especially if it is ever possible for the
+types from different versions to be used together. The [`cargo tree
+-d`][`cargo tree`] command can be used to identify duplicate versions and
+where they come from. Similarly, it is important to consider the impact on the
+ecosystem if you publish a SemVer-incompatible version of a popular library.
+
+[semver trick]: https://github.com/dtolnay/semver-trick
+[`downcast_ref`]: ../../std/any/trait.Any.html#method.downcast_ref
+
+### Pre-releases
+
+SemVer has the concept of "pre-releases" with a dash in the version, such as
+`1.0.0-alpha`, or `1.0.0-beta`. Cargo will avoid automatically using
+pre-releases unless explicitly asked. For example, if `1.0.0-alpha` of package
+`foo` is published, then a requirement of `foo = "1.0"` will *not* match, and
+will return an error. The pre-release must be specified, such as `foo =
+"1.0.0-alpha"`. Similarly [`cargo install`] will avoid pre-releases unless
+explicitly asked to install one.
+
+Cargo allows "newer" pre-releases to be used automatically. For example, if
+`1.0.0-beta` is published, then a requirement `foo = "1.0.0-alpha"` will allow
+updating to the `beta` version. Beware that pre-release versions can be
+unstable, and as such care should be taken when using them. Some projects may
+choose to publish breaking changes between pre-release versions. It is
+recommended to not use pre-release dependencies in a library if your library
+is not also a pre-release. Care should also be taken when updating your
+`Cargo.lock`, and be prepared if a pre-release update causes issues.
+
+The pre-release tag may be separated with periods to distinguish separate
+components. Numeric components will use numeric comparison. For example,
+`1.0.0-alpha.4` will use numeric comparison for the `4` component. That means
+that if `1.0.0-alpha.11` is published, that will be chosen as the greatest
+release. Non-numeric components are compared lexicographically.
+
+[`cargo install`]: ../commands/cargo-install.md
+
+### Version metadata
+
+SemVer has the concept of "version metadata" with a plus in the version, such
+as `1.0.0+21AF26D3`. This metadata is usually ignored, and should not be used
+in a version requirement. You should never publish multiple versions that
+differ only in the metadata tag (note, this is a [known issue] with
+[crates.io] that currently permits this).
+
+[known issue]: https://github.com/rust-lang/crates.io/issues/1059
+[crates.io]: https://crates.io/
+
+## Other constraints
+
+Version requirements aren't the only constraint that the resolver considers
+when selecting and unifying dependencies. The following sections cover some of
+the other constraints that can affect resolution.
+
+### Features
+
+For the purpose of generating `Cargo.lock`, the resolver builds the dependency
+graph as-if all [features] of all [workspace] members are enabled. This
+ensures that any optional dependencies are available and properly resolved
+with the rest of the graph when features are added or removed with the
+[`--features` command-line flag](features.md#command-line-feature-options).
+The resolver runs a second time to determine the actual features used when
+*compiling* a crate, based on the features selected on the command-line.
+
+Dependencies are resolved with the union of all features enabled on them. For
+example, if one package depends on the [`im`] package with the [`serde`
+dependency] enabled and another package depends on it with the [`rayon`
+dependency] enabled, then `im` will be built with both features enabled, and
+the `serde` and `rayon` crates will be included in the resolve graph. If no
+packages depend on `im` with those features, then those optional dependencies
+will be ignored, and they will not affect resolution.
+
+When building multiple packages in a workspace (such as with `--workspace` or
+multiple `-p` flags), the features of the dependencies of all of those
+packages are unified. If you have a circumstance where you want to avoid that
+unification for different workspace members, you will need to build them via
+separate `cargo` invocations.
+
+The resolver will skip over versions of packages that are missing required
+features. For example, if a package depends on version `^1` of [`regex`] with
+the [`perf` feature], then the oldest version it can select is `1.3.0`,
+because versions prior to that did not contain the `perf` feature. Similarly,
+if a feature is removed from a new release, then packages that require that
+feature will be stuck on the older releases that contain that feature. It is
+discouraged to remove features in a SemVer-compatible release. Beware that
+optional dependencies also define an implicit feature, so removing an optional
+dependency or making it non-optional can cause problems, see [removing an
+optional dependency].
+
+[`im`]: https://crates.io/crates/im
+[`perf` feature]: https://github.com/rust-lang/regex/blob/1.3.0/Cargo.toml#L56
+[`rayon` dependency]: https://github.com/bodil/im-rs/blob/v15.0.0/Cargo.toml#L47
+[`regex`]: https://crates.io/crates/regex
+[`serde` dependency]: https://github.com/bodil/im-rs/blob/v15.0.0/Cargo.toml#L46
+[features]: features.md
+[removing an optional dependency]: semver.md#cargo-remove-opt-dep
+[workspace]: workspaces.md
+
+#### Feature resolver version 2
+
+When `resolver = "2"` is specified in `Cargo.toml` (see [resolver
+versions](#resolver-versions) below), a different feature resolver is used
+which uses a different algorithm for unifying features. The version `"1"`
+resolver will unify features for a package no matter where it is specified.
+The version `"2"` resolver will avoid unifying features in the following
+situations:
+
+* Features for target-specific dependencies are not enabled if the target is
+ not currently being built. For example:
+
+ ```toml
+ [dependencies.common]
+ version = "1.0"
+ features = ["f1"]
+
+ [target.'cfg(windows)'.dependencies.common]
+ version = "1.0"
+ features = ["f2"]
+ ```
+
+ When building this example for a non-Windows platform, the `f2` feature will
+ *not* be enabled.
+
+* Features enabled on [build-dependencies] or proc-macros will not be unified
+ when those same dependencies are used as a normal dependency. For example:
+
+ ```toml
+ [dependencies]
+ log = "0.4"
+
+ [build-dependencies]
+ log = {version = "0.4", features=['std']}
+ ```
+
+ When building the build script, the `log` crate will be built with the `std`
+ feature. When building the library of your package, it will not enable the
+ feature.
+
+* Features enabled on [dev-dependencies] will not be unified when those same
+ dependencies are used as a normal dependency, unless those dev-dependencies
+ are currently being built. For example:
+
+ ```toml
+ [dependencies]
+ serde = {version = "1.0", default-features = false}
+
+ [dev-dependencies]
+ serde = {version = "1.0", features = ["std"]}
+ ```
+
+ In this example, the library will normally link against `serde` without the
+ `std` feature. However, when built as a test or example, it will include the
+ `std` feature. For example, `cargo test` or `cargo build --all-targets` will
+ unify these features. Note that dev-dependencies in dependencies are always
+ ignored, this is only relevant for the top-level package or workspace
+ members.
+
+[build-dependencies]: specifying-dependencies.md#build-dependencies
+[dev-dependencies]: specifying-dependencies.md#development-dependencies
+[resolver-field]: features.md#resolver-versions
+
+### `links`
+
+The [`links` field] is used to ensure only one copy of a native library is
+linked into a binary. The resolver will attempt to find a graph where there is
+only one instance of each `links` name. If it is unable to find a graph that
+satisfies that constraint, it will return an error.
+
+For example, it is an error if one package depends on [`libgit2-sys`] version
+`0.11` and another depends on `0.12`, because Cargo is unable to unify those,
+but they both link to the `git2` native library. Due to this requirement, it
+is encouraged to be very careful when making SemVer-incompatible releases with
+the `links` field if your library is in common use.
+
+[`links` field]: manifest.md#the-links-field
+[`libgit2-sys`]: https://crates.io/crates/libgit2-sys
+
+### Yanked versions
+
+[Yanked releases][yank] are those that are marked that they should not be
+used. When the resolver is building the graph, it will ignore all yanked
+releases unless they already exist in the `Cargo.lock` file.
+
+[yank]: publishing.md#cargo-yank
+
+## Dependency updates
+
+Dependency resolution is automatically performed by all Cargo commands that
+need to know about the dependency graph. For example, [`cargo build`] will run
+the resolver to discover all the dependencies to build. After the first time
+it runs, the result is stored in the `Cargo.lock` file. Subsequent commands
+will run the resolver, keeping dependencies locked to the versions in
+`Cargo.lock` *if it can*.
+
+If the dependency list in `Cargo.toml` has been modified, for example changing
+the version of a dependency from `1.0` to `2.0`, then the resolver will select
+a new version for that dependency that matches the new requirements. If that
+new dependency introduces new requirements, those new requirements may also
+trigger additional updates. The `Cargo.lock` file will be updated with the new
+result. The `--locked` or `--frozen` flags can be used to change this behavior
+to prevent automatic updates when requirements change, and return an error
+instead.
+
+[`cargo update`] can be used to update the entries in `Cargo.lock` when new
+versions are published. Without any options, it will attempt to update all
+packages in the lock file. The `-p` flag can be used to target the update for
+a specific package, and other flags such as `--aggressive` or `--precise` can
+be used to control how versions are selected.
+
+[`cargo build`]: ../commands/cargo-build.md
+[`cargo update`]: ../commands/cargo-update.md
+
+## Overrides
+
+Cargo has several mechanisms to override dependencies within the graph. The
+[Overriding Dependencies] chapter goes into detail on how to use overrides.
+The overrides appear as an overlay to a registry, replacing the patched
+version with the new entry. Otherwise, resolution is performed like normal.
+
+[Overriding Dependencies]: overriding-dependencies.md
+
+## Dependency kinds
+
+There are three kinds of dependencies in a package: normal, [build], and
+[dev][dev-dependencies]. For the most part these are all treated the same from
+the perspective of the resolver. One difference is that dev-dependencies for
+non-workspace members are always ignored, and do not influence resolution.
+
+[Platform-specific dependencies] with the `[target]` table are resolved as-if
+all platforms are enabled. In other words, the resolver ignores the platform
+or `cfg` expression.
+
+[build]: specifying-dependencies.md#build-dependencies
+[dev-dependencies]: specifying-dependencies.md#development-dependencies
+[Platform-specific dependencies]: specifying-dependencies.md#platform-specific-dependencies
+
+### dev-dependency cycles
+
+Usually the resolver does not allow cycles in the graph, but it does allow
+them for [dev-dependencies]. For example, project "foo" has a dev-dependency
+on "bar", which has a normal dependency on "foo" (usually as a "path"
+dependency). This is allowed because there isn't really a cycle from the
+perspective of the build artifacts. In this example, the "foo" library is
+built (which does not need "bar" because "bar" is only used for tests), and
+then "bar" can be built depending on "foo", then the "foo" tests can be built
+linking to "bar".
+
+Beware that this can lead to confusing errors. In the case of building library
+unit tests, there are actually two copies of the library linked into the final
+test binary: the one that was linked with "bar", and the one built that
+contains the unit tests. Similar to the issues highlighted in the
+[Version-incompatibility hazards] section, the types between the two are not
+compatible. Be careful when exposing types of "foo" from "bar" in this
+situation, since the "foo" unit tests won't treat them the same as the local
+types.
+
+If possible, try to split your package into multiple packages and restructure
+it so that it remains strictly acyclic.
+
+## Resolver versions
+
+A different feature resolver algorithm can be used by specifying the resolver
+version in `Cargo.toml` like this:
+
+```toml
+[package]
+name = "my-package"
+version = "1.0.0"
+resolver = "2"
+```
+
+The version `"1"` resolver is the original resolver that shipped with Cargo up to version 1.50.
+The default is `"2"` if the root package specifies [`edition = "2021"`](manifest.md#the-edition-field) or a newer edition.
+Otherwise the default is `"1"`.
+
+The version `"2"` resolver introduces changes in [feature
+unification](#features). See the [features chapter][features-2] for more
+details.
+
+The resolver is a global option that affects the entire workspace. The
+`resolver` version in dependencies is ignored, only the value in the top-level
+package will be used. If using a [virtual workspace], the version should be
+specified in the `[workspace]` table, for example:
+
+```toml
+[workspace]
+members = ["member1", "member2"]
+resolver = "2"
+```
+
+[virtual workspace]: workspaces.md#virtual-workspace
+[features-2]: features.md#feature-resolver-version-2
+
+## Recommendations
+
+The following are some recommendations for setting the version within your
+package, and for specifying dependency requirements. These are general
+guidelines that should apply to common situations, but of course some
+situations may require specifying unusual requirements.
+
+* Follow the [SemVer guidelines] when deciding how to update your version
+ number, and whether or not you will need to make a SemVer-incompatible
+ version change.
+* Use caret requirements for dependencies, such as `"1.2.3"`, for most
+ situations. This ensures that the resolver can be maximally flexible in
+ choosing a version while maintaining build compatibility.
+ * Specify all three components with the version you are currently using.
+ This helps set the minimum version that will be used, and ensures that
+ other users won't end up with an older version of the dependency that
+ might be missing something that your package requires.
+ * Avoid `*` requirements, as they are not allowed on [crates.io], and they
+ can pull in SemVer-breaking changes during a normal `cargo update`.
+ * Avoid overly broad version requirements. For example, `>=2.0.0` can pull
+ in any SemVer-incompatible version, like version `5.0.0`, which can result
+ in broken builds in the future.
+ * Avoid overly narrow version requirements if possible. For example, if you
+ specify a tilde requirement like `bar="~1.3"`, and another package
+ specifies a requirement of `bar="1.4"`, this will fail to resolve, even
+ though minor releases should be compatible.
+* Try to keep the dependency versions up-to-date with the actual minimum
+ versions that your library requires. For example, if you have a requirement
+ of `bar="1.0.12"`, and then in a future release you start using new features
+ added in the `1.1.0` release of "bar", update your dependency requirement to
+ `bar="1.1.0"`.
+
+ If you fail to do this, it may not be immediately obvious because Cargo can
+ opportunistically choose the newest version when you run a blanket `cargo
+ update`. However, if another user depends on your library, and runs `cargo
+ update -p your-library`, it will *not* automatically update "bar" if it is
+ locked in their `Cargo.lock`. It will only update "bar" in that situation if
+ the dependency declaration is also updated. Failure to do so can cause
+ confusing build errors for the user using `cargo update -p`.
+* If two packages are tightly coupled, then an `=` dependency requirement may
+ help ensure that they stay in sync. For example, a library with a companion
+ proc-macro library will sometimes make assumptions between the two libraries
+ that won't work well if the two are out of sync (and it is never expected to
+ use the two libraries independently). The parent library can use an `=`
+ requirement on the proc-macro, and re-export the macros for easy access.
+* `0.0.x` versions can be used for packages that are permanently unstable.
+
+In general, the stricter you make the dependency requirements, the more likely
+it will be for the resolver to fail. Conversely, if you use requirements that
+are too loose, it may be possible for new versions to be published that will
+break the build.
+
+[SemVer guidelines]: semver.md
+
+## Troubleshooting
+
+The following illustrates some problems you may experience, and some possible
+solutions.
+
+### Unexpected dependency duplication
+
+The resolver algorithm may converge on a solution that includes two copies of a
+dependency when one would suffice. For example:
+
+```toml
+# Package A
+[dependencies]
+rand = "0.7"
+
+# Package B
+[dependencies]
+rand = ">=0.6" # note: open requirements such as this are discouraged
+```
+
+In this example, Cargo may build two copies of the `rand` crate, even though a
+single copy at version `0.7.3` would meet all requirements. This is because the
+resolver's algorithm favors building the latest available version of `rand` for
+Package B, which is `0.8.5` at the time of this writing, and that is
+incompatible with Package A's specification. The resolver's algorithm does not
+currently attempt to "deduplicate" in this situation.
+
+The use of open-ended version requirements like `>=0.6` is discouraged in Cargo.
+But, if you run into this situation, the [`cargo update`] command with the
+`--precise` flag can be used to manually remove such duplications.
+
+[`cargo update`]: ../commands/cargo-update.md
+
+
+### SemVer-breaking patch release breaks the build
+
+Sometimes a project may inadvertently publish a point release with a
+SemVer-breaking change. When users update with `cargo update`, they will pick
+up this new release, and then their build may break. In this situation, it is
+recommended that the project should [yank] the release, and either remove the
+SemVer-breaking change, or publish it as a new SemVer-major version increase.
+
+If the change happened in a third-party project, if possible try to
+(politely!) work with the project to resolve the issue.
+
+While waiting for the release to be yanked, some workarounds depend on the
+circumstances:
+
+* If your project is the end product (such as a binary executable), just avoid
+ updating the offending package in `Cargo.lock`. This can be done with the
+ `--precise` flag in [`cargo update`].
+* If you publish a binary on [crates.io], then you can temporarily add an `=`
+ requirement to force the dependency to a specific good version.
+ * Binary projects can alternatively recommend users to use the `--locked`
+ flag with [`cargo install`] to use the original `Cargo.lock` that contains
+ the known good version.
+* Libraries may also consider publishing a temporary new release with stricter
+ requirements that avoid the troublesome dependency. You may want to consider
+ using range requirements (instead of `=`) to avoid overly-strict
+ requirements that may conflict with other packages using the same
+ dependency. Once the problem has been resolved, you can publish another
+ point release that relaxes the dependency back to a caret requirement.
+* If it looks like the third-party project is unable or unwilling to yank the
+ release, then one option is to update your code to be compatible with the
+ changes, and update the dependency requirement to set the minimum version to
+ the new release. You will also need to consider if this is a SemVer-breaking
+ change of your own library, for example if it exposes types from the
+ dependency.
+
diff --git a/src/tools/cargo/src/doc/src/reference/running-a-registry.md b/src/tools/cargo/src/doc/src/reference/running-a-registry.md
new file mode 100644
index 000000000..144f1ee6a
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/running-a-registry.md
@@ -0,0 +1,20 @@
+## Running a Registry
+
+A minimal registry can be implemented by having a git repository that contains
+an index, and a server that contains the compressed `.crate` files created by
+[`cargo package`]. Users won't be able to use Cargo to publish to it, but this
+may be sufficient for closed environments. The index format is described in
+[Registry Index].
+
+A full-featured registry that supports publishing will additionally need to
+have a web API service that conforms to the API used by Cargo. The web API is
+described in [Registry Web API].
+
+Commercial and community projects are available for building and running a
+registry. See <https://github.com/rust-lang/cargo/wiki/Third-party-registries>
+for a list of what is available.
+
+[Registry Web API]: registry-web-api.md
+[Registry Index]: registry-index.md
+[`cargo publish`]: ../commands/cargo-publish.md
+[`cargo package`]: ../commands/cargo-package.md
diff --git a/src/tools/cargo/src/doc/src/reference/semver.md b/src/tools/cargo/src/doc/src/reference/semver.md
new file mode 100644
index 000000000..22c42610f
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/semver.md
@@ -0,0 +1,1404 @@
+# SemVer Compatibility
+
+This chapter provides details on what is conventionally considered a
+compatible or breaking SemVer change for new releases of a package. See the
+[SemVer compatibility] section for details on what SemVer is, and how Cargo
+uses it to ensure compatibility of libraries.
+
+These are only *guidelines*, and not necessarily hard-and-fast rules that all
+projects will obey. The [Change categories] section details how this guide
+classifies the level and severity of a change. Most of this guide focuses on
+changes that will cause `cargo` and `rustc` to fail to build something that
+previously worked. Almost every change carries some risk that it will
+negatively affect the runtime behavior, and for those cases it is usually a
+judgment call by the project maintainers whether or not it is a
+SemVer-incompatible change.
+
+See also [rust-semverver], which is an experimental tool that attempts to
+programmatically check compatibility rules.
+
+[Change categories]: #change-categories
+[rust-semverver]: https://github.com/rust-lang/rust-semverver
+[SemVer compatibility]: resolver.md#semver-compatibility
+
+## Change categories
+
+All of the policies listed below are categorized by the level of change:
+
+* **Major change**: a change that requires a major SemVer bump.
+* **Minor change**: a change that requires only a minor SemVer bump.
+* **Possibly-breaking change**: a change that some projects may consider major
+ and others consider minor.
+
+The "Possibly-breaking" category covers changes that have the *potential* to
+break during an update, but may not necessarily cause a breakage. The impact
+of these changes should be considered carefully. The exact nature will depend
+on the change and the principles of the project maintainers.
+
+Some projects may choose to only bump the patch number on a minor change. It
+is encouraged to follow the SemVer spec, and only apply bug fixes in patch
+releases. However, a bug fix may require an API change that is marked as a
+"minor change", and shouldn't affect compatibility. This guide does not take a
+stance on how each individual "minor change" should be treated, as the
+difference between minor and patch changes are conventions that depend on the
+nature of the change.
+
+Some changes are marked as "minor", even though they carry the potential risk
+of breaking a build. This is for situations where the potential is extremely
+low, and the potentially breaking code is unlikely to be written in idiomatic
+Rust, or is specifically discouraged from use.
+
+This guide uses the terms "major" and "minor" assuming this relates to a
+"1.0.0" release or later. Initial development releases starting with "0.y.z"
+can treat changes in "y" as a major release, and "z" as a minor release.
+"0.0.z" releases are always major changes. This is because Cargo uses the
+convention that only changes in the left-most non-zero component are
+considered incompatible.
+
+* API compatibility
+ * Items
+ * [Major: renaming/moving/removing any public items](#item-remove)
+ * [Minor: adding new public items](#item-new)
+ * Structs
+ * [Major: adding a private struct field when all current fields are public](#struct-add-private-field-when-public)
+ * [Major: adding a public field when no private field exists](#struct-add-public-field-when-no-private)
+ * [Minor: adding or removing private fields when at least one already exists](#struct-private-fields-with-private)
+ * [Minor: going from a tuple struct with all private fields (with at least one field) to a normal struct, or vice versa](#struct-tuple-normal-with-private)
+ * Enums
+ * [Major: adding new enum variants (without `non_exhaustive`)](#enum-variant-new)
+ * [Major: adding new fields to an enum variant](#enum-fields-new)
+ * Traits
+ * [Major: adding a non-defaulted trait item](#trait-new-item-no-default)
+ * [Major: any change to trait item signatures](#trait-item-signature)
+ * [Possibly-breaking: adding a defaulted trait item](#trait-new-default-item)
+ * [Major: adding a trait item that makes the trait non-object safe](#trait-object-safety)
+ * [Major: adding a type parameter without a default](#trait-new-parameter-no-default)
+ * [Minor: adding a defaulted trait type parameter](#trait-new-parameter-default)
+ * Implementations
+ * [Possibly-breaking change: adding any inherent items](#impl-item-new)
+ * Generics
+ * [Major: tightening generic bounds](#generic-bounds-tighten)
+ * [Minor: loosening generic bounds](#generic-bounds-loosen)
+ * [Minor: adding defaulted type parameters](#generic-new-default)
+ * [Minor: generalizing a type to use generics (with identical types)](#generic-generalize-identical)
+ * [Major: generalizing a type to use generics (with possibly different types)](#generic-generalize-different)
+ * [Minor: changing a generic type to a more generic type](#generic-more-generic)
+ * Functions
+ * [Major: adding/removing function parameters](#fn-change-arity)
+ * [Possibly-breaking: introducing a new function type parameter](#fn-generic-new)
+ * [Minor: generalizing a function to use generics (supporting original type)](#fn-generalize-compatible)
+ * [Major: generalizing a function to use generics with type mismatch](#fn-generalize-mismatch)
+ * Attributes
+ * [Major: switching from `no_std` support to requiring `std`](#attr-no-std-to-std)
+* Tooling and environment compatibility
+ * [Possibly-breaking: changing the minimum version of Rust required](#env-new-rust)
+ * [Possibly-breaking: changing the platform and environment requirements](#env-change-requirements)
+ * [Minor: introducing new lints](#new-lints)
+ * Cargo
+ * [Minor: adding a new Cargo feature](#cargo-feature-add)
+ * [Major: removing a Cargo feature](#cargo-feature-remove)
+ * [Major: removing a feature from a feature list if that changes functionality or public items](#cargo-feature-remove-another)
+ * [Possibly-breaking: removing an optional dependency](#cargo-remove-opt-dep)
+ * [Minor: changing dependency features](#cargo-change-dep-feature)
+ * [Minor: adding dependencies](#cargo-dep-add)
+* [Application compatibility](#application-compatibility)
+
+## API compatibility
+
+All of the examples below contain three parts: the original code, the code
+after it has been modified, and an example usage of the code that could appear
+in another project. In a minor change, the example usage should successfully
+build with both the before and after versions.
+
+<a id="item-remove"></a>
+### Major: renaming/moving/removing any public items
+
+The absence of a publicly exposed [item][items] will cause any uses of that item to
+fail to compile.
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub fn foo() {}
+
+///////////////////////////////////////////////////////////
+// After
+// ... item has been removed
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+fn main() {
+ updated_crate::foo(); // Error: cannot find function `foo`
+}
+```
+
+This includes adding any sort of [`cfg` attribute] which can change which
+items or behavior is available based on [conditional compilation].
+
+Mitigating strategies:
+* Mark items to be removed as [deprecated], and then remove them at a later
+ date in a SemVer-breaking release.
+* Mark renamed items as [deprecated], and use a [`pub use`] item to re-export
+ to the old name.
+
+<a id="item-new"></a>
+### Minor: adding new public items
+
+Adding new, public [items] is a minor change.
+
+```rust,ignore
+// MINOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+// ... absence of item
+
+///////////////////////////////////////////////////////////
+// After
+pub fn foo() {}
+
+///////////////////////////////////////////////////////////
+// Example use of the library that will safely work.
+// `foo` is not used since it didn't previously exist.
+```
+
+Note that in some rare cases this can be a **breaking change** due to glob
+imports. For example, if you add a new trait, and a project has used a glob
+import that brings that trait into scope, and the new trait introduces an
+associated item that conflicts with any types it is implemented on, this can
+cause a compile-time error due to the ambiguity. Example:
+
+```rust,ignore
+// Breaking change example
+
+///////////////////////////////////////////////////////////
+// Before
+// ... absence of trait
+
+///////////////////////////////////////////////////////////
+// After
+pub trait NewTrait {
+ fn foo(&self) {}
+}
+
+impl NewTrait for i32 {}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::*;
+
+pub trait LocalTrait {
+ fn foo(&self) {}
+}
+
+impl LocalTrait for i32 {}
+
+fn main() {
+ 123i32.foo(); // Error: multiple applicable items in scope
+}
+```
+
+This is not considered a major change because conventionally glob imports are
+a known forwards-compatibility hazard. Glob imports of items from external
+crates should be avoided.
+
+<a id="struct-add-private-field-when-public"></a>
+### Major: adding a private struct field when all current fields are public
+
+When a private field is added to a struct that previously had all public fields,
+this will break any code that attempts to construct it with a [struct literal].
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub struct Foo {
+ pub f1: i32,
+}
+
+///////////////////////////////////////////////////////////
+// After
+pub struct Foo {
+ pub f1: i32,
+ f2: i32,
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+fn main() {
+ let x = updated_crate::Foo { f1: 123 }; // Error: cannot construct `Foo`
+}
+```
+
+Mitigation strategies:
+* Do not add new fields to all-public field structs.
+* Mark structs as [`#[non_exhaustive]`][non_exhaustive] when first introducing
+ a struct to prevent users from using struct literal syntax, and instead
+ provide a constructor method and/or [Default] implementation.
+
+<a id="struct-add-public-field-when-no-private"></a>
+### Major: adding a public field when no private field exists
+
+When a public field is added to a struct that has all public fields, this will
+break any code that attempts to construct it with a [struct literal].
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub struct Foo {
+ pub f1: i32,
+}
+
+///////////////////////////////////////////////////////////
+// After
+pub struct Foo {
+ pub f1: i32,
+ pub f2: i32,
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+fn main() {
+ let x = updated_crate::Foo { f1: 123 }; // Error: missing field `f2`
+}
+```
+
+Mitigation strategies:
+* Do not add new new fields to all-public field structs.
+* Mark structs as [`#[non_exhaustive]`][non_exhaustive] when first introducing
+ a struct to prevent users from using struct literal syntax, and instead
+ provide a constructor method and/or [Default] implementation.
+
+<a id="struct-private-fields-with-private"></a>
+### Minor: adding or removing private fields when at least one already exists
+
+It is safe to add or remove private fields from a struct when the struct
+already has at least one private field.
+
+```rust,ignore
+// MINOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+#[derive(Default)]
+pub struct Foo {
+ f1: i32,
+}
+
+///////////////////////////////////////////////////////////
+// After
+#[derive(Default)]
+pub struct Foo {
+ f2: f64,
+}
+
+///////////////////////////////////////////////////////////
+// Example use of the library that will safely work.
+fn main() {
+ // Cannot access private fields.
+ let x = updated_crate::Foo::default();
+}
+```
+
+This is safe because existing code cannot use a [struct literal] to construct
+it, nor exhaustively match its contents.
+
+Note that for tuple structs, this is a **major change** if the tuple contains
+public fields, and the addition or removal of a private field changes the
+index of any public field.
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+#[derive(Default)]
+pub struct Foo(pub i32, i32);
+
+///////////////////////////////////////////////////////////
+// After
+#[derive(Default)]
+pub struct Foo(f64, pub i32, i32);
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+fn main() {
+ let x = updated_crate::Foo::default();
+ let y = x.0; // Error: is private
+}
+```
+
+<a id="struct-tuple-normal-with-private"></a>
+### Minor: going from a tuple struct with all private fields (with at least one field) to a normal struct, or vice versa
+
+Changing a tuple struct to a normal struct (or vice-versa) is safe if all
+fields are private.
+
+```rust,ignore
+// MINOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+#[derive(Default)]
+pub struct Foo(i32);
+
+///////////////////////////////////////////////////////////
+// After
+#[derive(Default)]
+pub struct Foo {
+ f1: i32,
+}
+
+///////////////////////////////////////////////////////////
+// Example use of the library that will safely work.
+fn main() {
+ // Cannot access private fields.
+ let x = updated_crate::Foo::default();
+}
+```
+
+This is safe because existing code cannot use a [struct literal] to construct
+it, nor match its contents.
+
+<a id="enum-variant-new"></a>
+### Major: adding new enum variants (without `non_exhaustive`)
+
+It is a breaking change to add a new enum variant if the enum does not use the
+[`#[non_exhaustive]`][non_exhaustive] attribute.
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub enum E {
+ Variant1,
+}
+
+///////////////////////////////////////////////////////////
+// After
+pub enum E {
+ Variant1,
+ Variant2,
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+fn main() {
+ use updated_crate::E;
+ let x = E::Variant1;
+ match x { // Error: `E::Variant2` not covered
+ E::Variant1 => {}
+ }
+}
+```
+
+Mitigation strategies:
+* When introducing the enum, mark it as [`#[non_exhaustive]`][non_exhaustive]
+ to force users to use [wildcard patterns] to catch new variants.
+
+<a id="enum-fields-new"></a>
+### Major: adding new fields to an enum variant
+
+It is a breaking change to add new fields to an enum variant because all
+fields are public, and constructors and matching will fail to compile.
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub enum E {
+ Variant1 { f1: i32 },
+}
+
+///////////////////////////////////////////////////////////
+// After
+pub enum E {
+ Variant1 { f1: i32, f2: i32 },
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+fn main() {
+ use updated_crate::E;
+ let x = E::Variant1 { f1: 1 }; // Error: missing f2
+ match x {
+ E::Variant1 { f1 } => {} // Error: missing f2
+ }
+}
+```
+
+Mitigation strategies:
+* When introducing the enum, mark the variant as [`non_exhaustive`][non_exhaustive]
+ so that it cannot be constructed or matched without wildcards.
+ ```rust,ignore,skip
+ pub enum E {
+ #[non_exhaustive]
+ Variant1{f1: i32}
+ }
+ ```
+* When introducing the enum, use an explicit struct as a value, where you can
+ have control over the field visibility.
+ ```rust,ignore,skip
+ pub struct Foo {
+ f1: i32,
+ f2: i32,
+ }
+ pub enum E {
+ Variant1(Foo)
+ }
+ ```
+
+<a id="trait-new-item-no-default"></a>
+### Major: adding a non-defaulted trait item
+
+It is a breaking change to add a non-defaulted item to a trait. This will
+break any implementors of the trait.
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub trait Trait {}
+
+///////////////////////////////////////////////////////////
+// After
+pub trait Trait {
+ fn foo(&self);
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::Trait;
+struct Foo;
+
+impl Trait for Foo {} // Error: not all trait items implemented
+```
+
+Mitigation strategies:
+* Always provide a default implementation or value for new associated trait
+ items.
+* When introducing the trait, use the [sealed trait] technique to prevent
+ users outside of the crate from implementing the trait.
+
+<a id="trait-item-signature"></a>
+### Major: any change to trait item signatures
+
+It is a breaking change to make any change to a trait item signature. This can
+break external implementors of the trait.
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub trait Trait {
+ fn f(&self, x: i32) {}
+}
+
+///////////////////////////////////////////////////////////
+// After
+pub trait Trait {
+ // For sealed traits or normal functions, this would be a minor change
+ // because generalizing with generics strictly expands the possible uses.
+ // But in this case, trait implementations must use the same signature.
+ fn f<V>(&self, x: V) {}
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::Trait;
+struct Foo;
+
+impl Trait for Foo {
+ fn f(&self, x: i32) {} // Error: trait declaration has 1 type parameter
+}
+```
+
+Mitigation strategies:
+* Introduce new items with default implementations to cover the new
+ functionality instead of modifying existing items.
+* When introducing the trait, use the [sealed trait] technique to prevent
+ users outside of the crate from implementing the trait.
+
+<a id="trait-new-default-item"></a>
+### Possibly-breaking: adding a defaulted trait item
+
+It is usually safe to add a defaulted trait item. However, this can sometimes
+cause a compile error. For example, this can introduce an ambiguity if a
+method of the same name exists in another trait.
+
+```rust,ignore
+// Breaking change example
+
+///////////////////////////////////////////////////////////
+// Before
+pub trait Trait {}
+
+///////////////////////////////////////////////////////////
+// After
+pub trait Trait {
+ fn foo(&self) {}
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::Trait;
+struct Foo;
+
+trait LocalTrait {
+ fn foo(&self) {}
+}
+
+impl Trait for Foo {}
+impl LocalTrait for Foo {}
+
+fn main() {
+ let x = Foo;
+ x.foo(); // Error: multiple applicable items in scope
+}
+```
+
+Note that this ambiguity does *not* exist for name collisions on [inherent
+implementations], as they take priority over trait items.
+
+See [trait-object-safety](#trait-object-safety) for a special case to consider
+when adding trait items.
+
+Mitigation strategies:
+* Some projects may deem this acceptable breakage, particularly if the new
+ item name is unlikely to collide with any existing code. Choose names
+ carefully to help avoid these collisions. Additionally, it may be acceptable
+ to require downstream users to add [disambiguation syntax] to select the
+ correct function when updating the dependency.
+
+<a id="trait-object-safety"></a>
+### Major: adding a trait item that makes the trait non-object safe
+
+It is a breaking change to add a trait item that changes the trait to not be
+[object safe].
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub trait Trait {}
+
+///////////////////////////////////////////////////////////
+// After
+pub trait Trait {
+ // An associated const makes the trait not object-safe.
+ const CONST: i32 = 123;
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::Trait;
+struct Foo;
+
+impl Trait for Foo {}
+
+fn main() {
+ let obj: Box<dyn Trait> = Box::new(Foo); // Error: cannot be made into an object
+}
+```
+
+It is safe to do the converse (making a non-object safe trait into a safe
+one).
+
+<a id="trait-new-parameter-no-default"></a>
+### Major: adding a type parameter without a default
+
+It is a breaking change to add a type parameter without a default to a trait.
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub trait Trait {}
+
+///////////////////////////////////////////////////////////
+// After
+pub trait Trait<T> {}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::Trait;
+struct Foo;
+
+impl Trait for Foo {} // Error: missing generics
+```
+
+Mitigating strategies:
+* See [adding a defaulted trait type parameter](#trait-new-parameter-default).
+
+<a id="trait-new-parameter-default"></a>
+### Minor: adding a defaulted trait type parameter
+
+It is safe to add a type parameter to a trait as long as it has a default.
+External implementors will use the default without needing to specify the
+parameter.
+
+```rust,ignore
+// MINOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub trait Trait {}
+
+///////////////////////////////////////////////////////////
+// After
+pub trait Trait<T = i32> {}
+
+///////////////////////////////////////////////////////////
+// Example use of the library that will safely work.
+use updated_crate::Trait;
+struct Foo;
+
+impl Trait for Foo {}
+```
+
+<a id="impl-item-new"></a>
+### Possibly-breaking change: adding any inherent items
+
+Usually adding inherent items to an implementation should be safe because
+inherent items take priority over trait items. However, in some cases the
+collision can cause problems if the name is the same as an implemented trait
+item with a different signature.
+
+```rust,ignore
+// Breaking change example
+
+///////////////////////////////////////////////////////////
+// Before
+pub struct Foo;
+
+///////////////////////////////////////////////////////////
+// After
+pub struct Foo;
+
+impl Foo {
+ pub fn foo(&self) {}
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::Foo;
+
+trait Trait {
+ fn foo(&self, x: i32) {}
+}
+
+impl Trait for Foo {}
+
+fn main() {
+ let x = Foo;
+ x.foo(1); // Error: this method takes 0 arguments but 1 argument was supplied
+}
+```
+
+Note that if the signatures match, there would not be a compile-time error,
+but possibly a silent change in runtime behavior (because it is now executing
+a different function).
+
+Mitigation strategies:
+* Some projects may deem this acceptable breakage, particularly if the new
+ item name is unlikely to collide with any existing code. Choose names
+ carefully to help avoid these collisions. Additionally, it may be acceptable
+ to require downstream users to add [disambiguation syntax] to select the
+ correct function when updating the dependency.
+
+<a id="generic-bounds-tighten"></a>
+### Major: tightening generic bounds
+
+It is a breaking change to tighten generic bounds on a type since this can
+break users expecting the looser bounds.
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub struct Foo<A> {
+ pub f1: A,
+}
+
+///////////////////////////////////////////////////////////
+// After
+pub struct Foo<A: Eq> {
+ pub f1: A,
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::Foo;
+
+fn main() {
+ let s = Foo { f1: 1.23 }; // Error: the trait bound `{float}: Eq` is not satisfied
+}
+```
+
+<a id="generic-bounds-loosen"></a>
+### Minor: loosening generic bounds
+
+It is safe to loosen the generic bounds on a type, as it only expands what is
+allowed.
+
+```rust,ignore
+// MINOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub struct Foo<A: Clone> {
+ pub f1: A,
+}
+
+///////////////////////////////////////////////////////////
+// After
+pub struct Foo<A> {
+ pub f1: A,
+}
+
+///////////////////////////////////////////////////////////
+// Example use of the library that will safely work.
+use updated_crate::Foo;
+
+fn main() {
+ let s = Foo { f1: 123 };
+}
+```
+
+<a id="generic-new-default"></a>
+### Minor: adding defaulted type parameters
+
+It is safe to add a type parameter to a type as long as it has a default. All
+existing references will use the default without needing to specify the
+parameter.
+
+```rust,ignore
+// MINOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+#[derive(Default)]
+pub struct Foo {}
+
+///////////////////////////////////////////////////////////
+// After
+#[derive(Default)]
+pub struct Foo<A = i32> {
+ f1: A,
+}
+
+///////////////////////////////////////////////////////////
+// Example use of the library that will safely work.
+use updated_crate::Foo;
+
+fn main() {
+ let s: Foo = Default::default();
+}
+```
+
+<a id="generic-generalize-identical"></a>
+### Minor: generalizing a type to use generics (with identical types)
+
+A struct or enum field can change from a concrete type to a generic type
+parameter, provided that the change results in an identical type for all
+existing use cases. For example, the following change is permitted:
+
+```rust,ignore
+// MINOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub struct Foo(pub u8);
+
+///////////////////////////////////////////////////////////
+// After
+pub struct Foo<T = u8>(pub T);
+
+///////////////////////////////////////////////////////////
+// Example use of the library that will safely work.
+use updated_crate::Foo;
+
+fn main() {
+ let s: Foo = Foo(123);
+}
+```
+
+because existing uses of `Foo` are shorthand for `Foo<u8>` which yields the
+identical field type.
+
+<a id="generic-generalize-different"></a>
+### Major: generalizing a type to use generics (with possibly different types)
+
+Changing a struct or enum field from a concrete type to a generic type
+parameter can break if the type can change.
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub struct Foo<T = u8>(pub T, pub u8);
+
+///////////////////////////////////////////////////////////
+// After
+pub struct Foo<T = u8>(pub T, pub T);
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::Foo;
+
+fn main() {
+ let s: Foo<f32> = Foo(3.14, 123); // Error: mismatched types
+}
+```
+
+<a id="generic-more-generic"></a>
+### Minor: changing a generic type to a more generic type
+
+It is safe to change a generic type to a more generic one. For example, the
+following adds a generic parameter that defaults to the original type, which
+is safe because all existing users will be using the same type for both
+fields, the the defaulted parameter does not need to be specified.
+
+```rust,ignore
+// MINOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub struct Foo<T>(pub T, pub T);
+
+///////////////////////////////////////////////////////////
+// After
+pub struct Foo<T, U = T>(pub T, pub U);
+
+///////////////////////////////////////////////////////////
+// Example use of the library that will safely work.
+use updated_crate::Foo;
+
+fn main() {
+ let s: Foo<f32> = Foo(1.0, 2.0);
+}
+```
+
+<a id="fn-change-arity"></a>
+### Major: adding/removing function parameters
+
+Changing the arity of a function is a breaking change.
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub fn foo() {}
+
+///////////////////////////////////////////////////////////
+// After
+pub fn foo(x: i32) {}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+fn main() {
+ updated_crate::foo(); // Error: this function takes 1 argument
+}
+```
+
+Mitigating strategies:
+* Introduce a new function with the new signature and possibly
+ [deprecate][deprecated] the old one.
+* Introduce functions that take a struct argument, where the struct is built
+ with the builder pattern. This allows new fields to be added to the struct
+ in the future.
+
+<a id="fn-generic-new"></a>
+### Possibly-breaking: introducing a new function type parameter
+
+Usually, adding a non-defaulted type parameter is safe, but in some
+cases it can be a breaking change:
+
+```rust,ignore
+// Breaking change example
+
+///////////////////////////////////////////////////////////
+// Before
+pub fn foo<T>() {}
+
+///////////////////////////////////////////////////////////
+// After
+pub fn foo<T, U>() {}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::foo;
+
+fn main() {
+ foo::<u8>(); // Error: function takes 2 generic arguments but 1 generic argument was supplied
+}
+```
+
+However, such explicit calls are rare enough (and can usually be written in
+other ways) that this breakage is usually acceptable. One should take into
+account how likely it is that the function in question is being called with
+explicit type arguments.
+
+<a id="fn-generalize-compatible"></a>
+### Minor: generalizing a function to use generics (supporting original type)
+
+The type of a parameter to a function, or its return value, can be
+*generalized* to use generics, including by introducing a new type parameter,
+as long as it can be instantiated to the original type. For example, the
+following changes are allowed:
+
+```rust,ignore
+// MINOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub fn foo(x: u8) -> u8 {
+ x
+}
+pub fn bar<T: Iterator<Item = u8>>(t: T) {}
+
+///////////////////////////////////////////////////////////
+// After
+use std::ops::Add;
+pub fn foo<T: Add>(x: T) -> T {
+ x
+}
+pub fn bar<T: IntoIterator<Item = u8>>(t: T) {}
+
+///////////////////////////////////////////////////////////
+// Example use of the library that will safely work.
+use updated_crate::{bar, foo};
+
+fn main() {
+ foo(1);
+ bar(vec![1, 2, 3].into_iter());
+}
+```
+
+because all existing uses are instantiations of the new signature.
+
+Perhaps somewhat surprisingly, generalization applies to trait objects as
+well, given that every trait implements itself:
+
+```rust,ignore
+// MINOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub trait Trait {}
+pub fn foo(t: &dyn Trait) {}
+
+///////////////////////////////////////////////////////////
+// After
+pub trait Trait {}
+pub fn foo<T: Trait + ?Sized>(t: &T) {}
+
+///////////////////////////////////////////////////////////
+// Example use of the library that will safely work.
+use updated_crate::{foo, Trait};
+
+struct Foo;
+impl Trait for Foo {}
+
+fn main() {
+ let obj = Foo;
+ foo(&obj);
+}
+```
+
+(The use of `?Sized` is essential; otherwise you couldn't recover the original
+signature.)
+
+Introducing generics in this way can potentially create type inference
+failures. These are usually rare, and may be acceptable breakage for some
+projects, as this can be fixed with additional type annotations.
+
+```rust,ignore
+// Breaking change example
+
+///////////////////////////////////////////////////////////
+// Before
+pub fn foo() -> i32 {
+ 0
+}
+
+///////////////////////////////////////////////////////////
+// After
+pub fn foo<T: Default>() -> T {
+ Default::default()
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::foo;
+
+fn main() {
+ let x = foo(); // Error: type annotations needed
+}
+```
+
+<a id="fn-generalize-mismatch"></a>
+### Major: generalizing a function to use generics with type mismatch
+
+It is a breaking change to change a function parameter or return type if the
+generic type constrains or changes the types previously allowed. For example,
+the following adds a generic constraint that may not be satisfied by existing
+code:
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub fn foo(x: Vec<u8>) {}
+
+///////////////////////////////////////////////////////////
+// After
+pub fn foo<T: Copy + IntoIterator<Item = u8>>(x: T) {}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::foo;
+
+fn main() {
+ foo(vec![1, 2, 3]); // Error: `Copy` is not implemented for `Vec<u8>`
+}
+```
+
+<a id="attr-no-std-to-std"></a>
+### Major: switching from `no_std` support to requiring `std`
+
+If your library specifically supports a [`no_std`] environment, it is a
+breaking change to make a new release that requires `std`.
+
+```rust,ignore,skip
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+#![no_std]
+pub fn foo() {}
+
+///////////////////////////////////////////////////////////
+// After
+pub fn foo() {
+ std::time::SystemTime::now();
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+// This will fail to link for no_std targets because they don't have a `std` crate.
+#![no_std]
+use updated_crate::foo;
+
+fn example() {
+ foo();
+}
+```
+
+Mitigation strategies:
+* A common idiom to avoid this is to include a `std` [Cargo feature] that
+ optionally enables `std` support, and when the feature is off, the library
+ can be used in a `no_std` environment.
+
+## Tooling and environment compatibility
+
+<a id="env-new-rust"></a>
+### Possibly-breaking: changing the minimum version of Rust required
+
+Introducing the use of new features in a new release of Rust can break
+projects that are using older versions of Rust. This also includes using new
+features in a new release of Cargo, and requiring the use of a nightly-only
+feature in a crate that previously worked on stable.
+
+Some projects choose to allow this in a minor release for various reasons. It
+is usually relatively easy to update to a newer version of Rust. Rust also has
+a rapid 6-week release cycle, and some projects will provide compatibility
+within a window of releases (such as the current stable release plus N
+previous releases). Just keep in mind that some large projects may not be able
+to update their Rust toolchain rapidly.
+
+Mitigation strategies:
+* Use [Cargo features] to make the new features opt-in.
+* Provide a large window of support for older releases.
+* Copy the source of new standard library items if possible so that you
+ can continue to use an older version but take advantage of the new feature.
+* Provide a separate branch of older minor releases that can receive backports
+ of important bugfixes.
+* Keep an eye out for the [`[cfg(version(..))]`][cfg-version] and
+ [`#[cfg(accessible(..))]`][cfg-accessible] features which provide an opt-in
+ mechanism for new features. These are currently unstable and only available
+ in the nightly channel.
+
+<a id="env-change-requirements"></a>
+### Possibly-breaking: changing the platform and environment requirements
+
+There is a very wide range of assumptions a library makes about the
+environment that it runs in, such as the host platform, operating system
+version, available services, filesystem support, etc. It can be a breaking
+change if you make a new release that restricts what was previously supported,
+for example requiring a newer version of an operating system. These changes
+can be difficult to track, since you may not always know if a change breaks in
+an environment that is not automatically tested.
+
+Some projects may deem this acceptable breakage, particularly if the breakage
+is unlikely for most users, or the project doesn't have the resources to
+support all environments. Another notable situation is when a vendor
+discontinues support for some hardware or OS, the project may deem it
+reasonable to also discontinue support.
+
+Mitigation strategies:
+* Document the platforms and environments you specifically support.
+* Test your code on a wide range of environments in CI.
+
+<a id="new-lints"></a>
+### Minor: introducing new lints
+
+Some changes to a library may cause new lints to be triggered in users of that library.
+This should generally be considered a compatible change.
+
+```rust,ignore,dont-deny
+// MINOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub fn foo() {}
+
+///////////////////////////////////////////////////////////
+// After
+#[deprecated]
+pub fn foo() {}
+
+///////////////////////////////////////////////////////////
+// Example use of the library that will safely work.
+
+fn main() {
+ updated_crate::foo(); // Warning: use of deprecated function
+}
+```
+
+Beware that it may be possible for this to technically cause a project to fail if they have explicitly denied the warning, and the updated crate is a direct dependency.
+Denying warnings should be done with care and the understanding that new lints may be introduced over time.
+However, library authors should be cautious about introducing new warnings and may want to consider the potential impact on their users.
+
+The following lints are examples of those that may be introduced when updating a dependency:
+
+* [`deprecated`][deprecated-lint] --- Introduced when a dependency adds the [`#[deprecated]` attribute][deprecated] to an item you are using.
+* [`unused_must_use`] --- Introduced when a dependency adds the [`#[must_use]` attribute][must-use-attr] to an item where you are not consuming the result.
+* [`unused_unsafe`] --- Introduced when a dependency *removes* the `unsafe` qualifier from a function, and that is the only unsafe function called in an unsafe block.
+
+Additionally, updating `rustc` to a new version may introduce new lints.
+
+Transitive dependencies which introduce new lints should not usually cause a failure because Cargo uses [`--cap-lints`](../../rustc/lints/levels.html#capping-lints) to suppress all lints in dependencies.
+
+Mitigating strategies:
+* If you build with warnings denied, understand you may need to deal with resolving new warnings whenever you update your dependencies.
+ If using RUSTFLAGS to pass `-Dwarnings`, also add the `-A` flag to allow lints that are likely to cause issues, such as `-Adeprecated`.
+* Introduce deprecations behind a [feature][Cargo features].
+ For example `#[cfg_attr(feature = "deprecated", deprecated="use bar instead")]`.
+ Then, when you plan to remove an item in a future SemVer breaking change, you can communicate with your users that they should enable the `deprecated` feature *before* updating to remove the use of the deprecated items.
+ This allows users to choose when to respond to deprecations without needing to immediately respond to them.
+ A downside is that it can be difficult to communicate to users that they need to take these manual steps to prepare for a major update.
+
+[`unused_must_use`]: ../../rustc/lints/listing/warn-by-default.html#unused-must-use
+[deprecated-lint]: ../../rustc/lints/listing/warn-by-default.html#deprecated
+[must-use-attr]: ../../reference/attributes/diagnostics.html#the-must_use-attribute
+[`unused_unsafe`]: ../../rustc/lints/listing/warn-by-default.html#unused-unsafe
+
+### Cargo
+
+<a id="cargo-feature-add"></a>
+#### Minor: adding a new Cargo feature
+
+It is usually safe to add new [Cargo features]. If the feature introduces new
+changes that cause a breaking change, this can cause difficulties for projects
+that have stricter backwards-compatibility needs. In that scenario, avoid
+adding the feature to the "default" list, and possibly document the
+consequences of enabling the feature.
+
+```toml
+# MINOR CHANGE
+
+###########################################################
+# Before
+[features]
+# ..empty
+
+###########################################################
+# After
+[features]
+std = []
+```
+
+<a id="cargo-feature-remove"></a>
+#### Major: removing a Cargo feature
+
+It is usually a breaking change to remove [Cargo features]. This will cause
+an error for any project that enabled the feature.
+
+```toml
+# MAJOR CHANGE
+
+###########################################################
+# Before
+[features]
+logging = []
+
+###########################################################
+# After
+[dependencies]
+# ..logging removed
+```
+
+Mitigation strategies:
+* Clearly document your features. If there is an internal or experimental
+ feature, mark it as such, so that users know the status of the feature.
+* Leave the old feature in `Cargo.toml`, but otherwise remove its
+ functionality. Document that the feature is deprecated, and remove it in a
+ future major SemVer release.
+
+<a id="cargo-feature-remove-another"></a>
+#### Major: removing a feature from a feature list if that changes functionality or public items
+
+If removing a feature from another feature, this can break existing users if
+they are expecting that functionality to be available through that feature.
+
+```toml
+# Breaking change example
+
+###########################################################
+# Before
+[features]
+default = ["std"]
+std = []
+
+###########################################################
+# After
+[features]
+default = [] # This may cause packages to fail if they are expecting std to be enabled.
+std = []
+```
+
+<a id="cargo-remove-opt-dep"></a>
+#### Possibly-breaking: removing an optional dependency
+
+Removing an optional dependency can break a project using your library because
+another project may be enabling that dependency via [Cargo features].
+
+```toml
+# Breaking change example
+
+###########################################################
+# Before
+[dependencies]
+curl = { version = "0.4.31", optional = true }
+
+###########################################################
+# After
+[dependencies]
+# ..curl removed
+```
+
+Mitigation strategies:
+* Clearly document your features. If the optional dependency is not included
+ in the documented list of features, then you may decide to consider it safe
+ to change undocumented entries.
+* Leave the optional dependency, and just don't use it within your library.
+* Replace the optional dependency with a [Cargo feature] that does nothing,
+ and document that it is deprecated.
+* Use high-level features which enable optional dependencies, and document
+ those as the preferred way to enable the extended functionality. For
+ example, if your library has optional support for something like
+ "networking", create a generic feature name "networking" that enables the
+ optional dependencies necessary to implement "networking". Then document the
+ "networking" feature.
+
+<a id="cargo-change-dep-feature"></a>
+#### Minor: changing dependency features
+
+It is usually safe to change the features on a dependency, as long as the
+feature does not introduce a breaking change.
+
+```toml
+# MINOR CHANGE
+
+###########################################################
+# Before
+[dependencies]
+rand = { version = "0.7.3", features = ["small_rng"] }
+
+
+###########################################################
+# After
+[dependencies]
+rand = "0.7.3"
+```
+
+<a id="cargo-dep-add"></a>
+#### Minor: adding dependencies
+
+It is usually safe to add new dependencies, as long as the new dependency
+does not introduce new requirements that result in a breaking change.
+For example, adding a new dependency that requires nightly in a project
+that previously worked on stable is a major change.
+
+```toml
+# MINOR CHANGE
+
+###########################################################
+# Before
+[dependencies]
+# ..empty
+
+###########################################################
+# After
+[dependencies]
+log = "0.4.11"
+```
+
+## Application compatibility
+
+Cargo projects may also include executable binaries which have their own
+interfaces (such as a CLI interface, OS-level interaction, etc.). Since these
+are part of the Cargo package, they often use and share the same version as
+the package. You will need to decide if and how you want to employ a SemVer
+contract with your users in the changes you make to your application. The
+potential breaking and compatible changes to an application are too numerous
+to list, so you are encouraged to use the spirit of the [SemVer] spec to guide
+your decisions on how to apply versioning to your application, or at least
+document what your commitments are.
+
+[`cfg` attribute]: ../../reference/conditional-compilation.md#the-cfg-attribute
+[`no_std`]: ../../reference/names/preludes.html#the-no_std-attribute
+[`pub use`]: ../../reference/items/use-declarations.html
+[Cargo feature]: features.md
+[Cargo features]: features.md
+[cfg-accessible]: https://github.com/rust-lang/rust/issues/64797
+[cfg-version]: https://github.com/rust-lang/rust/issues/64796
+[conditional compilation]: ../../reference/conditional-compilation.md
+[Default]: ../../std/default/trait.Default.html
+[deprecated]: ../../reference/attributes/diagnostics.html#the-deprecated-attribute
+[disambiguation syntax]: ../../reference/expressions/call-expr.html#disambiguating-function-calls
+[inherent implementations]: ../../reference/items/implementations.html#inherent-implementations
+[items]: ../../reference/items.html
+[non_exhaustive]: ../../reference/attributes/type_system.html#the-non_exhaustive-attribute
+[object safe]: ../../reference/items/traits.html#object-safety
+[rust-feature]: https://doc.rust-lang.org/nightly/unstable-book/
+[sealed trait]: https://rust-lang.github.io/api-guidelines/future-proofing.html#sealed-traits-protect-against-downstream-implementations-c-sealed
+[SemVer]: https://semver.org/
+[struct literal]: ../../reference/expressions/struct-expr.html
+[wildcard patterns]: ../../reference/patterns.html#wildcard-pattern
diff --git a/src/tools/cargo/src/doc/src/reference/source-replacement.md b/src/tools/cargo/src/doc/src/reference/source-replacement.md
new file mode 100644
index 000000000..b8bcdc7ae
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/source-replacement.md
@@ -0,0 +1,130 @@
+## Source Replacement
+
+This document is about replacing the crate index. You can read about overriding
+dependencies in the [overriding dependencies] section of this
+documentation.
+
+A *source* is a provider that contains crates that may be included as
+dependencies for a package. Cargo supports the ability to **replace one source
+with another** to express strategies such as:
+
+* Vendoring --- custom sources can be defined which represent crates on the local
+ filesystem. These sources are subsets of the source that they're replacing and
+ can be checked into packages if necessary.
+
+* Mirroring --- sources can be replaced with an equivalent version which acts as a
+ cache for crates.io itself.
+
+Cargo has a core assumption about source replacement that the source code is
+exactly the same from both sources. Note that this also means that
+a replacement source is not allowed to have crates which are not present in the
+original source.
+
+As a consequence, source replacement is not appropriate for situations such as
+patching a dependency or a private registry. Cargo supports patching
+dependencies through the usage of [the `[patch]` key][overriding
+dependencies], and private registry support is described in [the Registries
+chapter][registries].
+
+When using source replacement, running commands like `cargo publish` that need to
+contact the registry require passing the `--registry` option. This helps avoid
+any ambiguity about which registry to contact, and will use the authentication
+token for the specified registry.
+
+[overriding dependencies]: overriding-dependencies.md
+[registries]: registries.md
+
+### Configuration
+
+Configuration of replacement sources is done through [`.cargo/config.toml`][config]
+and the full set of available keys are:
+
+```toml
+# The `source` table is where all keys related to source-replacement
+# are stored.
+[source]
+
+# Under the `source` table are a number of other tables whose keys are a
+# name for the relevant source. For example this section defines a new
+# source, called `my-vendor-source`, which comes from a directory
+# located at `vendor` relative to the directory containing this `.cargo/config.toml`
+# file
+[source.my-vendor-source]
+directory = "vendor"
+
+# The crates.io default source for crates is available under the name
+# "crates-io", and here we use the `replace-with` key to indicate that it's
+# replaced with our source above.
+#
+# The `replace-with` key can also reference an alternative registry name
+# defined in the `[registries]` table.
+[source.crates-io]
+replace-with = "my-vendor-source"
+
+# Each source has its own table where the key is the name of the source
+[source.the-source-name]
+
+# Indicate that `the-source-name` will be replaced with `another-source`,
+# defined elsewhere
+replace-with = "another-source"
+
+# Several kinds of sources can be specified (described in more detail below):
+registry = "https://example.com/path/to/index"
+local-registry = "path/to/registry"
+directory = "path/to/vendor"
+
+# Git sources can optionally specify a branch/tag/rev as well
+git = "https://example.com/path/to/repo"
+# branch = "master"
+# tag = "v1.0.1"
+# rev = "313f44e8"
+```
+
+[config]: config.md
+
+### Registry Sources
+
+A "registry source" is one that is the same as crates.io itself. That is, it has
+an index served in a git repository which matches the format of the
+[crates.io index](https://github.com/rust-lang/crates.io-index). That repository
+then has configuration indicating where to download crates from.
+
+Currently there is not an already-available project for setting up a mirror of
+crates.io. Stay tuned though!
+
+### Local Registry Sources
+
+A "local registry source" is intended to be a subset of another registry
+source, but available on the local filesystem (aka vendoring). Local registries
+are downloaded ahead of time, typically sync'd with a `Cargo.lock`, and are
+made up of a set of `*.crate` files and an index like the normal registry is.
+
+The primary way to manage and create local registry sources is through the
+[`cargo-local-registry`][cargo-local-registry] subcommand,
+[available on crates.io][cargo-local-registry] and can be installed with
+`cargo install cargo-local-registry`.
+
+[cargo-local-registry]: https://crates.io/crates/cargo-local-registry
+
+Local registries are contained within one directory and contain a number of
+`*.crate` files downloaded from crates.io as well as an `index` directory with
+the same format as the crates.io-index project (populated with just entries for
+the crates that are present).
+
+### Directory Sources
+
+A "directory source" is similar to a local registry source where it contains a
+number of crates available on the local filesystem, suitable for vendoring
+dependencies. Directory sources are primarily managed by the `cargo vendor`
+subcommand.
+
+Directory sources are distinct from local registries though in that they contain
+the unpacked version of `*.crate` files, making it more suitable in some
+situations to check everything into source control. A directory source is just a
+directory containing a number of other directories which contain the source code
+for crates (the unpacked version of `*.crate` files). Currently no restriction
+is placed on the name of each directory.
+
+Each crate in a directory source also has an associated metadata file indicating
+the checksum of each file in the crate to protect against accidental
+modifications.
diff --git a/src/tools/cargo/src/doc/src/reference/specifying-dependencies.md b/src/tools/cargo/src/doc/src/reference/specifying-dependencies.md
new file mode 100644
index 000000000..8d9eac308
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/specifying-dependencies.md
@@ -0,0 +1,513 @@
+## Specifying Dependencies
+
+Your crates can depend on other libraries from [crates.io] or other
+registries, `git` repositories, or subdirectories on your local file system.
+You can also temporarily override the location of a dependency --- for example,
+to be able to test out a bug fix in the dependency that you are working on
+locally. You can have different dependencies for different platforms, and
+dependencies that are only used during development. Let's take a look at how
+to do each of these.
+
+### Specifying dependencies from crates.io
+
+Cargo is configured to look for dependencies on [crates.io] by default. Only
+the name and a version string are required in this case. In [the cargo
+guide](../guide/index.md), we specified a dependency on the `time` crate:
+
+```toml
+[dependencies]
+time = "0.1.12"
+```
+
+The string `"0.1.12"` is a version requirement. Although it looks like a
+specific *version* of the `time` crate, it actually specifies a *range* of
+versions and allows [SemVer] compatible updates. An update is allowed if the new
+version number does not modify the left-most non-zero digit in the major, minor,
+patch grouping. In this case, if we ran `cargo update -p time`, cargo should
+update us to version `0.1.13` if it is the latest `0.1.z` release, but would not
+update us to `0.2.0`. If instead we had specified the version string as `1.0`,
+cargo should update to `1.1` if it is the latest `1.y` release, but not `2.0`.
+The version `0.0.x` is not considered compatible with any other version.
+
+[SemVer]: https://semver.org
+
+Here are some more examples of version requirements and the versions that would
+be allowed with them:
+
+```notrust
+1.2.3 := >=1.2.3, <2.0.0
+1.2 := >=1.2.0, <2.0.0
+1 := >=1.0.0, <2.0.0
+0.2.3 := >=0.2.3, <0.3.0
+0.2 := >=0.2.0, <0.3.0
+0.0.3 := >=0.0.3, <0.0.4
+0.0 := >=0.0.0, <0.1.0
+0 := >=0.0.0, <1.0.0
+```
+
+This compatibility convention is different from SemVer in the way it treats
+versions before 1.0.0. While SemVer says there is no compatibility before
+1.0.0, Cargo considers `0.x.y` to be compatible with `0.x.z`, where `y ≥ z`
+and `x > 0`.
+
+It is possible to further tweak the logic for selecting compatible versions
+using special operators, though it shouldn't be necessary most of the time.
+
+### Caret requirements
+
+**Caret requirements** are an alternative syntax for the default strategy,
+`^1.2.3` is exactly equivalent to `1.2.3`.
+
+### Tilde requirements
+
+**Tilde requirements** specify a minimal version with some ability to update.
+If you specify a major, minor, and patch version or only a major and minor
+version, only patch-level changes are allowed. If you only specify a major
+version, then minor- and patch-level changes are allowed.
+
+`~1.2.3` is an example of a tilde requirement.
+
+```notrust
+~1.2.3 := >=1.2.3, <1.3.0
+~1.2 := >=1.2.0, <1.3.0
+~1 := >=1.0.0, <2.0.0
+```
+
+### Wildcard requirements
+
+**Wildcard requirements** allow for any version where the wildcard is
+positioned.
+
+`*`, `1.*` and `1.2.*` are examples of wildcard requirements.
+
+```notrust
+* := >=0.0.0
+1.* := >=1.0.0, <2.0.0
+1.2.* := >=1.2.0, <1.3.0
+```
+
+> **Note**: [crates.io] does not allow bare `*` versions.
+
+### Comparison requirements
+
+**Comparison requirements** allow manually specifying a version range or an
+exact version to depend on.
+
+Here are some examples of comparison requirements:
+
+```notrust
+>= 1.2.0
+> 1
+< 2
+= 1.2.3
+```
+
+### Multiple requirements
+
+As shown in the examples above, multiple version requirements can be
+separated with a comma, e.g., `>= 1.2, < 1.5`.
+
+### Specifying dependencies from other registries
+
+To specify a dependency from a registry other than [crates.io], first the
+registry must be configured in a `.cargo/config.toml` file. See the [registries
+documentation] for more information. In the dependency, set the `registry` key
+to the name of the registry to use.
+
+```toml
+[dependencies]
+some-crate = { version = "1.0", registry = "my-registry" }
+```
+
+> **Note**: [crates.io] does not allow packages to be published with
+> dependencies on other registries.
+
+[registries documentation]: registries.md
+
+### Specifying dependencies from `git` repositories
+
+To depend on a library located in a `git` repository, the minimum information
+you need to specify is the location of the repository with the `git` key:
+
+```toml
+[dependencies]
+regex = { git = "https://github.com/rust-lang/regex.git" }
+```
+
+Cargo will fetch the `git` repository at this location then look for a
+`Cargo.toml` for the requested crate anywhere inside the `git` repository
+(not necessarily at the root --- for example, specifying a member crate name
+of a workspace and setting `git` to the repository containing the workspace).
+
+Since we haven’t specified any other information, Cargo assumes that
+we intend to use the latest commit on the main branch to build our package.
+You can combine the `git` key with the `rev`, `tag`, or `branch` keys to
+specify something else. Here's an example of specifying that you want to use
+the latest commit on a branch named `next`:
+
+```toml
+[dependencies]
+regex = { git = "https://github.com/rust-lang/regex.git", branch = "next" }
+```
+
+Anything that is not a branch or tag falls under `rev`. This can be a commit
+hash like `rev = "4c59b707"`, or a named reference exposed by the remote
+repository such as `rev = "refs/pull/493/head"`. What references are available
+varies by where the repo is hosted; GitHub in particular exposes a reference to
+the most recent commit of every pull request as shown, but other git hosts often
+provide something equivalent, possibly under a different naming scheme.
+
+Once a `git` dependency has been added, Cargo will lock that dependency to the
+latest commit at the time. New commits will not be pulled down automatically
+once the lock is in place. However, they can be pulled down manually with
+`cargo update`.
+
+See [Git Authentication] for help with git authentication for private repos.
+
+> **Note**: [crates.io] does not allow packages to be published with `git`
+> dependencies (`git` [dev-dependencies] are ignored). See the [Multiple
+> locations](#multiple-locations) section for a fallback alternative.
+
+[Git Authentication]: ../appendix/git-authentication.md
+
+### Specifying path dependencies
+
+Over time, our `hello_world` package from [the guide](../guide/index.md) has
+grown significantly in size! It’s gotten to the point that we probably want to
+split out a separate crate for others to use. To do this Cargo supports **path
+dependencies** which are typically sub-crates that live within one repository.
+Let’s start off by making a new crate inside of our `hello_world` package:
+
+```console
+# inside of hello_world/
+$ cargo new hello_utils
+```
+
+This will create a new folder `hello_utils` inside of which a `Cargo.toml` and
+`src` folder are ready to be configured. In order to tell Cargo about this, open
+up `hello_world/Cargo.toml` and add `hello_utils` to your dependencies:
+
+```toml
+[dependencies]
+hello_utils = { path = "hello_utils" }
+```
+
+This tells Cargo that we depend on a crate called `hello_utils` which is found
+in the `hello_utils` folder (relative to the `Cargo.toml` it’s written in).
+
+And that’s it! The next `cargo build` will automatically build `hello_utils` and
+all of its own dependencies, and others can also start using the crate as well.
+However, crates that use dependencies specified with only a path are not
+permitted on [crates.io]. If we wanted to publish our `hello_world` crate, we
+would need to publish a version of `hello_utils` to [crates.io]
+and specify its version in the dependencies line as well:
+
+```toml
+[dependencies]
+hello_utils = { path = "hello_utils", version = "0.1.0" }
+```
+
+> **Note**: [crates.io] does not allow packages to be published with `path`
+> dependencies (`path` [dev-dependencies] are ignored). See the [Multiple
+> locations](#multiple-locations) section for a fallback alternative.
+
+### Multiple locations
+
+It is possible to specify both a registry version and a `git` or `path`
+location. The `git` or `path` dependency will be used locally (in which case
+the `version` is checked against the local copy), and when published to a
+registry like [crates.io], it will use the registry version. Other
+combinations are not allowed. Examples:
+
+```toml
+[dependencies]
+# Uses `my-bitflags` when used locally, and uses
+# version 1.0 from crates.io when published.
+bitflags = { path = "my-bitflags", version = "1.0" }
+
+# Uses the given git repo when used locally, and uses
+# version 1.0 from crates.io when published.
+smallvec = { git = "https://github.com/servo/rust-smallvec.git", version = "1.0" }
+
+# N.B. that if a version doesn't match, Cargo will fail to compile!
+```
+
+One example where this can be useful is when you have split up a library into
+multiple packages within the same workspace. You can then use `path`
+dependencies to point to the local packages within the workspace to use the
+local version during development, and then use the [crates.io] version once it
+is published. This is similar to specifying an
+[override](overriding-dependencies.md), but only applies to this one
+dependency declaration.
+
+### Platform specific dependencies
+
+Platform-specific dependencies take the same format, but are listed under a
+`target` section. Normally Rust-like [`#[cfg]`
+syntax](../../reference/conditional-compilation.html) will be used to define
+these sections:
+
+```toml
+[target.'cfg(windows)'.dependencies]
+winhttp = "0.4.0"
+
+[target.'cfg(unix)'.dependencies]
+openssl = "1.0.1"
+
+[target.'cfg(target_arch = "x86")'.dependencies]
+native-i686 = { path = "native/i686" }
+
+[target.'cfg(target_arch = "x86_64")'.dependencies]
+native-x86_64 = { path = "native/x86_64" }
+```
+
+Like with Rust, the syntax here supports the `not`, `any`, and `all` operators
+to combine various cfg name/value pairs.
+
+If you want to know which cfg targets are available on your platform, run
+`rustc --print=cfg` from the command line. If you want to know which `cfg`
+targets are available for another platform, such as 64-bit Windows,
+run `rustc --print=cfg --target=x86_64-pc-windows-msvc`.
+
+Unlike in your Rust source code, you cannot use
+`[target.'cfg(feature = "fancy-feature")'.dependencies]` to add dependencies
+based on optional features. Use [the `[features]` section](features.md)
+instead:
+
+```toml
+[dependencies]
+foo = { version = "1.0", optional = true }
+bar = { version = "1.0", optional = true }
+
+[features]
+fancy-feature = ["foo", "bar"]
+```
+
+The same applies to `cfg(debug_assertions)`, `cfg(test)` and `cfg(proc_macro)`.
+These values will not work as expected and will always have the default value
+returned by `rustc --print=cfg`.
+There is currently no way to add dependencies based on these configuration values.
+
+In addition to `#[cfg]` syntax, Cargo also supports listing out the full target
+the dependencies would apply to:
+
+```toml
+[target.x86_64-pc-windows-gnu.dependencies]
+winhttp = "0.4.0"
+
+[target.i686-unknown-linux-gnu.dependencies]
+openssl = "1.0.1"
+```
+
+#### Custom target specifications
+
+If you’re using a custom target specification (such as `--target
+foo/bar.json`), use the base filename without the `.json` extension:
+
+```toml
+[target.bar.dependencies]
+winhttp = "0.4.0"
+
+[target.my-special-i686-platform.dependencies]
+openssl = "1.0.1"
+native = { path = "native/i686" }
+```
+
+> **Note**: Custom target specifications are not usable on the stable channel.
+
+### Development dependencies
+
+You can add a `[dev-dependencies]` section to your `Cargo.toml` whose format
+is equivalent to `[dependencies]`:
+
+```toml
+[dev-dependencies]
+tempdir = "0.3"
+```
+
+Dev-dependencies are not used when compiling
+a package for building, but are used for compiling tests, examples, and
+benchmarks.
+
+These dependencies are *not* propagated to other packages which depend on this
+package.
+
+You can also have target-specific development dependencies by using
+`dev-dependencies` in the target section header instead of `dependencies`. For
+example:
+
+```toml
+[target.'cfg(unix)'.dev-dependencies]
+mio = "0.0.1"
+```
+
+> **Note**: When a package is published, only dev-dependencies that specify a
+> `version` will be included in the published crate. For most use cases,
+> dev-dependencies are not needed when published, though some users (like OS
+> packagers) may want to run tests within a crate, so providing a `version` if
+> possible can still be beneficial.
+
+### Build dependencies
+
+You can depend on other Cargo-based crates for use in your build scripts.
+Dependencies are declared through the `build-dependencies` section of the
+manifest:
+
+```toml
+[build-dependencies]
+cc = "1.0.3"
+```
+
+
+You can also have target-specific build dependencies by using
+`build-dependencies` in the target section header instead of `dependencies`. For
+example:
+
+```toml
+[target.'cfg(unix)'.build-dependencies]
+cc = "1.0.3"
+```
+
+In this case, the dependency will only be built when the host platform matches the
+specified target.
+
+The build script **does not** have access to the dependencies listed
+in the `dependencies` or `dev-dependencies` section. Build
+dependencies will likewise not be available to the package itself
+unless listed under the `dependencies` section as well. A package
+itself and its build script are built separately, so their
+dependencies need not coincide. Cargo is kept simpler and cleaner by
+using independent dependencies for independent purposes.
+
+### Choosing features
+
+If a package you depend on offers conditional features, you can
+specify which to use:
+
+```toml
+[dependencies.awesome]
+version = "1.3.5"
+default-features = false # do not include the default features, and optionally
+ # cherry-pick individual features
+features = ["secure-password", "civet"]
+```
+
+More information about features can be found in the [features
+chapter](features.md#dependency-features).
+
+### Renaming dependencies in `Cargo.toml`
+
+When writing a `[dependencies]` section in `Cargo.toml` the key you write for a
+dependency typically matches up to the name of the crate you import from in the
+code. For some projects, though, you may wish to reference the crate with a
+different name in the code regardless of how it's published on crates.io. For
+example you may wish to:
+
+* Avoid the need to `use foo as bar` in Rust source.
+* Depend on multiple versions of a crate.
+* Depend on crates with the same name from different registries.
+
+To support this Cargo supports a `package` key in the `[dependencies]` section
+of which package should be depended on:
+
+```toml
+[package]
+name = "mypackage"
+version = "0.0.1"
+
+[dependencies]
+foo = "0.1"
+bar = { git = "https://github.com/example/project.git", package = "foo" }
+baz = { version = "0.1", registry = "custom", package = "foo" }
+```
+
+In this example, three crates are now available in your Rust code:
+
+```rust,ignore
+extern crate foo; // crates.io
+extern crate bar; // git repository
+extern crate baz; // registry `custom`
+```
+
+All three of these crates have the package name of `foo` in their own
+`Cargo.toml`, so we're explicitly using the `package` key to inform Cargo that
+we want the `foo` package even though we're calling it something else locally.
+The `package` key, if not specified, defaults to the name of the dependency
+being requested.
+
+Note that if you have an optional dependency like:
+
+```toml
+[dependencies]
+bar = { version = "0.1", package = 'foo', optional = true }
+```
+
+you're depending on the crate `foo` from crates.io, but your crate has a `bar`
+feature instead of a `foo` feature. That is, names of features take after the
+name of the dependency, not the package name, when renamed.
+
+Enabling transitive dependencies works similarly, for example we could add the
+following to the above manifest:
+
+```toml
+[features]
+log-debug = ['bar/log-debug'] # using 'foo/log-debug' would be an error!
+```
+
+### Inheriting a dependency from a workspace
+
+Dependencies can be inherited from a workspace by specifying the
+dependency in the workspace's [`[workspace.dependencies]`][workspace.dependencies] table.
+After that, add it to the `[dependencies]` table with `workspace = true`.
+
+Along with the `workspace` key, dependencies can also include these keys:
+- [`optional`][optional]: Note that the`[workspace.dependencies]` table is not allowed to specify `optional`.
+- [`features`][features]: These are additive with the features declared in the `[workspace.dependencies]`
+
+Other than `optional` and `features`, inherited dependencies cannot use any other
+dependency key (such as `version` or `default-features`).
+
+Dependencies in the `[dependencies]`, `[dev-dependencies]`, `[build-dependencies]`, and
+`[target."...".dependencies]` sections support the ability to reference the
+`[workspace.dependencies]` definition of dependencies.
+
+```toml
+[package]
+name = "bar"
+version = "0.2.0"
+
+[dependencies]
+regex = { workspace = true, features = ["unicode"] }
+
+[build-dependencies]
+cc.workspace = true
+
+[dev-dependencies]
+rand = { workspace = true, optional = true }
+```
+
+
+[crates.io]: https://crates.io/
+[dev-dependencies]: #development-dependencies
+[workspace.dependencies]: workspaces.md#the-dependencies-table
+[optional]: features.md#optional-dependencies
+[features]: features.md
+
+<script>
+(function() {
+ var fragments = {
+ "#overriding-dependencies": "overriding-dependencies.html",
+ "#testing-a-bugfix": "overriding-dependencies.html#testing-a-bugfix",
+ "#working-with-an-unpublished-minor-version": "overriding-dependencies.html#working-with-an-unpublished-minor-version",
+ "#overriding-repository-url": "overriding-dependencies.html#overriding-repository-url",
+ "#prepublishing-a-breaking-change": "overriding-dependencies.html#prepublishing-a-breaking-change",
+ "#overriding-with-local-dependencies": "overriding-dependencies.html#paths-overrides",
+ };
+ var target = fragments[window.location.hash];
+ if (target) {
+ var url = window.location.toString();
+ var base = url.substring(0, url.lastIndexOf('/'));
+ window.location.replace(base + "/" + target);
+ }
+})();
+</script>
diff --git a/src/tools/cargo/src/doc/src/reference/timings.md b/src/tools/cargo/src/doc/src/reference/timings.md
new file mode 100644
index 000000000..b978d52cd
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/timings.md
@@ -0,0 +1,66 @@
+# Reporting build timings
+The `--timings` option gives some information about how long each compilation
+takes, and tracks concurrency information over time.
+
+```sh
+cargo build --timings
+```
+
+This writes an HTML report in `target/cargo-timings/cargo-timing.html`. This
+also writes a copy of the report to the same directory with a timestamp in the
+filename, if you want to look at older runs.
+
+#### Reading the graphs
+
+There are two tables and two graphs in the output.
+
+The first table displays the build information of the project, including the
+number of units built, the maximum number of concurrency, build time, and the
+version information of the currently used compiler.
+
+![build-info](../images/build-info.png)
+
+The "unit" graph shows the duration of each unit over time. A "unit" is a single
+compiler invocation. There are lines that show which additional units are
+"unlocked" when a unit finishes. That is, it shows the new units that are now
+allowed to run because their dependencies are all finished. Hover the mouse over
+a unit to highlight the lines. This can help visualize the critical path of
+dependencies. This may change between runs because the units may finish in
+different orders.
+
+The "codegen" times are highlighted in a lavender color. In some cases, build
+pipelining allows units to start when their dependencies are performing code
+generation. This information is not always displayed (for example, binary
+units do not show when code generation starts).
+
+The "custom build" units are `build.rs` scripts, which when run are
+highlighted in orange.
+
+![build-unit-time](../images/build-unit-time.png)
+
+The second graph shows Cargo's concurrency over time. The background
+indicates CPU usage. The three lines are:
+- "Waiting" (red) --- This is the number of units waiting for a CPU slot to
+ open.
+- "Inactive" (blue) --- This is the number of units that are waiting for their
+ dependencies to finish.
+- "Active" (green) --- This is the number of units currently running.
+
+![cargo-concurrency-over-time](../images/cargo-concurrency-over-time.png)
+
+Note: This does not show the concurrency in the compiler itself. `rustc`
+coordinates with Cargo via the "job server" to stay within the concurrency
+limit. This currently mostly applies to the code generation phase.
+
+Tips for addressing compile times:
+- Look for slow dependencies.
+ - Check if they have features that you may wish to consider disabling.
+ - Consider trying to remove the dependency completely.
+- Look for a crate being built multiple times with different versions. Try to
+ remove the older versions from the dependency graph.
+- Split large crates into smaller pieces.
+- If there are a large number of crates bottlenecked on a single crate, focus
+ your attention on improving that one crate to improve parallelism.
+
+The last table lists the total time and "codegen" time spent on each unit,
+as well as the features that were enabled during each unit's compilation.
diff --git a/src/tools/cargo/src/doc/src/reference/unstable.md b/src/tools/cargo/src/doc/src/reference/unstable.md
new file mode 100644
index 000000000..accd45d8e
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/unstable.md
@@ -0,0 +1,1473 @@
+## Unstable Features
+
+Experimental Cargo features are only available on the [nightly channel]. You
+are encouraged to experiment with these features to see if they meet your
+needs, and if there are any issues or problems. Check the linked tracking
+issues listed below for more information on the feature, and click the GitHub
+subscribe button if you want future updates.
+
+After some period of time, if the feature does not have any major concerns, it
+can be [stabilized], which will make it available on stable once the current
+nightly release reaches the stable channel (anywhere from 6 to 12 weeks).
+
+There are three different ways that unstable features can be enabled based on
+how the feature works:
+
+* New syntax in `Cargo.toml` requires a `cargo-features` key at the top of
+ `Cargo.toml`, before any tables. For example:
+
+ ```toml
+ # This specifies which new Cargo.toml features are enabled.
+ cargo-features = ["test-dummy-unstable"]
+
+ [package]
+ name = "my-package"
+ version = "0.1.0"
+ im-a-teapot = true # This is a new option enabled by test-dummy-unstable.
+ ```
+
+* New command-line flags, options, and subcommands require the `-Z
+ unstable-options` CLI option to also be included. For example, the new
+ `--out-dir` option is only available on nightly:
+
+ ```cargo +nightly build --out-dir=out -Z unstable-options```
+
+* `-Z` command-line flags are used to enable new functionality that may not
+ have an interface, or the interface has not yet been designed, or for more
+ complex features that affect multiple parts of Cargo. For example, the
+ [mtime-on-use](#mtime-on-use) feature can be enabled with:
+
+ ```cargo +nightly build -Z mtime-on-use```
+
+ Run `cargo -Z help` to see a list of flags available.
+
+ Anything which can be configured with a `-Z` flag can also be set in the
+ cargo [config file] (`.cargo/config.toml`) in the `unstable` table. For
+ example:
+
+ ```toml
+ [unstable]
+ mtime-on-use = true
+ build-std = ["core", "alloc"]
+ ```
+
+Each new feature described below should explain how to use it.
+
+[config file]: config.md
+[nightly channel]: ../../book/appendix-07-nightly-rust.html
+[stabilized]: https://doc.crates.io/contrib/process/unstable.html#stabilization
+
+### List of unstable features
+
+* Unstable-specific features
+ * [-Z allow-features](#allow-features) --- Provides a way to restrict which unstable features are used.
+* Build scripts and linking
+ * [Metabuild](#metabuild) --- Provides declarative build scripts.
+* Resolver and features
+ * [no-index-update](#no-index-update) --- Prevents cargo from updating the index cache.
+ * [avoid-dev-deps](#avoid-dev-deps) --- Prevents the resolver from including dev-dependencies during resolution.
+ * [minimal-versions](#minimal-versions) --- Forces the resolver to use the lowest compatible version instead of the highest.
+ * [direct-minimal-versions](#direct-minimal-versions) — Forces the resolver to use the lowest compatible version instead of the highest.
+ * [public-dependency](#public-dependency) --- Allows dependencies to be classified as either public or private.
+* Output behavior
+ * [out-dir](#out-dir) --- Adds a directory where artifacts are copied to.
+ * [Different binary name](#different-binary-name) --- Assign a name to the built binary that is separate from the crate name.
+* Compile behavior
+ * [mtime-on-use](#mtime-on-use) --- Updates the last-modified timestamp on every dependency every time it is used, to provide a mechanism to delete unused artifacts.
+ * [doctest-xcompile](#doctest-xcompile) --- Supports running doctests with the `--target` flag.
+ * [build-std](#build-std) --- Builds the standard library instead of using pre-built binaries.
+ * [build-std-features](#build-std-features) --- Sets features to use with the standard library.
+ * [binary-dep-depinfo](#binary-dep-depinfo) --- Causes the dep-info file to track binary dependencies.
+ * [panic-abort-tests](#panic-abort-tests) --- Allows running tests with the "abort" panic strategy.
+ * [keep-going](#keep-going) --- Build as much as possible rather than aborting on the first error.
+ * [check-cfg](#check-cfg) --- Compile-time validation of `cfg` expressions.
+ * [host-config](#host-config) --- Allows setting `[target]`-like configuration settings for host build targets.
+ * [target-applies-to-host](#target-applies-to-host) --- Alters whether certain flags will be passed to host build targets.
+* rustdoc
+ * [`doctest-in-workspace`](#doctest-in-workspace) --- Fixes workspace-relative paths when running doctests.
+ * [rustdoc-map](#rustdoc-map) --- Provides mappings for documentation to link to external sites like [docs.rs](https://docs.rs/).
+ * [scrape-examples](#scrape-examples) --- Shows examples within documentation.
+* `Cargo.toml` extensions
+ * [Profile `rustflags` option](#profile-rustflags-option) --- Passed directly to rustc.
+ * [codegen-backend](#codegen-backend) --- Select the codegen backend used by rustc.
+ * [per-package-target](#per-package-target) --- Sets the `--target` to use for each individual package.
+ * [artifact dependencies](#artifact-dependencies) --- Allow build artifacts to be included into other build artifacts and build them for different targets.
+* Information and metadata
+ * [Build-plan](#build-plan) --- Emits JSON information on which commands will be run.
+ * [unit-graph](#unit-graph) --- Emits JSON for Cargo's internal graph structure.
+ * [`cargo rustc --print`](#rustc---print) --- Calls rustc with `--print` to display information from rustc.
+* Configuration
+ * [config-include](#config-include) --- Adds the ability for config files to include other files.
+ * [`cargo config`](#cargo-config) --- Adds a new subcommand for viewing config files.
+* Registries
+ * [credential-process](#credential-process) --- Adds support for fetching registry tokens from an external authentication program.
+ * [publish-timeout](#publish-timeout) --- Controls the timeout between uploading the crate and being available in the index
+ * [registry-auth](#registry-auth) --- Adds support for authenticated registries, and generate registry authentication tokens using asymmetric cryptography.
+* Other
+ * [gitoxide](#gitoxide) --- Use `gitoxide` instead of `git2` for a set of operations.
+
+### allow-features
+
+This permanently-unstable flag makes it so that only a listed set of
+unstable features can be used. Specifically, if you pass
+`-Zallow-features=foo,bar`, you'll continue to be able to pass `-Zfoo`
+and `-Zbar` to `cargo`, but you will be unable to pass `-Zbaz`. You can
+pass an empty string (`-Zallow-features=`) to disallow all unstable
+features.
+
+`-Zallow-features` also restricts which unstable features can be passed
+to the `cargo-features` entry in `Cargo.toml`. If, for example, you want
+to allow
+
+```toml
+cargo-features = ["test-dummy-unstable"]
+```
+
+where `test-dummy-unstable` is unstable, that features would also be
+disallowed by `-Zallow-features=`, and allowed with
+`-Zallow-features=test-dummy-unstable`.
+
+The list of features passed to cargo's `-Zallow-features` is also passed
+to any Rust tools that cargo ends up calling (like `rustc` or
+`rustdoc`). Thus, if you run `cargo -Zallow-features=`, no unstable
+Cargo _or_ Rust features can be used.
+
+### no-index-update
+* Original Issue: [#3479](https://github.com/rust-lang/cargo/issues/3479)
+* Tracking Issue: [#7404](https://github.com/rust-lang/cargo/issues/7404)
+
+The `-Z no-index-update` flag ensures that Cargo does not attempt to update
+the registry index. This is intended for tools such as Crater that issue many
+Cargo commands, and you want to avoid the network latency for updating the
+index each time.
+
+### mtime-on-use
+* Original Issue: [#6477](https://github.com/rust-lang/cargo/pull/6477)
+* Cache usage meta tracking issue: [#7150](https://github.com/rust-lang/cargo/issues/7150)
+
+The `-Z mtime-on-use` flag is an experiment to have Cargo update the mtime of
+used files to make it easier for tools like cargo-sweep to detect which files
+are stale. For many workflows this needs to be set on *all* invocations of cargo.
+To make this more practical setting the `unstable.mtime_on_use` flag in `.cargo/config.toml`
+or the corresponding ENV variable will apply the `-Z mtime-on-use` to all
+invocations of nightly cargo. (the config flag is ignored by stable)
+
+### avoid-dev-deps
+* Original Issue: [#4988](https://github.com/rust-lang/cargo/issues/4988)
+* Tracking Issue: [#5133](https://github.com/rust-lang/cargo/issues/5133)
+
+When running commands such as `cargo install` or `cargo build`, Cargo
+currently requires dev-dependencies to be downloaded, even if they are not
+used. The `-Z avoid-dev-deps` flag allows Cargo to avoid downloading
+dev-dependencies if they are not needed. The `Cargo.lock` file will not be
+generated if dev-dependencies are skipped.
+
+### minimal-versions
+* Original Issue: [#4100](https://github.com/rust-lang/cargo/issues/4100)
+* Tracking Issue: [#5657](https://github.com/rust-lang/cargo/issues/5657)
+
+> Note: It is not recommended to use this feature. Because it enforces minimal
+> versions for all transitive dependencies, its usefulness is limited since
+> not all external dependencies declare proper lower version bounds. It is
+> intended that it will be changed in the future to only enforce minimal
+> versions for direct dependencies.
+
+When a `Cargo.lock` file is generated, the `-Z minimal-versions` flag will
+resolve the dependencies to the minimum SemVer version that will satisfy the
+requirements (instead of the greatest version).
+
+The intended use-case of this flag is to check, during continuous integration,
+that the versions specified in Cargo.toml are a correct reflection of the
+minimum versions that you are actually using. That is, if Cargo.toml says
+`foo = "1.0.0"` that you don't accidentally depend on features added only in
+`foo 1.5.0`.
+
+### direct-minimal-versions
+* Original Issue: [#4100](https://github.com/rust-lang/cargo/issues/4100)
+* Tracking Issue: [#5657](https://github.com/rust-lang/cargo/issues/5657)
+
+When a `Cargo.lock` file is generated, the `-Z direct-minimal-versions` flag will
+resolve the dependencies to the minimum SemVer version that will satisfy the
+requirements (instead of the greatest version) for direct dependencies only.
+
+The intended use-case of this flag is to check, during continuous integration,
+that the versions specified in Cargo.toml are a correct reflection of the
+minimum versions that you are actually using. That is, if Cargo.toml says
+`foo = "1.0.0"` that you don't accidentally depend on features added only in
+`foo 1.5.0`.
+
+Indirect dependencies are resolved as normal so as not to be blocked on their
+minimal version validation.
+
+### out-dir
+* Original Issue: [#4875](https://github.com/rust-lang/cargo/issues/4875)
+* Tracking Issue: [#6790](https://github.com/rust-lang/cargo/issues/6790)
+
+This feature allows you to specify the directory where artifacts will be
+copied to after they are built. Typically artifacts are only written to the
+`target/release` or `target/debug` directories. However, determining the
+exact filename can be tricky since you need to parse JSON output. The
+`--out-dir` flag makes it easier to predictably access the artifacts. Note
+that the artifacts are copied, so the originals are still in the `target`
+directory. Example:
+
+```sh
+cargo +nightly build --out-dir=out -Z unstable-options
+```
+
+This can also be specified in `.cargo/config.toml` files.
+
+```toml
+[build]
+out-dir = "out"
+```
+
+### doctest-xcompile
+* Tracking Issue: [#7040](https://github.com/rust-lang/cargo/issues/7040)
+* Tracking Rustc Issue: [#64245](https://github.com/rust-lang/rust/issues/64245)
+
+This flag changes `cargo test`'s behavior when handling doctests when
+a target is passed. Currently, if a target is passed that is different
+from the host cargo will simply skip testing doctests. If this flag is
+present, cargo will continue as normal, passing the tests to doctest,
+while also passing it a `--target` option, as well as enabling
+`-Zunstable-features --enable-per-target-ignores` and passing along
+information from `.cargo/config.toml`. See the rustc issue for more information.
+
+```sh
+cargo test --target foo -Zdoctest-xcompile
+```
+
+### Build-plan
+* Tracking Issue: [#5579](https://github.com/rust-lang/cargo/issues/5579)
+
+The `--build-plan` argument for the `build` command will output JSON with
+information about which commands would be run without actually executing
+anything. This can be useful when integrating with another build tool.
+Example:
+
+```sh
+cargo +nightly build --build-plan -Z unstable-options
+```
+
+### Metabuild
+* Tracking Issue: [rust-lang/rust#49803](https://github.com/rust-lang/rust/issues/49803)
+* RFC: [#2196](https://github.com/rust-lang/rfcs/blob/master/text/2196-metabuild.md)
+
+Metabuild is a feature to have declarative build scripts. Instead of writing
+a `build.rs` script, you specify a list of build dependencies in the
+`metabuild` key in `Cargo.toml`. A build script is automatically generated
+that runs each build dependency in order. Metabuild packages can then read
+metadata from `Cargo.toml` to specify their behavior.
+
+Include `cargo-features` at the top of `Cargo.toml`, a `metabuild` key in the
+`package`, list the dependencies in `build-dependencies`, and add any metadata
+that the metabuild packages require under `package.metadata`. Example:
+
+```toml
+cargo-features = ["metabuild"]
+
+[package]
+name = "mypackage"
+version = "0.0.1"
+metabuild = ["foo", "bar"]
+
+[build-dependencies]
+foo = "1.0"
+bar = "1.0"
+
+[package.metadata.foo]
+extra-info = "qwerty"
+```
+
+Metabuild packages should have a public function called `metabuild` that
+performs the same actions as a regular `build.rs` script would perform.
+
+### public-dependency
+* Tracking Issue: [#44663](https://github.com/rust-lang/rust/issues/44663)
+
+The 'public-dependency' feature allows marking dependencies as 'public'
+or 'private'. When this feature is enabled, additional information is passed to rustc to allow
+the 'exported_private_dependencies' lint to function properly.
+
+This requires the appropriate key to be set in `cargo-features`:
+
+```toml
+cargo-features = ["public-dependency"]
+
+[dependencies]
+my_dep = { version = "1.2.3", public = true }
+private_dep = "2.0.0" # Will be 'private' by default
+```
+
+### build-std
+* Tracking Repository: <https://github.com/rust-lang/wg-cargo-std-aware>
+
+The `build-std` feature enables Cargo to compile the standard library itself as
+part of a crate graph compilation. This feature has also historically been known
+as "std-aware Cargo". This feature is still in very early stages of development,
+and is also a possible massive feature addition to Cargo. This is a very large
+feature to document, even in the minimal form that it exists in today, so if
+you're curious to stay up to date you'll want to follow the [tracking
+repository](https://github.com/rust-lang/wg-cargo-std-aware) and its set of
+issues.
+
+The functionality implemented today is behind a flag called `-Z build-std`. This
+flag indicates that Cargo should compile the standard library from source code
+using the same profile as the main build itself. Note that for this to work you
+need to have the source code for the standard library available, and at this
+time the only supported method of doing so is to add the `rust-src` rust rustup
+component:
+
+```console
+$ rustup component add rust-src --toolchain nightly
+```
+
+It is also required today that the `-Z build-std` flag is combined with the
+`--target` flag. Note that you're not forced to do a cross compilation, you're
+just forced to pass `--target` in one form or another.
+
+Usage looks like:
+
+```console
+$ cargo new foo
+$ cd foo
+$ cargo +nightly run -Z build-std --target x86_64-unknown-linux-gnu
+ Compiling core v0.0.0 (...)
+ ...
+ Compiling foo v0.1.0 (...)
+ Finished dev [unoptimized + debuginfo] target(s) in 21.00s
+ Running `target/x86_64-unknown-linux-gnu/debug/foo`
+Hello, world!
+```
+
+Here we recompiled the standard library in debug mode with debug assertions
+(like `src/main.rs` is compiled) and everything was linked together at the end.
+
+Using `-Z build-std` will implicitly compile the stable crates `core`, `std`,
+`alloc`, and `proc_macro`. If you're using `cargo test` it will also compile the
+`test` crate. If you're working with an environment which does not support some
+of these crates, then you can pass an argument to `-Zbuild-std` as well:
+
+```console
+$ cargo +nightly build -Z build-std=core,alloc
+```
+
+The value here is a comma-separated list of standard library crates to build.
+
+#### Requirements
+
+As a summary, a list of requirements today to use `-Z build-std` are:
+
+* You must install libstd's source code through `rustup component add rust-src`
+* You must pass `--target`
+* You must use both a nightly Cargo and a nightly rustc
+* The `-Z build-std` flag must be passed to all `cargo` invocations.
+
+#### Reporting bugs and helping out
+
+The `-Z build-std` feature is in the very early stages of development! This
+feature for Cargo has an extremely long history and is very large in scope, and
+this is just the beginning. If you'd like to report bugs please either report
+them to:
+
+* Cargo --- <https://github.com/rust-lang/cargo/issues/new> --- for implementation bugs
+* The tracking repository ---
+ <https://github.com/rust-lang/wg-cargo-std-aware/issues/new> --- for larger design
+ questions.
+
+Also if you'd like to see a feature that's not yet implemented and/or if
+something doesn't quite work the way you'd like it to, feel free to check out
+the [issue tracker](https://github.com/rust-lang/wg-cargo-std-aware/issues) of
+the tracking repository, and if it's not there please file a new issue!
+
+### build-std-features
+* Tracking Repository: <https://github.com/rust-lang/wg-cargo-std-aware>
+
+This flag is a sibling to the `-Zbuild-std` feature flag. This will configure
+the features enabled for the standard library itself when building the standard
+library. The default enabled features, at this time, are `backtrace` and
+`panic_unwind`. This flag expects a comma-separated list and, if provided, will
+override the default list of features enabled.
+
+### binary-dep-depinfo
+* Tracking rustc issue: [#63012](https://github.com/rust-lang/rust/issues/63012)
+
+The `-Z binary-dep-depinfo` flag causes Cargo to forward the same flag to
+`rustc` which will then cause `rustc` to include the paths of all binary
+dependencies in the "dep info" file (with the `.d` extension). Cargo then uses
+that information for change-detection (if any binary dependency changes, then
+the crate will be rebuilt). The primary use case is for building the compiler
+itself, which has implicit dependencies on the standard library that would
+otherwise be untracked for change-detection.
+
+### panic-abort-tests
+* Tracking Issue: [#67650](https://github.com/rust-lang/rust/issues/67650)
+* Original Pull Request: [#7460](https://github.com/rust-lang/cargo/pull/7460)
+
+The `-Z panic-abort-tests` flag will enable nightly support to compile test
+harness crates with `-Cpanic=abort`. Without this flag Cargo will compile tests,
+and everything they depend on, with `-Cpanic=unwind` because it's the only way
+`test`-the-crate knows how to operate. As of [rust-lang/rust#64158], however,
+the `test` crate supports `-C panic=abort` with a test-per-process, and can help
+avoid compiling crate graphs multiple times.
+
+It's currently unclear how this feature will be stabilized in Cargo, but we'd
+like to stabilize it somehow!
+
+[rust-lang/rust#64158]: https://github.com/rust-lang/rust/pull/64158
+
+### keep-going
+* Tracking Issue: [#10496](https://github.com/rust-lang/cargo/issues/10496)
+
+`cargo build --keep-going` (and similarly for `check`, `test` etc) will build as
+many crates in the dependency graph as possible, rather than aborting the build
+at the first one that fails to build.
+
+For example if the current package depends on dependencies `fails` and `works`,
+one of which fails to build, `cargo check -j1` may or may not build the one that
+succeeds (depending on which one of the two builds Cargo picked to run first),
+whereas `cargo check -j1 --keep-going` would definitely run both builds, even if
+the one run first fails.
+
+The `-Z unstable-options` command-line option must be used in order to use
+`--keep-going` while it is not yet stable:
+
+```console
+cargo check --keep-going -Z unstable-options
+```
+
+### config-include
+* Tracking Issue: [#7723](https://github.com/rust-lang/cargo/issues/7723)
+
+The `include` key in a config file can be used to load another config file. It
+takes a string for a path to another file relative to the config file, or a
+list of strings. It requires the `-Zconfig-include` command-line option.
+
+```toml
+# .cargo/config
+include = '../../some-common-config.toml'
+```
+
+The config values are first loaded from the include path, and then the config
+file's own values are merged on top of it.
+
+This can be paired with [config-cli](#config-cli) to specify a file to load
+from the command-line. Pass a path to a config file as the argument to
+`--config`:
+
+```console
+cargo +nightly -Zunstable-options -Zconfig-include --config somefile.toml build
+```
+
+CLI paths are relative to the current working directory.
+
+### target-applies-to-host
+* Original Pull Request: [#9322](https://github.com/rust-lang/cargo/pull/9322)
+* Tracking Issue: [#9453](https://github.com/rust-lang/cargo/issues/9453)
+
+Historically, Cargo's behavior for whether the `linker` and `rustflags`
+configuration options from environment variables and
+[`[target]`](config.md#target) are respected for build scripts, plugins,
+and other artifacts that are _always_ built for the host platform has
+been somewhat inconsistent.
+When `--target` is _not_ passed, Cargo respects the same `linker` and
+`rustflags` for build scripts as for all other compile artifacts. When
+`--target` _is_ passed, however, Cargo respects `linker` from
+[`[target.<host triple>]`](config.md#targettriplelinker), and does not
+pick up any `rustflags` configuration.
+This dual behavior is confusing, but also makes it difficult to correctly
+configure builds where the host triple and the [target triple] happen to
+be the same, but artifacts intended to run on the build host should still
+be configured differently.
+
+`-Ztarget-applies-to-host` enables the top-level
+`target-applies-to-host` setting in Cargo configuration files which
+allows users to opt into different (and more consistent) behavior for
+these properties. When `target-applies-to-host` is unset, or set to
+`true`, in the configuration file, the existing Cargo behavior is
+preserved (though see `-Zhost-config`, which changes that default). When
+it is set to `false`, no options from `[target.<host triple>]`,
+`RUSTFLAGS`, or `[build]` are respected for host artifacts regardless of
+whether `--target` is passed to Cargo. To customize artifacts intended
+to be run on the host, use `[host]` ([`host-config`](#host-config)).
+
+In the future, `target-applies-to-host` may end up defaulting to `false`
+to provide more sane and consistent default behavior.
+
+```toml
+# config.toml
+target-applies-to-host = false
+```
+
+```console
+cargo +nightly -Ztarget-applies-to-host build --target x86_64-unknown-linux-gnu
+```
+
+### host-config
+* Original Pull Request: [#9322](https://github.com/rust-lang/cargo/pull/9322)
+* Tracking Issue: [#9452](https://github.com/rust-lang/cargo/issues/9452)
+
+The `host` key in a config file can be used pass flags to host build targets
+such as build scripts that must run on the host system instead of the target
+system when cross compiling. It supports both generic and host arch specific
+tables. Matching host arch tables take precedence over generic host tables.
+
+It requires the `-Zhost-config` and `-Ztarget-applies-to-host`
+command-line options to be set, and that `target-applies-to-host =
+false` is set in the Cargo configuration file.
+
+```toml
+# config.toml
+[host]
+linker = "/path/to/host/linker"
+[host.x86_64-unknown-linux-gnu]
+linker = "/path/to/host/arch/linker"
+rustflags = ["-Clink-arg=--verbose"]
+[target.x86_64-unknown-linux-gnu]
+linker = "/path/to/target/linker"
+```
+
+The generic `host` table above will be entirely ignored when building on a
+`x86_64-unknown-linux-gnu` host as the `host.x86_64-unknown-linux-gnu` table
+takes precedence.
+
+Setting `-Zhost-config` changes the default for `target-applies-to-host` to
+`false` from `true`.
+
+```console
+cargo +nightly -Ztarget-applies-to-host -Zhost-config build --target x86_64-unknown-linux-gnu
+```
+
+### unit-graph
+* Tracking Issue: [#8002](https://github.com/rust-lang/cargo/issues/8002)
+
+The `--unit-graph` flag can be passed to any build command (`build`, `check`,
+`run`, `test`, `bench`, `doc`, etc.) to emit a JSON object to stdout which
+represents Cargo's internal unit graph. Nothing is actually built, and the
+command returns immediately after printing. Each "unit" corresponds to an
+execution of the compiler. These objects also include which unit each unit
+depends on.
+
+```
+cargo +nightly build --unit-graph -Z unstable-options
+```
+
+This structure provides a more complete view of the dependency relationship as
+Cargo sees it. In particular, the "features" field supports the new feature
+resolver where a dependency can be built multiple times with different
+features. `cargo metadata` fundamentally cannot represent the relationship of
+features between different dependency kinds, and features now depend on which
+command is run and which packages and targets are selected. Additionally it
+can provide details about intra-package dependencies like build scripts or
+tests.
+
+The following is a description of the JSON structure:
+
+```javascript
+{
+ /* Version of the JSON output structure. If any backwards incompatible
+ changes are made, this value will be increased.
+ */
+ "version": 1,
+ /* Array of all build units. */
+ "units": [
+ {
+ /* An opaque string which indicates the package.
+ Information about the package can be obtained from `cargo metadata`.
+ */
+ "pkg_id": "my-package 0.1.0 (path+file:///path/to/my-package)",
+ /* The Cargo target. See the `cargo metadata` documentation for more
+ information about these fields.
+ https://doc.rust-lang.org/cargo/commands/cargo-metadata.html
+ */
+ "target": {
+ "kind": ["lib"],
+ "crate_types": ["lib"],
+ "name": "my-package",
+ "src_path": "/path/to/my-package/src/lib.rs",
+ "edition": "2018",
+ "test": true,
+ "doctest": true
+ },
+ /* The profile settings for this unit.
+ These values may not match the profile defined in the manifest.
+ Units can use modified profile settings. For example, the "panic"
+ setting can be overridden for tests to force it to "unwind".
+ */
+ "profile": {
+ /* The profile name these settings are derived from. */
+ "name": "dev",
+ /* The optimization level as a string. */
+ "opt_level": "0",
+ /* The LTO setting as a string. */
+ "lto": "false",
+ /* The codegen units as an integer.
+ `null` if it should use the compiler's default.
+ */
+ "codegen_units": null,
+ /* The debug information level as an integer.
+ `null` if it should use the compiler's default (0).
+ */
+ "debuginfo": 2,
+ /* Whether or not debug-assertions are enabled. */
+ "debug_assertions": true,
+ /* Whether or not overflow-checks are enabled. */
+ "overflow_checks": true,
+ /* Whether or not rpath is enabled. */
+ "rpath": false,
+ /* Whether or not incremental is enabled. */
+ "incremental": true,
+ /* The panic strategy, "unwind" or "abort". */
+ "panic": "unwind"
+ },
+ /* Which platform this target is being built for.
+ A value of `null` indicates it is for the host.
+ Otherwise it is a string of the target triple (such as
+ "x86_64-unknown-linux-gnu").
+ */
+ "platform": null,
+ /* The "mode" for this unit. Valid values:
+
+ * "test" --- Build using `rustc` as a test.
+ * "build" --- Build using `rustc`.
+ * "check" --- Build using `rustc` in "check" mode.
+ * "doc" --- Build using `rustdoc`.
+ * "doctest" --- Test using `rustdoc`.
+ * "run-custom-build" --- Represents the execution of a build script.
+ */
+ "mode": "build",
+ /* Array of features enabled on this unit as strings. */
+ "features": ["somefeat"],
+ /* Whether or not this is a standard-library unit,
+ part of the unstable build-std feature.
+ If not set, treat as `false`.
+ */
+ "is_std": false,
+ /* Array of dependencies of this unit. */
+ "dependencies": [
+ {
+ /* Index in the "units" array for the dependency. */
+ "index": 1,
+ /* The name that this dependency will be referred as. */
+ "extern_crate_name": "unicode_xid",
+ /* Whether or not this dependency is "public",
+ part of the unstable public-dependency feature.
+ If not set, the public-dependency feature is not enabled.
+ */
+ "public": false,
+ /* Whether or not this dependency is injected into the prelude,
+ currently used by the build-std feature.
+ If not set, treat as `false`.
+ */
+ "noprelude": false
+ }
+ ]
+ },
+ // ...
+ ],
+ /* Array of indices in the "units" array that are the "roots" of the
+ dependency graph.
+ */
+ "roots": [0],
+}
+```
+
+### Profile `rustflags` option
+* Original Issue: [rust-lang/cargo#7878](https://github.com/rust-lang/cargo/issues/7878)
+* Tracking Issue: [rust-lang/cargo#10271](https://github.com/rust-lang/cargo/issues/10271)
+
+This feature provides a new option in the `[profile]` section to specify flags
+that are passed directly to rustc.
+This can be enabled like so:
+
+```toml
+cargo-features = ["profile-rustflags"]
+
+[package]
+# ...
+
+[profile.release]
+rustflags = [ "-C", "..." ]
+```
+
+To set this in a profile in Cargo configuration, you need to use either
+`-Z profile-rustflags` or `[unstable]` table to enable it. For example,
+
+```toml
+# .cargo/config.toml
+[unstable]
+profile-rustflags = true
+
+[profile.release]
+rustflags = [ "-C", "..." ]
+```
+
+### rustdoc-map
+* Tracking Issue: [#8296](https://github.com/rust-lang/cargo/issues/8296)
+
+This feature adds configuration settings that are passed to `rustdoc` so that
+it can generate links to dependencies whose documentation is hosted elsewhere
+when the dependency is not documented. First, add this to `.cargo/config`:
+
+```toml
+[doc.extern-map.registries]
+crates-io = "https://docs.rs/"
+```
+
+Then, when building documentation, use the following flags to cause links
+to dependencies to link to [docs.rs](https://docs.rs/):
+
+```
+cargo +nightly doc --no-deps -Zrustdoc-map
+```
+
+The `registries` table contains a mapping of registry name to the URL to link
+to. The URL may have the markers `{pkg_name}` and `{version}` which will get
+replaced with the corresponding values. If neither are specified, then Cargo
+defaults to appending `{pkg_name}/{version}/` to the end of the URL.
+
+Another config setting is available to redirect standard library links. By
+default, rustdoc creates links to <https://doc.rust-lang.org/nightly/>. To
+change this behavior, use the `doc.extern-map.std` setting:
+
+```toml
+[doc.extern-map]
+std = "local"
+```
+
+A value of `"local"` means to link to the documentation found in the `rustc`
+sysroot. If you are using rustup, this documentation can be installed with
+`rustup component add rust-docs`.
+
+The default value is `"remote"`.
+
+The value may also take a URL for a custom location.
+
+### per-package-target
+* Tracking Issue: [#9406](https://github.com/rust-lang/cargo/pull/9406)
+* Original Pull Request: [#9030](https://github.com/rust-lang/cargo/pull/9030)
+* Original Issue: [#7004](https://github.com/rust-lang/cargo/pull/7004)
+
+The `per-package-target` feature adds two keys to the manifest:
+`package.default-target` and `package.forced-target`. The first makes
+the package be compiled by default (ie. when no `--target` argument is
+passed) for some target. The second one makes the package always be
+compiled for the target.
+
+Example:
+
+```toml
+[package]
+forced-target = "wasm32-unknown-unknown"
+```
+
+In this example, the crate is always built for
+`wasm32-unknown-unknown`, for instance because it is going to be used
+as a plugin for a main program that runs on the host (or provided on
+the command line) target.
+
+### artifact-dependencies
+
+* Tracking Issue: [#9096](https://github.com/rust-lang/cargo/pull/9096)
+* Original Pull Request: [#9992](https://github.com/rust-lang/cargo/pull/9992)
+
+Allow Cargo packages to depend on `bin`, `cdylib`, and `staticlib` crates,
+and use the artifacts built by those crates at compile time.
+
+Run `cargo` with `-Z bindeps` to enable this functionality.
+
+**Example:** use _cdylib_ artifact in build script
+
+The `Cargo.toml` in the consuming package, building the `bar` library as `cdylib`
+for a specific build target…
+
+```toml
+[build-dependencies]
+bar = { artifact = "cdylib", version = "1.0", target = "wasm32-unknown-unknown" }
+```
+
+…along with the build script in `build.rs`.
+
+```rust
+fn main() {
+ wasm::run_file(std::env::var("CARGO_CDYLIB_FILE_BAR").unwrap());
+}
+```
+
+**Example:** use _binary_ artifact and its library in a binary
+
+The `Cargo.toml` in the consuming package, building the `bar` binary for inclusion
+as artifact while making it available as library as well…
+
+```toml
+[dependencies]
+bar = { artifact = "bin", version = "1.0", lib = true }
+```
+
+…along with the executable using `main.rs`.
+
+```rust
+fn main() {
+ bar::init();
+ command::run(env!("CARGO_BIN_FILE_BAR"));
+}
+```
+
+### publish-timeout
+* Tracking Issue: [11222](https://github.com/rust-lang/cargo/issues/11222)
+
+The `publish.timeout` key in a config file can be used to control how long
+`cargo publish` waits between posting a package to the registry and it being
+available in the local index.
+
+A timeout of `0` prevents any checks from occurring. The current default is
+`60` seconds.
+
+It requires the `-Zpublish-timeout` command-line options to be set.
+
+```toml
+# config.toml
+[publish]
+timeout = 300 # in seconds
+```
+
+### registry-auth
+* Tracking Issue: [10474](https://github.com/rust-lang/cargo/issues/10474)
+* RFC: [#3139](https://github.com/rust-lang/rfcs/pull/3139)
+
+Enables Cargo to include the authorization token for API requests, crate downloads
+and sparse index updates by adding a configuration option to config.json
+in the registry index.
+
+To use this feature, the registry server must include `"auth-required": true` in
+`config.json`, and you must pass the `-Z registry-auth` flag on the Cargo command line.
+
+When using the sparse protocol, Cargo will attempt to fetch the `config.json` file before
+fetching any other files. If the server responds with an HTTP 401, then Cargo will assume
+that the registry requires authentication and re-attempt the request for `config.json`
+with the authentication token included.
+
+On authentication failure (or missing authentication token) the server MAY include a
+`WWW-Authenticate` header with a `Cargo login_url` challenge to indicate where the user
+can go to get a token.
+
+```
+WWW-Authenticate: Cargo login_url="https://test-registry-login/me
+```
+
+This same flag is also used to enable asymmetric authentication tokens.
+* Tracking Issue: [10519](https://github.com/rust-lang/cargo/issues/10519)
+* RFC: [#3231](https://github.com/rust-lang/rfcs/pull/3231)
+
+Add support for Cargo to authenticate the user to registries without sending secrets over the network.
+
+In [`config.toml`](config.md) and `credentials.toml` files there is a field called `private-key`, which is a private key formatted in the secret [subset of `PASERK`](https://github.com/paseto-standard/paserk/blob/master/types/secret.md) and is used to sign asymmetric tokens
+
+A keypair can be generated with `cargo login --generate-keypair` which will:
+- generate a public/private keypair in the currently recommended fashion.
+- save the private key in `credentials.toml`.
+- print the public key in [PASERK public](https://github.com/paseto-standard/paserk/blob/master/types/public.md) format.
+
+It is recommended that the `private-key` be saved in `credentials.toml`. It is also supported in `config.toml`, primarily so that it can be set using the associated environment variable, which is the recommended way to provide it in CI contexts. This setup is what we have for the `token` field for setting a secret token.
+
+There is also an optional field called `private-key-subject` which is a string chosen by the registry.
+This string will be included as part of an asymmetric token and should not be secret.
+It is intended for the rare use cases like "cryptographic proof that the central CA server authorized this action". Cargo requires it to be non-whitespace printable ASCII. Registries that need non-ASCII data should base64 encode it.
+
+Both fields can be set with `cargo login --registry=name --private-key --private-key-subject="subject"` which will prompt you to put in the key value.
+
+A registry can have at most one of `private-key`, `token`, or `credential-process` set.
+
+All PASETOs will include `iat`, the current time in ISO 8601 format. Cargo will include the following where appropriate:
+- `sub` an optional, non-secret string chosen by the registry that is expected to be claimed with every request. The value will be the `private-key-subject` from the `config.toml` file.
+- `mutation` if present, indicates that this request is a mutating operation (or a read-only operation if not present), must be one of the strings `publish`, `yank`, or `unyank`.
+ - `name` name of the crate related to this request.
+ - `vers` version string of the crate related to this request.
+ - `cksum` the SHA256 hash of the crate contents, as a string of 64 lowercase hexadecimal digits, must be present only when `mutation` is equal to `publish`
+- `challenge` the challenge string received from a 401/403 from this server this session. Registries that issue challenges must track which challenges have been issued/used and never accept a given challenge more than once within the same validity period (avoiding the need to track every challenge ever issued).
+
+The "footer" (which is part of the signature) will be a JSON string in UTF-8 and include:
+- `url` the RFC 3986 compliant URL where cargo got the config.json file,
+ - If this is a registry with an HTTP index, then this is the base URL that all index queries are relative to.
+ - If this is a registry with a GIT index, it is the URL Cargo used to clone the index.
+- `kid` the identifier of the private key used to sign the request, using the [PASERK IDs](https://github.com/paseto-standard/paserk/blob/master/operations/ID.md) standard.
+
+PASETO includes the message that was signed, so the server does not have to reconstruct the exact string from the request in order to check the signature. The server does need to check that the signature is valid for the string in the PASETO and that the contents of that string matches the request.
+If a claim should be expected for the request but is missing in the PASETO then the request must be rejected.
+
+### credential-process
+* Tracking Issue: [#8933](https://github.com/rust-lang/cargo/issues/8933)
+* RFC: [#2730](https://github.com/rust-lang/rfcs/pull/2730)
+
+The `credential-process` feature adds a config setting to fetch registry
+authentication tokens by calling an external process.
+
+Token authentication is used by the [`cargo login`], [`cargo publish`],
+[`cargo owner`], [`cargo yank`], and [`cargo logout`] commands.
+
+To use this feature, you must pass the `-Z credential-process` flag on the
+command-line. Additionally, you must remove any current tokens currently saved
+in the [`credentials.toml` file] (which can be done with the [`cargo logout`] command).
+
+#### `credential-process` Configuration
+
+To configure which process to run to fetch the token, specify the process in
+the `registry` table in a [config file]:
+
+```toml
+[registry]
+credential-process = "/usr/bin/cargo-creds"
+```
+
+If you want to use a different process for a specific registry, it can be
+specified in the `registries` table:
+
+```toml
+[registries.my-registry]
+credential-process = "/usr/bin/cargo-creds"
+```
+
+The value can be a string with spaces separating arguments or it can be a TOML
+array of strings.
+
+Command-line arguments allow special placeholders which will be replaced with
+the corresponding value:
+
+* `{name}` --- The name of the registry.
+* `{api_url}` --- The base URL of the registry API endpoints.
+* `{action}` --- The authentication action (described below).
+
+Process names with the prefix `cargo:` are loaded from the `libexec` directory
+next to cargo. Several experimental credential wrappers are included with
+Cargo, and this provides convenient access to them:
+
+```toml
+[registry]
+credential-process = "cargo:macos-keychain"
+```
+
+The current wrappers are:
+
+* `cargo:macos-keychain`: Uses the macOS Keychain to store the token.
+* `cargo:wincred`: Uses the Windows Credential Manager to store the token.
+* `cargo:1password`: Uses the 1password `op` CLI to store the token. You must
+ install the `op` CLI from the [1password
+ website](https://1password.com/downloads/command-line/). You must run `op
+ signin` at least once with the appropriate arguments (such as `op signin
+ my.1password.com user@example.com`), unless you provide the sign-in-address
+ and email arguments. The master password will be required on each request
+ unless the appropriate `OP_SESSION` environment variable is set. It supports
+ the following command-line arguments:
+ * `--account`: The account shorthand name to use.
+ * `--vault`: The vault name to use.
+ * `--sign-in-address`: The sign-in-address, which is a web address such as `my.1password.com`.
+ * `--email`: The email address to sign in with.
+
+A wrapper is available for GNOME
+[libsecret](https://wiki.gnome.org/Projects/Libsecret) to store tokens on
+Linux systems. Due to build limitations, this wrapper is not available as a
+pre-compiled binary. This can be built and installed manually. First, install
+libsecret using your system package manager (for example, `sudo apt install
+libsecret-1-dev`). Then build and install the wrapper with `cargo install
+cargo-credential-gnome-secret`.
+In the config, use a path to the binary like this:
+
+```toml
+[registry]
+credential-process = "cargo-credential-gnome-secret {action}"
+```
+
+#### `credential-process` Interface
+
+There are two different kinds of token processes that Cargo supports. The
+simple "basic" kind will only be called by Cargo when it needs a token. This
+is intended for simple and easy integration with password managers, that can
+often use pre-existing tooling. The more advanced "Cargo" kind supports
+different actions passed as a command-line argument. This is intended for more
+pleasant integration experience, at the expense of requiring a Cargo-specific
+process to glue to the password manager. Cargo will determine which kind is
+supported by the `credential-process` definition. If it contains the
+`{action}` argument, then it uses the advanced style, otherwise it assumes it
+only supports the "basic" kind.
+
+##### Basic authenticator
+
+A basic authenticator is a process that returns a token on stdout. Newlines
+will be trimmed. The process inherits the user's stdin and stderr. It should
+exit 0 on success, and nonzero on error.
+
+With this form, [`cargo login`] and [`cargo logout`] are not supported and
+return an error if used.
+
+##### Cargo authenticator
+
+The protocol between the Cargo and the process is very basic, intended to
+ensure the credential process is kept as simple as possible. Cargo will
+execute the process with the `{action}` argument indicating which action to
+perform:
+
+* `store` --- Store the given token in secure storage.
+* `get` --- Get a token from storage.
+* `erase` --- Remove a token from storage.
+
+The `cargo login` command uses `store` to save a token. Commands that require
+authentication, like `cargo publish`, uses `get` to retrieve a token. `cargo
+logout` uses the `erase` command to remove a token.
+
+The process inherits the user's stderr, so the process can display messages.
+Some values are passed in via environment variables (see below). The expected
+interactions are:
+
+* `store` --- The token is sent to the process's stdin, terminated by a newline.
+ The process should store the token keyed off the registry name. If the
+ process fails, it should exit with a nonzero exit status.
+
+* `get` --- The process should send the token to its stdout (trailing newline
+ will be trimmed). The process inherits the user's stdin, should it need to
+ receive input.
+
+ If the process is unable to fulfill the request, it should exit with a
+ nonzero exit code.
+
+* `erase` --- The process should remove the token associated with the registry
+ name. If the token is not found, the process should exit with a 0 exit
+ status.
+
+##### Environment
+
+The following environment variables will be provided to the executed command:
+
+* `CARGO` --- Path to the `cargo` binary executing the command.
+* `CARGO_REGISTRY_INDEX_URL` --- The URL of the registry index.
+* `CARGO_REGISTRY_NAME_OPT` --- Optional name of the registry. Should not be used as a storage key. Not always available.
+
+[`cargo login`]: ../commands/cargo-login.md
+[`cargo logout`]: ../commands/cargo-logout.md
+[`cargo publish`]: ../commands/cargo-publish.md
+[`cargo owner`]: ../commands/cargo-owner.md
+[`cargo yank`]: ../commands/cargo-yank.md
+[`credentials.toml` file]: config.md#credentials
+[crates.io]: https://crates.io/
+[config file]: config.md
+
+### `cargo config`
+
+* Original Issue: [#2362](https://github.com/rust-lang/cargo/issues/2362)
+* Tracking Issue: [#9301](https://github.com/rust-lang/cargo/issues/9301)
+
+The `cargo config` subcommand provides a way to display the configuration
+files that cargo loads. It currently includes the `get` subcommand which
+can take an optional config value to display.
+
+```console
+cargo +nightly -Zunstable-options config get build.rustflags
+```
+
+If no config value is included, it will display all config values. See the
+`--help` output for more options available.
+
+### `doctest-in-workspace`
+
+* Tracking Issue: [#9427](https://github.com/rust-lang/cargo/issues/9427)
+
+The `-Z doctest-in-workspace` flag changes the behavior of the current working
+directory used when running doctests. Historically, Cargo has run `rustdoc
+--test` relative to the root of the package, with paths relative from that
+root. However, this is inconsistent with how `rustc` and `rustdoc` are
+normally run in a workspace, where they are run relative to the workspace
+root. This inconsistency causes problems in various ways, such as when passing
+RUSTDOCFLAGS with relative paths, or dealing with diagnostic output.
+
+The `-Z doctest-in-workspace` flag causes cargo to switch to running `rustdoc`
+from the root of the workspace. It also passes the `--test-run-directory` to
+`rustdoc` so that when *running* the tests, they are run from the root of the
+package. This preserves backwards compatibility and is consistent with how
+normal unittests are run.
+
+### rustc `--print`
+
+* Tracking Issue: [#9357](https://github.com/rust-lang/cargo/issues/9357)
+
+`cargo rustc --print=VAL` forwards the `--print` flag to `rustc` in order to
+extract information from `rustc`. This runs `rustc` with the corresponding
+[`--print`](https://doc.rust-lang.org/rustc/command-line-arguments.html#--print-print-compiler-information)
+flag, and then immediately exits without compiling. Exposing this as a cargo
+flag allows cargo to inject the correct target and RUSTFLAGS based on the
+current configuration.
+
+The primary use case is to run `cargo rustc --print=cfg` to get config values
+for the appropriate target and influenced by any other RUSTFLAGS.
+
+
+### Different binary name
+
+* Tracking Issue: [#9778](https://github.com/rust-lang/cargo/issues/9778)
+* PR: [#9627](https://github.com/rust-lang/cargo/pull/9627)
+
+The `different-binary-name` feature allows setting the filename of the binary without having to obey the
+restrictions placed on crate names. For example, the crate name must use only `alphanumeric` characters
+or `-` or `_`, and cannot be empty.
+
+The `filename` parameter should **not** include the binary extension, `cargo` will figure out the appropriate
+extension and use that for the binary on its own.
+
+The `filename` parameter is only available in the `[[bin]]` section of the manifest.
+
+```toml
+cargo-features = ["different-binary-name"]
+
+[package]
+name = "foo"
+version = "0.0.1"
+
+[[bin]]
+name = "foo"
+filename = "007bar"
+path = "src/main.rs"
+```
+
+### scrape-examples
+
+* RFC: [#3123](https://github.com/rust-lang/rfcs/pull/3123)
+* Tracking Issue: [#9910](https://github.com/rust-lang/cargo/issues/9910)
+
+The `-Z rustdoc-scrape-examples` flag tells Rustdoc to search crates in the current workspace
+for calls to functions. Those call-sites are then included as documentation. You can use the flag
+like this:
+
+```
+cargo doc -Z unstable-options -Z rustdoc-scrape-examples
+```
+
+By default, Cargo will scrape examples from the example targets of packages being documented.
+You can individually enable or disable targets from being scraped with the `doc-scrape-examples` flag, such as:
+
+```toml
+# Enable scraping examples from a library
+[lib]
+doc-scrape-examples = true
+
+# Disable scraping examples from an example target
+[[example]]
+name = "my-example"
+doc-scrape-examples = false
+```
+
+**Note on tests:** enabling `doc-scrape-examples` on test targets will not currently have any effect. Scraping
+examples from tests is a work-in-progress.
+
+**Note on dev-dependencies:** documenting a library does not normally require the crate's dev-dependencies. However,
+example targets require dev-deps. For backwards compatibility, `-Z rustdoc-scrape-examples` will *not* introduce a
+dev-deps requirement for `cargo doc`. Therefore examples will *not* be scraped from example targets under the
+following conditions:
+
+1. No target being documented requires dev-deps, AND
+2. At least one crate with targets being documented has dev-deps, AND
+3. The `doc-scrape-examples` parameter is unset or false for all `[[example]]` targets.
+
+If you want examples to be scraped from example targets, then you must not satisfy one of the above conditions.
+For example, you can set `doc-scrape-examples` to true for one example target, and that signals to Cargo that
+you are ok with dev-deps being build for `cargo doc`.
+
+
+### check-cfg
+
+* RFC: [#3013](https://github.com/rust-lang/rfcs/pull/3013)
+* Tracking Issue: [#10554](https://github.com/rust-lang/cargo/issues/10554)
+
+`-Z check-cfg` command line enables compile time checking of name and values in `#[cfg]`, `cfg!`,
+`#[link]` and `#[cfg_attr]` with the `rustc` and `rustdoc` unstable `--check-cfg` command line.
+
+It's values are:
+ - `features`: enables features checking via `--check-cfg=values(feature, ...)`.
+ Note than this command line options will probably become the default when stabilizing.
+ - `names`: enables well known names checking via `--check-cfg=names()`.
+ - `values`: enables well known values checking via `--check-cfg=values()`.
+ - `output`: enable the use of `rustc-check-cfg` in build script.
+
+For instance:
+
+```
+cargo check -Z unstable-options -Z check-cfg=features
+cargo check -Z unstable-options -Z check-cfg=names
+cargo check -Z unstable-options -Z check-cfg=values
+cargo check -Z unstable-options -Z check-cfg=features,names,values
+```
+
+Or for `output`:
+
+```rust,no_run
+// build.rs
+println!("cargo:rustc-check-cfg=names(foo, bar)");
+```
+
+```
+cargo check -Z unstable-options -Z check-cfg=output
+```
+
+#### `cargo:rustc-check-cfg=CHECK_CFG`
+
+The `rustc-check-cfg` instruction tells Cargo to pass the given value to the
+`--check-cfg` flag to the compiler. This may be used for compile-time
+detection of unexpected conditional compilation name and/or values.
+
+This can only be used in combination with `-Zcheck-cfg=output` otherwise it is ignored
+with a warning.
+
+If you want to integrate with Cargo features, use `-Zcheck-cfg=features` instead of
+trying to do it manually with this option.
+
+### codegen-backend
+
+The `codegen-backend` feature makes it possible to select the codegen backend used by rustc using a profile.
+
+Example:
+
+```toml
+[package]
+name = "foo"
+
+[dependencies]
+serde = "1.0.117"
+
+[profile.dev.package.foo]
+codegen-backend = "cranelift"
+```
+
+To set this in a profile in Cargo configuration, you need to use either
+`-Z codegen-backend` or `[unstable]` table to enable it. For example,
+
+```toml
+# .cargo/config.toml
+[unstable]
+codegen-backend = true
+
+[profile.dev.package.foo]
+codegen-backend = "cranelift"
+```
+
+### gitoxide
+
+* Tracking Issue: [#11813](https://github.com/rust-lang/cargo/issues/11813)
+
+With the 'gitoxide' unstable feature, all or the the specified git operations will be performed by
+the `gitoxide` crate instead of `git2`.
+
+While `-Zgitoxide` enables all currently implemented features, one can individually select git operations
+to run with `gitoxide` with the `-Zgitoxide=operation[,operationN]` syntax.
+
+Valid operations are the following:
+
+* `fetch` - All fetches are done with `gitoxide`, which includes git dependencies as well as the crates index.
+* `shallow-index` *(planned)* - perform a shallow clone of the index.
+* `shallow-deps` *(planned)* - perform a shallow clone of git dependencies.
+* `checkout` *(planned)* - checkout the worktree, with support for filters and submodules.
+
+## Stabilized and removed features
+
+### Compile progress
+
+The compile-progress feature has been stabilized in the 1.30 release.
+Progress bars are now enabled by default.
+See [`term.progress`](config.md#termprogresswhen) for more information about
+controlling this feature.
+
+### Edition
+
+Specifying the `edition` in `Cargo.toml` has been stabilized in the 1.31 release.
+See [the edition field](manifest.md#the-edition-field) for more information
+about specifying this field.
+
+### rename-dependency
+
+Specifying renamed dependencies in `Cargo.toml` has been stabilized in the 1.31 release.
+See [renaming dependencies](specifying-dependencies.md#renaming-dependencies-in-cargotoml)
+for more information about renaming dependencies.
+
+### Alternate Registries
+
+Support for alternate registries has been stabilized in the 1.34 release.
+See the [Registries chapter](registries.md) for more information about alternate registries.
+
+### Offline Mode
+
+The offline feature has been stabilized in the 1.36 release.
+See the [`--offline` flag](../commands/cargo.md#option-cargo---offline) for
+more information on using the offline mode.
+
+### publish-lockfile
+
+The `publish-lockfile` feature has been removed in the 1.37 release.
+The `Cargo.lock` file is always included when a package is published if the
+package contains a binary target. `cargo install` requires the `--locked` flag
+to use the `Cargo.lock` file.
+See [`cargo package`](../commands/cargo-package.md) and
+[`cargo install`](../commands/cargo-install.md) for more information.
+
+### default-run
+
+The `default-run` feature has been stabilized in the 1.37 release.
+See [the `default-run` field](manifest.md#the-default-run-field) for more
+information about specifying the default target to run.
+
+### cache-messages
+
+Compiler message caching has been stabilized in the 1.40 release.
+Compiler warnings are now cached by default and will be replayed automatically
+when re-running Cargo.
+
+### install-upgrade
+
+The `install-upgrade` feature has been stabilized in the 1.41 release.
+[`cargo install`] will now automatically upgrade packages if they appear to be
+out-of-date. See the [`cargo install`] documentation for more information.
+
+[`cargo install`]: ../commands/cargo-install.md
+
+### Profile Overrides
+
+Profile overrides have been stabilized in the 1.41 release.
+See [Profile Overrides](profiles.md#overrides) for more information on using
+overrides.
+
+### Config Profiles
+
+Specifying profiles in Cargo config files and environment variables has been
+stabilized in the 1.43 release.
+See the [config `[profile]` table](config.md#profile) for more information
+about specifying [profiles](profiles.md) in config files.
+
+### crate-versions
+
+The `-Z crate-versions` flag has been stabilized in the 1.47 release.
+The crate version is now automatically included in the
+[`cargo doc`](../commands/cargo-doc.md) documentation sidebar.
+
+### Features
+
+The `-Z features` flag has been stabilized in the 1.51 release.
+See [feature resolver version 2](features.md#feature-resolver-version-2)
+for more information on using the new feature resolver.
+
+### package-features
+
+The `-Z package-features` flag has been stabilized in the 1.51 release.
+See the [resolver version 2 command-line flags](features.md#resolver-version-2-command-line-flags)
+for more information on using the features CLI options.
+
+### Resolver
+
+The `resolver` feature in `Cargo.toml` has been stabilized in the 1.51 release.
+See the [resolver versions](resolver.md#resolver-versions) for more
+information about specifying resolvers.
+
+### extra-link-arg
+
+The `extra-link-arg` feature to specify additional linker arguments in build
+scripts has been stabilized in the 1.56 release. See the [build script
+documentation](build-scripts.md#outputs-of-the-build-script) for more
+information on specifying extra linker arguments.
+
+### configurable-env
+
+The `configurable-env` feature to specify environment variables in Cargo
+configuration has been stabilized in the 1.56 release. See the [config
+documentation](config.html#env) for more information about configuring
+environment variables.
+
+### rust-version
+
+The `rust-version` field in `Cargo.toml` has been stabilized in the 1.56 release.
+See the [rust-version field](manifest.html#the-rust-version-field) for more
+information on using the `rust-version` field and the `--ignore-rust-version` option.
+
+### patch-in-config
+
+The `-Z patch-in-config` flag, and the corresponding support for
+`[patch]` section in Cargo configuration files has been stabilized in
+the 1.56 release. See the [patch field](config.html#patch) for more
+information.
+
+### edition 2021
+
+The 2021 edition has been stabilized in the 1.56 release.
+See the [`edition` field](manifest.md#the-edition-field) for more information on setting the edition.
+See [`cargo fix --edition`](../commands/cargo-fix.md) and [The Edition Guide](../../edition-guide/index.html) for more information on migrating existing projects.
+
+
+### Custom named profiles
+
+Custom named profiles have been stabilized in the 1.57 release. See the
+[profiles chapter](profiles.md#custom-profiles) for more information.
+
+### Profile `strip` option
+
+The profile `strip` option has been stabilized in the 1.59 release. See the
+[profiles chapter](profiles.md#strip) for more information.
+
+### Future incompat report
+
+Support for generating a future-incompat report has been stabilized
+in the 1.59 release. See the [future incompat report chapter](future-incompat-report.md)
+for more information.
+
+### Namespaced features
+
+Namespaced features has been stabilized in the 1.60 release.
+See the [Features chapter](features.md#optional-dependencies) for more information.
+
+### Weak dependency features
+
+Weak dependency features has been stabilized in the 1.60 release.
+See the [Features chapter](features.md#dependency-features) for more information.
+
+### timings
+
+The `-Ztimings` option has been stabilized as `--timings` in the 1.60 release.
+(`--timings=html` and the machine-readable `--timings=json` output remain
+unstable and require `-Zunstable-options`.)
+
+### config-cli
+
+The `--config` CLI option has been stabilized in the 1.63 release. See
+the [config documentation](config.html#command-line-overrides) for more
+information.
+
+### multitarget
+
+The `-Z multitarget` option has been stabilized in the 1.64 release.
+See [`build.target`](config.md#buildtarget) for more information about
+setting the default [target platform triples][target triple].
+
+### crate-type
+
+The `--crate-type` flag for `cargo rustc` has been stabilized in the 1.64
+release. See the [`cargo rustc` documentation](../commands/cargo-rustc.md)
+for more information.
+
+
+### Workspace Inheritance
+
+Workspace Inheritance has been stabilized in the 1.64 release.
+See [workspace.package](workspaces.md#the-package-table),
+[workspace.dependencies](workspaces.md#the-dependencies-table),
+and [inheriting-a-dependency-from-a-workspace](specifying-dependencies.md#inheriting-a-dependency-from-a-workspace)
+for more information.
+
+### terminal-width
+
+The `-Z terminal-width` option has been stabilized in the 1.68 release.
+The terminal width is always passed to the compiler when running from a
+terminal where Cargo can automatically detect the width.
+
+### sparse-registry
+
+Sparse registry support has been stabilized in the 1.68 release.
+See [Registry Protocols](registries.md#registry-protocols) for more information.
+
+#### `cargo logout`
+
+The [`cargo logout`] command has been stabilized in the 1.70 release.
+
+[target triple]: ../appendix/glossary.md#target '"target" (glossary)'
diff --git a/src/tools/cargo/src/doc/src/reference/workspaces.md b/src/tools/cargo/src/doc/src/reference/workspaces.md
new file mode 100644
index 000000000..21f8f08f8
--- /dev/null
+++ b/src/tools/cargo/src/doc/src/reference/workspaces.md
@@ -0,0 +1,255 @@
+## Workspaces
+
+A *workspace* is a collection of one or more packages, called *workspace
+members*, that are managed together.
+
+The key points of workspaces are:
+
+* Common commands can run across all workspace members, like `cargo check --workspace`.
+* All packages share a common [`Cargo.lock`] file which resides in the
+ *workspace root*.
+* All packages share a common [output directory], which defaults to a
+ directory named `target` in the *workspace root*.
+* Sharing package metadata, like with [`workspace.package`](#the-package-table).
+* The [`[patch]`][patch], [`[replace]`][replace] and [`[profile.*]`][profiles]
+ sections in `Cargo.toml` are only recognized in the *root* manifest, and
+ ignored in member crates' manifests.
+
+In the `Cargo.toml`, the `[workspace]` table supports the following sections:
+
+* [`[workspace]`](#the-workspace-section) --- Defines a workspace.
+ * [`resolver`](resolver.md#resolver-versions) --- Sets the dependency resolver to use.
+ * [`members`](#the-members-and-exclude-fields) --- Packages to include in the workspace.
+ * [`exclude`](#the-members-and-exclude-fields) --- Packages to exclude from the workspace.
+ * [`default-members`](#the-default-members-field) --- Packages to operate on when a specific package wasn't selected.
+ * [`package`](#the-package-table) --- Keys for inheriting in packages.
+ * [`dependencies`](#the-dependencies-table) --- Keys for inheriting in package dependencies.
+ * [`metadata`](#the-metadata-table) --- Extra settings for external tools.
+* [`[patch]`](overriding-dependencies.md#the-patch-section) --- Override dependencies.
+* [`[replace]`](overriding-dependencies.md#the-replace-section) --- Override dependencies (deprecated).
+* [`[profile]`](profiles.md) --- Compiler settings and optimizations.
+
+### The `[workspace]` section
+
+To create a workspace, you add the `[workspace]` table to a `Cargo.toml`:
+```toml
+[workspace]
+# ...
+```
+
+At minimum, a workspace has to have a member, either with a root package or as
+a virtual manifest.
+
+#### Root package
+
+If the [`[workspace]` section](#the-workspace-section) is added to a
+`Cargo.toml` that already defines a `[package]`, the package is
+the *root package* of the workspace. The *workspace root* is the directory
+where the workspace's `Cargo.toml` is located.
+
+```toml
+[workspace]
+
+[package]
+name = "hello_world" # the name of the package
+version = "0.1.0" # the current version, obeying semver
+authors = ["Alice <a@example.com>", "Bob <b@example.com>"]
+```
+
+<a id="virtual-manifest"></a>
+#### Virtual workspace
+
+Alternatively, a `Cargo.toml` file can be created with a `[workspace]` section
+but without a [`[package]` section][package]. This is called a *virtual
+manifest*. This is typically useful when there isn't a "primary" package, or
+you want to keep all the packages organized in separate directories.
+
+```toml
+# [PROJECT_DIR]/Cargo.toml
+[workspace]
+members = ["hello_world"]
+```
+
+```toml
+# [PROJECT_DIR]/hello_world/Cargo.toml
+[package]
+name = "hello_world" # the name of the package
+version = "0.1.0" # the current version, obeying semver
+authors = ["Alice <a@example.com>", "Bob <b@example.com>"]
+```
+
+### The `members` and `exclude` fields
+
+The `members` and `exclude` fields define which packages are members of
+the workspace:
+
+```toml
+[workspace]
+members = ["member1", "path/to/member2", "crates/*"]
+exclude = ["crates/foo", "path/to/other"]
+```
+
+All [`path` dependencies] residing in the workspace directory automatically
+become members. Additional members can be listed with the `members` key, which
+should be an array of strings containing directories with `Cargo.toml` files.
+
+The `members` list also supports [globs] to match multiple paths, using
+typical filename glob patterns like `*` and `?`.
+
+The `exclude` key can be used to prevent paths from being included in a
+workspace. This can be useful if some path dependencies aren't desired to be
+in the workspace at all, or using a glob pattern and you want to remove a
+directory.
+
+When inside a subdirectory within the workspace, Cargo will automatically
+search the parent directories for a `Cargo.toml` file with a `[workspace]`
+definition to determine which workspace to use. The [`package.workspace`]
+manifest key can be used in member crates to point at a workspace's root to
+override this automatic search. The manual setting can be useful if the member
+is not inside a subdirectory of the workspace root.
+
+#### Package selection
+
+In a workspace, package-related Cargo commands like [`cargo build`] can use
+the `-p` / `--package` or `--workspace` command-line flags to determine which
+packages to operate on. If neither of those flags are specified, Cargo will
+use the package in the current working directory. If the current directory is
+a [virtual workspace](#virtual-workspace), it will apply to all members (as if
+`--workspace` were specified on the command-line). See also
+[`default-members`](#the-default-members-field).
+
+### The `default-members` field
+
+The optional `default-members` key can be specified to set the members to
+operate on when in the workspace root and the package selection flags are not
+used:
+
+```toml
+[workspace]
+members = ["path/to/member1", "path/to/member2", "path/to/member3/*"]
+default-members = ["path/to/member2", "path/to/member3/foo"]
+```
+
+When specified, `default-members` must expand to a subset of `members`.
+
+### The `package` table
+
+The `workspace.package` table is where you define keys that can be
+inherited by members of a workspace. These keys can be inherited by
+defining them in the member package with `{key}.workspace = true`.
+
+Keys that are supported:
+
+| | |
+|----------------|-----------------|
+| `authors` | `categories` |
+| `description` | `documentation` |
+| `edition` | `exclude` |
+| `homepage` | `include` |
+| `keywords` | `license` |
+| `license-file` | `publish` |
+| `readme` | `repository` |
+| `rust-version` | `version` |
+
+- `license-file` and `readme` are relative to the workspace root
+- `include` and `exclude` are relative to your package root
+
+Example:
+```toml
+# [PROJECT_DIR]/Cargo.toml
+[workspace]
+members = ["bar"]
+
+[workspace.package]
+version = "1.2.3"
+authors = ["Nice Folks"]
+description = "A short description of my package"
+documentation = "https://example.com/bar"
+```
+
+```toml
+# [PROJECT_DIR]/bar/Cargo.toml
+[package]
+name = "bar"
+version.workspace = true
+authors.workspace = true
+description.workspace = true
+documentation.workspace = true
+```
+
+### The `dependencies` table
+
+The `workspace.dependencies` table is where you define dependencies to be
+inherited by members of a workspace.
+
+Specifying a workspace dependency is similar to [package dependencies][specifying-dependencies] except:
+- Dependencies from this table cannot be declared as `optional`
+- [`features`][features] declared in this table are additive with the `features` from `[dependencies]`
+
+You can then [inherit the workspace dependency as a package dependency][inheriting-a-dependency-from-a-workspace]
+
+Example:
+```toml
+# [PROJECT_DIR]/Cargo.toml
+[workspace]
+members = ["bar"]
+
+[workspace.dependencies]
+cc = "1.0.73"
+rand = "0.8.5"
+regex = { version = "1.6.0", default-features = false, features = ["std"] }
+```
+
+```toml
+# [PROJECT_DIR]/bar/Cargo.toml
+[package]
+name = "bar"
+version = "0.2.0"
+
+[dependencies]
+regex = { workspace = true, features = ["unicode"] }
+
+[build-dependencies]
+cc.workspace = true
+
+[dev-dependencies]
+rand.workspace = true
+```
+
+### The `metadata` table
+
+The `workspace.metadata` table is ignored by Cargo and will not be warned
+about. This section can be used for tools that would like to store workspace
+configuration in `Cargo.toml`. For example:
+
+```toml
+[workspace]
+members = ["member1", "member2"]
+
+[workspace.metadata.webcontents]
+root = "path/to/webproject"
+tool = ["npm", "run", "build"]
+# ...
+```
+
+There is a similar set of tables at the package level at
+[`package.metadata`][package-metadata]. While cargo does not specify a
+format for the content of either of these tables, it is suggested that
+external tools may wish to use them in a consistent fashion, such as referring
+to the data in `workspace.metadata` if data is missing from `package.metadata`,
+if that makes sense for the tool in question.
+
+[package]: manifest.md#the-package-section
+[`Cargo.lock`]: ../guide/cargo-toml-vs-cargo-lock.md
+[package-metadata]: manifest.md#the-metadata-table
+[output directory]: ../guide/build-cache.md
+[patch]: overriding-dependencies.md#the-patch-section
+[replace]: overriding-dependencies.md#the-replace-section
+[profiles]: profiles.md
+[`path` dependencies]: specifying-dependencies.md#specifying-path-dependencies
+[`package.workspace`]: manifest.md#the-workspace-field
+[globs]: https://docs.rs/glob/0.3.0/glob/struct.Pattern.html
+[`cargo build`]: ../commands/cargo-build.md
+[specifying-dependencies]: specifying-dependencies.md
+[features]: features.md
+[inheriting-a-dependency-from-a-workspace]: specifying-dependencies.md#inheriting-a-dependency-from-a-workspace
diff --git a/src/tools/cargo/src/doc/theme/favicon.png b/src/tools/cargo/src/doc/theme/favicon.png
new file mode 100644
index 000000000..47c8f628f
--- /dev/null
+++ b/src/tools/cargo/src/doc/theme/favicon.png
Binary files differ
diff --git a/src/tools/cargo/src/doc/theme/head.hbs b/src/tools/cargo/src/doc/theme/head.hbs
new file mode 100644
index 000000000..062417e11
--- /dev/null
+++ b/src/tools/cargo/src/doc/theme/head.hbs
@@ -0,0 +1,5 @@
+<style>
+ dd {
+ margin-bottom: 1em;
+ }
+</style>
diff --git a/src/tools/cargo/src/etc/_cargo b/src/tools/cargo/src/etc/_cargo
new file mode 100644
index 000000000..bdceb10c9
--- /dev/null
+++ b/src/tools/cargo/src/etc/_cargo
@@ -0,0 +1,468 @@
+#compdef cargo
+
+autoload -U regexp-replace
+
+_cargo() {
+ local curcontext="$curcontext" ret=1
+ local -a command_scope_spec common parallel features msgfmt triple target registry
+ local -a state line state_descr # These are set by _arguments
+ typeset -A opt_args
+
+ common=(
+ '(-q --quiet)*'{-v,--verbose}'[use verbose output]'
+ '(-q --quiet -v --verbose)'{-q,--quiet}'[no output printed to stdout]'
+ '-Z+[pass unstable (nightly-only) flags to cargo]: :_cargo_unstable_flags'
+ '--frozen[require that Cargo.lock and cache are up-to-date]'
+ '--locked[require that Cargo.lock is up-to-date]'
+ '--color=[specify colorization option]:coloring:(auto always never)'
+ '(- 1 *)'{-h,--help}'[show help message]'
+ )
+
+ # leading items in parentheses are an exclusion list for the arguments following that arg
+ # See: http://zsh.sourceforge.net/Doc/Release/Completion-System.html#Completion-Functions
+ # - => exclude all other options
+ # 1 => exclude positional arg 1
+ # * => exclude all other args
+ # +blah => exclude +blah
+ _arguments -s -S -C $common \
+ '(- 1 *)--list[list installed commands]' \
+ '(- 1 *)--explain=[provide a detailed explanation of an error message]:error code' \
+ '(- 1 *)'{-V,--version}'[show version information]' \
+ '(+beta +nightly)+stable[use the stable toolchain]' \
+ '(+stable +nightly)+beta[use the beta toolchain]' \
+ '(+stable +beta)+nightly[use the nightly toolchain]' \
+ '1: :_cargo_cmds' \
+ '*:: :->args'
+
+ # These flags are mutually exclusive specifiers for the scope of a command; as
+ # they are used in multiple places without change, they are expanded into the
+ # appropriate command's `_arguments` where appropriate.
+ command_scope_spec=(
+ '(--bin --example --test --lib)--bench=[specify benchmark name]: :_cargo_benchmark_names'
+ '(--bench --bin --test --lib)--example=[specify example name]:example name:_cargo_example_names'
+ '(--bench --example --test --lib)--bin=[specify binary name]:binary name'
+ '(--bench --bin --example --test)--lib=[specify library name]:library name'
+ '(--bench --bin --example --lib)--test=[specify test name]:test name'
+ )
+
+ parallel=(
+ '(-j --jobs)'{-j+,--jobs=}'[specify number of parallel jobs]:jobs [# of CPUs]'
+ '--keep-going[do not abort build on first error]'
+ )
+
+ features=(
+ '(--all-features)'{-F+,--features=}'[specify features to activate]:feature'
+ '(--features -F)--all-features[activate all available features]'
+ "--no-default-features[don't build the default features]"
+ )
+
+ msgfmt='--message-format=[specify error format]:error format [human]:(human json short)'
+ triple='--target=[specify target triple]:target triple:_cargo_target_triple'
+ target='--target-dir=[specify directory for all generated artifacts]:directory:_directories'
+ manifest='--manifest-path=[specify path to manifest]:path:_directories'
+ registry='--registry=[specify registry to use]:registry'
+
+ case $state in
+ args)
+ curcontext="${curcontext%:*}-${words[1]}:"
+ case ${words[1]} in
+ add)
+ _arguments -s -A "^--" $common $manifest $registry \
+ {-F+,--features=}'[specify features to activate]:feature' \
+ "--default-features[enable the default features]" \
+ "--no-default-features[don't enable the default features]" \
+ "--optional[mark the dependency as optional]" \
+ "--no-optional[mark the dependency as required]" \
+ "--dev[add as a dev dependency]" \
+ "--build[add as a build dependency]" \
+ "--target=[add as a dependency to the given target platform]" \
+ "--rename=[rename the dependency]" \
+ "--dry-run[don't actually write the manifest]" \
+ '--branch=[branch to use when adding from git]:branch' \
+ '--git=[specify URL from which to add the crate]:url:_urls' \
+ '--path=[local filesystem path to crate to add]: :_directories' \
+ '--rev=[specific commit to use when adding from git]:commit' \
+ '--tag=[tag to use when adding from git]:tag' \
+ '1: :_guard "^-*" "crate name"' \
+ '*:args:_default'
+ ;;
+ bench)
+ _arguments -s -A "^--" $common $parallel $features $msgfmt $triple $target $manifest \
+ "${command_scope_spec[@]}" \
+ '--all-targets[benchmark all targets]' \
+ "--no-run[compile but don't run]" \
+ '(-p --package)'{-p+,--package=}'[specify package to run benchmarks for]:package:_cargo_package_names' \
+ '--exclude=[exclude packages from the benchmark]:spec' \
+ '--no-fail-fast[run all benchmarks regardless of failure]' \
+ '--ignore-rust-version[Ignore rust-version specification in packages]' \
+ '1: :_guard "^-*" "bench name"' \
+ '*:args:_default'
+ ;;
+
+ build | b)
+ _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
+ '--all-targets[equivalent to specifying --lib --bins --tests --benches --examples]' \
+ "${command_scope_spec[@]}" \
+ '(-p --package)'{-p+,--package=}'[specify package to build]:package:_cargo_package_names' \
+ '--release[build in release mode]' \
+ '--build-plan[output the build plan in JSON]' \
+ '--ignore-rust-version[Ignore rust-version specification in packages]'
+ ;;
+
+ check | c)
+ _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
+ '--all-targets[equivalent to specifying --lib --bins --tests --benches --examples]' \
+ "${command_scope_spec[@]}" \
+ '(-p --package)'{-p+,--package=}'[specify package to check]:package:_cargo_package_names' \
+ '--release[check in release mode]' \
+ '--ignore-rust-version[Ignore rust-version specification in packages]'
+ ;;
+
+ clean)
+ _arguments -s -S $common $triple $target $manifest \
+ '(-p --package)'{-p+,--package=}'[specify package to clean]:package:_cargo_package_names' \
+ '--release[clean release artifacts]' \
+ '--doc[clean just the documentation directory]'
+ ;;
+
+ doc | d)
+ _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
+ '--no-deps[do not build docs for dependencies]' \
+ '--document-private-items[include non-public items in the documentation]' \
+ '--open[open docs in browser after the build]' \
+ '(-p --package)'{-p+,--package=}'[specify package to document]:package:_cargo_package_names' \
+ '--release[build artifacts in release mode, with optimizations]' \
+ '--ignore-rust-version[Ignore rust-version specification in packages]'
+ ;;
+
+ fetch)
+ _arguments -s -S $common $triple $manifest
+ ;;
+
+ fix)
+ _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
+ "${command_scope_spec[@]}" \
+ '--broken-code[fix code even if it already has compiler errors]' \
+ '--edition[fix in preparation for the next edition]' \
+ '--edition-idioms[fix warnings to migrate to the idioms of an edition]' \
+ '--allow-no-vcs[fix code even if a VCS was not detected]' \
+ '--allow-dirty[fix code even if the working directory is dirty]' \
+ '--allow-staged[fix code even if the working directory has staged changes]' \
+ '--ignore-rust-version[Ignore rust-version specification in packages]'
+ ;;
+
+ generate-lockfile)
+ _arguments -s -S $common $manifest
+ ;;
+
+ help)
+ _cargo_cmds
+ ;;
+
+ init)
+ _arguments -s -S $common $registry \
+ '--lib[use library template]' \
+ '--edition=[specify edition to set for the crate generated]:edition:(2015 2018 2021)' \
+ '--vcs=[initialize a new repo with a given VCS]:vcs:(git hg pijul fossil none)' \
+ '--name=[set the resulting package name]:name' \
+ '1:path:_directories'
+ ;;
+
+ install)
+ _arguments -s -S $common $parallel $features $triple $registry \
+ '(-f --force)'{-f,--force}'[force overwriting of existing crates or binaries]' \
+ '--bin=[only install the specified binary]:binary' \
+ '--branch=[branch to use when installing from git]:branch' \
+ '--debug[Build in debug mode (with the "dev" profile) instead of release mode]' \
+ '--example=[install the specified example instead of binaries]:example:_cargo_example_names' \
+ '--git=[specify URL from which to install the crate]:url:_urls' \
+ '--path=[local filesystem path to crate to install]: :_directories' \
+ '--rev=[specific commit to use when installing from git]:commit' \
+ '--root=[directory to install packages into]: :_directories' \
+ '--tag=[tag to use when installing from git]:tag' \
+ '--version=[version to install from crates.io]:version' \
+ '--list[list all installed packages and their versions]' \
+ '--ignore-rust-version[Ignore rust-version specification in packages]' \
+ '*: :_guard "^-*" "crate"'
+ ;;
+
+ locate-project)
+ _arguments -s -S $common $manifest \
+ '--message-format=[specify output representation]:output representation [json]:(json plain)' \
+ '--workspace[locate Cargo.toml of the workspace root]'
+ ;;
+
+ login)
+ _arguments -s -S $common $registry \
+ '*: :_guard "^-*" "token"'
+ ;;
+
+ metadata)
+ _arguments -s -S $common $features $manifest \
+ "--no-deps[output information only about the root package and don't fetch dependencies]" \
+ '--format-version=[specify format version]:version [1]:(1)'
+ ;;
+
+ new)
+ _arguments -s -S $common $registry \
+ '--lib[use library template]' \
+ '--vcs:initialize a new repo with a given VCS:(git hg none)' \
+ '--name=[set the resulting package name]'
+ ;;
+
+ owner)
+ _arguments -s -S $common $registry \
+ '(-a --add)'{-a,--add}'[specify name of a user or team to invite as an owner]:name' \
+ '--index=[specify registry index]:index' \
+ '(-l --list)'{-l,--list}'[list owners of a crate]' \
+ '(-r --remove)'{-r,--remove}'[specify name of a user or team to remove as an owner]:name' \
+ '--token=[specify API token to use when authenticating]:token' \
+ '*: :_guard "^-*" "crate"'
+ ;;
+
+ package)
+ _arguments -s -S $common $parallel $features $triple $target $manifest \
+ '(-l --list)'{-l,--list}'[print files included in a package without making one]' \
+ '--no-metadata[ignore warnings about a lack of human-usable metadata]' \
+ '--allow-dirty[allow dirty working directories to be packaged]' \
+ "--no-verify[don't build to verify contents]"
+ ;;
+
+ pkgid)
+ _arguments -s -S $common $manifest \
+ '(-p --package)'{-p+,--package=}'[specify package to get ID specifier for]:package:_cargo_package_names' \
+ '*: :_guard "^-*" "spec"'
+ ;;
+
+ publish)
+ _arguments -s -S $common $parallel $features $triple $target $manifest $registry \
+ '--index=[specify registry index]:index' \
+ '--allow-dirty[allow dirty working directories to be packaged]' \
+ "--no-verify[don't verify the contents by building them]" \
+ '--token=[specify token to use when uploading]:token' \
+ '--dry-run[perform all checks without uploading]'
+ ;;
+
+ read-manifest)
+ _arguments -s -S $common $manifest
+ ;;
+
+ remove | rm)
+ _arguments -s -A "^--" $common $manifest \
+ "--dev[remove as a dev dependency]" \
+ "--build[remove as a build dependency]" \
+ "--target=[remove as a dependency from the given target platform]" \
+ "--dry-run[don't actually write the manifest]" \
+ '(-p --package)'{-p+,--package=}'[package to remove from]:package:_cargo_package_names' \
+ '1: :_guard "^-*" "crate name"' \
+ '*:args:_default'
+ ;;
+
+ run | r)
+ _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
+ '--example=[name of the bin target]:name:_cargo_example_names' \
+ '--bin=[name of the bin target]:name' \
+ '(-p --package)'{-p+,--package=}'[specify package with the target to run]:package:_cargo_package_names' \
+ '--release[build in release mode]' \
+ '--ignore-rust-version[Ignore rust-version specification in packages]' \
+ '*: :_default'
+ ;;
+
+ rustc)
+ _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
+ '(-p --package)'{-p+,--package=}'[specify package to build]:package:_cargo_package_names' \
+ '--profile=[specify profile to build the selected target for]:profile' \
+ '--release[build artifacts in release mode, with optimizations]' \
+ "${command_scope_spec[@]}" \
+ '--ignore-rust-version[Ignore rust-version specification in packages]' \
+ '*: : _dispatch rustc rustc -default-'
+ ;;
+
+ rustdoc)
+ _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
+ '--document-private-items[include non-public items in the documentation]' \
+ '--open[open the docs in a browser after the operation]' \
+ '(-p --package)'{-p+,--package=}'[specify package to document]:package:_cargo_package_names' \
+ '--release[build artifacts in release mode, with optimizations]' \
+ "${command_scope_spec[@]}" \
+ '--ignore-rust-version[Ignore rust-version specification in packages]' \
+ '*: : _dispatch rustdoc rustdoc -default-'
+ ;;
+
+ search)
+ _arguments -s -S $common $registry \
+ '--index=[specify registry index]:index' \
+ '--limit=[limit the number of results]:results [10]' \
+ '*: :_guard "^-*" "query"'
+ ;;
+
+ test | t)
+ _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
+ '--test=[test name]: :_cargo_test_names' \
+ '--no-fail-fast[run all tests regardless of failure]' \
+ '--no-run[compile but do not run]' \
+ '(-p --package)'{-p+,--package=}'[package to run tests for]:package:_cargo_package_names' \
+ '--all[test all packages in the workspace]' \
+ '--release[build artifacts in release mode, with optimizations]' \
+ '1: :_cargo_test_names' \
+ '(--doc --bin --example --test --bench)--lib[only test library]' \
+ '(--lib --bin --example --test --bench)--doc[only test documentation]' \
+ '(--lib --doc --example --test --bench)--bin=[binary name]' \
+ '(--lib --doc --bin --test --bench)--example=[example name]:_cargo_example_names' \
+ '(--lib --doc --bin --example --bench)--test=[test name]' \
+ '(--lib --doc --bin --example --test)--bench=[benchmark name]' \
+ '--ignore-rust-version[Ignore rust-version specification in packages]' \
+ '*: :_default'
+ ;;
+
+ tree)
+ _arguments -s -S $common $features $triple $manifest \
+ '(-p --package)'{-p+,--package=}'[package to use as the root]:package:_cargo_package_names' \
+ '(-i --invert)'{-i+,--invert=}'[invert the tree for the given package]:package:_cargo_package_names' \
+ '--prefix=[line prefix]:prefix:(depth indent none)' \
+ '--no-dedupe[repeat shared dependencies]' \
+ '(-d --duplicates)'{-d,--duplicates}'[packages with multiple versions]' \
+ '--charset=[utf8 or ascii]:charset:(utf8 ascii)' \
+ '(-f --format)'{-f,--format=}'[format string]:format' \
+ '(-e --edges)'{-e,--edges=}'[edge kinds]:kind:(features normal build dev all no-dev no-build no-normal)' \
+ ;;
+
+ uninstall)
+ _arguments -s -S $common \
+ '(-p --package)'{-p+,--package=}'[specify package to uninstall]:package:_cargo_package_names' \
+ '--bin=[only uninstall the specified binary]:name' \
+ '--root=[directory to uninstall packages from]: :_files -/' \
+ '*:crate:_cargo_installed_crates -F line'
+ ;;
+
+ update)
+ _arguments -s -S $common $manifest \
+ '--aggressive=[force dependency update]' \
+ "--dry-run[don't actually write the lockfile]" \
+ '(-p --package)'{-p+,--package=}'[specify package to update]:package:_cargo_package_names' \
+ '--precise=[update single dependency to precise release]:release'
+ ;;
+
+ verify-project)
+ _arguments -s -S $common $manifest
+ ;;
+
+ version)
+ _arguments -s -S $common
+ ;;
+
+ yank)
+ _arguments -s -S $common $registry \
+ '--version=[specify yank version]:version' \
+ '--undo[undo a yank, putting a version back into the index]' \
+ '--index=[specify registry index to yank from]:registry index' \
+ '--token=[specify API token to use when authenticating]:token' \
+ '*: :_guard "^-*" "crate"'
+ ;;
+ *)
+ # allow plugins to define their own functions
+ if ! _call_function ret _cargo-${words[1]}; then
+ # fallback on default completion for unknown commands
+ _default && ret=0
+ fi
+ (( ! ret ))
+ ;;
+ esac
+ ;;
+ esac
+}
+
+_cargo_unstable_flags() {
+ local flags
+ flags=( help ${${${(M)${(f)"$(_call_program flags cargo -Z help)"}:#*--*}/ #-- #/:}##*-Z } )
+ _describe -t flags 'unstable flag' flags
+}
+
+_cargo_installed_crates() {
+ local expl
+ _description crates expl 'crate'
+ compadd "$@" "$expl[@]" - ${${${(f)"$(cargo install --list)"}:# *}%% *}
+}
+
+_cargo_cmds() {
+ local -a commands
+ # This uses Parameter Expansion Flags, which are a built-in Zsh feature.
+ # See more: http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion-Flags
+ # and http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion
+ #
+ # # How this work?
+ #
+ # First it splits the result of `cargo --list` at newline, then it removes the first line.
+ # Then it removes indentation (4 whitespaces) before each items. (Note the x## pattern [1]).
+ # Then it replaces those spaces between item and description with a `:`
+ #
+ # [1]: https://github.com/zsh-users/zsh-completions/blob/master/zsh-completions-howto.org#patterns
+ commands=( ${${${(M)"${(f)$(_call_program commands cargo --list)}":# *}/ ##/}/ ##/:} )
+ _describe -t commands 'command' commands
+}
+
+_cargo_target_triple() {
+ local -a targets
+ targets=( ${(f)"$(rustc --print target-list)"} )
+ _describe 'target triple' targets
+}
+
+#FIXME: Disabled until fixed
+#gets package names from the manifest file
+_cargo_package_names() {
+ _message -e packages package
+}
+
+# Extracts the values of "name" from the array given in $1 and shows them as
+# command line options for completion
+_cargo_names_from_array() {
+ local manifest=$(cargo locate-project --message-format plain)
+ if [[ -z $manifest ]]; then
+ return 0
+ fi
+
+ local last_line
+ local -a names;
+ local in_block=false
+ local block_name=$1
+ names=()
+ while read -r line; do
+ if [[ $last_line == "[[$block_name]]" ]]; then
+ in_block=true
+ else
+ if [[ $last_line =~ '\s*\[\[.*' ]]; then
+ in_block=false
+ fi
+ fi
+
+ if [[ $in_block == true ]]; then
+ if [[ $line =~ '\s*name\s*=' ]]; then
+ regexp-replace line '^\s*name\s*=\s*|"' ''
+ names+=( "$line" )
+ fi
+ fi
+
+ last_line=$line
+ done < "$manifest"
+ _describe "$block_name" names
+
+}
+
+#Gets the test names from the manifest file
+_cargo_test_names() {
+ _cargo_names_from_array "test"
+}
+
+#Gets the bench names from the manifest file
+_cargo_benchmark_names() {
+ _cargo_names_from_array "bench"
+}
+
+_cargo_example_names() {
+ if [[ -d examples ]]; then
+ local -a files=(${(@f)$(echo examples/*.rs(:t:r))})
+ _values 'example' "${files[@]}"
+ fi
+}
+
+_cargo
diff --git a/src/tools/cargo/src/etc/cargo.bashcomp.sh b/src/tools/cargo/src/etc/cargo.bashcomp.sh
new file mode 100644
index 000000000..2867ec56d
--- /dev/null
+++ b/src/tools/cargo/src/etc/cargo.bashcomp.sh
@@ -0,0 +1,287 @@
+# Required for bash versions < 4.1
+# Default bash version is 3.2 on latest macOS. See #6874
+shopt -s extglob
+
+command -v cargo >/dev/null 2>&1 &&
+_cargo()
+{
+ local cur prev words cword
+ _get_comp_words_by_ref cur prev words cword
+
+ COMPREPLY=()
+
+ # Skip past - and + options to find the command.
+ local nwords=${#words[@]}
+ local cmd_i cmd dd_i
+ for (( cmd_i=1; cmd_i<$nwords; cmd_i++ ));
+ do
+ if [[ ! "${words[$cmd_i]}" =~ ^[+-] ]]; then
+ cmd="${words[$cmd_i]}"
+ break
+ fi
+ done
+ # Find the location of the -- separator.
+ for (( dd_i=1; dd_i<$nwords-1; dd_i++ ));
+ do
+ if [[ "${words[$dd_i]}" = "--" ]]; then
+ break
+ fi
+ done
+
+ local vcs='git hg none pijul fossil'
+ local color='auto always never'
+ local msg_format='human json short'
+
+ local opt_help='-h --help'
+ local opt_verbose='-v --verbose'
+ local opt_quiet='-q --quiet'
+ local opt_color='--color'
+ local opt_common="$opt_help $opt_verbose $opt_quiet $opt_color"
+ local opt_pkg_spec='-p --package --all --exclude --workspace'
+ local opt_pkg='-p --package'
+ local opt_feat='-F --features --all-features --no-default-features'
+ local opt_mani='--manifest-path'
+ local opt_parallel='-j --jobs --keep-going'
+ local opt_force='-f --force'
+ local opt_sync='-s --sync'
+ local opt_lock='--frozen --locked --offline'
+ local opt_targets="--lib --bin --bins --example --examples --test --tests --bench --benches --all-targets"
+
+ local opt___nocmd="$opt_common -V --version --list --explain"
+ local opt__add="$opt_common -p --package --features --default-features --no-default-features $opt_mani --optional --no-optional --rename --dry-run --path --git --branch --tag --rev --registry --dev --build --target"
+ local opt__bench="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets --message-format --target --no-run --no-fail-fast --target-dir --ignore-rust-version"
+ local opt__build="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets --message-format --target --release --profile --target-dir --ignore-rust-version"
+ local opt__b="$opt__build"
+ local opt__check="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets --message-format --target --release --profile --target-dir --ignore-rust-version"
+ local opt__c="$opt__check"
+ local opt__clean="$opt_common $opt_pkg $opt_mani $opt_lock --target --release --doc --target-dir --profile"
+ local opt__clippy="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets --message-format --target --release --profile --target-dir --no-deps --fix"
+ local opt__doc="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_parallel --message-format --bin --bins --lib --target --open --no-deps --release --document-private-items --target-dir --profile --ignore-rust-version"
+ local opt__d="$opt__doc"
+ local opt__fetch="$opt_common $opt_mani $opt_lock --target"
+ local opt__fix="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_parallel $opt_targets $opt_lock --release --target --message-format --broken-code --edition --edition-idioms --allow-no-vcs --allow-dirty --allow-staged --profile --target-dir --ignore-rust-version"
+ local opt__generate_lockfile="$opt_common $opt_mani $opt_lock"
+ local opt__help="$opt_help"
+ local opt__init="$opt_common $opt_lock --bin --lib --name --vcs --edition --registry"
+ local opt__install="$opt_common $opt_feat $opt_parallel $opt_lock $opt_force --bin --bins --branch --debug --example --examples --git --list --path --rev --root --tag --version --registry --target --profile --no-track --ignore-rust-version"
+ local opt__locate_project="$opt_common $opt_mani $opt_lock --message-format --workspace"
+ local opt__login="$opt_common $opt_lock --registry"
+ local opt__metadata="$opt_common $opt_feat $opt_mani $opt_lock --format-version=1 --no-deps --filter-platform"
+ local opt__new="$opt_common $opt_lock --vcs --bin --lib --name --edition --registry"
+ local opt__owner="$opt_common $opt_lock -a --add -r --remove -l --list --index --token --registry"
+ local opt__package="$opt_common $opt_mani $opt_feat $opt_lock $opt_parallel --allow-dirty -l --list --no-verify --no-metadata --target --target-dir"
+ local opt__pkgid="$opt_common $opt_mani $opt_lock $opt_pkg"
+ local opt__publish="$opt_common $opt_mani $opt_feat $opt_lock $opt_parallel --allow-dirty --dry-run --token --no-verify --index --registry --target --target-dir"
+ local opt__read_manifest="$opt_help $opt_quiet $opt_verbose $opt_mani $opt_color $opt_lock --no-deps"
+ local opt__remove="$opt_common $opt_pkg $opt_lock $opt_mani --dry-run --dev --build --target"
+ local opt__rm="$opt__remove"
+ local opt__report="$opt_help $opt_verbose $opt_color future-incompat future-incompatibilities"
+ local opt__report__future_incompat="$opt_help $opt_verbose $opt_color $opt_pkg --id"
+ local opt__run="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_parallel --message-format --target --bin --example --release --target-dir --profile --ignore-rust-version"
+ local opt__r="$opt__run"
+ local opt__rustc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets -L --crate-type --extern --message-format --profile --target --release --target-dir --ignore-rust-version"
+ local opt__rustdoc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets --message-format --target --release --open --target-dir --profile --ignore-rust-version"
+ local opt__search="$opt_common $opt_lock --limit --index --registry"
+ local opt__test="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets --message-format --doc --target --no-run --release --no-fail-fast --target-dir --profile --ignore-rust-version"
+ local opt__t="$opt__test"
+ local opt__tree="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock --target -i --invert --prefix --no-dedupe --duplicates -d --charset -f --format -e --edges"
+ local opt__uninstall="$opt_common $opt_lock $opt_pkg --bin --root"
+ local opt__update="$opt_common $opt_mani $opt_lock $opt_pkg --aggressive --precise --dry-run"
+ local opt__vendor="$opt_common $opt_mani $opt_lock $opt_sync --no-delete --respect-source-config --versioned-dirs"
+ local opt__verify_project="$opt_common $opt_mani $opt_lock"
+ local opt__version="$opt_common $opt_lock"
+ local opt__yank="$opt_common $opt_lock --version --undo --index --token --registry"
+ local opt__libtest="--help --include-ignored --ignored --test --bench --list --logfile --nocapture --test-threads --skip -q --quiet --exact --color --format"
+
+ if [[ $cword -gt $dd_i ]]; then
+ # Completion after -- separator.
+ if [[ "${cmd}" = @(test|bench) ]]; then
+ COMPREPLY=( $( compgen -W "${opt__libtest}" -- "$cur" ) )
+ else
+ # Fallback to filename completion, useful with `cargo run`.
+ _filedir
+ fi
+ elif [[ $cword -le $cmd_i ]]; then
+ # Completion before or at the command.
+ if [[ "$cur" == -* ]]; then
+ COMPREPLY=( $( compgen -W "${opt___nocmd}" -- "$cur" ) )
+ elif [[ "$cur" == +* ]]; then
+ COMPREPLY=( $( compgen -W "$(_toolchains)" -- "$cur" ) )
+ else
+ _ensure_cargo_commands_cache_filled
+ COMPREPLY=( $( compgen -W "$__cargo_commands_cache" -- "$cur" ) )
+ fi
+ else
+ case "${prev}" in
+ --vcs)
+ COMPREPLY=( $( compgen -W "$vcs" -- "$cur" ) )
+ ;;
+ --color)
+ COMPREPLY=( $( compgen -W "$color" -- "$cur" ) )
+ ;;
+ --message-format)
+ COMPREPLY=( $( compgen -W "$msg_format" -- "$cur" ) )
+ ;;
+ --manifest-path)
+ _filedir toml
+ ;;
+ --bin)
+ COMPREPLY=( $( compgen -W "$(_bin_names)" -- "$cur" ) )
+ ;;
+ --test)
+ COMPREPLY=( $( compgen -W "$(_test_names)" -- "$cur" ) )
+ ;;
+ --bench)
+ COMPREPLY=( $( compgen -W "$(_benchmark_names)" -- "$cur" ) )
+ ;;
+ --example)
+ COMPREPLY=( $( compgen -W "$(_get_examples)" -- "$cur" ) )
+ ;;
+ --target)
+ COMPREPLY=( $( compgen -W "$(_get_targets)" -- "$cur" ) )
+ ;;
+ --target-dir|--path)
+ _filedir -d
+ ;;
+ help)
+ _ensure_cargo_commands_cache_filled
+ COMPREPLY=( $( compgen -W "$__cargo_commands_cache" -- "$cur" ) )
+ ;;
+ *)
+ if [[ "$cmd" == "report" && "$prev" == future-incompat* ]]; then
+ local opt_var=opt__${cmd//-/_}__${prev//-/_}
+ else
+ local opt_var=opt__${cmd//-/_}
+ fi
+ if [[ -z "${!opt_var}" ]]; then
+ # Fallback to filename completion.
+ _filedir
+ else
+ COMPREPLY=( $( compgen -W "${!opt_var}" -- "$cur" ) )
+ fi
+ ;;
+ esac
+ fi
+
+ # compopt does not work in bash version 3
+
+ return 0
+} &&
+complete -F _cargo cargo
+
+__cargo_commands_cache=
+_ensure_cargo_commands_cache_filled(){
+ if [[ -z $__cargo_commands_cache ]]; then
+ __cargo_commands_cache="$(cargo --list 2>/dev/null | awk 'NR>1 {print $1}')"
+ fi
+}
+
+_locate_manifest(){
+ cargo locate-project --message-format plain 2>/dev/null
+}
+
+# Extracts the values of "name" from the array given in $1 and shows them as
+# command line options for completion
+_get_names_from_array()
+{
+ local manifest=$(_locate_manifest)
+ if [[ -z $manifest ]]; then
+ return 0
+ fi
+
+ local last_line
+ local -a names
+ local in_block=false
+ local block_name=$1
+ while read line
+ do
+ if [[ $last_line == "[[$block_name]]" ]]; then
+ in_block=true
+ else
+ if [[ $last_line =~ .*\[\[.* ]]; then
+ in_block=false
+ fi
+ fi
+
+ if [[ $in_block == true ]]; then
+ if [[ $line =~ .*name.*\= ]]; then
+ line=${line##*=}
+ line=${line%%\"}
+ line=${line##*\"}
+ names+=($line)
+ fi
+ fi
+
+ last_line=$line
+ done < $manifest
+ echo "${names[@]}"
+}
+
+#Gets the bin names from the manifest file
+_bin_names()
+{
+ _get_names_from_array "bin"
+}
+
+#Gets the test names from the manifest file
+_test_names()
+{
+ _get_names_from_array "test"
+}
+
+#Gets the bench names from the manifest file
+_benchmark_names()
+{
+ _get_names_from_array "bench"
+}
+
+_get_examples(){
+ local manifest=$(_locate_manifest)
+ [ -z "$manifest" ] && return 0
+
+ local files=("${manifest%/*}"/examples/*.rs)
+ local names=("${files[@]##*/}")
+ local names=("${names[@]%.*}")
+ # "*" means no examples found
+ if [[ "${names[@]}" != "*" ]]; then
+ echo "${names[@]}"
+ fi
+}
+
+_get_targets(){
+ local result=()
+ local targets=$(rustup target list)
+ while read line
+ do
+ if [[ "$line" =~ default|installed ]]; then
+ result+=("${line%% *}")
+ fi
+ done <<< "$targets"
+ echo "${result[@]}"
+}
+
+_toolchains(){
+ local result=()
+ local toolchains=$(rustup toolchain list)
+ local channels="nightly|beta|stable|[0-9]\.[0-9]{1,2}\.[0-9]"
+ local date="[0-9]{4}-[0-9]{2}-[0-9]{2}"
+ while read line
+ do
+ # Strip " (default)"
+ line=${line%% *}
+ if [[ "$line" =~ ^($channels)(-($date))?(-.*) ]]; then
+ if [[ -z ${BASH_REMATCH[3]} ]]; then
+ result+=("+${BASH_REMATCH[1]}")
+ else
+ # channel-date
+ result+=("+${BASH_REMATCH[1]}-${BASH_REMATCH[3]}")
+ fi
+ result+=("+$line")
+ else
+ result+=("+$line")
+ fi
+ done <<< "$toolchains"
+ echo "${result[@]}"
+}
+
+# vim:ft=sh
diff --git a/src/tools/cargo/src/etc/man/cargo-add.1 b/src/tools/cargo/src/etc/man/cargo-add.1
new file mode 100644
index 000000000..086a561fd
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-add.1
@@ -0,0 +1,329 @@
+'\" t
+.TH "CARGO\-ADD" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-add \[em] Add dependencies to a Cargo.toml manifest file
+.SH "SYNOPSIS"
+\fBcargo add\fR [\fIoptions\fR] \fIcrate\fR\[u2026]
+.br
+\fBcargo add\fR [\fIoptions\fR] \fB\-\-path\fR \fIpath\fR
+.br
+\fBcargo add\fR [\fIoptions\fR] \fB\-\-git\fR \fIurl\fR [\fIcrate\fR\[u2026]]
+.SH "DESCRIPTION"
+This command can add or modify dependencies.
+.sp
+The source for the dependency can be specified with:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fIcrate\fR\fB@\fR\fIversion\fR: Fetch from a registry with a version constraint of \[lq]\fIversion\fR\[rq]
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB\-\-path\fR \fIpath\fR: Fetch from the specified \fIpath\fR
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB\-\-git\fR \fIurl\fR: Pull from a git repo at \fIurl\fR
+.RE
+.sp
+If no source is specified, then a best effort will be made to select one, including:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Existing dependencies in other tables (like \fBdev\-dependencies\fR)
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Workspace members
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Latest release in the registry
+.RE
+.sp
+When you add a package that is already present, the existing entry will be updated with the flags specified.
+.sp
+Upon successful invocation, the enabled (\fB+\fR) and disabled (\fB\-\fR) \fIfeatures\fR <https://doc.rust\-lang.org/cargo/reference/features.md> of the specified
+dependency will be listed in the command\[cq]s output.
+.SH "OPTIONS"
+.SS "Source options"
+.sp
+\fB\-\-git\fR \fIurl\fR
+.RS 4
+\fIGit URL to add the specified crate from\fR <https://doc.rust\-lang.org/cargo/reference/specifying\-dependencies.html#specifying\-dependencies\-from\-git\-repositories>\&.
+.RE
+.sp
+\fB\-\-branch\fR \fIbranch\fR
+.RS 4
+Branch to use when adding from git.
+.RE
+.sp
+\fB\-\-tag\fR \fItag\fR
+.RS 4
+Tag to use when adding from git.
+.RE
+.sp
+\fB\-\-rev\fR \fIsha\fR
+.RS 4
+Specific commit to use when adding from git.
+.RE
+.sp
+\fB\-\-path\fR \fIpath\fR
+.RS 4
+\fIFilesystem path\fR <https://doc.rust\-lang.org/cargo/reference/specifying\-dependencies.html#specifying\-path\-dependencies> to local crate to add.
+.RE
+.sp
+\fB\-\-registry\fR \fIregistry\fR
+.RS 4
+Name of the registry to use. Registry names are defined in \fICargo config
+files\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. If not specified, the default registry is used,
+which is defined by the \fBregistry.default\fR config key which defaults to
+\fBcrates\-io\fR\&.
+.RE
+.SS "Section options"
+.sp
+\fB\-\-dev\fR
+.RS 4
+Add as a \fIdevelopment dependency\fR <https://doc.rust\-lang.org/cargo/reference/specifying\-dependencies.html#development\-dependencies>\&.
+.RE
+.sp
+\fB\-\-build\fR
+.RS 4
+Add as a \fIbuild dependency\fR <https://doc.rust\-lang.org/cargo/reference/specifying\-dependencies.html#build\-dependencies>\&.
+.RE
+.sp
+\fB\-\-target\fR \fItarget\fR
+.RS 4
+Add as a dependency to the \fIgiven target platform\fR <https://doc.rust\-lang.org/cargo/reference/specifying\-dependencies.html#platform\-specific\-dependencies>\&.
+.sp
+To avoid unexpected shell expansions, you may use quotes around each target, e.g., \fB\-\-target 'cfg(unix)'\fR\&.
+.RE
+.SS "Dependency options"
+.sp
+\fB\-\-dry\-run\fR
+.RS 4
+Don\[cq]t actually write the manifest
+.RE
+.sp
+\fB\-\-rename\fR \fIname\fR
+.RS 4
+\fIRename\fR <https://doc.rust\-lang.org/cargo/reference/specifying\-dependencies.html#renaming\-dependencies\-in\-cargotoml> the dependency.
+.RE
+.sp
+\fB\-\-optional\fR
+.RS 4
+Mark the dependency as \fIoptional\fR <https://doc.rust\-lang.org/cargo/reference/features.html#optional\-dependencies>\&.
+.RE
+.sp
+\fB\-\-no\-optional\fR
+.RS 4
+Mark the dependency as \fIrequired\fR <https://doc.rust\-lang.org/cargo/reference/features.html#optional\-dependencies>\&.
+.RE
+.sp
+\fB\-\-no\-default\-features\fR
+.RS 4
+Disable the \fIdefault features\fR <https://doc.rust\-lang.org/cargo/reference/features.html#dependency\-features>\&.
+.RE
+.sp
+\fB\-\-default\-features\fR
+.RS 4
+Re\-enable the \fIdefault features\fR <https://doc.rust\-lang.org/cargo/reference/features.html#dependency\-features>\&.
+.RE
+.sp
+\fB\-F\fR \fIfeatures\fR,
+\fB\-\-features\fR \fIfeatures\fR
+.RS 4
+Space or comma separated list of \fIfeatures to
+activate\fR <https://doc.rust\-lang.org/cargo/reference/features.html#dependency\-features>\&. When adding multiple
+crates, the features for a specific crate may be enabled with
+\fBpackage\-name/feature\-name\fR syntax. This flag may be specified multiple times,
+which enables all specified features.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-p\fR \fIspec\fR,
+\fB\-\-package\fR \fIspec\fR
+.RS 4
+Add dependencies to only the specified package.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Add \fBregex\fR as a dependency
+.sp
+.RS 4
+.nf
+cargo add regex
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Add \fBtrybuild\fR as a dev\-dependency
+.sp
+.RS 4
+.nf
+cargo add \-\-dev trybuild
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 3.\h'+01'Add an older version of \fBnom\fR as a dependency
+.sp
+.RS 4
+.nf
+cargo add nom@5
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 4.\h'+01'Add support for serializing data structures to json with \fBderive\fRs
+.sp
+.RS 4
+.nf
+cargo add serde serde_json \-F serde/derive
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 5.\h'+01'Add \fBwindows\fR as a platform specific dependency on \fBcfg(windows)\fR
+.sp
+.RS 4
+.nf
+cargo add windows \-\-target 'cfg(windows)'
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-remove\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-bench.1 b/src/tools/cargo/src/etc/man/cargo-bench.1
new file mode 100644
index 000000000..b95902c4e
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-bench.1
@@ -0,0 +1,538 @@
+'\" t
+.TH "CARGO\-BENCH" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-bench \[em] Execute benchmarks of a package
+.SH "SYNOPSIS"
+\fBcargo bench\fR [\fIoptions\fR] [\fIbenchname\fR] [\fB\-\-\fR \fIbench\-options\fR]
+.SH "DESCRIPTION"
+Compile and execute benchmarks.
+.sp
+The benchmark filtering argument \fIbenchname\fR and all the arguments following
+the two dashes (\fB\-\-\fR) are passed to the benchmark binaries and thus to
+\fIlibtest\fR (rustc\[cq]s built in unit\-test and micro\-benchmarking framework). If
+you are passing arguments to both Cargo and the binary, the ones after \fB\-\-\fR go
+to the binary, the ones before go to Cargo. For details about libtest\[cq]s
+arguments see the output of \fBcargo bench \-\- \-\-help\fR and check out the rustc
+book\[cq]s chapter on how tests work at
+<https://doc.rust\-lang.org/rustc/tests/index.html>\&.
+.sp
+As an example, this will run only the benchmark named \fBfoo\fR (and skip other
+similarly named benchmarks like \fBfoobar\fR):
+.sp
+.RS 4
+.nf
+cargo bench \-\- foo \-\-exact
+.fi
+.RE
+.sp
+Benchmarks are built with the \fB\-\-test\fR option to \fBrustc\fR which creates a
+special executable by linking your code with libtest. The executable
+automatically runs all functions annotated with the \fB#[bench]\fR attribute.
+Cargo passes the \fB\-\-bench\fR flag to the test harness to tell it to run
+only benchmarks.
+.sp
+The libtest harness may be disabled by setting \fBharness = false\fR in the target
+manifest settings, in which case your code will need to provide its own \fBmain\fR
+function to handle running benchmarks.
+.RS 3
+.ll -5
+.sp
+\fBNote\fR: The
+\fI\f(BI#[bench]\fI attribute\fR <https://doc.rust\-lang.org/nightly/unstable\-book/library\-features/test.html>
+is currently unstable and only available on the
+\fInightly channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html>\&.
+There are some packages available on
+\fIcrates.io\fR <https://crates.io/keywords/benchmark> that may help with
+running benchmarks on the stable channel, such as
+\fICriterion\fR <https://crates.io/crates/criterion>\&.
+.br
+.RE
+.ll
+.sp
+By default, \fBcargo bench\fR uses the \fI\f(BIbench\fI profile\fR <https://doc.rust\-lang.org/cargo/reference/profiles.html#bench>, which enables
+optimizations and disables debugging information. If you need to debug a
+benchmark, you can use the \fB\-\-profile=dev\fR command\-line option to switch to
+the dev profile. You can then run the debug\-enabled benchmark within a
+debugger.
+.SS "Working directory of benchmarks"
+The working directory of every benchmark is set to the root directory of the
+package the benchmark belongs to.
+Setting the working directory of benchmarks to the package\[cq]s root directory
+makes it possible for benchmarks to reliably access the package\[cq]s files using
+relative paths, regardless from where \fBcargo bench\fR was executed from.
+.SH "OPTIONS"
+.SS "Benchmark Options"
+.sp
+\fB\-\-no\-run\fR
+.RS 4
+Compile, but don\[cq]t run benchmarks.
+.RE
+.sp
+\fB\-\-no\-fail\-fast\fR
+.RS 4
+Run all benchmarks regardless of failure. Without this flag, Cargo will exit
+after the first executable fails. The Rust test harness will run all benchmarks
+within the executable to completion, this flag only applies to the executable
+as a whole.
+.RE
+.SS "Package Selection"
+By default, when no package selection options are given, the packages selected
+depend on the selected manifest file (based on the current working directory if
+\fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then
+the workspaces default members are selected, otherwise only the package defined
+by the manifest will be selected.
+.sp
+The default members of a workspace can be set explicitly with the
+\fBworkspace.default\-members\fR key in the root manifest. If this is not set, a
+virtual workspace will include all workspace members (equivalent to passing
+\fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself.
+.sp
+\fB\-p\fR \fIspec\fR\[u2026],
+\fB\-\-package\fR \fIspec\fR\[u2026]
+.RS 4
+Benchmark only the specified packages. See \fBcargo\-pkgid\fR(1) for the
+SPEC format. This flag may be specified multiple times and supports common Unix
+glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally
+expanding glob patterns before Cargo handles them, you must use single quotes or
+double quotes around each pattern.
+.RE
+.sp
+\fB\-\-workspace\fR
+.RS 4
+Benchmark all members in the workspace.
+.RE
+.sp
+\fB\-\-all\fR
+.RS 4
+Deprecated alias for \fB\-\-workspace\fR\&.
+.RE
+.sp
+\fB\-\-exclude\fR \fISPEC\fR\[u2026]
+.RS 4
+Exclude the specified packages. Must be used in conjunction with the
+\fB\-\-workspace\fR flag. This flag may be specified multiple times and supports
+common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell
+accidentally expanding glob patterns before Cargo handles them, you must use
+single quotes or double quotes around each pattern.
+.RE
+.SS "Target Selection"
+When no target selection options are given, \fBcargo bench\fR will build the
+following targets of the selected packages:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'lib \[em] used to link with binaries and benchmarks
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'bins (only if benchmark targets are built and required features are
+available)
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'lib as a benchmark
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'bins as benchmarks
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'benchmark targets
+.RE
+.sp
+The default behavior can be changed by setting the \fBbench\fR flag for the target
+in the manifest settings. Setting examples to \fBbench = true\fR will build and
+run the example as a benchmark. Setting targets to \fBbench = false\fR will stop
+them from being benchmarked by default. Target selection options that take a
+target by name ignore the \fBbench\fR flag and will always benchmark the given
+target.
+.sp
+Binary targets are automatically built if there is an integration test or
+benchmark being selected to benchmark. This allows an integration
+test to execute the binary to exercise and test its behavior.
+The \fBCARGO_BIN_EXE_<name>\fR
+\fIenvironment variable\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html#environment\-variables\-cargo\-sets\-for\-crates>
+is set when the integration test is built so that it can use the
+\fI\f(BIenv\fI macro\fR <https://doc.rust\-lang.org/std/macro.env.html> to locate the
+executable.
+.sp
+Passing target selection flags will benchmark only the specified
+targets.
+.sp
+Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also
+support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your
+shell accidentally expanding glob patterns before Cargo handles them, you must
+use single quotes or double quotes around each glob pattern.
+.sp
+\fB\-\-lib\fR
+.RS 4
+Benchmark the package\[cq]s library.
+.RE
+.sp
+\fB\-\-bin\fR \fIname\fR\[u2026]
+.RS 4
+Benchmark the specified binary. This flag may be specified multiple times
+and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-bins\fR
+.RS 4
+Benchmark all binary targets.
+.RE
+.sp
+\fB\-\-example\fR \fIname\fR\[u2026]
+.RS 4
+Benchmark the specified example. This flag may be specified multiple times
+and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-examples\fR
+.RS 4
+Benchmark all example targets.
+.RE
+.sp
+\fB\-\-test\fR \fIname\fR\[u2026]
+.RS 4
+Benchmark the specified integration test. This flag may be specified
+multiple times and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-tests\fR
+.RS 4
+Benchmark all targets in test mode that have the \fBtest = true\fR manifest
+flag set. By default this includes the library and binaries built as
+unittests, and integration tests. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+unittest, and once as a dependency for binaries, integration tests, etc.).
+Targets may be enabled or disabled by setting the \fBtest\fR flag in the
+manifest settings for the target.
+.RE
+.sp
+\fB\-\-bench\fR \fIname\fR\[u2026]
+.RS 4
+Benchmark the specified benchmark. This flag may be specified multiple
+times and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-benches\fR
+.RS 4
+Benchmark all targets in benchmark mode that have the \fBbench = true\fR
+manifest flag set. By default this includes the library and binaries built
+as benchmarks, and bench targets. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+benchmark, and once as a dependency for binaries, benchmarks, etc.).
+Targets may be enabled or disabled by setting the \fBbench\fR flag in the
+manifest settings for the target.
+.RE
+.sp
+\fB\-\-all\-targets\fR
+.RS 4
+Benchmark all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&.
+.RE
+.SS "Feature Selection"
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the \fBdefault\fR feature is activated for every
+selected package.
+.sp
+See \fIthe features documentation\fR <https://doc.rust\-lang.org/cargo/reference/features.html#command\-line\-feature\-options>
+for more details.
+.sp
+\fB\-F\fR \fIfeatures\fR,
+\fB\-\-features\fR \fIfeatures\fR
+.RS 4
+Space or comma separated list of features to activate. Features of workspace
+members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may
+be specified multiple times, which enables all specified features.
+.RE
+.sp
+\fB\-\-all\-features\fR
+.RS 4
+Activate all available features of all selected packages.
+.RE
+.sp
+\fB\-\-no\-default\-features\fR
+.RS 4
+Do not activate the \fBdefault\fR feature of the selected packages.
+.RE
+.SS "Compilation Options"
+.sp
+\fB\-\-target\fR \fItriple\fR
+.RS 4
+Benchmark for the given architecture. The default is the host architecture. The general format of the triple is
+\fB<arch><sub>\-<vendor>\-<sys>\-<abi>\fR\&. Run \fBrustc \-\-print target\-list\fR for a
+list of supported targets. This flag may be specified multiple times.
+.sp
+This may also be specified with the \fBbuild.target\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.sp
+Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+\fIbuild cache\fR <https://doc.rust\-lang.org/cargo/guide/build\-cache.html> documentation for more details.
+.RE
+.sp
+\fB\-\-profile\fR \fIname\fR
+.RS 4
+Benchmark with the given profile.
+See the \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/profiles.html> for more details on profiles.
+.RE
+.sp
+\fB\-\-ignore\-rust\-version\fR
+.RS 4
+Benchmark the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project\[cq]s \fBrust\-version\fR field.
+.RE
+.sp
+\fB\-\-timings=\fR\fIfmts\fR
+.RS 4
+Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma\-separated list of output
+formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&.
+Specifying an output format (rather than the default) is unstable and requires
+\fB\-Zunstable\-options\fR\&. Valid output formats:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the
+\fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine\-readable timing data.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON
+information about timing information.
+.RE
+.RE
+.SS "Output Options"
+.sp
+\fB\-\-target\-dir\fR \fIdirectory\fR
+.RS 4
+Directory for all generated artifacts and intermediate files. May also be
+specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the
+\fBbuild.target\-dir\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+Defaults to \fBtarget\fR in the root of the workspace.
+.RE
+.SS "Display Options"
+By default the Rust test harness hides output from benchmark execution to keep
+results readable. Benchmark output can be recovered (e.g., for debugging) by
+passing \fB\-\-nocapture\fR to the benchmark binaries:
+.sp
+.RS 4
+.nf
+cargo bench \-\- \-\-nocapture
+.fi
+.RE
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-message\-format\fR \fIfmt\fR
+.RS 4
+The output format for diagnostic messages. Can be specified multiple times
+and consists of comma\-separated values. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with
+\fBshort\fR and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR
+and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See
+\fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/external\-tools.html#json\-messages>
+for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains
+the \[lq]short\[rq] rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages
+contains embedded ANSI color codes for respecting rustc\[cq]s default color
+scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo\[cq]s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SS "Miscellaneous Options"
+The \fB\-\-jobs\fR argument affects the building of the benchmark executable but
+does not affect how many threads are used when running the benchmarks. The
+Rust test harness runs benchmarks serially in a single thread.
+.sp
+\fB\-j\fR \fIN\fR,
+\fB\-\-jobs\fR \fIN\fR
+.RS 4
+Number of parallel jobs to run. May also be specified with the
+\fBbuild.jobs\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.
+.RE
+.sp
+\fB\-\-keep\-going\fR
+.RS 4
+Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+\fB\-Zunstable\-options\fR\&.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Build and execute all the benchmarks of the current package:
+.sp
+.RS 4
+.nf
+cargo bench
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Run only a specific benchmark within a specific benchmark target:
+.sp
+.RS 4
+.nf
+cargo bench \-\-bench bench_name \-\- modname::some_benchmark
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-test\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-build.1 b/src/tools/cargo/src/etc/man/cargo-build.1
new file mode 100644
index 000000000..80ae4ac90
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-build.1
@@ -0,0 +1,467 @@
+'\" t
+.TH "CARGO\-BUILD" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-build \[em] Compile the current package
+.SH "SYNOPSIS"
+\fBcargo build\fR [\fIoptions\fR]
+.SH "DESCRIPTION"
+Compile local packages and all of their dependencies.
+.SH "OPTIONS"
+.SS "Package Selection"
+By default, when no package selection options are given, the packages selected
+depend on the selected manifest file (based on the current working directory if
+\fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then
+the workspaces default members are selected, otherwise only the package defined
+by the manifest will be selected.
+.sp
+The default members of a workspace can be set explicitly with the
+\fBworkspace.default\-members\fR key in the root manifest. If this is not set, a
+virtual workspace will include all workspace members (equivalent to passing
+\fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself.
+.sp
+\fB\-p\fR \fIspec\fR\[u2026],
+\fB\-\-package\fR \fIspec\fR\[u2026]
+.RS 4
+Build only the specified packages. See \fBcargo\-pkgid\fR(1) for the
+SPEC format. This flag may be specified multiple times and supports common Unix
+glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally
+expanding glob patterns before Cargo handles them, you must use single quotes or
+double quotes around each pattern.
+.RE
+.sp
+\fB\-\-workspace\fR
+.RS 4
+Build all members in the workspace.
+.RE
+.sp
+\fB\-\-all\fR
+.RS 4
+Deprecated alias for \fB\-\-workspace\fR\&.
+.RE
+.sp
+\fB\-\-exclude\fR \fISPEC\fR\[u2026]
+.RS 4
+Exclude the specified packages. Must be used in conjunction with the
+\fB\-\-workspace\fR flag. This flag may be specified multiple times and supports
+common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell
+accidentally expanding glob patterns before Cargo handles them, you must use
+single quotes or double quotes around each pattern.
+.RE
+.SS "Target Selection"
+When no target selection options are given, \fBcargo build\fR will build all
+binary and library targets of the selected packages. Binaries are skipped if
+they have \fBrequired\-features\fR that are missing.
+.sp
+Binary targets are automatically built if there is an integration test or
+benchmark being selected to build. This allows an integration
+test to execute the binary to exercise and test its behavior.
+The \fBCARGO_BIN_EXE_<name>\fR
+\fIenvironment variable\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html#environment\-variables\-cargo\-sets\-for\-crates>
+is set when the integration test is built so that it can use the
+\fI\f(BIenv\fI macro\fR <https://doc.rust\-lang.org/std/macro.env.html> to locate the
+executable.
+.sp
+Passing target selection flags will build only the specified
+targets.
+.sp
+Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also
+support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your
+shell accidentally expanding glob patterns before Cargo handles them, you must
+use single quotes or double quotes around each glob pattern.
+.sp
+\fB\-\-lib\fR
+.RS 4
+Build the package\[cq]s library.
+.RE
+.sp
+\fB\-\-bin\fR \fIname\fR\[u2026]
+.RS 4
+Build the specified binary. This flag may be specified multiple times
+and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-bins\fR
+.RS 4
+Build all binary targets.
+.RE
+.sp
+\fB\-\-example\fR \fIname\fR\[u2026]
+.RS 4
+Build the specified example. This flag may be specified multiple times
+and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-examples\fR
+.RS 4
+Build all example targets.
+.RE
+.sp
+\fB\-\-test\fR \fIname\fR\[u2026]
+.RS 4
+Build the specified integration test. This flag may be specified
+multiple times and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-tests\fR
+.RS 4
+Build all targets in test mode that have the \fBtest = true\fR manifest
+flag set. By default this includes the library and binaries built as
+unittests, and integration tests. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+unittest, and once as a dependency for binaries, integration tests, etc.).
+Targets may be enabled or disabled by setting the \fBtest\fR flag in the
+manifest settings for the target.
+.RE
+.sp
+\fB\-\-bench\fR \fIname\fR\[u2026]
+.RS 4
+Build the specified benchmark. This flag may be specified multiple
+times and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-benches\fR
+.RS 4
+Build all targets in benchmark mode that have the \fBbench = true\fR
+manifest flag set. By default this includes the library and binaries built
+as benchmarks, and bench targets. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+benchmark, and once as a dependency for binaries, benchmarks, etc.).
+Targets may be enabled or disabled by setting the \fBbench\fR flag in the
+manifest settings for the target.
+.RE
+.sp
+\fB\-\-all\-targets\fR
+.RS 4
+Build all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&.
+.RE
+.SS "Feature Selection"
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the \fBdefault\fR feature is activated for every
+selected package.
+.sp
+See \fIthe features documentation\fR <https://doc.rust\-lang.org/cargo/reference/features.html#command\-line\-feature\-options>
+for more details.
+.sp
+\fB\-F\fR \fIfeatures\fR,
+\fB\-\-features\fR \fIfeatures\fR
+.RS 4
+Space or comma separated list of features to activate. Features of workspace
+members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may
+be specified multiple times, which enables all specified features.
+.RE
+.sp
+\fB\-\-all\-features\fR
+.RS 4
+Activate all available features of all selected packages.
+.RE
+.sp
+\fB\-\-no\-default\-features\fR
+.RS 4
+Do not activate the \fBdefault\fR feature of the selected packages.
+.RE
+.SS "Compilation Options"
+.sp
+\fB\-\-target\fR \fItriple\fR
+.RS 4
+Build for the given architecture. The default is the host architecture. The general format of the triple is
+\fB<arch><sub>\-<vendor>\-<sys>\-<abi>\fR\&. Run \fBrustc \-\-print target\-list\fR for a
+list of supported targets. This flag may be specified multiple times.
+.sp
+This may also be specified with the \fBbuild.target\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.sp
+Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+\fIbuild cache\fR <https://doc.rust\-lang.org/cargo/guide/build\-cache.html> documentation for more details.
+.RE
+.sp
+\fB\-r\fR,
+\fB\-\-release\fR
+.RS 4
+Build optimized artifacts with the \fBrelease\fR profile.
+See also the \fB\-\-profile\fR option for choosing a specific profile by name.
+.RE
+.sp
+\fB\-\-profile\fR \fIname\fR
+.RS 4
+Build with the given profile.
+See the \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/profiles.html> for more details on profiles.
+.RE
+.sp
+\fB\-\-ignore\-rust\-version\fR
+.RS 4
+Build the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project\[cq]s \fBrust\-version\fR field.
+.RE
+.sp
+\fB\-\-timings=\fR\fIfmts\fR
+.RS 4
+Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma\-separated list of output
+formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&.
+Specifying an output format (rather than the default) is unstable and requires
+\fB\-Zunstable\-options\fR\&. Valid output formats:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the
+\fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine\-readable timing data.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON
+information about timing information.
+.RE
+.RE
+.SS "Output Options"
+.sp
+\fB\-\-target\-dir\fR \fIdirectory\fR
+.RS 4
+Directory for all generated artifacts and intermediate files. May also be
+specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the
+\fBbuild.target\-dir\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+Defaults to \fBtarget\fR in the root of the workspace.
+.RE
+.sp
+\fB\-\-out\-dir\fR \fIdirectory\fR
+.RS 4
+Copy final artifacts to this directory.
+.sp
+This option is unstable and available only on the
+\fInightly channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html>
+and requires the \fB\-Z unstable\-options\fR flag to enable.
+See <https://github.com/rust\-lang/cargo/issues/6790> for more information.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-message\-format\fR \fIfmt\fR
+.RS 4
+The output format for diagnostic messages. Can be specified multiple times
+and consists of comma\-separated values. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with
+\fBshort\fR and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR
+and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See
+\fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/external\-tools.html#json\-messages>
+for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains
+the \[lq]short\[rq] rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages
+contains embedded ANSI color codes for respecting rustc\[cq]s default color
+scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo\[cq]s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.RE
+.sp
+\fB\-\-build\-plan\fR
+.RS 4
+Outputs a series of JSON messages to stdout that indicate the commands to run
+the build.
+.sp
+This option is unstable and available only on the
+\fInightly channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html>
+and requires the \fB\-Z unstable\-options\fR flag to enable.
+See <https://github.com/rust\-lang/cargo/issues/5579> for more information.
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SS "Miscellaneous Options"
+.sp
+\fB\-j\fR \fIN\fR,
+\fB\-\-jobs\fR \fIN\fR
+.RS 4
+Number of parallel jobs to run. May also be specified with the
+\fBbuild.jobs\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.
+.RE
+.sp
+\fB\-\-keep\-going\fR
+.RS 4
+Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+\fB\-Zunstable\-options\fR\&.
+.RE
+.sp
+\fB\-\-future\-incompat\-report\fR
+.RS 4
+Displays a future\-incompat report for any future\-incompatible warnings
+produced during execution of this command
+.sp
+See \fBcargo\-report\fR(1)
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Build the local package and all of its dependencies:
+.sp
+.RS 4
+.nf
+cargo build
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Build with optimizations:
+.sp
+.RS 4
+.nf
+cargo build \-\-release
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-rustc\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-check.1 b/src/tools/cargo/src/etc/man/cargo-check.1
new file mode 100644
index 000000000..cf7a66d89
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-check.1
@@ -0,0 +1,448 @@
+'\" t
+.TH "CARGO\-CHECK" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-check \[em] Check the current package
+.SH "SYNOPSIS"
+\fBcargo check\fR [\fIoptions\fR]
+.SH "DESCRIPTION"
+Check a local package and all of its dependencies for errors. This will
+essentially compile the packages without performing the final step of code
+generation, which is faster than running \fBcargo build\fR\&. The compiler will save
+metadata files to disk so that future runs will reuse them if the source has
+not been modified. Some diagnostics and errors are only emitted during code
+generation, so they inherently won\[cq]t be reported with \fBcargo check\fR\&.
+.SH "OPTIONS"
+.SS "Package Selection"
+By default, when no package selection options are given, the packages selected
+depend on the selected manifest file (based on the current working directory if
+\fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then
+the workspaces default members are selected, otherwise only the package defined
+by the manifest will be selected.
+.sp
+The default members of a workspace can be set explicitly with the
+\fBworkspace.default\-members\fR key in the root manifest. If this is not set, a
+virtual workspace will include all workspace members (equivalent to passing
+\fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself.
+.sp
+\fB\-p\fR \fIspec\fR\[u2026],
+\fB\-\-package\fR \fIspec\fR\[u2026]
+.RS 4
+Check only the specified packages. See \fBcargo\-pkgid\fR(1) for the
+SPEC format. This flag may be specified multiple times and supports common Unix
+glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally
+expanding glob patterns before Cargo handles them, you must use single quotes or
+double quotes around each pattern.
+.RE
+.sp
+\fB\-\-workspace\fR
+.RS 4
+Check all members in the workspace.
+.RE
+.sp
+\fB\-\-all\fR
+.RS 4
+Deprecated alias for \fB\-\-workspace\fR\&.
+.RE
+.sp
+\fB\-\-exclude\fR \fISPEC\fR\[u2026]
+.RS 4
+Exclude the specified packages. Must be used in conjunction with the
+\fB\-\-workspace\fR flag. This flag may be specified multiple times and supports
+common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell
+accidentally expanding glob patterns before Cargo handles them, you must use
+single quotes or double quotes around each pattern.
+.RE
+.SS "Target Selection"
+When no target selection options are given, \fBcargo check\fR will check all
+binary and library targets of the selected packages. Binaries are skipped if
+they have \fBrequired\-features\fR that are missing.
+.sp
+Passing target selection flags will check only the specified
+targets.
+.sp
+Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also
+support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your
+shell accidentally expanding glob patterns before Cargo handles them, you must
+use single quotes or double quotes around each glob pattern.
+.sp
+\fB\-\-lib\fR
+.RS 4
+Check the package\[cq]s library.
+.RE
+.sp
+\fB\-\-bin\fR \fIname\fR\[u2026]
+.RS 4
+Check the specified binary. This flag may be specified multiple times
+and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-bins\fR
+.RS 4
+Check all binary targets.
+.RE
+.sp
+\fB\-\-example\fR \fIname\fR\[u2026]
+.RS 4
+Check the specified example. This flag may be specified multiple times
+and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-examples\fR
+.RS 4
+Check all example targets.
+.RE
+.sp
+\fB\-\-test\fR \fIname\fR\[u2026]
+.RS 4
+Check the specified integration test. This flag may be specified
+multiple times and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-tests\fR
+.RS 4
+Check all targets in test mode that have the \fBtest = true\fR manifest
+flag set. By default this includes the library and binaries built as
+unittests, and integration tests. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+unittest, and once as a dependency for binaries, integration tests, etc.).
+Targets may be enabled or disabled by setting the \fBtest\fR flag in the
+manifest settings for the target.
+.RE
+.sp
+\fB\-\-bench\fR \fIname\fR\[u2026]
+.RS 4
+Check the specified benchmark. This flag may be specified multiple
+times and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-benches\fR
+.RS 4
+Check all targets in benchmark mode that have the \fBbench = true\fR
+manifest flag set. By default this includes the library and binaries built
+as benchmarks, and bench targets. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+benchmark, and once as a dependency for binaries, benchmarks, etc.).
+Targets may be enabled or disabled by setting the \fBbench\fR flag in the
+manifest settings for the target.
+.RE
+.sp
+\fB\-\-all\-targets\fR
+.RS 4
+Check all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&.
+.RE
+.SS "Feature Selection"
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the \fBdefault\fR feature is activated for every
+selected package.
+.sp
+See \fIthe features documentation\fR <https://doc.rust\-lang.org/cargo/reference/features.html#command\-line\-feature\-options>
+for more details.
+.sp
+\fB\-F\fR \fIfeatures\fR,
+\fB\-\-features\fR \fIfeatures\fR
+.RS 4
+Space or comma separated list of features to activate. Features of workspace
+members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may
+be specified multiple times, which enables all specified features.
+.RE
+.sp
+\fB\-\-all\-features\fR
+.RS 4
+Activate all available features of all selected packages.
+.RE
+.sp
+\fB\-\-no\-default\-features\fR
+.RS 4
+Do not activate the \fBdefault\fR feature of the selected packages.
+.RE
+.SS "Compilation Options"
+.sp
+\fB\-\-target\fR \fItriple\fR
+.RS 4
+Check for the given architecture. The default is the host architecture. The general format of the triple is
+\fB<arch><sub>\-<vendor>\-<sys>\-<abi>\fR\&. Run \fBrustc \-\-print target\-list\fR for a
+list of supported targets. This flag may be specified multiple times.
+.sp
+This may also be specified with the \fBbuild.target\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.sp
+Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+\fIbuild cache\fR <https://doc.rust\-lang.org/cargo/guide/build\-cache.html> documentation for more details.
+.RE
+.sp
+\fB\-r\fR,
+\fB\-\-release\fR
+.RS 4
+Check optimized artifacts with the \fBrelease\fR profile.
+See also the \fB\-\-profile\fR option for choosing a specific profile by name.
+.RE
+.sp
+\fB\-\-profile\fR \fIname\fR
+.RS 4
+Check with the given profile.
+.sp
+As a special case, specifying the \fBtest\fR profile will also enable checking in
+test mode which will enable checking tests and enable the \fBtest\fR cfg option.
+See \fIrustc tests\fR <https://doc.rust\-lang.org/rustc/tests/index.html> for more
+detail.
+.sp
+See the \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/profiles.html> for more details on profiles.
+.RE
+.sp
+\fB\-\-ignore\-rust\-version\fR
+.RS 4
+Check the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project\[cq]s \fBrust\-version\fR field.
+.RE
+.sp
+\fB\-\-timings=\fR\fIfmts\fR
+.RS 4
+Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma\-separated list of output
+formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&.
+Specifying an output format (rather than the default) is unstable and requires
+\fB\-Zunstable\-options\fR\&. Valid output formats:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the
+\fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine\-readable timing data.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON
+information about timing information.
+.RE
+.RE
+.SS "Output Options"
+.sp
+\fB\-\-target\-dir\fR \fIdirectory\fR
+.RS 4
+Directory for all generated artifacts and intermediate files. May also be
+specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the
+\fBbuild.target\-dir\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+Defaults to \fBtarget\fR in the root of the workspace.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-message\-format\fR \fIfmt\fR
+.RS 4
+The output format for diagnostic messages. Can be specified multiple times
+and consists of comma\-separated values. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with
+\fBshort\fR and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR
+and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See
+\fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/external\-tools.html#json\-messages>
+for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains
+the \[lq]short\[rq] rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages
+contains embedded ANSI color codes for respecting rustc\[cq]s default color
+scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo\[cq]s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SS "Miscellaneous Options"
+.sp
+\fB\-j\fR \fIN\fR,
+\fB\-\-jobs\fR \fIN\fR
+.RS 4
+Number of parallel jobs to run. May also be specified with the
+\fBbuild.jobs\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.
+.RE
+.sp
+\fB\-\-keep\-going\fR
+.RS 4
+Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+\fB\-Zunstable\-options\fR\&.
+.RE
+.sp
+\fB\-\-future\-incompat\-report\fR
+.RS 4
+Displays a future\-incompat report for any future\-incompatible warnings
+produced during execution of this command
+.sp
+See \fBcargo\-report\fR(1)
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Check the local package for errors:
+.sp
+.RS 4
+.nf
+cargo check
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Check all targets, including unit tests:
+.sp
+.RS 4
+.nf
+cargo check \-\-all\-targets \-\-profile=test
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-build\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-clean.1 b/src/tools/cargo/src/etc/man/cargo-clean.1
new file mode 100644
index 000000000..3cb321f05
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-clean.1
@@ -0,0 +1,215 @@
+'\" t
+.TH "CARGO\-CLEAN" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-clean \[em] Remove generated artifacts
+.SH "SYNOPSIS"
+\fBcargo clean\fR [\fIoptions\fR]
+.SH "DESCRIPTION"
+Remove artifacts from the target directory that Cargo has generated in the
+past.
+.sp
+With no options, \fBcargo clean\fR will delete the entire target directory.
+.SH "OPTIONS"
+.SS "Package Selection"
+When no packages are selected, all packages and all dependencies in the
+workspace are cleaned.
+.sp
+\fB\-p\fR \fIspec\fR\[u2026],
+\fB\-\-package\fR \fIspec\fR\[u2026]
+.RS 4
+Clean only the specified packages. This flag may be specified
+multiple times. See \fBcargo\-pkgid\fR(1) for the SPEC format.
+.RE
+.SS "Clean Options"
+.sp
+\fB\-\-doc\fR
+.RS 4
+This option will cause \fBcargo clean\fR to remove only the \fBdoc\fR directory in
+the target directory.
+.RE
+.sp
+\fB\-\-release\fR
+.RS 4
+Remove all artifacts in the \fBrelease\fR directory.
+.RE
+.sp
+\fB\-\-profile\fR \fIname\fR
+.RS 4
+Remove all artifacts in the directory with the given profile name.
+.RE
+.sp
+\fB\-\-target\-dir\fR \fIdirectory\fR
+.RS 4
+Directory for all generated artifacts and intermediate files. May also be
+specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the
+\fBbuild.target\-dir\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+Defaults to \fBtarget\fR in the root of the workspace.
+.RE
+.sp
+\fB\-\-target\fR \fItriple\fR
+.RS 4
+Clean for the given architecture. The default is the host architecture. The general format of the triple is
+\fB<arch><sub>\-<vendor>\-<sys>\-<abi>\fR\&. Run \fBrustc \-\-print target\-list\fR for a
+list of supported targets. This flag may be specified multiple times.
+.sp
+This may also be specified with the \fBbuild.target\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.sp
+Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+\fIbuild cache\fR <https://doc.rust\-lang.org/cargo/guide/build\-cache.html> documentation for more details.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Remove the entire target directory:
+.sp
+.RS 4
+.nf
+cargo clean
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Remove only the release artifacts:
+.sp
+.RS 4
+.nf
+cargo clean \-\-release
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-build\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-doc.1 b/src/tools/cargo/src/etc/man/cargo-doc.1
new file mode 100644
index 000000000..63ce2a050
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-doc.1
@@ -0,0 +1,398 @@
+'\" t
+.TH "CARGO\-DOC" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-doc \[em] Build a package\[cq]s documentation
+.SH "SYNOPSIS"
+\fBcargo doc\fR [\fIoptions\fR]
+.SH "DESCRIPTION"
+Build the documentation for the local package and all dependencies. The output
+is placed in \fBtarget/doc\fR in rustdoc\[cq]s usual format.
+.SH "OPTIONS"
+.SS "Documentation Options"
+.sp
+\fB\-\-open\fR
+.RS 4
+Open the docs in a browser after building them. This will use your default
+browser unless you define another one in the \fBBROWSER\fR environment variable
+or use the \fI\f(BIdoc.browser\fI\fR <https://doc.rust\-lang.org/cargo/reference/config.html#docbrowser> configuration
+option.
+.RE
+.sp
+\fB\-\-no\-deps\fR
+.RS 4
+Do not build documentation for dependencies.
+.RE
+.sp
+\fB\-\-document\-private\-items\fR
+.RS 4
+Include non\-public items in the documentation. This will be enabled by default if documenting a binary target.
+.RE
+.SS "Package Selection"
+By default, when no package selection options are given, the packages selected
+depend on the selected manifest file (based on the current working directory if
+\fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then
+the workspaces default members are selected, otherwise only the package defined
+by the manifest will be selected.
+.sp
+The default members of a workspace can be set explicitly with the
+\fBworkspace.default\-members\fR key in the root manifest. If this is not set, a
+virtual workspace will include all workspace members (equivalent to passing
+\fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself.
+.sp
+\fB\-p\fR \fIspec\fR\[u2026],
+\fB\-\-package\fR \fIspec\fR\[u2026]
+.RS 4
+Document only the specified packages. See \fBcargo\-pkgid\fR(1) for the
+SPEC format. This flag may be specified multiple times and supports common Unix
+glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally
+expanding glob patterns before Cargo handles them, you must use single quotes or
+double quotes around each pattern.
+.RE
+.sp
+\fB\-\-workspace\fR
+.RS 4
+Document all members in the workspace.
+.RE
+.sp
+\fB\-\-all\fR
+.RS 4
+Deprecated alias for \fB\-\-workspace\fR\&.
+.RE
+.sp
+\fB\-\-exclude\fR \fISPEC\fR\[u2026]
+.RS 4
+Exclude the specified packages. Must be used in conjunction with the
+\fB\-\-workspace\fR flag. This flag may be specified multiple times and supports
+common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell
+accidentally expanding glob patterns before Cargo handles them, you must use
+single quotes or double quotes around each pattern.
+.RE
+.SS "Target Selection"
+When no target selection options are given, \fBcargo doc\fR will document all
+binary and library targets of the selected package. The binary will be skipped
+if its name is the same as the lib target. Binaries are skipped if they have
+\fBrequired\-features\fR that are missing.
+.sp
+The default behavior can be changed by setting \fBdoc = false\fR for the target in
+the manifest settings. Using target selection options will ignore the \fBdoc\fR
+flag and will always document the given target.
+.sp
+\fB\-\-lib\fR
+.RS 4
+Document the package\[cq]s library.
+.RE
+.sp
+\fB\-\-bin\fR \fIname\fR\[u2026]
+.RS 4
+Document the specified binary. This flag may be specified multiple times
+and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-bins\fR
+.RS 4
+Document all binary targets.
+.RE
+.sp
+\fB\-\-example\fR \fIname\fR\[u2026]
+.RS 4
+Document the specified example. This flag may be specified multiple times
+and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-examples\fR
+.RS 4
+Document all example targets.
+.RE
+.SS "Feature Selection"
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the \fBdefault\fR feature is activated for every
+selected package.
+.sp
+See \fIthe features documentation\fR <https://doc.rust\-lang.org/cargo/reference/features.html#command\-line\-feature\-options>
+for more details.
+.sp
+\fB\-F\fR \fIfeatures\fR,
+\fB\-\-features\fR \fIfeatures\fR
+.RS 4
+Space or comma separated list of features to activate. Features of workspace
+members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may
+be specified multiple times, which enables all specified features.
+.RE
+.sp
+\fB\-\-all\-features\fR
+.RS 4
+Activate all available features of all selected packages.
+.RE
+.sp
+\fB\-\-no\-default\-features\fR
+.RS 4
+Do not activate the \fBdefault\fR feature of the selected packages.
+.RE
+.SS "Compilation Options"
+.sp
+\fB\-\-target\fR \fItriple\fR
+.RS 4
+Document for the given architecture. The default is the host architecture. The general format of the triple is
+\fB<arch><sub>\-<vendor>\-<sys>\-<abi>\fR\&. Run \fBrustc \-\-print target\-list\fR for a
+list of supported targets. This flag may be specified multiple times.
+.sp
+This may also be specified with the \fBbuild.target\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.sp
+Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+\fIbuild cache\fR <https://doc.rust\-lang.org/cargo/guide/build\-cache.html> documentation for more details.
+.RE
+.sp
+\fB\-r\fR,
+\fB\-\-release\fR
+.RS 4
+Document optimized artifacts with the \fBrelease\fR profile.
+See also the \fB\-\-profile\fR option for choosing a specific profile by name.
+.RE
+.sp
+\fB\-\-profile\fR \fIname\fR
+.RS 4
+Document with the given profile.
+See the \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/profiles.html> for more details on profiles.
+.RE
+.sp
+\fB\-\-ignore\-rust\-version\fR
+.RS 4
+Document the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project\[cq]s \fBrust\-version\fR field.
+.RE
+.sp
+\fB\-\-timings=\fR\fIfmts\fR
+.RS 4
+Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma\-separated list of output
+formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&.
+Specifying an output format (rather than the default) is unstable and requires
+\fB\-Zunstable\-options\fR\&. Valid output formats:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the
+\fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine\-readable timing data.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON
+information about timing information.
+.RE
+.RE
+.SS "Output Options"
+.sp
+\fB\-\-target\-dir\fR \fIdirectory\fR
+.RS 4
+Directory for all generated artifacts and intermediate files. May also be
+specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the
+\fBbuild.target\-dir\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+Defaults to \fBtarget\fR in the root of the workspace.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-message\-format\fR \fIfmt\fR
+.RS 4
+The output format for diagnostic messages. Can be specified multiple times
+and consists of comma\-separated values. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with
+\fBshort\fR and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR
+and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See
+\fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/external\-tools.html#json\-messages>
+for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains
+the \[lq]short\[rq] rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages
+contains embedded ANSI color codes for respecting rustc\[cq]s default color
+scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo\[cq]s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SS "Miscellaneous Options"
+.sp
+\fB\-j\fR \fIN\fR,
+\fB\-\-jobs\fR \fIN\fR
+.RS 4
+Number of parallel jobs to run. May also be specified with the
+\fBbuild.jobs\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.
+.RE
+.sp
+\fB\-\-keep\-going\fR
+.RS 4
+Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+\fB\-Zunstable\-options\fR\&.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Build the local package documentation and its dependencies and output to
+\fBtarget/doc\fR\&.
+.sp
+.RS 4
+.nf
+cargo doc
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-rustdoc\fR(1), \fBrustdoc\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-fetch.1 b/src/tools/cargo/src/etc/man/cargo-fetch.1
new file mode 100644
index 000000000..3779b9c28
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-fetch.1
@@ -0,0 +1,179 @@
+'\" t
+.TH "CARGO\-FETCH" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-fetch \[em] Fetch dependencies of a package from the network
+.SH "SYNOPSIS"
+\fBcargo fetch\fR [\fIoptions\fR]
+.SH "DESCRIPTION"
+If a \fBCargo.lock\fR file is available, this command will ensure that all of the
+git dependencies and/or registry dependencies are downloaded and locally
+available. Subsequent Cargo commands will be able to run offline after a \fBcargo fetch\fR unless the lock file changes.
+.sp
+If the lock file is not available, then this command will generate the lock
+file before fetching the dependencies.
+.sp
+If \fB\-\-target\fR is not specified, then all target dependencies are fetched.
+.sp
+See also the \fIcargo\-prefetch\fR <https://crates.io/crates/cargo\-prefetch>
+plugin which adds a command to download popular crates. This may be useful if
+you plan to use Cargo without a network with the \fB\-\-offline\fR flag.
+.SH "OPTIONS"
+.SS "Fetch options"
+.sp
+\fB\-\-target\fR \fItriple\fR
+.RS 4
+Fetch for the given architecture. The default is all architectures. The general format of the triple is
+\fB<arch><sub>\-<vendor>\-<sys>\-<abi>\fR\&. Run \fBrustc \-\-print target\-list\fR for a
+list of supported targets. This flag may be specified multiple times.
+.sp
+This may also be specified with the \fBbuild.target\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.sp
+Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+\fIbuild cache\fR <https://doc.rust\-lang.org/cargo/guide/build\-cache.html> documentation for more details.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Fetch all dependencies:
+.sp
+.RS 4
+.nf
+cargo fetch
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-update\fR(1), \fBcargo\-generate\-lockfile\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-fix.1 b/src/tools/cargo/src/etc/man/cargo-fix.1
new file mode 100644
index 000000000..51b1e3fd6
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-fix.1
@@ -0,0 +1,545 @@
+'\" t
+.TH "CARGO\-FIX" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-fix \[em] Automatically fix lint warnings reported by rustc
+.SH "SYNOPSIS"
+\fBcargo fix\fR [\fIoptions\fR]
+.SH "DESCRIPTION"
+This Cargo subcommand will automatically take rustc\[cq]s suggestions from
+diagnostics like warnings and apply them to your source code. This is intended
+to help automate tasks that rustc itself already knows how to tell you to fix!
+.sp
+Executing \fBcargo fix\fR will under the hood execute \fBcargo\-check\fR(1). Any warnings
+applicable to your crate will be automatically fixed (if possible) and all
+remaining warnings will be displayed when the check process is finished. For
+example if you\[cq]d like to apply all fixes to the current package, you can run:
+.sp
+.RS 4
+.nf
+cargo fix
+.fi
+.RE
+.sp
+which behaves the same as \fBcargo check \-\-all\-targets\fR\&.
+.sp
+\fBcargo fix\fR is only capable of fixing code that is normally compiled with
+\fBcargo check\fR\&. If code is conditionally enabled with optional features, you
+will need to enable those features for that code to be analyzed:
+.sp
+.RS 4
+.nf
+cargo fix \-\-features foo
+.fi
+.RE
+.sp
+Similarly, other \fBcfg\fR expressions like platform\-specific code will need to
+pass \fB\-\-target\fR to fix code for the given target.
+.sp
+.RS 4
+.nf
+cargo fix \-\-target x86_64\-pc\-windows\-gnu
+.fi
+.RE
+.sp
+If you encounter any problems with \fBcargo fix\fR or otherwise have any questions
+or feature requests please don\[cq]t hesitate to file an issue at
+<https://github.com/rust\-lang/cargo>\&.
+.SS "Edition migration"
+The \fBcargo fix\fR subcommand can also be used to migrate a package from one
+\fIedition\fR <https://doc.rust\-lang.org/edition\-guide/editions/transitioning\-an\-existing\-project\-to\-a\-new\-edition.html> to the next. The general procedure is:
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Run \fBcargo fix \-\-edition\fR\&. Consider also using the \fB\-\-all\-features\fR flag if
+your project has multiple features. You may also want to run \fBcargo fix \-\-edition\fR multiple times with different \fB\-\-target\fR flags if your project
+has platform\-specific code gated by \fBcfg\fR attributes.
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Modify \fBCargo.toml\fR to set the \fIedition field\fR <https://doc.rust\-lang.org/cargo/reference/manifest.html#the\-edition\-field> to the new edition.
+.RE
+.sp
+.RS 4
+\h'-04' 3.\h'+01'Run your project tests to verify that everything still works. If new
+warnings are issued, you may want to consider running \fBcargo fix\fR again
+(without the \fB\-\-edition\fR flag) to apply any suggestions given by the
+compiler.
+.RE
+.sp
+And hopefully that\[cq]s it! Just keep in mind of the caveats mentioned above that
+\fBcargo fix\fR cannot update code for inactive features or \fBcfg\fR expressions.
+Also, in some rare cases the compiler is unable to automatically migrate all
+code to the new edition, and this may require manual changes after building
+with the new edition.
+.SH "OPTIONS"
+.SS "Fix options"
+.sp
+\fB\-\-broken\-code\fR
+.RS 4
+Fix code even if it already has compiler errors. This is useful if \fBcargo fix\fR
+fails to apply the changes. It will apply the changes and leave the broken
+code in the working directory for you to inspect and manually fix.
+.RE
+.sp
+\fB\-\-edition\fR
+.RS 4
+Apply changes that will update the code to the next edition. This will not
+update the edition in the \fBCargo.toml\fR manifest, which must be updated
+manually after \fBcargo fix \-\-edition\fR has finished.
+.RE
+.sp
+\fB\-\-edition\-idioms\fR
+.RS 4
+Apply suggestions that will update code to the preferred style for the current
+edition.
+.RE
+.sp
+\fB\-\-allow\-no\-vcs\fR
+.RS 4
+Fix code even if a VCS was not detected.
+.RE
+.sp
+\fB\-\-allow\-dirty\fR
+.RS 4
+Fix code even if the working directory has changes.
+.RE
+.sp
+\fB\-\-allow\-staged\fR
+.RS 4
+Fix code even if the working directory has staged changes.
+.RE
+.SS "Package Selection"
+By default, when no package selection options are given, the packages selected
+depend on the selected manifest file (based on the current working directory if
+\fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then
+the workspaces default members are selected, otherwise only the package defined
+by the manifest will be selected.
+.sp
+The default members of a workspace can be set explicitly with the
+\fBworkspace.default\-members\fR key in the root manifest. If this is not set, a
+virtual workspace will include all workspace members (equivalent to passing
+\fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself.
+.sp
+\fB\-p\fR \fIspec\fR\[u2026],
+\fB\-\-package\fR \fIspec\fR\[u2026]
+.RS 4
+Fix only the specified packages. See \fBcargo\-pkgid\fR(1) for the
+SPEC format. This flag may be specified multiple times and supports common Unix
+glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally
+expanding glob patterns before Cargo handles them, you must use single quotes or
+double quotes around each pattern.
+.RE
+.sp
+\fB\-\-workspace\fR
+.RS 4
+Fix all members in the workspace.
+.RE
+.sp
+\fB\-\-all\fR
+.RS 4
+Deprecated alias for \fB\-\-workspace\fR\&.
+.RE
+.sp
+\fB\-\-exclude\fR \fISPEC\fR\[u2026]
+.RS 4
+Exclude the specified packages. Must be used in conjunction with the
+\fB\-\-workspace\fR flag. This flag may be specified multiple times and supports
+common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell
+accidentally expanding glob patterns before Cargo handles them, you must use
+single quotes or double quotes around each pattern.
+.RE
+.SS "Target Selection"
+When no target selection options are given, \fBcargo fix\fR will fix all targets
+(\fB\-\-all\-targets\fR implied). Binaries are skipped if they have
+\fBrequired\-features\fR that are missing.
+.sp
+Passing target selection flags will fix only the specified
+targets.
+.sp
+Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also
+support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your
+shell accidentally expanding glob patterns before Cargo handles them, you must
+use single quotes or double quotes around each glob pattern.
+.sp
+\fB\-\-lib\fR
+.RS 4
+Fix the package\[cq]s library.
+.RE
+.sp
+\fB\-\-bin\fR \fIname\fR\[u2026]
+.RS 4
+Fix the specified binary. This flag may be specified multiple times
+and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-bins\fR
+.RS 4
+Fix all binary targets.
+.RE
+.sp
+\fB\-\-example\fR \fIname\fR\[u2026]
+.RS 4
+Fix the specified example. This flag may be specified multiple times
+and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-examples\fR
+.RS 4
+Fix all example targets.
+.RE
+.sp
+\fB\-\-test\fR \fIname\fR\[u2026]
+.RS 4
+Fix the specified integration test. This flag may be specified
+multiple times and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-tests\fR
+.RS 4
+Fix all targets in test mode that have the \fBtest = true\fR manifest
+flag set. By default this includes the library and binaries built as
+unittests, and integration tests. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+unittest, and once as a dependency for binaries, integration tests, etc.).
+Targets may be enabled or disabled by setting the \fBtest\fR flag in the
+manifest settings for the target.
+.RE
+.sp
+\fB\-\-bench\fR \fIname\fR\[u2026]
+.RS 4
+Fix the specified benchmark. This flag may be specified multiple
+times and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-benches\fR
+.RS 4
+Fix all targets in benchmark mode that have the \fBbench = true\fR
+manifest flag set. By default this includes the library and binaries built
+as benchmarks, and bench targets. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+benchmark, and once as a dependency for binaries, benchmarks, etc.).
+Targets may be enabled or disabled by setting the \fBbench\fR flag in the
+manifest settings for the target.
+.RE
+.sp
+\fB\-\-all\-targets\fR
+.RS 4
+Fix all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&.
+.RE
+.SS "Feature Selection"
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the \fBdefault\fR feature is activated for every
+selected package.
+.sp
+See \fIthe features documentation\fR <https://doc.rust\-lang.org/cargo/reference/features.html#command\-line\-feature\-options>
+for more details.
+.sp
+\fB\-F\fR \fIfeatures\fR,
+\fB\-\-features\fR \fIfeatures\fR
+.RS 4
+Space or comma separated list of features to activate. Features of workspace
+members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may
+be specified multiple times, which enables all specified features.
+.RE
+.sp
+\fB\-\-all\-features\fR
+.RS 4
+Activate all available features of all selected packages.
+.RE
+.sp
+\fB\-\-no\-default\-features\fR
+.RS 4
+Do not activate the \fBdefault\fR feature of the selected packages.
+.RE
+.SS "Compilation Options"
+.sp
+\fB\-\-target\fR \fItriple\fR
+.RS 4
+Fix for the given architecture. The default is the host architecture. The general format of the triple is
+\fB<arch><sub>\-<vendor>\-<sys>\-<abi>\fR\&. Run \fBrustc \-\-print target\-list\fR for a
+list of supported targets. This flag may be specified multiple times.
+.sp
+This may also be specified with the \fBbuild.target\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.sp
+Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+\fIbuild cache\fR <https://doc.rust\-lang.org/cargo/guide/build\-cache.html> documentation for more details.
+.RE
+.sp
+\fB\-r\fR,
+\fB\-\-release\fR
+.RS 4
+Fix optimized artifacts with the \fBrelease\fR profile.
+See also the \fB\-\-profile\fR option for choosing a specific profile by name.
+.RE
+.sp
+\fB\-\-profile\fR \fIname\fR
+.RS 4
+Fix with the given profile.
+.sp
+As a special case, specifying the \fBtest\fR profile will also enable checking in
+test mode which will enable checking tests and enable the \fBtest\fR cfg option.
+See \fIrustc tests\fR <https://doc.rust\-lang.org/rustc/tests/index.html> for more
+detail.
+.sp
+See the \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/profiles.html> for more details on profiles.
+.RE
+.sp
+\fB\-\-ignore\-rust\-version\fR
+.RS 4
+Fix the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project\[cq]s \fBrust\-version\fR field.
+.RE
+.sp
+\fB\-\-timings=\fR\fIfmts\fR
+.RS 4
+Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma\-separated list of output
+formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&.
+Specifying an output format (rather than the default) is unstable and requires
+\fB\-Zunstable\-options\fR\&. Valid output formats:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the
+\fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine\-readable timing data.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON
+information about timing information.
+.RE
+.RE
+.SS "Output Options"
+.sp
+\fB\-\-target\-dir\fR \fIdirectory\fR
+.RS 4
+Directory for all generated artifacts and intermediate files. May also be
+specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the
+\fBbuild.target\-dir\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+Defaults to \fBtarget\fR in the root of the workspace.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-message\-format\fR \fIfmt\fR
+.RS 4
+The output format for diagnostic messages. Can be specified multiple times
+and consists of comma\-separated values. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with
+\fBshort\fR and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR
+and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See
+\fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/external\-tools.html#json\-messages>
+for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains
+the \[lq]short\[rq] rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages
+contains embedded ANSI color codes for respecting rustc\[cq]s default color
+scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo\[cq]s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SS "Miscellaneous Options"
+.sp
+\fB\-j\fR \fIN\fR,
+\fB\-\-jobs\fR \fIN\fR
+.RS 4
+Number of parallel jobs to run. May also be specified with the
+\fBbuild.jobs\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.
+.RE
+.sp
+\fB\-\-keep\-going\fR
+.RS 4
+Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+\fB\-Zunstable\-options\fR\&.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Apply compiler suggestions to the local package:
+.sp
+.RS 4
+.nf
+cargo fix
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Update a package to prepare it for the next edition:
+.sp
+.RS 4
+.nf
+cargo fix \-\-edition
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 3.\h'+01'Apply suggested idioms for the current edition:
+.sp
+.RS 4
+.nf
+cargo fix \-\-edition\-idioms
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-check\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-generate-lockfile.1 b/src/tools/cargo/src/etc/man/cargo-generate-lockfile.1
new file mode 100644
index 000000000..075f6324c
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-generate-lockfile.1
@@ -0,0 +1,158 @@
+'\" t
+.TH "CARGO\-GENERATE\-LOCKFILE" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-generate\-lockfile \[em] Generate the lockfile for a package
+.SH "SYNOPSIS"
+\fBcargo generate\-lockfile\fR [\fIoptions\fR]
+.SH "DESCRIPTION"
+This command will create the \fBCargo.lock\fR lockfile for the current package or
+workspace. If the lockfile already exists, it will be rebuilt with the latest
+available version of every package.
+.sp
+See also \fBcargo\-update\fR(1) which is also capable of creating a \fBCargo.lock\fR
+lockfile and has more options for controlling update behavior.
+.SH "OPTIONS"
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Create or update the lockfile for the current package or workspace:
+.sp
+.RS 4
+.nf
+cargo generate\-lockfile
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-update\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-help.1 b/src/tools/cargo/src/etc/man/cargo-help.1
new file mode 100644
index 000000000..655328550
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-help.1
@@ -0,0 +1,34 @@
+'\" t
+.TH "CARGO\-HELP" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-help \[em] Get help for a Cargo command
+.SH "SYNOPSIS"
+\fBcargo help\fR [\fIsubcommand\fR]
+.SH "DESCRIPTION"
+Prints a help message for the given command.
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Get help for a command:
+.sp
+.RS 4
+.nf
+cargo help build
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Help is also available with the \fB\-\-help\fR flag:
+.sp
+.RS 4
+.nf
+cargo build \-\-help
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-init.1 b/src/tools/cargo/src/etc/man/cargo-init.1
new file mode 100644
index 000000000..56d1aca9f
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-init.1
@@ -0,0 +1,171 @@
+'\" t
+.TH "CARGO\-INIT" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-init \[em] Create a new Cargo package in an existing directory
+.SH "SYNOPSIS"
+\fBcargo init\fR [\fIoptions\fR] [\fIpath\fR]
+.SH "DESCRIPTION"
+This command will create a new Cargo manifest in the current directory. Give a
+path as an argument to create in the given directory.
+.sp
+If there are typically\-named Rust source files already in the directory, those
+will be used. If not, then a sample \fBsrc/main.rs\fR file will be created, or
+\fBsrc/lib.rs\fR if \fB\-\-lib\fR is passed.
+.sp
+If the directory is not already in a VCS repository, then a new repository
+is created (see \fB\-\-vcs\fR below).
+.sp
+See \fBcargo\-new\fR(1) for a similar command which will create a new package in
+a new directory.
+.SH "OPTIONS"
+.SS "Init Options"
+.sp
+\fB\-\-bin\fR
+.RS 4
+Create a package with a binary target (\fBsrc/main.rs\fR).
+This is the default behavior.
+.RE
+.sp
+\fB\-\-lib\fR
+.RS 4
+Create a package with a library target (\fBsrc/lib.rs\fR).
+.RE
+.sp
+\fB\-\-edition\fR \fIedition\fR
+.RS 4
+Specify the Rust edition to use. Default is 2021.
+Possible values: 2015, 2018, 2021
+.RE
+.sp
+\fB\-\-name\fR \fIname\fR
+.RS 4
+Set the package name. Defaults to the directory name.
+.RE
+.sp
+\fB\-\-vcs\fR \fIvcs\fR
+.RS 4
+Initialize a new VCS repository for the given version control system (git,
+hg, pijul, or fossil) or do not initialize any version control at all
+(none). If not specified, defaults to \fBgit\fR or the configuration value
+\fBcargo\-new.vcs\fR, or \fBnone\fR if already inside a VCS repository.
+.RE
+.sp
+\fB\-\-registry\fR \fIregistry\fR
+.RS 4
+This sets the \fBpublish\fR field in \fBCargo.toml\fR to the given registry name
+which will restrict publishing only to that registry.
+.sp
+Registry names are defined in \fICargo config files\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+If not specified, the default registry defined by the \fBregistry.default\fR
+config key is used. If the default registry is not set and \fB\-\-registry\fR is not
+used, the \fBpublish\fR field will not be set which means that publishing will not
+be restricted.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Create a binary Cargo package in the current directory:
+.sp
+.RS 4
+.nf
+cargo init
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-new\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-install.1 b/src/tools/cargo/src/etc/man/cargo-install.1
new file mode 100644
index 000000000..d0e5c5181
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-install.1
@@ -0,0 +1,515 @@
+'\" t
+.TH "CARGO\-INSTALL" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-install \[em] Build and install a Rust binary
+.SH "SYNOPSIS"
+\fBcargo install\fR [\fIoptions\fR] \fIcrate\fR[@\fIversion\fR]\[u2026]
+.br
+\fBcargo install\fR [\fIoptions\fR] \fB\-\-path\fR \fIpath\fR
+.br
+\fBcargo install\fR [\fIoptions\fR] \fB\-\-git\fR \fIurl\fR [\fIcrate\fR\[u2026]]
+.br
+\fBcargo install\fR [\fIoptions\fR] \fB\-\-list\fR
+.SH "DESCRIPTION"
+This command manages Cargo\[cq]s local set of installed binary crates. Only
+packages which have executable \fB[[bin]]\fR or \fB[[example]]\fR targets can be
+installed, and all executables are installed into the installation root\[cq]s
+\fBbin\fR folder.
+.sp
+The installation root is determined, in order of precedence:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB\-\-root\fR option
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBCARGO_INSTALL_ROOT\fR environment variable
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBinstall.root\fR Cargo \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBCARGO_HOME\fR environment variable
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB$HOME/.cargo\fR
+.RE
+.sp
+There are multiple sources from which a crate can be installed. The default
+location is crates.io but the \fB\-\-git\fR, \fB\-\-path\fR, and \fB\-\-registry\fR flags can
+change this source. If the source contains more than one package (such as
+crates.io or a git repository with multiple crates) the \fIcrate\fR argument is
+required to indicate which crate should be installed.
+.sp
+Crates from crates.io can optionally specify the version they wish to install
+via the \fB\-\-version\fR flags, and similarly packages from git repositories can
+optionally specify the branch, tag, or revision that should be installed. If a
+crate has multiple binaries, the \fB\-\-bin\fR argument can selectively install only
+one of them, and if you\[cq]d rather install examples the \fB\-\-example\fR argument can
+be used as well.
+.sp
+If the package is already installed, Cargo will reinstall it if the installed
+version does not appear to be up\-to\-date. If any of the following values
+change, then Cargo will reinstall the package:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'The package version and source.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'The set of binary names installed.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'The chosen features.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'The profile (\fB\-\-profile\fR).
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'The target (\fB\-\-target\fR).
+.RE
+.sp
+Installing with \fB\-\-path\fR will always build and install, unless there are
+conflicting binaries from another package. The \fB\-\-force\fR flag may be used to
+force Cargo to always reinstall the package.
+.sp
+If the source is crates.io or \fB\-\-git\fR then by default the crate will be built
+in a temporary target directory. To avoid this, the target directory can be
+specified by setting the \fBCARGO_TARGET_DIR\fR environment variable to a relative
+path. In particular, this can be useful for caching build artifacts on
+continuous integration systems.
+.SS "Dealing with the Lockfile"
+By default, the \fBCargo.lock\fR file that is included with the package will be
+ignored. This means that Cargo will recompute which versions of dependencies
+to use, possibly using newer versions that have been released since the
+package was published. The \fB\-\-locked\fR flag can be used to force Cargo to use
+the packaged \fBCargo.lock\fR file if it is available. This may be useful for
+ensuring reproducible builds, to use the exact same set of dependencies that
+were available when the package was published. It may also be useful if a
+newer version of a dependency is published that no longer builds on your
+system, or has other problems. The downside to using \fB\-\-locked\fR is that you
+will not receive any fixes or updates to any dependency. Note that Cargo did
+not start publishing \fBCargo.lock\fR files until version 1.37, which means
+packages published with prior versions will not have a \fBCargo.lock\fR file
+available.
+.SS "Configuration Discovery"
+This command operates on system or user level, not project level.
+This means that the local \fIconfiguration discovery\fR <https://doc.rust\-lang.org/cargo/reference/config.html#hierarchical\-structure> is ignored.
+Instead, the configuration discovery begins at \fB$CARGO_HOME/config.toml\fR\&.
+If the package is installed with \fB\-\-path $PATH\fR, the local configuration
+will be used, beginning discovery at \fB$PATH/.cargo/config.toml\fR\&.
+.SH "OPTIONS"
+.SS "Install Options"
+.sp
+\fB\-\-vers\fR \fIversion\fR,
+\fB\-\-version\fR \fIversion\fR
+.RS 4
+Specify a version to install. This may be a \fIversion
+requirement\fR <https://doc.rust\-lang.org/cargo/reference/specifying\-dependencies.md>, like \fB~1.2\fR, to have Cargo
+select the newest version from the given requirement. If the version does not
+have a requirement operator (such as \fB^\fR or \fB~\fR), then it must be in the form
+\fIMAJOR.MINOR.PATCH\fR, and will install exactly that version; it is \fInot\fR
+treated as a caret requirement like Cargo dependencies are.
+.RE
+.sp
+\fB\-\-git\fR \fIurl\fR
+.RS 4
+Git URL to install the specified crate from.
+.RE
+.sp
+\fB\-\-branch\fR \fIbranch\fR
+.RS 4
+Branch to use when installing from git.
+.RE
+.sp
+\fB\-\-tag\fR \fItag\fR
+.RS 4
+Tag to use when installing from git.
+.RE
+.sp
+\fB\-\-rev\fR \fIsha\fR
+.RS 4
+Specific commit to use when installing from git.
+.RE
+.sp
+\fB\-\-path\fR \fIpath\fR
+.RS 4
+Filesystem path to local crate to install.
+.RE
+.sp
+\fB\-\-list\fR
+.RS 4
+List all installed packages and their versions.
+.RE
+.sp
+\fB\-f\fR,
+\fB\-\-force\fR
+.RS 4
+Force overwriting existing crates or binaries. This can be used if a package
+has installed a binary with the same name as another package. This is also
+useful if something has changed on the system that you want to rebuild with,
+such as a newer version of \fBrustc\fR\&.
+.RE
+.sp
+\fB\-\-no\-track\fR
+.RS 4
+By default, Cargo keeps track of the installed packages with a metadata file
+stored in the installation root directory. This flag tells Cargo not to use or
+create that file. With this flag, Cargo will refuse to overwrite any existing
+files unless the \fB\-\-force\fR flag is used. This also disables Cargo\[cq]s ability to
+protect against multiple concurrent invocations of Cargo installing at the
+same time.
+.RE
+.sp
+\fB\-\-bin\fR \fIname\fR\[u2026]
+.RS 4
+Install only the specified binary.
+.RE
+.sp
+\fB\-\-bins\fR
+.RS 4
+Install all binaries.
+.RE
+.sp
+\fB\-\-example\fR \fIname\fR\[u2026]
+.RS 4
+Install only the specified example.
+.RE
+.sp
+\fB\-\-examples\fR
+.RS 4
+Install all examples.
+.RE
+.sp
+\fB\-\-root\fR \fIdir\fR
+.RS 4
+Directory to install packages into.
+.RE
+.sp
+\fB\-\-registry\fR \fIregistry\fR
+.RS 4
+Name of the registry to use. Registry names are defined in \fICargo config
+files\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. If not specified, the default registry is used,
+which is defined by the \fBregistry.default\fR config key which defaults to
+\fBcrates\-io\fR\&.
+.RE
+.sp
+\fB\-\-index\fR \fIindex\fR
+.RS 4
+The URL of the registry index to use.
+.RE
+.SS "Feature Selection"
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the \fBdefault\fR feature is activated for every
+selected package.
+.sp
+See \fIthe features documentation\fR <https://doc.rust\-lang.org/cargo/reference/features.html#command\-line\-feature\-options>
+for more details.
+.sp
+\fB\-F\fR \fIfeatures\fR,
+\fB\-\-features\fR \fIfeatures\fR
+.RS 4
+Space or comma separated list of features to activate. Features of workspace
+members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may
+be specified multiple times, which enables all specified features.
+.RE
+.sp
+\fB\-\-all\-features\fR
+.RS 4
+Activate all available features of all selected packages.
+.RE
+.sp
+\fB\-\-no\-default\-features\fR
+.RS 4
+Do not activate the \fBdefault\fR feature of the selected packages.
+.RE
+.SS "Compilation Options"
+.sp
+\fB\-\-target\fR \fItriple\fR
+.RS 4
+Install for the given architecture. The default is the host architecture. The general format of the triple is
+\fB<arch><sub>\-<vendor>\-<sys>\-<abi>\fR\&. Run \fBrustc \-\-print target\-list\fR for a
+list of supported targets.
+.sp
+This may also be specified with the \fBbuild.target\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.sp
+Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+\fIbuild cache\fR <https://doc.rust\-lang.org/cargo/guide/build\-cache.html> documentation for more details.
+.RE
+.sp
+\fB\-\-target\-dir\fR \fIdirectory\fR
+.RS 4
+Directory for all generated artifacts and intermediate files. May also be
+specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the
+\fBbuild.target\-dir\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+Defaults to a new temporary folder located in the
+temporary directory of the platform.
+.sp
+When using \fB\-\-path\fR, by default it will use \fBtarget\fR directory in the workspace
+of the local crate unless \fB\-\-target\-dir\fR
+is specified.
+.RE
+.sp
+\fB\-\-debug\fR
+.RS 4
+Build with the \fBdev\fR profile instead the \fBrelease\fR profile.
+See also the \fB\-\-profile\fR option for choosing a specific profile by name.
+.RE
+.sp
+\fB\-\-profile\fR \fIname\fR
+.RS 4
+Install with the given profile.
+See the \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/profiles.html> for more details on profiles.
+.RE
+.sp
+\fB\-\-ignore\-rust\-version\fR
+.RS 4
+Install the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project\[cq]s \fBrust\-version\fR field.
+.RE
+.sp
+\fB\-\-timings=\fR\fIfmts\fR
+.RS 4
+Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma\-separated list of output
+formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&.
+Specifying an output format (rather than the default) is unstable and requires
+\fB\-Zunstable\-options\fR\&. Valid output formats:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the
+\fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine\-readable timing data.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON
+information about timing information.
+.RE
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Miscellaneous Options"
+.sp
+\fB\-j\fR \fIN\fR,
+\fB\-\-jobs\fR \fIN\fR
+.RS 4
+Number of parallel jobs to run. May also be specified with the
+\fBbuild.jobs\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.
+.RE
+.sp
+\fB\-\-keep\-going\fR
+.RS 4
+Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+\fB\-Zunstable\-options\fR\&.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-message\-format\fR \fIfmt\fR
+.RS 4
+The output format for diagnostic messages. Can be specified multiple times
+and consists of comma\-separated values. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with
+\fBshort\fR and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR
+and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See
+\fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/external\-tools.html#json\-messages>
+for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains
+the \[lq]short\[rq] rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages
+contains embedded ANSI color codes for respecting rustc\[cq]s default color
+scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo\[cq]s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Install or upgrade a package from crates.io:
+.sp
+.RS 4
+.nf
+cargo install ripgrep
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Install or reinstall the package in the current directory:
+.sp
+.RS 4
+.nf
+cargo install \-\-path .
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 3.\h'+01'View the list of installed packages:
+.sp
+.RS 4
+.nf
+cargo install \-\-list
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-uninstall\fR(1), \fBcargo\-search\fR(1), \fBcargo\-publish\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-locate-project.1 b/src/tools/cargo/src/etc/man/cargo-locate-project.1
new file mode 100644
index 000000000..2fbbe183b
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-locate-project.1
@@ -0,0 +1,150 @@
+'\" t
+.TH "CARGO\-LOCATE\-PROJECT" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-locate\-project \[em] Print a JSON representation of a Cargo.toml file\[cq]s location
+.SH "SYNOPSIS"
+\fBcargo locate\-project\fR [\fIoptions\fR]
+.SH "DESCRIPTION"
+This command will print a JSON object to stdout with the full path to the manifest. The
+manifest is found by searching upward for a file named \fBCargo.toml\fR starting from the current
+working directory.
+.sp
+If the project happens to be a part of a workspace, the manifest of the project, rather than
+the workspace root, is output. This can be overridden by the \fB\-\-workspace\fR flag. The root
+workspace is found by traversing further upward or by using the field \fBpackage.workspace\fR after
+locating the manifest of a workspace member.
+.SH "OPTIONS"
+.sp
+\fB\-\-workspace\fR
+.RS 4
+Locate the \fBCargo.toml\fR at the root of the workspace, as opposed to the current
+workspace member.
+.RE
+.SS "Display Options"
+.sp
+\fB\-\-message\-format\fR \fIfmt\fR
+.RS 4
+The representation in which to print the project location. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR (default): JSON object with the path under the key \[lq]root\[rq]\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBplain\fR: Just the path.
+.RE
+.RE
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Display the path to the manifest based on the current directory:
+.sp
+.RS 4
+.nf
+cargo locate\-project
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-metadata\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-login.1 b/src/tools/cargo/src/etc/man/cargo-login.1
new file mode 100644
index 000000000..1ae1cc626
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-login.1
@@ -0,0 +1,135 @@
+'\" t
+.TH "CARGO\-LOGIN" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-login \[em] Save an API token from the registry locally
+.SH "SYNOPSIS"
+\fBcargo login\fR [\fIoptions\fR] [\fItoken\fR]
+.SH "DESCRIPTION"
+This command will save the API token to disk so that commands that require
+authentication, such as \fBcargo\-publish\fR(1), will be automatically
+authenticated. The token is saved in \fB$CARGO_HOME/credentials.toml\fR\&. \fBCARGO_HOME\fR
+defaults to \fB\&.cargo\fR in your home directory.
+.sp
+If the \fItoken\fR argument is not specified, it will be read from stdin.
+.sp
+The API token for crates.io may be retrieved from <https://crates.io/me>\&.
+.sp
+Take care to keep the token secret, it should not be shared with anyone else.
+.SH "OPTIONS"
+.SS "Login Options"
+.sp
+\fB\-\-registry\fR \fIregistry\fR
+.RS 4
+Name of the registry to use. Registry names are defined in \fICargo config
+files\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. If not specified, the default registry is used,
+which is defined by the \fBregistry.default\fR config key which defaults to
+\fBcrates\-io\fR\&.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Save the API token to disk:
+.sp
+.RS 4
+.nf
+cargo login
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-logout\fR(1), \fBcargo\-publish\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-logout.1 b/src/tools/cargo/src/etc/man/cargo-logout.1
new file mode 100644
index 000000000..7333cc62c
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-logout.1
@@ -0,0 +1,147 @@
+'\" t
+.TH "CARGO\-LOGOUT" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-logout \[em] Remove an API token from the registry locally
+.SH "SYNOPSIS"
+\fBcargo logout\fR [\fIoptions\fR]
+.SH "DESCRIPTION"
+This command will remove the API token from the local credential storage.
+Credentials are stored in \fB$CARGO_HOME/credentials.toml\fR where \fB$CARGO_HOME\fR
+defaults to \fB\&.cargo\fR in your home directory.
+.sp
+If \fB\-\-registry\fR is not specified, then the credentials for the default
+registry will be removed (configured by
+\fI\f(BIregistry.default\fI\fR <https://doc.rust\-lang.org/cargo/reference/config.html#registrydefault>, which defaults
+to <https://crates.io/>).
+.sp
+This will not revoke the token on the server. If you need to revoke the token,
+visit the registry website and follow its instructions (see
+<https://crates.io/me> to revoke the token for <https://crates.io/>).
+.SH "OPTIONS"
+.SS "Logout Options"
+.sp
+\fB\-\-registry\fR \fIregistry\fR
+.RS 4
+Name of the registry to use. Registry names are defined in \fICargo config
+files\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. If not specified, the default registry is used,
+which is defined by the \fBregistry.default\fR config key which defaults to
+\fBcrates\-io\fR\&.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Remove the default registry token:
+.sp
+.RS 4
+.nf
+cargo logout
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Remove the token for a specific registry:
+.sp
+.RS 4
+.nf
+cargo logout \-\-registry my\-registry
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-login\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-metadata.1 b/src/tools/cargo/src/etc/man/cargo-metadata.1
new file mode 100644
index 000000000..8549290cd
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-metadata.1
@@ -0,0 +1,483 @@
+'\" t
+.TH "CARGO\-METADATA" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-metadata \[em] Machine\-readable metadata about the current package
+.SH "SYNOPSIS"
+\fBcargo metadata\fR [\fIoptions\fR]
+.SH "DESCRIPTION"
+Output JSON to stdout containing information about the workspace members and
+resolved dependencies of the current package.
+.sp
+It is recommended to include the \fB\-\-format\-version\fR flag to future\-proof
+your code to ensure the output is in the format you are expecting.
+.sp
+See the \fIcargo_metadata crate\fR <https://crates.io/crates/cargo_metadata>
+for a Rust API for reading the metadata.
+.SH "OUTPUT FORMAT"
+The output has the following format:
+.sp
+.RS 4
+.nf
+{
+ /* Array of all packages in the workspace.
+ It also includes all feature\-enabled dependencies unless \-\-no\-deps is used.
+ */
+ "packages": [
+ {
+ /* The name of the package. */
+ "name": "my\-package",
+ /* The version of the package. */
+ "version": "0.1.0",
+ /* The Package ID, a unique identifier for referring to the package. */
+ "id": "my\-package 0.1.0 (path+file:///path/to/my\-package)",
+ /* The license value from the manifest, or null. */
+ "license": "MIT/Apache\-2.0",
+ /* The license\-file value from the manifest, or null. */
+ "license_file": "LICENSE",
+ /* The description value from the manifest, or null. */
+ "description": "Package description.",
+ /* The source ID of the package. This represents where
+ a package is retrieved from.
+ This is null for path dependencies and workspace members.
+ For other dependencies, it is a string with the format:
+ \- "registry+URL" for registry\-based dependencies.
+ Example: "registry+https://github.com/rust\-lang/crates.io\-index"
+ \- "git+URL" for git\-based dependencies.
+ Example: "git+https://github.com/rust\-lang/cargo?rev=5e85ba14aaa20f8133863373404cb0af69eeef2c#5e85ba14aaa20f8133863373404cb0af69eeef2c"
+ */
+ "source": null,
+ /* Array of dependencies declared in the package's manifest. */
+ "dependencies": [
+ {
+ /* The name of the dependency. */
+ "name": "bitflags",
+ /* The source ID of the dependency. May be null, see
+ description for the package source.
+ */
+ "source": "registry+https://github.com/rust\-lang/crates.io\-index",
+ /* The version requirement for the dependency.
+ Dependencies without a version requirement have a value of "*".
+ */
+ "req": "^1.0",
+ /* The dependency kind.
+ "dev", "build", or null for a normal dependency.
+ */
+ "kind": null,
+ /* If the dependency is renamed, this is the new name for
+ the dependency as a string. null if it is not renamed.
+ */
+ "rename": null,
+ /* Boolean of whether or not this is an optional dependency. */
+ "optional": false,
+ /* Boolean of whether or not default features are enabled. */
+ "uses_default_features": true,
+ /* Array of features enabled. */
+ "features": [],
+ /* The target platform for the dependency.
+ null if not a target dependency.
+ */
+ "target": "cfg(windows)",
+ /* The file system path for a local path dependency.
+ not present if not a path dependency.
+ */
+ "path": "/path/to/dep",
+ /* A string of the URL of the registry this dependency is from.
+ If not specified or null, the dependency is from the default
+ registry (crates.io).
+ */
+ "registry": null
+ }
+ ],
+ /* Array of Cargo targets. */
+ "targets": [
+ {
+ /* Array of target kinds.
+ \- lib targets list the `crate\-type` values from the
+ manifest such as "lib", "rlib", "dylib",
+ "proc\-macro", etc. (default ["lib"])
+ \- binary is ["bin"]
+ \- example is ["example"]
+ \- integration test is ["test"]
+ \- benchmark is ["bench"]
+ \- build script is ["custom\-build"]
+ */
+ "kind": [
+ "bin"
+ ],
+ /* Array of crate types.
+ \- lib and example libraries list the `crate\-type` values
+ from the manifest such as "lib", "rlib", "dylib",
+ "proc\-macro", etc. (default ["lib"])
+ \- all other target kinds are ["bin"]
+ */
+ "crate_types": [
+ "bin"
+ ],
+ /* The name of the target. */
+ "name": "my\-package",
+ /* Absolute path to the root source file of the target. */
+ "src_path": "/path/to/my\-package/src/main.rs",
+ /* The Rust edition of the target.
+ Defaults to the package edition.
+ */
+ "edition": "2018",
+ /* Array of required features.
+ This property is not included if no required features are set.
+ */
+ "required\-features": ["feat1"],
+ /* Whether the target should be documented by `cargo doc`. */
+ "doc": true,
+ /* Whether or not this target has doc tests enabled, and
+ the target is compatible with doc testing.
+ */
+ "doctest": false,
+ /* Whether or not this target should be built and run with `\-\-test`
+ */
+ "test": true
+ }
+ ],
+ /* Set of features defined for the package.
+ Each feature maps to an array of features or dependencies it
+ enables.
+ */
+ "features": {
+ "default": [
+ "feat1"
+ ],
+ "feat1": [],
+ "feat2": []
+ },
+ /* Absolute path to this package's manifest. */
+ "manifest_path": "/path/to/my\-package/Cargo.toml",
+ /* Package metadata.
+ This is null if no metadata is specified.
+ */
+ "metadata": {
+ "docs": {
+ "rs": {
+ "all\-features": true
+ }
+ }
+ },
+ /* List of registries to which this package may be published.
+ Publishing is unrestricted if null, and forbidden if an empty array. */
+ "publish": [
+ "crates\-io"
+ ],
+ /* Array of authors from the manifest.
+ Empty array if no authors specified.
+ */
+ "authors": [
+ "Jane Doe <user@example.com>"
+ ],
+ /* Array of categories from the manifest. */
+ "categories": [
+ "command\-line\-utilities"
+ ],
+ /* Optional string that is the default binary picked by cargo run. */
+ "default_run": null,
+ /* Optional string that is the minimum supported rust version */
+ "rust_version": "1.56",
+ /* Array of keywords from the manifest. */
+ "keywords": [
+ "cli"
+ ],
+ /* The readme value from the manifest or null if not specified. */
+ "readme": "README.md",
+ /* The repository value from the manifest or null if not specified. */
+ "repository": "https://github.com/rust\-lang/cargo",
+ /* The homepage value from the manifest or null if not specified. */
+ "homepage": "https://rust\-lang.org",
+ /* The documentation value from the manifest or null if not specified. */
+ "documentation": "https://doc.rust\-lang.org/stable/std",
+ /* The default edition of the package.
+ Note that individual targets may have different editions.
+ */
+ "edition": "2018",
+ /* Optional string that is the name of a native library the package
+ is linking to.
+ */
+ "links": null,
+ }
+ ],
+ /* Array of members of the workspace.
+ Each entry is the Package ID for the package.
+ */
+ "workspace_members": [
+ "my\-package 0.1.0 (path+file:///path/to/my\-package)",
+ ],
+ // The resolved dependency graph for the entire workspace. The enabled
+ // features are based on the enabled features for the "current" package.
+ // Inactivated optional dependencies are not listed.
+ //
+ // This is null if \-\-no\-deps is specified.
+ //
+ // By default, this includes all dependencies for all target platforms.
+ // The `\-\-filter\-platform` flag may be used to narrow to a specific
+ // target triple.
+ "resolve": {
+ /* Array of nodes within the dependency graph.
+ Each node is a package.
+ */
+ "nodes": [
+ {
+ /* The Package ID of this node. */
+ "id": "my\-package 0.1.0 (path+file:///path/to/my\-package)",
+ /* The dependencies of this package, an array of Package IDs. */
+ "dependencies": [
+ "bitflags 1.0.4 (registry+https://github.com/rust\-lang/crates.io\-index)"
+ ],
+ /* The dependencies of this package. This is an alternative to
+ "dependencies" which contains additional information. In
+ particular, this handles renamed dependencies.
+ */
+ "deps": [
+ {
+ /* The name of the dependency's library target.
+ If this is a renamed dependency, this is the new
+ name.
+ */
+ "name": "bitflags",
+ /* The Package ID of the dependency. */
+ "pkg": "bitflags 1.0.4 (registry+https://github.com/rust\-lang/crates.io\-index)",
+ /* Array of dependency kinds. Added in Cargo 1.40. */
+ "dep_kinds": [
+ {
+ /* The dependency kind.
+ "dev", "build", or null for a normal dependency.
+ */
+ "kind": null,
+ /* The target platform for the dependency.
+ null if not a target dependency.
+ */
+ "target": "cfg(windows)"
+ }
+ ]
+ }
+ ],
+ /* Array of features enabled on this package. */
+ "features": [
+ "default"
+ ]
+ }
+ ],
+ /* The root package of the workspace.
+ This is null if this is a virtual workspace. Otherwise it is
+ the Package ID of the root package.
+ */
+ "root": "my\-package 0.1.0 (path+file:///path/to/my\-package)"
+ },
+ /* The absolute path to the build directory where Cargo places its output. */
+ "target_directory": "/path/to/my\-package/target",
+ /* The version of the schema for this metadata structure.
+ This will be changed if incompatible changes are ever made.
+ */
+ "version": 1,
+ /* The absolute path to the root of the workspace. */
+ "workspace_root": "/path/to/my\-package"
+ /* Workspace metadata.
+ This is null if no metadata is specified. */
+ "metadata": {
+ "docs": {
+ "rs": {
+ "all\-features": true
+ }
+ }
+ }
+}
+.fi
+.RE
+.SH "OPTIONS"
+.SS "Output Options"
+.sp
+\fB\-\-no\-deps\fR
+.RS 4
+Output information only about the workspace members and don\[cq]t fetch
+dependencies.
+.RE
+.sp
+\fB\-\-format\-version\fR \fIversion\fR
+.RS 4
+Specify the version of the output format to use. Currently \fB1\fR is the only
+possible value.
+.RE
+.sp
+\fB\-\-filter\-platform\fR \fItriple\fR
+.RS 4
+This filters the \fBresolve\fR output to only include dependencies for the
+given \fItarget triple\fR <https://doc.rust\-lang.org/cargo/appendix/glossary.html#target>\&.
+Without this flag, the resolve includes all targets.
+.sp
+Note that the dependencies listed in the \[lq]packages\[rq] array still includes all
+dependencies. Each package definition is intended to be an unaltered
+reproduction of the information within \fBCargo.toml\fR\&.
+.RE
+.SS "Feature Selection"
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the \fBdefault\fR feature is activated for every
+selected package.
+.sp
+See \fIthe features documentation\fR <https://doc.rust\-lang.org/cargo/reference/features.html#command\-line\-feature\-options>
+for more details.
+.sp
+\fB\-F\fR \fIfeatures\fR,
+\fB\-\-features\fR \fIfeatures\fR
+.RS 4
+Space or comma separated list of features to activate. Features of workspace
+members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may
+be specified multiple times, which enables all specified features.
+.RE
+.sp
+\fB\-\-all\-features\fR
+.RS 4
+Activate all available features of all selected packages.
+.RE
+.sp
+\fB\-\-no\-default\-features\fR
+.RS 4
+Do not activate the \fBdefault\fR feature of the selected packages.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Output JSON about the current package:
+.sp
+.RS 4
+.nf
+cargo metadata \-\-format\-version=1
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-new.1 b/src/tools/cargo/src/etc/man/cargo-new.1
new file mode 100644
index 000000000..62e0eb157
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-new.1
@@ -0,0 +1,166 @@
+'\" t
+.TH "CARGO\-NEW" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-new \[em] Create a new Cargo package
+.SH "SYNOPSIS"
+\fBcargo new\fR [\fIoptions\fR] \fIpath\fR
+.SH "DESCRIPTION"
+This command will create a new Cargo package in the given directory. This
+includes a simple template with a \fBCargo.toml\fR manifest, sample source file,
+and a VCS ignore file. If the directory is not already in a VCS repository,
+then a new repository is created (see \fB\-\-vcs\fR below).
+.sp
+See \fBcargo\-init\fR(1) for a similar command which will create a new manifest
+in an existing directory.
+.SH "OPTIONS"
+.SS "New Options"
+.sp
+\fB\-\-bin\fR
+.RS 4
+Create a package with a binary target (\fBsrc/main.rs\fR).
+This is the default behavior.
+.RE
+.sp
+\fB\-\-lib\fR
+.RS 4
+Create a package with a library target (\fBsrc/lib.rs\fR).
+.RE
+.sp
+\fB\-\-edition\fR \fIedition\fR
+.RS 4
+Specify the Rust edition to use. Default is 2021.
+Possible values: 2015, 2018, 2021
+.RE
+.sp
+\fB\-\-name\fR \fIname\fR
+.RS 4
+Set the package name. Defaults to the directory name.
+.RE
+.sp
+\fB\-\-vcs\fR \fIvcs\fR
+.RS 4
+Initialize a new VCS repository for the given version control system (git,
+hg, pijul, or fossil) or do not initialize any version control at all
+(none). If not specified, defaults to \fBgit\fR or the configuration value
+\fBcargo\-new.vcs\fR, or \fBnone\fR if already inside a VCS repository.
+.RE
+.sp
+\fB\-\-registry\fR \fIregistry\fR
+.RS 4
+This sets the \fBpublish\fR field in \fBCargo.toml\fR to the given registry name
+which will restrict publishing only to that registry.
+.sp
+Registry names are defined in \fICargo config files\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+If not specified, the default registry defined by the \fBregistry.default\fR
+config key is used. If the default registry is not set and \fB\-\-registry\fR is not
+used, the \fBpublish\fR field will not be set which means that publishing will not
+be restricted.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Create a binary Cargo package in the given directory:
+.sp
+.RS 4
+.nf
+cargo new foo
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-init\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-owner.1 b/src/tools/cargo/src/etc/man/cargo-owner.1
new file mode 100644
index 000000000..82cac16aa
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-owner.1
@@ -0,0 +1,197 @@
+'\" t
+.TH "CARGO\-OWNER" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-owner \[em] Manage the owners of a crate on the registry
+.SH "SYNOPSIS"
+\fBcargo owner\fR [\fIoptions\fR] \fB\-\-add\fR \fIlogin\fR [\fIcrate\fR]
+.br
+\fBcargo owner\fR [\fIoptions\fR] \fB\-\-remove\fR \fIlogin\fR [\fIcrate\fR]
+.br
+\fBcargo owner\fR [\fIoptions\fR] \fB\-\-list\fR [\fIcrate\fR]
+.SH "DESCRIPTION"
+This command will modify the owners for a crate on the registry. Owners of a
+crate can upload new versions and yank old versions. Non\-team owners can also
+modify the set of owners, so take care!
+.sp
+This command requires you to be authenticated with either the \fB\-\-token\fR option
+or using \fBcargo\-login\fR(1).
+.sp
+If the crate name is not specified, it will use the package name from the
+current directory.
+.sp
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/publishing.html#cargo\-owner> for more
+information about owners and publishing.
+.SH "OPTIONS"
+.SS "Owner Options"
+.sp
+\fB\-a\fR,
+\fB\-\-add\fR \fIlogin\fR\[u2026]
+.RS 4
+Invite the given user or team as an owner.
+.RE
+.sp
+\fB\-r\fR,
+\fB\-\-remove\fR \fIlogin\fR\[u2026]
+.RS 4
+Remove the given user or team as an owner.
+.RE
+.sp
+\fB\-l\fR,
+\fB\-\-list\fR
+.RS 4
+List owners of a crate.
+.RE
+.sp
+\fB\-\-token\fR \fItoken\fR
+.RS 4
+API token to use when authenticating. This overrides the token stored in
+the credentials file (which is created by \fBcargo\-login\fR(1)).
+.sp
+\fICargo config\fR <https://doc.rust\-lang.org/cargo/reference/config.html> environment variables can be
+used to override the tokens stored in the credentials file. The token for
+crates.io may be specified with the \fBCARGO_REGISTRY_TOKEN\fR environment
+variable. Tokens for other registries may be specified with environment
+variables of the form \fBCARGO_REGISTRIES_NAME_TOKEN\fR where \fBNAME\fR is the name
+of the registry in all capital letters.
+.RE
+.sp
+\fB\-\-index\fR \fIindex\fR
+.RS 4
+The URL of the registry index to use.
+.RE
+.sp
+\fB\-\-registry\fR \fIregistry\fR
+.RS 4
+Name of the registry to use. Registry names are defined in \fICargo config
+files\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. If not specified, the default registry is used,
+which is defined by the \fBregistry.default\fR config key which defaults to
+\fBcrates\-io\fR\&.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'List owners of a package:
+.sp
+.RS 4
+.nf
+cargo owner \-\-list foo
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Invite an owner to a package:
+.sp
+.RS 4
+.nf
+cargo owner \-\-add username foo
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 3.\h'+01'Remove an owner from a package:
+.sp
+.RS 4
+.nf
+cargo owner \-\-remove username foo
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-login\fR(1), \fBcargo\-publish\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-package.1 b/src/tools/cargo/src/etc/man/cargo-package.1
new file mode 100644
index 000000000..9f4847d7d
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-package.1
@@ -0,0 +1,351 @@
+'\" t
+.TH "CARGO\-PACKAGE" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-package \[em] Assemble the local package into a distributable tarball
+.SH "SYNOPSIS"
+\fBcargo package\fR [\fIoptions\fR]
+.SH "DESCRIPTION"
+This command will create a distributable, compressed \fB\&.crate\fR file with the
+source code of the package in the current directory. The resulting file will
+be stored in the \fBtarget/package\fR directory. This performs the following
+steps:
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Load and check the current workspace, performing some basic checks.
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Path dependencies are not allowed unless they have a version key. Cargo
+will ignore the path key for dependencies in published packages.
+\fBdev\-dependencies\fR do not have this restriction.
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Create the compressed \fB\&.crate\fR file.
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'The original \fBCargo.toml\fR file is rewritten and normalized.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB[patch]\fR, \fB[replace]\fR, and \fB[workspace]\fR sections are removed from the
+manifest.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBCargo.lock\fR is automatically included if the package contains an
+executable binary or example target. \fBcargo\-install\fR(1) will use the
+packaged lock file if the \fB\-\-locked\fR flag is used.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'A \fB\&.cargo_vcs_info.json\fR file is included that contains information
+about the current VCS checkout hash if available (not included with
+\fB\-\-allow\-dirty\fR).
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 3.\h'+01'Extract the \fB\&.crate\fR file and build it to verify it can build.
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'This will rebuild your package from scratch to ensure that it can be
+built from a pristine state. The \fB\-\-no\-verify\fR flag can be used to skip
+this step.
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 4.\h'+01'Check that build scripts did not modify any source files.
+.RE
+.sp
+The list of files included can be controlled with the \fBinclude\fR and \fBexclude\fR
+fields in the manifest.
+.sp
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/publishing.html> for more details about
+packaging and publishing.
+.SS ".cargo_vcs_info.json format"
+Will generate a \fB\&.cargo_vcs_info.json\fR in the following format
+.sp
+.RS 4
+.nf
+{
+ "git": {
+ "sha1": "aac20b6e7e543e6dd4118b246c77225e3a3a1302"
+ },
+ "path_in_vcs": ""
+}
+.fi
+.RE
+.sp
+\fBpath_in_vcs\fR will be set to a repo\-relative path for packages
+in subdirectories of the version control repository.
+.SH "OPTIONS"
+.SS "Package Options"
+.sp
+\fB\-l\fR,
+\fB\-\-list\fR
+.RS 4
+Print files included in a package without making one.
+.RE
+.sp
+\fB\-\-no\-verify\fR
+.RS 4
+Don\[cq]t verify the contents by building them.
+.RE
+.sp
+\fB\-\-no\-metadata\fR
+.RS 4
+Ignore warnings about a lack of human\-usable metadata (such as the description
+or the license).
+.RE
+.sp
+\fB\-\-allow\-dirty\fR
+.RS 4
+Allow working directories with uncommitted VCS changes to be packaged.
+.RE
+.SS "Package Selection"
+By default, when no package selection options are given, the packages selected
+depend on the selected manifest file (based on the current working directory if
+\fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then
+the workspaces default members are selected, otherwise only the package defined
+by the manifest will be selected.
+.sp
+The default members of a workspace can be set explicitly with the
+\fBworkspace.default\-members\fR key in the root manifest. If this is not set, a
+virtual workspace will include all workspace members (equivalent to passing
+\fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself.
+.sp
+\fB\-p\fR \fIspec\fR\[u2026],
+\fB\-\-package\fR \fIspec\fR\[u2026]
+.RS 4
+Package only the specified packages. See \fBcargo\-pkgid\fR(1) for the
+SPEC format. This flag may be specified multiple times and supports common Unix
+glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally
+expanding glob patterns before Cargo handles them, you must use single quotes or
+double quotes around each pattern.
+.RE
+.sp
+\fB\-\-workspace\fR
+.RS 4
+Package all members in the workspace.
+.RE
+.sp
+\fB\-\-exclude\fR \fISPEC\fR\[u2026]
+.RS 4
+Exclude the specified packages. Must be used in conjunction with the
+\fB\-\-workspace\fR flag. This flag may be specified multiple times and supports
+common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell
+accidentally expanding glob patterns before Cargo handles them, you must use
+single quotes or double quotes around each pattern.
+.RE
+.SS "Compilation Options"
+.sp
+\fB\-\-target\fR \fItriple\fR
+.RS 4
+Package for the given architecture. The default is the host architecture. The general format of the triple is
+\fB<arch><sub>\-<vendor>\-<sys>\-<abi>\fR\&. Run \fBrustc \-\-print target\-list\fR for a
+list of supported targets. This flag may be specified multiple times.
+.sp
+This may also be specified with the \fBbuild.target\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.sp
+Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+\fIbuild cache\fR <https://doc.rust\-lang.org/cargo/guide/build\-cache.html> documentation for more details.
+.RE
+.sp
+\fB\-\-target\-dir\fR \fIdirectory\fR
+.RS 4
+Directory for all generated artifacts and intermediate files. May also be
+specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the
+\fBbuild.target\-dir\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+Defaults to \fBtarget\fR in the root of the workspace.
+.RE
+.SS "Feature Selection"
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the \fBdefault\fR feature is activated for every
+selected package.
+.sp
+See \fIthe features documentation\fR <https://doc.rust\-lang.org/cargo/reference/features.html#command\-line\-feature\-options>
+for more details.
+.sp
+\fB\-F\fR \fIfeatures\fR,
+\fB\-\-features\fR \fIfeatures\fR
+.RS 4
+Space or comma separated list of features to activate. Features of workspace
+members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may
+be specified multiple times, which enables all specified features.
+.RE
+.sp
+\fB\-\-all\-features\fR
+.RS 4
+Activate all available features of all selected packages.
+.RE
+.sp
+\fB\-\-no\-default\-features\fR
+.RS 4
+Do not activate the \fBdefault\fR feature of the selected packages.
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Miscellaneous Options"
+.sp
+\fB\-j\fR \fIN\fR,
+\fB\-\-jobs\fR \fIN\fR
+.RS 4
+Number of parallel jobs to run. May also be specified with the
+\fBbuild.jobs\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.
+.RE
+.sp
+\fB\-\-keep\-going\fR
+.RS 4
+Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+\fB\-Zunstable\-options\fR\&.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Create a compressed \fB\&.crate\fR file of the current package:
+.sp
+.RS 4
+.nf
+cargo package
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-publish\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-pkgid.1 b/src/tools/cargo/src/etc/man/cargo-pkgid.1
new file mode 100644
index 000000000..9ec1b8cb7
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-pkgid.1
@@ -0,0 +1,243 @@
+'\" t
+.TH "CARGO\-PKGID" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-pkgid \[em] Print a fully qualified package specification
+.SH "SYNOPSIS"
+\fBcargo pkgid\fR [\fIoptions\fR] [\fIspec\fR]
+.SH "DESCRIPTION"
+Given a \fIspec\fR argument, print out the fully qualified package ID specifier
+for a package or dependency in the current workspace. This command will
+generate an error if \fIspec\fR is ambiguous as to which package it refers to in
+the dependency graph. If no \fIspec\fR is given, then the specifier for the local
+package is printed.
+.sp
+This command requires that a lockfile is available and dependencies have been
+fetched.
+.sp
+A package specifier consists of a name, version, and source URL. You are
+allowed to use partial specifiers to succinctly match a specific package as
+long as it matches only one package. The format of a \fIspec\fR can be one of the
+following:
+
+.TS
+allbox tab(:);
+lt lt.
+T{
+SPEC Structure
+T}:T{
+Example SPEC
+T}
+T{
+\fIname\fR
+T}:T{
+\fBbitflags\fR
+T}
+T{
+\fIname\fR\fB@\fR\fIversion\fR
+T}:T{
+\fBbitflags@1.0.4\fR
+T}
+T{
+\fIurl\fR
+T}:T{
+\fBhttps://github.com/rust\-lang/cargo\fR
+T}
+T{
+\fIurl\fR\fB#\fR\fIversion\fR
+T}:T{
+\fBhttps://github.com/rust\-lang/cargo#0.33.0\fR
+T}
+T{
+\fIurl\fR\fB#\fR\fIname\fR
+T}:T{
+\fBhttps://github.com/rust\-lang/crates.io\-index#bitflags\fR
+T}
+T{
+\fIurl\fR\fB#\fR\fIname\fR\fB@\fR\fIversion\fR
+T}:T{
+\fBhttps://github.com/rust\-lang/cargo#crates\-io@0.21.0\fR
+T}
+.TE
+.sp
+.SH "OPTIONS"
+.SS "Package Selection"
+.sp
+\fB\-p\fR \fIspec\fR,
+\fB\-\-package\fR \fIspec\fR
+.RS 4
+Get the package ID for the given package instead of the current package.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Retrieve package specification for \fBfoo\fR package:
+.sp
+.RS 4
+.nf
+cargo pkgid foo
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Retrieve package specification for version 1.0.0 of \fBfoo\fR:
+.sp
+.RS 4
+.nf
+cargo pkgid foo@1.0.0
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 3.\h'+01'Retrieve package specification for \fBfoo\fR from crates.io:
+.sp
+.RS 4
+.nf
+cargo pkgid https://github.com/rust\-lang/crates.io\-index#foo
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 4.\h'+01'Retrieve package specification for \fBfoo\fR from a local package:
+.sp
+.RS 4
+.nf
+cargo pkgid file:///path/to/local/package#foo
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-generate\-lockfile\fR(1), \fBcargo\-metadata\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-publish.1 b/src/tools/cargo/src/etc/man/cargo-publish.1
new file mode 100644
index 000000000..a54a7bcda
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-publish.1
@@ -0,0 +1,301 @@
+'\" t
+.TH "CARGO\-PUBLISH" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-publish \[em] Upload a package to the registry
+.SH "SYNOPSIS"
+\fBcargo publish\fR [\fIoptions\fR]
+.SH "DESCRIPTION"
+This command will create a distributable, compressed \fB\&.crate\fR file with the
+source code of the package in the current directory and upload it to a
+registry. The default registry is <https://crates.io>\&. This performs the
+following steps:
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Performs a few checks, including:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Checks the \fBpackage.publish\fR key in the manifest for restrictions on
+which registries you are allowed to publish to.
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Create a \fB\&.crate\fR file by following the steps in \fBcargo\-package\fR(1).
+.RE
+.sp
+.RS 4
+\h'-04' 3.\h'+01'Upload the crate to the registry. Note that the server will perform
+additional checks on the crate.
+.RE
+.sp
+This command requires you to be authenticated with either the \fB\-\-token\fR option
+or using \fBcargo\-login\fR(1).
+.sp
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/publishing.html> for more details about
+packaging and publishing.
+.SH "OPTIONS"
+.SS "Publish Options"
+.sp
+\fB\-\-dry\-run\fR
+.RS 4
+Perform all checks without uploading.
+.RE
+.sp
+\fB\-\-token\fR \fItoken\fR
+.RS 4
+API token to use when authenticating. This overrides the token stored in
+the credentials file (which is created by \fBcargo\-login\fR(1)).
+.sp
+\fICargo config\fR <https://doc.rust\-lang.org/cargo/reference/config.html> environment variables can be
+used to override the tokens stored in the credentials file. The token for
+crates.io may be specified with the \fBCARGO_REGISTRY_TOKEN\fR environment
+variable. Tokens for other registries may be specified with environment
+variables of the form \fBCARGO_REGISTRIES_NAME_TOKEN\fR where \fBNAME\fR is the name
+of the registry in all capital letters.
+.RE
+.sp
+\fB\-\-no\-verify\fR
+.RS 4
+Don\[cq]t verify the contents by building them.
+.RE
+.sp
+\fB\-\-allow\-dirty\fR
+.RS 4
+Allow working directories with uncommitted VCS changes to be packaged.
+.RE
+.sp
+\fB\-\-index\fR \fIindex\fR
+.RS 4
+The URL of the registry index to use.
+.RE
+.sp
+\fB\-\-registry\fR \fIregistry\fR
+.RS 4
+Name of the registry to publish to. Registry names are defined in \fICargo
+config files\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. If not specified, and there is a
+\fI\f(BIpackage.publish\fI\fR <https://doc.rust\-lang.org/cargo/reference/manifest.html#the\-publish\-field> field in
+\fBCargo.toml\fR with a single registry, then it will publish to that registry.
+Otherwise it will use the default registry, which is defined by the
+\fI\f(BIregistry.default\fI\fR <https://doc.rust\-lang.org/cargo/reference/config.html#registrydefault> config key
+which defaults to \fBcrates\-io\fR\&.
+.RE
+.SS "Package Selection"
+By default, the package in the current working directory is selected. The \fB\-p\fR
+flag can be used to choose a different package in a workspace.
+.sp
+\fB\-p\fR \fIspec\fR,
+\fB\-\-package\fR \fIspec\fR
+.RS 4
+The package to publish. See \fBcargo\-pkgid\fR(1) for the SPEC
+format.
+.RE
+.SS "Compilation Options"
+.sp
+\fB\-\-target\fR \fItriple\fR
+.RS 4
+Publish for the given architecture. The default is the host architecture. The general format of the triple is
+\fB<arch><sub>\-<vendor>\-<sys>\-<abi>\fR\&. Run \fBrustc \-\-print target\-list\fR for a
+list of supported targets. This flag may be specified multiple times.
+.sp
+This may also be specified with the \fBbuild.target\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.sp
+Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+\fIbuild cache\fR <https://doc.rust\-lang.org/cargo/guide/build\-cache.html> documentation for more details.
+.RE
+.sp
+\fB\-\-target\-dir\fR \fIdirectory\fR
+.RS 4
+Directory for all generated artifacts and intermediate files. May also be
+specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the
+\fBbuild.target\-dir\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+Defaults to \fBtarget\fR in the root of the workspace.
+.RE
+.SS "Feature Selection"
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the \fBdefault\fR feature is activated for every
+selected package.
+.sp
+See \fIthe features documentation\fR <https://doc.rust\-lang.org/cargo/reference/features.html#command\-line\-feature\-options>
+for more details.
+.sp
+\fB\-F\fR \fIfeatures\fR,
+\fB\-\-features\fR \fIfeatures\fR
+.RS 4
+Space or comma separated list of features to activate. Features of workspace
+members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may
+be specified multiple times, which enables all specified features.
+.RE
+.sp
+\fB\-\-all\-features\fR
+.RS 4
+Activate all available features of all selected packages.
+.RE
+.sp
+\fB\-\-no\-default\-features\fR
+.RS 4
+Do not activate the \fBdefault\fR feature of the selected packages.
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Miscellaneous Options"
+.sp
+\fB\-j\fR \fIN\fR,
+\fB\-\-jobs\fR \fIN\fR
+.RS 4
+Number of parallel jobs to run. May also be specified with the
+\fBbuild.jobs\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.
+.RE
+.sp
+\fB\-\-keep\-going\fR
+.RS 4
+Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+\fB\-Zunstable\-options\fR\&.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Publish the current package:
+.sp
+.RS 4
+.nf
+cargo publish
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-package\fR(1), \fBcargo\-login\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-remove.1 b/src/tools/cargo/src/etc/man/cargo-remove.1
new file mode 100644
index 000000000..404981617
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-remove.1
@@ -0,0 +1,204 @@
+'\" t
+.TH "CARGO\-REMOVE" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-remove \[em] Remove dependencies from a Cargo.toml manifest file
+.SH "SYNOPSIS"
+\fBcargo remove\fR [\fIoptions\fR] \fIdependency\fR\[u2026]
+.SH "DESCRIPTION"
+Remove one or more dependencies from a \fBCargo.toml\fR manifest.
+.SH "OPTIONS"
+.SS "Section options"
+.sp
+\fB\-\-dev\fR
+.RS 4
+Remove as a \fIdevelopment dependency\fR <https://doc.rust\-lang.org/cargo/reference/specifying\-dependencies.html#development\-dependencies>\&.
+.RE
+.sp
+\fB\-\-build\fR
+.RS 4
+Remove as a \fIbuild dependency\fR <https://doc.rust\-lang.org/cargo/reference/specifying\-dependencies.html#build\-dependencies>\&.
+.RE
+.sp
+\fB\-\-target\fR \fItarget\fR
+.RS 4
+Remove as a dependency to the \fIgiven target platform\fR <https://doc.rust\-lang.org/cargo/reference/specifying\-dependencies.html#platform\-specific\-dependencies>\&.
+.sp
+To avoid unexpected shell expansions, you may use quotes around each target, e.g., \fB\-\-target 'cfg(unix)'\fR\&.
+.RE
+.SS "Miscellaneous Options"
+.sp
+\fB\-\-dry\-run\fR
+.RS 4
+Don\[cq]t actually write to the manifest.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Package Selection"
+.sp
+\fB\-p\fR \fIspec\fR\[u2026],
+\fB\-\-package\fR \fIspec\fR\[u2026]
+.RS 4
+Package to remove from.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Remove \fBregex\fR as a dependency
+.sp
+.RS 4
+.nf
+cargo remove regex
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Remove \fBtrybuild\fR as a dev\-dependency
+.sp
+.RS 4
+.nf
+cargo remove \-\-dev trybuild
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 3.\h'+01'Remove \fBnom\fR from the \fBx86_64\-pc\-windows\-gnu\fR dependencies table
+.sp
+.RS 4
+.nf
+cargo remove \-\-target x86_64\-pc\-windows\-gnu nom
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-add\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-report.1 b/src/tools/cargo/src/etc/man/cargo-report.1
new file mode 100644
index 000000000..24b630569
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-report.1
@@ -0,0 +1,48 @@
+'\" t
+.TH "CARGO\-REPORT" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-report \[em] Generate and display various kinds of reports
+.SH "SYNOPSIS"
+\fBcargo report\fR \fItype\fR [\fIoptions\fR]
+.SS "DESCRIPTION"
+Displays a report of the given \fItype\fR \[em] currently, only \fBfuture\-incompat\fR is supported
+.SH "OPTIONS"
+.sp
+\fB\-\-id\fR \fIid\fR
+.RS 4
+Show the report with the specified Cargo\-generated id
+.RE
+.sp
+\fB\-p\fR \fIspec\fR\[u2026],
+\fB\-\-package\fR \fIspec\fR\[u2026]
+.RS 4
+Only display a report for the specified package
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Display the latest future\-incompat report:
+.sp
+.RS 4
+.nf
+cargo report future\-incompat
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Display the latest future\-incompat report for a specific package:
+.sp
+.RS 4
+.nf
+cargo report future\-incompat \-\-package my\-dep:0.0.1
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fIFuture incompat report\fR <https://doc.rust\-lang.org/cargo/reference/future\-incompat\-report.html>
+.sp
+\fBcargo\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-run.1 b/src/tools/cargo/src/etc/man/cargo-run.1
new file mode 100644
index 000000000..7a85298cc
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-run.1
@@ -0,0 +1,344 @@
+'\" t
+.TH "CARGO\-RUN" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-run \[em] Run the current package
+.SH "SYNOPSIS"
+\fBcargo run\fR [\fIoptions\fR] [\fB\-\-\fR \fIargs\fR]
+.SH "DESCRIPTION"
+Run a binary or example of the local package.
+.sp
+All the arguments following the two dashes (\fB\-\-\fR) are passed to the binary to
+run. If you\[cq]re passing arguments to both Cargo and the binary, the ones after
+\fB\-\-\fR go to the binary, the ones before go to Cargo.
+.sp
+Unlike \fBcargo\-test\fR(1) and \fBcargo\-bench\fR(1), \fBcargo run\fR sets the
+working directory of the binary executed to the current working directory, same
+as if it was executed in the shell directly.
+.SH "OPTIONS"
+.SS "Package Selection"
+By default, the package in the current working directory is selected. The \fB\-p\fR
+flag can be used to choose a different package in a workspace.
+.sp
+\fB\-p\fR \fIspec\fR,
+\fB\-\-package\fR \fIspec\fR
+.RS 4
+The package to run. See \fBcargo\-pkgid\fR(1) for the SPEC
+format.
+.RE
+.SS "Target Selection"
+When no target selection options are given, \fBcargo run\fR will run the binary
+target. If there are multiple binary targets, you must pass a target flag to
+choose one. Or, the \fBdefault\-run\fR field may be specified in the \fB[package]\fR
+section of \fBCargo.toml\fR to choose the name of the binary to run by default.
+.sp
+\fB\-\-bin\fR \fIname\fR
+.RS 4
+Run the specified binary.
+.RE
+.sp
+\fB\-\-example\fR \fIname\fR
+.RS 4
+Run the specified example.
+.RE
+.SS "Feature Selection"
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the \fBdefault\fR feature is activated for every
+selected package.
+.sp
+See \fIthe features documentation\fR <https://doc.rust\-lang.org/cargo/reference/features.html#command\-line\-feature\-options>
+for more details.
+.sp
+\fB\-F\fR \fIfeatures\fR,
+\fB\-\-features\fR \fIfeatures\fR
+.RS 4
+Space or comma separated list of features to activate. Features of workspace
+members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may
+be specified multiple times, which enables all specified features.
+.RE
+.sp
+\fB\-\-all\-features\fR
+.RS 4
+Activate all available features of all selected packages.
+.RE
+.sp
+\fB\-\-no\-default\-features\fR
+.RS 4
+Do not activate the \fBdefault\fR feature of the selected packages.
+.RE
+.SS "Compilation Options"
+.sp
+\fB\-\-target\fR \fItriple\fR
+.RS 4
+Run for the given architecture. The default is the host architecture. The general format of the triple is
+\fB<arch><sub>\-<vendor>\-<sys>\-<abi>\fR\&. Run \fBrustc \-\-print target\-list\fR for a
+list of supported targets.
+.sp
+This may also be specified with the \fBbuild.target\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.sp
+Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+\fIbuild cache\fR <https://doc.rust\-lang.org/cargo/guide/build\-cache.html> documentation for more details.
+.RE
+.sp
+\fB\-r\fR,
+\fB\-\-release\fR
+.RS 4
+Run optimized artifacts with the \fBrelease\fR profile.
+See also the \fB\-\-profile\fR option for choosing a specific profile by name.
+.RE
+.sp
+\fB\-\-profile\fR \fIname\fR
+.RS 4
+Run with the given profile.
+See the \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/profiles.html> for more details on profiles.
+.RE
+.sp
+\fB\-\-ignore\-rust\-version\fR
+.RS 4
+Run the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project\[cq]s \fBrust\-version\fR field.
+.RE
+.sp
+\fB\-\-timings=\fR\fIfmts\fR
+.RS 4
+Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma\-separated list of output
+formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&.
+Specifying an output format (rather than the default) is unstable and requires
+\fB\-Zunstable\-options\fR\&. Valid output formats:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the
+\fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine\-readable timing data.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON
+information about timing information.
+.RE
+.RE
+.SS "Output Options"
+.sp
+\fB\-\-target\-dir\fR \fIdirectory\fR
+.RS 4
+Directory for all generated artifacts and intermediate files. May also be
+specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the
+\fBbuild.target\-dir\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+Defaults to \fBtarget\fR in the root of the workspace.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-message\-format\fR \fIfmt\fR
+.RS 4
+The output format for diagnostic messages. Can be specified multiple times
+and consists of comma\-separated values. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with
+\fBshort\fR and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR
+and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See
+\fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/external\-tools.html#json\-messages>
+for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains
+the \[lq]short\[rq] rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages
+contains embedded ANSI color codes for respecting rustc\[cq]s default color
+scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo\[cq]s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SS "Miscellaneous Options"
+.sp
+\fB\-j\fR \fIN\fR,
+\fB\-\-jobs\fR \fIN\fR
+.RS 4
+Number of parallel jobs to run. May also be specified with the
+\fBbuild.jobs\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.
+.RE
+.sp
+\fB\-\-keep\-going\fR
+.RS 4
+Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+\fB\-Zunstable\-options\fR\&.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Build the local package and run its main target (assuming only one binary):
+.sp
+.RS 4
+.nf
+cargo run
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Run an example with extra arguments:
+.sp
+.RS 4
+.nf
+cargo run \-\-example exname \-\- \-\-exoption exarg1 exarg2
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-build\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-rustc.1 b/src/tools/cargo/src/etc/man/cargo-rustc.1
new file mode 100644
index 000000000..6e901d9ec
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-rustc.1
@@ -0,0 +1,477 @@
+'\" t
+.TH "CARGO\-RUSTC" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-rustc \[em] Compile the current package, and pass extra options to the compiler
+.SH "SYNOPSIS"
+\fBcargo rustc\fR [\fIoptions\fR] [\fB\-\-\fR \fIargs\fR]
+.SH "DESCRIPTION"
+The specified target for the current package (or package specified by \fB\-p\fR if
+provided) will be compiled along with all of its dependencies. The specified
+\fIargs\fR will all be passed to the final compiler invocation, not any of the
+dependencies. Note that the compiler will still unconditionally receive
+arguments such as \fB\-L\fR, \fB\-\-extern\fR, and \fB\-\-crate\-type\fR, and the specified
+\fIargs\fR will simply be added to the compiler invocation.
+.sp
+See <https://doc.rust\-lang.org/rustc/index.html> for documentation on rustc
+flags.
+.sp
+This command requires that only one target is being compiled when additional
+arguments are provided. If more than one target is available for the current
+package the filters of \fB\-\-lib\fR, \fB\-\-bin\fR, etc, must be used to select which
+target is compiled.
+.sp
+To pass flags to all compiler processes spawned by Cargo, use the \fBRUSTFLAGS\fR
+\fIenvironment variable\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> or the
+\fBbuild.rustflags\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.SH "OPTIONS"
+.SS "Package Selection"
+By default, the package in the current working directory is selected. The \fB\-p\fR
+flag can be used to choose a different package in a workspace.
+.sp
+\fB\-p\fR \fIspec\fR,
+\fB\-\-package\fR \fIspec\fR
+.RS 4
+The package to build. See \fBcargo\-pkgid\fR(1) for the SPEC
+format.
+.RE
+.SS "Target Selection"
+When no target selection options are given, \fBcargo rustc\fR will build all
+binary and library targets of the selected package.
+.sp
+Binary targets are automatically built if there is an integration test or
+benchmark being selected to build. This allows an integration
+test to execute the binary to exercise and test its behavior.
+The \fBCARGO_BIN_EXE_<name>\fR
+\fIenvironment variable\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html#environment\-variables\-cargo\-sets\-for\-crates>
+is set when the integration test is built so that it can use the
+\fI\f(BIenv\fI macro\fR <https://doc.rust\-lang.org/std/macro.env.html> to locate the
+executable.
+.sp
+Passing target selection flags will build only the specified
+targets.
+.sp
+Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also
+support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your
+shell accidentally expanding glob patterns before Cargo handles them, you must
+use single quotes or double quotes around each glob pattern.
+.sp
+\fB\-\-lib\fR
+.RS 4
+Build the package\[cq]s library.
+.RE
+.sp
+\fB\-\-bin\fR \fIname\fR\[u2026]
+.RS 4
+Build the specified binary. This flag may be specified multiple times
+and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-bins\fR
+.RS 4
+Build all binary targets.
+.RE
+.sp
+\fB\-\-example\fR \fIname\fR\[u2026]
+.RS 4
+Build the specified example. This flag may be specified multiple times
+and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-examples\fR
+.RS 4
+Build all example targets.
+.RE
+.sp
+\fB\-\-test\fR \fIname\fR\[u2026]
+.RS 4
+Build the specified integration test. This flag may be specified
+multiple times and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-tests\fR
+.RS 4
+Build all targets in test mode that have the \fBtest = true\fR manifest
+flag set. By default this includes the library and binaries built as
+unittests, and integration tests. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+unittest, and once as a dependency for binaries, integration tests, etc.).
+Targets may be enabled or disabled by setting the \fBtest\fR flag in the
+manifest settings for the target.
+.RE
+.sp
+\fB\-\-bench\fR \fIname\fR\[u2026]
+.RS 4
+Build the specified benchmark. This flag may be specified multiple
+times and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-benches\fR
+.RS 4
+Build all targets in benchmark mode that have the \fBbench = true\fR
+manifest flag set. By default this includes the library and binaries built
+as benchmarks, and bench targets. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+benchmark, and once as a dependency for binaries, benchmarks, etc.).
+Targets may be enabled or disabled by setting the \fBbench\fR flag in the
+manifest settings for the target.
+.RE
+.sp
+\fB\-\-all\-targets\fR
+.RS 4
+Build all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&.
+.RE
+.SS "Feature Selection"
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the \fBdefault\fR feature is activated for every
+selected package.
+.sp
+See \fIthe features documentation\fR <https://doc.rust\-lang.org/cargo/reference/features.html#command\-line\-feature\-options>
+for more details.
+.sp
+\fB\-F\fR \fIfeatures\fR,
+\fB\-\-features\fR \fIfeatures\fR
+.RS 4
+Space or comma separated list of features to activate. Features of workspace
+members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may
+be specified multiple times, which enables all specified features.
+.RE
+.sp
+\fB\-\-all\-features\fR
+.RS 4
+Activate all available features of all selected packages.
+.RE
+.sp
+\fB\-\-no\-default\-features\fR
+.RS 4
+Do not activate the \fBdefault\fR feature of the selected packages.
+.RE
+.SS "Compilation Options"
+.sp
+\fB\-\-target\fR \fItriple\fR
+.RS 4
+Build for the given architecture. The default is the host architecture. The general format of the triple is
+\fB<arch><sub>\-<vendor>\-<sys>\-<abi>\fR\&. Run \fBrustc \-\-print target\-list\fR for a
+list of supported targets. This flag may be specified multiple times.
+.sp
+This may also be specified with the \fBbuild.target\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.sp
+Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+\fIbuild cache\fR <https://doc.rust\-lang.org/cargo/guide/build\-cache.html> documentation for more details.
+.RE
+.sp
+\fB\-r\fR,
+\fB\-\-release\fR
+.RS 4
+Build optimized artifacts with the \fBrelease\fR profile.
+See also the \fB\-\-profile\fR option for choosing a specific profile by name.
+.RE
+.sp
+\fB\-\-profile\fR \fIname\fR
+.RS 4
+Build with the given profile.
+.sp
+The \fBrustc\fR subcommand will treat the following named profiles with special behaviors:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBcheck\fR \[em] Builds in the same way as the \fBcargo\-check\fR(1) command with
+the \fBdev\fR profile.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBtest\fR \[em] Builds in the same way as the \fBcargo\-test\fR(1) command,
+enabling building in test mode which will enable tests and enable the \fBtest\fR
+cfg option. See \fIrustc
+tests\fR <https://doc.rust\-lang.org/rustc/tests/index.html> for more detail.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBbench\fR \[em] Builds in the same was as the \fBcargo\-bench\fR(1) command,
+similar to the \fBtest\fR profile.
+.RE
+.sp
+See the \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/profiles.html> for more details on profiles.
+.RE
+.sp
+\fB\-\-ignore\-rust\-version\fR
+.RS 4
+Build the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project\[cq]s \fBrust\-version\fR field.
+.RE
+.sp
+\fB\-\-timings=\fR\fIfmts\fR
+.RS 4
+Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma\-separated list of output
+formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&.
+Specifying an output format (rather than the default) is unstable and requires
+\fB\-Zunstable\-options\fR\&. Valid output formats:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the
+\fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine\-readable timing data.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON
+information about timing information.
+.RE
+.RE
+.sp
+\fB\-\-crate\-type\fR \fIcrate\-type\fR
+.RS 4
+Build for the given crate type. This flag accepts a comma\-separated list of
+1 or more crate types, of which the allowed values are the same as \fBcrate\-type\fR
+field in the manifest for configuring a Cargo target. See
+\fI\f(BIcrate\-type\fI field\fR <https://doc.rust\-lang.org/cargo/reference/cargo\-targets.html#the\-crate\-type\-field>
+for possible values.
+.sp
+If the manifest contains a list, and \fB\-\-crate\-type\fR is provided,
+the command\-line argument value will override what is in the manifest.
+.sp
+This flag only works when building a \fBlib\fR or \fBexample\fR library target.
+.RE
+.SS "Output Options"
+.sp
+\fB\-\-target\-dir\fR \fIdirectory\fR
+.RS 4
+Directory for all generated artifacts and intermediate files. May also be
+specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the
+\fBbuild.target\-dir\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+Defaults to \fBtarget\fR in the root of the workspace.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-message\-format\fR \fIfmt\fR
+.RS 4
+The output format for diagnostic messages. Can be specified multiple times
+and consists of comma\-separated values. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with
+\fBshort\fR and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR
+and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See
+\fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/external\-tools.html#json\-messages>
+for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains
+the \[lq]short\[rq] rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages
+contains embedded ANSI color codes for respecting rustc\[cq]s default color
+scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo\[cq]s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SS "Miscellaneous Options"
+.sp
+\fB\-j\fR \fIN\fR,
+\fB\-\-jobs\fR \fIN\fR
+.RS 4
+Number of parallel jobs to run. May also be specified with the
+\fBbuild.jobs\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.
+.RE
+.sp
+\fB\-\-keep\-going\fR
+.RS 4
+Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+\fB\-Zunstable\-options\fR\&.
+.RE
+.sp
+\fB\-\-future\-incompat\-report\fR
+.RS 4
+Displays a future\-incompat report for any future\-incompatible warnings
+produced during execution of this command
+.sp
+See \fBcargo\-report\fR(1)
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Check if your package (not including dependencies) uses unsafe code:
+.sp
+.RS 4
+.nf
+cargo rustc \-\-lib \-\- \-D unsafe\-code
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Try an experimental flag on the nightly compiler, such as this which prints
+the size of every type:
+.sp
+.RS 4
+.nf
+cargo rustc \-\-lib \-\- \-Z print\-type\-sizes
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 3.\h'+01'Override \fBcrate\-type\fR field in Cargo.toml with command\-line option:
+.sp
+.RS 4
+.nf
+cargo rustc \-\-lib \-\-crate\-type lib,cdylib
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-build\fR(1), \fBrustc\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-rustdoc.1 b/src/tools/cargo/src/etc/man/cargo-rustdoc.1
new file mode 100644
index 000000000..0c9a0e74a
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-rustdoc.1
@@ -0,0 +1,416 @@
+'\" t
+.TH "CARGO\-RUSTDOC" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-rustdoc \[em] Build a package\[cq]s documentation, using specified custom flags
+.SH "SYNOPSIS"
+\fBcargo rustdoc\fR [\fIoptions\fR] [\fB\-\-\fR \fIargs\fR]
+.SH "DESCRIPTION"
+The specified target for the current package (or package specified by \fB\-p\fR if
+provided) will be documented with the specified \fIargs\fR being passed to the
+final rustdoc invocation. Dependencies will not be documented as part of this
+command. Note that rustdoc will still unconditionally receive arguments such
+as \fB\-L\fR, \fB\-\-extern\fR, and \fB\-\-crate\-type\fR, and the specified \fIargs\fR will simply
+be added to the rustdoc invocation.
+.sp
+See <https://doc.rust\-lang.org/rustdoc/index.html> for documentation on rustdoc
+flags.
+.sp
+This command requires that only one target is being compiled when additional
+arguments are provided. If more than one target is available for the current
+package the filters of \fB\-\-lib\fR, \fB\-\-bin\fR, etc, must be used to select which
+target is compiled.
+.sp
+To pass flags to all rustdoc processes spawned by Cargo, use the
+\fBRUSTDOCFLAGS\fR \fIenvironment variable\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html>
+or the \fBbuild.rustdocflags\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.SH "OPTIONS"
+.SS "Documentation Options"
+.sp
+\fB\-\-open\fR
+.RS 4
+Open the docs in a browser after building them. This will use your default
+browser unless you define another one in the \fBBROWSER\fR environment variable
+or use the \fI\f(BIdoc.browser\fI\fR <https://doc.rust\-lang.org/cargo/reference/config.html#docbrowser> configuration
+option.
+.RE
+.SS "Package Selection"
+By default, the package in the current working directory is selected. The \fB\-p\fR
+flag can be used to choose a different package in a workspace.
+.sp
+\fB\-p\fR \fIspec\fR,
+\fB\-\-package\fR \fIspec\fR
+.RS 4
+The package to document. See \fBcargo\-pkgid\fR(1) for the SPEC
+format.
+.RE
+.SS "Target Selection"
+When no target selection options are given, \fBcargo rustdoc\fR will document all
+binary and library targets of the selected package. The binary will be skipped
+if its name is the same as the lib target. Binaries are skipped if they have
+\fBrequired\-features\fR that are missing.
+.sp
+Passing target selection flags will document only the specified
+targets.
+.sp
+Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also
+support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your
+shell accidentally expanding glob patterns before Cargo handles them, you must
+use single quotes or double quotes around each glob pattern.
+.sp
+\fB\-\-lib\fR
+.RS 4
+Document the package\[cq]s library.
+.RE
+.sp
+\fB\-\-bin\fR \fIname\fR\[u2026]
+.RS 4
+Document the specified binary. This flag may be specified multiple times
+and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-bins\fR
+.RS 4
+Document all binary targets.
+.RE
+.sp
+\fB\-\-example\fR \fIname\fR\[u2026]
+.RS 4
+Document the specified example. This flag may be specified multiple times
+and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-examples\fR
+.RS 4
+Document all example targets.
+.RE
+.sp
+\fB\-\-test\fR \fIname\fR\[u2026]
+.RS 4
+Document the specified integration test. This flag may be specified
+multiple times and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-tests\fR
+.RS 4
+Document all targets in test mode that have the \fBtest = true\fR manifest
+flag set. By default this includes the library and binaries built as
+unittests, and integration tests. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+unittest, and once as a dependency for binaries, integration tests, etc.).
+Targets may be enabled or disabled by setting the \fBtest\fR flag in the
+manifest settings for the target.
+.RE
+.sp
+\fB\-\-bench\fR \fIname\fR\[u2026]
+.RS 4
+Document the specified benchmark. This flag may be specified multiple
+times and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-benches\fR
+.RS 4
+Document all targets in benchmark mode that have the \fBbench = true\fR
+manifest flag set. By default this includes the library and binaries built
+as benchmarks, and bench targets. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+benchmark, and once as a dependency for binaries, benchmarks, etc.).
+Targets may be enabled or disabled by setting the \fBbench\fR flag in the
+manifest settings for the target.
+.RE
+.sp
+\fB\-\-all\-targets\fR
+.RS 4
+Document all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&.
+.RE
+.SS "Feature Selection"
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the \fBdefault\fR feature is activated for every
+selected package.
+.sp
+See \fIthe features documentation\fR <https://doc.rust\-lang.org/cargo/reference/features.html#command\-line\-feature\-options>
+for more details.
+.sp
+\fB\-F\fR \fIfeatures\fR,
+\fB\-\-features\fR \fIfeatures\fR
+.RS 4
+Space or comma separated list of features to activate. Features of workspace
+members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may
+be specified multiple times, which enables all specified features.
+.RE
+.sp
+\fB\-\-all\-features\fR
+.RS 4
+Activate all available features of all selected packages.
+.RE
+.sp
+\fB\-\-no\-default\-features\fR
+.RS 4
+Do not activate the \fBdefault\fR feature of the selected packages.
+.RE
+.SS "Compilation Options"
+.sp
+\fB\-\-target\fR \fItriple\fR
+.RS 4
+Document for the given architecture. The default is the host architecture. The general format of the triple is
+\fB<arch><sub>\-<vendor>\-<sys>\-<abi>\fR\&. Run \fBrustc \-\-print target\-list\fR for a
+list of supported targets. This flag may be specified multiple times.
+.sp
+This may also be specified with the \fBbuild.target\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.sp
+Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+\fIbuild cache\fR <https://doc.rust\-lang.org/cargo/guide/build\-cache.html> documentation for more details.
+.RE
+.sp
+\fB\-r\fR,
+\fB\-\-release\fR
+.RS 4
+Document optimized artifacts with the \fBrelease\fR profile.
+See also the \fB\-\-profile\fR option for choosing a specific profile by name.
+.RE
+.sp
+\fB\-\-profile\fR \fIname\fR
+.RS 4
+Document with the given profile.
+See the \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/profiles.html> for more details on profiles.
+.RE
+.sp
+\fB\-\-ignore\-rust\-version\fR
+.RS 4
+Document the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project\[cq]s \fBrust\-version\fR field.
+.RE
+.sp
+\fB\-\-timings=\fR\fIfmts\fR
+.RS 4
+Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma\-separated list of output
+formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&.
+Specifying an output format (rather than the default) is unstable and requires
+\fB\-Zunstable\-options\fR\&. Valid output formats:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the
+\fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine\-readable timing data.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON
+information about timing information.
+.RE
+.RE
+.SS "Output Options"
+.sp
+\fB\-\-target\-dir\fR \fIdirectory\fR
+.RS 4
+Directory for all generated artifacts and intermediate files. May also be
+specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the
+\fBbuild.target\-dir\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+Defaults to \fBtarget\fR in the root of the workspace.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-message\-format\fR \fIfmt\fR
+.RS 4
+The output format for diagnostic messages. Can be specified multiple times
+and consists of comma\-separated values. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with
+\fBshort\fR and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR
+and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See
+\fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/external\-tools.html#json\-messages>
+for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains
+the \[lq]short\[rq] rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages
+contains embedded ANSI color codes for respecting rustc\[cq]s default color
+scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo\[cq]s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SS "Miscellaneous Options"
+.sp
+\fB\-j\fR \fIN\fR,
+\fB\-\-jobs\fR \fIN\fR
+.RS 4
+Number of parallel jobs to run. May also be specified with the
+\fBbuild.jobs\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.
+.RE
+.sp
+\fB\-\-keep\-going\fR
+.RS 4
+Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+\fB\-Zunstable\-options\fR\&.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Build documentation with custom CSS included from a given file:
+.sp
+.RS 4
+.nf
+cargo rustdoc \-\-lib \-\- \-\-extend\-css extra.css
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-doc\fR(1), \fBrustdoc\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-search.1 b/src/tools/cargo/src/etc/man/cargo-search.1
new file mode 100644
index 000000000..245d4e65d
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-search.1
@@ -0,0 +1,138 @@
+'\" t
+.TH "CARGO\-SEARCH" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-search \[em] Search packages in crates.io
+.SH "SYNOPSIS"
+\fBcargo search\fR [\fIoptions\fR] [\fIquery\fR\[u2026]]
+.SH "DESCRIPTION"
+This performs a textual search for crates on <https://crates.io>\&. The matching
+crates will be displayed along with their description in TOML format suitable
+for copying into a \fBCargo.toml\fR manifest.
+.SH "OPTIONS"
+.SS "Search Options"
+.sp
+\fB\-\-limit\fR \fIlimit\fR
+.RS 4
+Limit the number of results (default: 10, max: 100).
+.RE
+.sp
+\fB\-\-index\fR \fIindex\fR
+.RS 4
+The URL of the registry index to use.
+.RE
+.sp
+\fB\-\-registry\fR \fIregistry\fR
+.RS 4
+Name of the registry to use. Registry names are defined in \fICargo config
+files\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. If not specified, the default registry is used,
+which is defined by the \fBregistry.default\fR config key which defaults to
+\fBcrates\-io\fR\&.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Search for a package from crates.io:
+.sp
+.RS 4
+.nf
+cargo search serde
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-install\fR(1), \fBcargo\-publish\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-test.1 b/src/tools/cargo/src/etc/man/cargo-test.1
new file mode 100644
index 000000000..85260b067
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-test.1
@@ -0,0 +1,581 @@
+'\" t
+.TH "CARGO\-TEST" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-test \[em] Execute unit and integration tests of a package
+.SH "SYNOPSIS"
+\fBcargo test\fR [\fIoptions\fR] [\fItestname\fR] [\fB\-\-\fR \fItest\-options\fR]
+.SH "DESCRIPTION"
+Compile and execute unit, integration, and documentation tests.
+.sp
+The test filtering argument \fBTESTNAME\fR and all the arguments following the two
+dashes (\fB\-\-\fR) are passed to the test binaries and thus to \fIlibtest\fR (rustc\[cq]s
+built in unit\-test and micro\-benchmarking framework). If you\[cq]re passing
+arguments to both Cargo and the binary, the ones after \fB\-\-\fR go to the binary,
+the ones before go to Cargo. For details about libtest\[cq]s arguments see the
+output of \fBcargo test \-\- \-\-help\fR and check out the rustc book\[cq]s chapter on
+how tests work at <https://doc.rust\-lang.org/rustc/tests/index.html>\&.
+.sp
+As an example, this will filter for tests with \fBfoo\fR in their name and run them
+on 3 threads in parallel:
+.sp
+.RS 4
+.nf
+cargo test foo \-\- \-\-test\-threads 3
+.fi
+.RE
+.sp
+Tests are built with the \fB\-\-test\fR option to \fBrustc\fR which creates a special
+executable by linking your code with libtest. The executable automatically
+runs all functions annotated with the \fB#[test]\fR attribute in multiple threads.
+\fB#[bench]\fR annotated functions will also be run with one iteration to verify
+that they are functional.
+.sp
+If the package contains multiple test targets, each target compiles to a
+special executable as aforementioned, and then is run serially.
+.sp
+The libtest harness may be disabled by setting \fBharness = false\fR in the target
+manifest settings, in which case your code will need to provide its own \fBmain\fR
+function to handle running tests.
+.SS "Documentation tests"
+Documentation tests are also run by default, which is handled by \fBrustdoc\fR\&. It
+extracts code samples from documentation comments of the library target, and
+then executes them.
+.sp
+Different from normal test targets, each code block compiles to a doctest
+executable on the fly with \fBrustc\fR\&. These executables run in parallel in
+separate processes. The compilation of a code block is in fact a part of test
+function controlled by libtest, so some options such as \fB\-\-jobs\fR might not
+take effect. Note that this execution model of doctests is not guaranteed
+and may change in the future; beware of depending on it.
+.sp
+See the \fIrustdoc book\fR <https://doc.rust\-lang.org/rustdoc/> for more information
+on writing doc tests.
+.SS "Working directory of tests"
+The working directory of every test is set to the root directory of the package
+the test belongs to.
+Setting the working directory of tests to the package\[cq]s root directory makes it
+possible for tests to reliably access the package\[cq]s files using relative paths,
+regardless from where \fBcargo test\fR was executed from.
+.SH "OPTIONS"
+.SS "Test Options"
+.sp
+\fB\-\-no\-run\fR
+.RS 4
+Compile, but don\[cq]t run tests.
+.RE
+.sp
+\fB\-\-no\-fail\-fast\fR
+.RS 4
+Run all tests regardless of failure. Without this flag, Cargo will exit
+after the first executable fails. The Rust test harness will run all tests
+within the executable to completion, this flag only applies to the executable
+as a whole.
+.RE
+.SS "Package Selection"
+By default, when no package selection options are given, the packages selected
+depend on the selected manifest file (based on the current working directory if
+\fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then
+the workspaces default members are selected, otherwise only the package defined
+by the manifest will be selected.
+.sp
+The default members of a workspace can be set explicitly with the
+\fBworkspace.default\-members\fR key in the root manifest. If this is not set, a
+virtual workspace will include all workspace members (equivalent to passing
+\fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself.
+.sp
+\fB\-p\fR \fIspec\fR\[u2026],
+\fB\-\-package\fR \fIspec\fR\[u2026]
+.RS 4
+Test only the specified packages. See \fBcargo\-pkgid\fR(1) for the
+SPEC format. This flag may be specified multiple times and supports common Unix
+glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally
+expanding glob patterns before Cargo handles them, you must use single quotes or
+double quotes around each pattern.
+.RE
+.sp
+\fB\-\-workspace\fR
+.RS 4
+Test all members in the workspace.
+.RE
+.sp
+\fB\-\-all\fR
+.RS 4
+Deprecated alias for \fB\-\-workspace\fR\&.
+.RE
+.sp
+\fB\-\-exclude\fR \fISPEC\fR\[u2026]
+.RS 4
+Exclude the specified packages. Must be used in conjunction with the
+\fB\-\-workspace\fR flag. This flag may be specified multiple times and supports
+common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell
+accidentally expanding glob patterns before Cargo handles them, you must use
+single quotes or double quotes around each pattern.
+.RE
+.SS "Target Selection"
+When no target selection options are given, \fBcargo test\fR will build the
+following targets of the selected packages:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'lib \[em] used to link with binaries, examples, integration tests, and doc tests
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'bins (only if integration tests are built and required features are
+available)
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'examples \[em] to ensure they compile
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'lib as a unit test
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'bins as unit tests
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'integration tests
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'doc tests for the lib target
+.RE
+.sp
+The default behavior can be changed by setting the \fBtest\fR flag for the target
+in the manifest settings. Setting examples to \fBtest = true\fR will build and run
+the example as a test. Setting targets to \fBtest = false\fR will stop them from
+being tested by default. Target selection options that take a target by name
+ignore the \fBtest\fR flag and will always test the given target.
+.sp
+Doc tests for libraries may be disabled by setting \fBdoctest = false\fR for the
+library in the manifest.
+.sp
+Binary targets are automatically built if there is an integration test or
+benchmark being selected to test. This allows an integration
+test to execute the binary to exercise and test its behavior.
+The \fBCARGO_BIN_EXE_<name>\fR
+\fIenvironment variable\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html#environment\-variables\-cargo\-sets\-for\-crates>
+is set when the integration test is built so that it can use the
+\fI\f(BIenv\fI macro\fR <https://doc.rust\-lang.org/std/macro.env.html> to locate the
+executable.
+.sp
+Passing target selection flags will test only the specified
+targets.
+.sp
+Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also
+support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your
+shell accidentally expanding glob patterns before Cargo handles them, you must
+use single quotes or double quotes around each glob pattern.
+.sp
+\fB\-\-lib\fR
+.RS 4
+Test the package\[cq]s library.
+.RE
+.sp
+\fB\-\-bin\fR \fIname\fR\[u2026]
+.RS 4
+Test the specified binary. This flag may be specified multiple times
+and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-bins\fR
+.RS 4
+Test all binary targets.
+.RE
+.sp
+\fB\-\-example\fR \fIname\fR\[u2026]
+.RS 4
+Test the specified example. This flag may be specified multiple times
+and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-examples\fR
+.RS 4
+Test all example targets.
+.RE
+.sp
+\fB\-\-test\fR \fIname\fR\[u2026]
+.RS 4
+Test the specified integration test. This flag may be specified
+multiple times and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-tests\fR
+.RS 4
+Test all targets in test mode that have the \fBtest = true\fR manifest
+flag set. By default this includes the library and binaries built as
+unittests, and integration tests. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+unittest, and once as a dependency for binaries, integration tests, etc.).
+Targets may be enabled or disabled by setting the \fBtest\fR flag in the
+manifest settings for the target.
+.RE
+.sp
+\fB\-\-bench\fR \fIname\fR\[u2026]
+.RS 4
+Test the specified benchmark. This flag may be specified multiple
+times and supports common Unix glob patterns.
+.RE
+.sp
+\fB\-\-benches\fR
+.RS 4
+Test all targets in benchmark mode that have the \fBbench = true\fR
+manifest flag set. By default this includes the library and binaries built
+as benchmarks, and bench targets. Be aware that this will also build any
+required dependencies, so the lib target may be built twice (once as a
+benchmark, and once as a dependency for binaries, benchmarks, etc.).
+Targets may be enabled or disabled by setting the \fBbench\fR flag in the
+manifest settings for the target.
+.RE
+.sp
+\fB\-\-all\-targets\fR
+.RS 4
+Test all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&.
+.RE
+.sp
+\fB\-\-doc\fR
+.RS 4
+Test only the library\[cq]s documentation. This cannot be mixed with other
+target options.
+.RE
+.SS "Feature Selection"
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the \fBdefault\fR feature is activated for every
+selected package.
+.sp
+See \fIthe features documentation\fR <https://doc.rust\-lang.org/cargo/reference/features.html#command\-line\-feature\-options>
+for more details.
+.sp
+\fB\-F\fR \fIfeatures\fR,
+\fB\-\-features\fR \fIfeatures\fR
+.RS 4
+Space or comma separated list of features to activate. Features of workspace
+members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may
+be specified multiple times, which enables all specified features.
+.RE
+.sp
+\fB\-\-all\-features\fR
+.RS 4
+Activate all available features of all selected packages.
+.RE
+.sp
+\fB\-\-no\-default\-features\fR
+.RS 4
+Do not activate the \fBdefault\fR feature of the selected packages.
+.RE
+.SS "Compilation Options"
+.sp
+\fB\-\-target\fR \fItriple\fR
+.RS 4
+Test for the given architecture. The default is the host architecture. The general format of the triple is
+\fB<arch><sub>\-<vendor>\-<sys>\-<abi>\fR\&. Run \fBrustc \-\-print target\-list\fR for a
+list of supported targets. This flag may be specified multiple times.
+.sp
+This may also be specified with the \fBbuild.target\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.sp
+Note that specifying this flag makes Cargo run in a different mode where the
+target artifacts are placed in a separate directory. See the
+\fIbuild cache\fR <https://doc.rust\-lang.org/cargo/guide/build\-cache.html> documentation for more details.
+.RE
+.sp
+\fB\-r\fR,
+\fB\-\-release\fR
+.RS 4
+Test optimized artifacts with the \fBrelease\fR profile.
+See also the \fB\-\-profile\fR option for choosing a specific profile by name.
+.RE
+.sp
+\fB\-\-profile\fR \fIname\fR
+.RS 4
+Test with the given profile.
+See the \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/profiles.html> for more details on profiles.
+.RE
+.sp
+\fB\-\-ignore\-rust\-version\fR
+.RS 4
+Test the target even if the selected Rust compiler is older than the
+required Rust version as configured in the project\[cq]s \fBrust\-version\fR field.
+.RE
+.sp
+\fB\-\-timings=\fR\fIfmts\fR
+.RS 4
+Output information how long each compilation takes, and track concurrency
+information over time. Accepts an optional comma\-separated list of output
+formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&.
+Specifying an output format (rather than the default) is unstable and requires
+\fB\-Zunstable\-options\fR\&. Valid output formats:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the
+\fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write
+a report to the same directory with a timestamp in the filename if you want
+to look at older runs. HTML output is suitable for human consumption only,
+and does not provide machine\-readable timing data.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON
+information about timing information.
+.RE
+.RE
+.SS "Output Options"
+.sp
+\fB\-\-target\-dir\fR \fIdirectory\fR
+.RS 4
+Directory for all generated artifacts and intermediate files. May also be
+specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the
+\fBbuild.target\-dir\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+Defaults to \fBtarget\fR in the root of the workspace.
+.RE
+.SS "Display Options"
+By default the Rust test harness hides output from test execution to keep
+results readable. Test output can be recovered (e.g., for debugging) by passing
+\fB\-\-nocapture\fR to the test binaries:
+.sp
+.RS 4
+.nf
+cargo test \-\- \-\-nocapture
+.fi
+.RE
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-message\-format\fR \fIfmt\fR
+.RS 4
+The output format for diagnostic messages. Can be specified multiple times
+and consists of comma\-separated values. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with
+\fBshort\fR and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR
+and \fBjson\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See
+\fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/external\-tools.html#json\-messages>
+for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains
+the \[lq]short\[rq] rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages
+contains embedded ANSI color codes for respecting rustc\[cq]s default color
+scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics
+in JSON messages printed, but instead Cargo itself should render the
+JSON diagnostics coming from rustc. Cargo\[cq]s own JSON diagnostics and others
+coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&.
+.RE
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SS "Miscellaneous Options"
+The \fB\-\-jobs\fR argument affects the building of the test executable but does not
+affect how many threads are used when running the tests. The Rust test harness
+includes an option to control the number of threads used:
+.sp
+.RS 4
+.nf
+cargo test \-j 2 \-\- \-\-test\-threads=2
+.fi
+.RE
+.sp
+\fB\-j\fR \fIN\fR,
+\fB\-\-jobs\fR \fIN\fR
+.RS 4
+Number of parallel jobs to run. May also be specified with the
+\fBbuild.jobs\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. Defaults to
+the number of logical CPUs. If negative, it sets the maximum number of
+parallel jobs to the number of logical CPUs plus provided value.
+Should not be 0.
+.RE
+.sp
+\fB\-\-keep\-going\fR
+.RS 4
+Build as many crates in the dependency graph as possible, rather than aborting
+the build on the first one that fails to build. Unstable, requires
+\fB\-Zunstable\-options\fR\&.
+.RE
+.sp
+\fB\-\-future\-incompat\-report\fR
+.RS 4
+Displays a future\-incompat report for any future\-incompatible warnings
+produced during execution of this command
+.sp
+See \fBcargo\-report\fR(1)
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Execute all the unit and integration tests of the current package:
+.sp
+.RS 4
+.nf
+cargo test
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Run only tests whose names match against a filter string:
+.sp
+.RS 4
+.nf
+cargo test name_filter
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 3.\h'+01'Run only a specific test within a specific integration test:
+.sp
+.RS 4
+.nf
+cargo test \-\-test int_test_name \-\- modname::test_name
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-bench\fR(1), \fItypes of tests\fR <https://doc.rust\-lang.org/cargo/reference/cargo\-targets.html#tests>, \fIhow to write tests\fR <https://doc.rust\-lang.org/rustc/tests/index.html>
diff --git a/src/tools/cargo/src/etc/man/cargo-tree.1 b/src/tools/cargo/src/etc/man/cargo-tree.1
new file mode 100644
index 000000000..2abad9732
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-tree.1
@@ -0,0 +1,507 @@
+'\" t
+.TH "CARGO\-TREE" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-tree \[em] Display a tree visualization of a dependency graph
+.SH "SYNOPSIS"
+\fBcargo tree\fR [\fIoptions\fR]
+.SH "DESCRIPTION"
+This command will display a tree of dependencies to the terminal. An example
+of a simple project that depends on the \[lq]rand\[rq] package:
+.sp
+.RS 4
+.nf
+myproject v0.1.0 (/myproject)
+`\-\- rand v0.7.3
+ |\-\- getrandom v0.1.14
+ | |\-\- cfg\-if v0.1.10
+ | `\-\- libc v0.2.68
+ |\-\- libc v0.2.68 (*)
+ |\-\- rand_chacha v0.2.2
+ | |\-\- ppv\-lite86 v0.2.6
+ | `\-\- rand_core v0.5.1
+ | `\-\- getrandom v0.1.14 (*)
+ `\-\- rand_core v0.5.1 (*)
+[build\-dependencies]
+`\-\- cc v1.0.50
+.fi
+.RE
+.sp
+Packages marked with \fB(*)\fR have been \[lq]de\-duplicated\[rq]\&. The dependencies for the
+package have already been shown elsewhere in the graph, and so are not
+repeated. Use the \fB\-\-no\-dedupe\fR option to repeat the duplicates.
+.sp
+The \fB\-e\fR flag can be used to select the dependency kinds to display. The
+\[lq]features\[rq] kind changes the output to display the features enabled by
+each dependency. For example, \fBcargo tree \-e features\fR:
+.sp
+.RS 4
+.nf
+myproject v0.1.0 (/myproject)
+`\-\- log feature "serde"
+ `\-\- log v0.4.8
+ |\-\- serde v1.0.106
+ `\-\- cfg\-if feature "default"
+ `\-\- cfg\-if v0.1.10
+.fi
+.RE
+.sp
+In this tree, \fBmyproject\fR depends on \fBlog\fR with the \fBserde\fR feature. \fBlog\fR in
+turn depends on \fBcfg\-if\fR with \[lq]default\[rq] features. When using \fB\-e features\fR it
+can be helpful to use \fB\-i\fR flag to show how the features flow into a package.
+See the examples below for more detail.
+.SS "Feature Unification"
+This command shows a graph much closer to a feature\-unified graph Cargo will
+build, rather than what you list in \fBCargo.toml\fR\&. For instance, if you specify
+the same dependency in both \fB[dependencies]\fR and \fB[dev\-dependencies]\fR but with
+different features on. This command may merge all features and show a \fB(*)\fR on
+one of the dependency to indicate the duplicate.
+.sp
+As a result, for a mostly equivalent overview of what \fBcargo build\fR does,
+\fBcargo tree \-e normal,build\fR is pretty close; for a mostly equivalent overview
+of what \fBcargo test\fR does, \fBcargo tree\fR is pretty close. However, it doesn\[cq]t
+guarantee the exact equivalence to what Cargo is going to build, since a
+compilation is complex and depends on lots of different factors.
+.sp
+To learn more about feature unification, check out this
+\fIdedicated section\fR <https://doc.rust\-lang.org/cargo/reference/features.html#feature\-unification>\&.
+.SH "OPTIONS"
+.SS "Tree Options"
+.sp
+\fB\-i\fR \fIspec\fR,
+\fB\-\-invert\fR \fIspec\fR
+.RS 4
+Show the reverse dependencies for the given package. This flag will invert
+the tree and display the packages that depend on the given package.
+.sp
+Note that in a workspace, by default it will only display the package\[cq]s
+reverse dependencies inside the tree of the workspace member in the current
+directory. The \fB\-\-workspace\fR flag can be used to extend it so that it will
+show the package\[cq]s reverse dependencies across the entire workspace. The \fB\-p\fR
+flag can be used to display the package\[cq]s reverse dependencies only with the
+subtree of the package given to \fB\-p\fR\&.
+.RE
+.sp
+\fB\-\-prune\fR \fIspec\fR
+.RS 4
+Prune the given package from the display of the dependency tree.
+.RE
+.sp
+\fB\-\-depth\fR \fIdepth\fR
+.RS 4
+Maximum display depth of the dependency tree. A depth of 1 displays the direct
+dependencies, for example.
+.RE
+.sp
+\fB\-\-no\-dedupe\fR
+.RS 4
+Do not de\-duplicate repeated dependencies. Usually, when a package has already
+displayed its dependencies, further occurrences will not re\-display its
+dependencies, and will include a \fB(*)\fR to indicate it has already been shown.
+This flag will cause those duplicates to be repeated.
+.RE
+.sp
+\fB\-d\fR,
+\fB\-\-duplicates\fR
+.RS 4
+Show only dependencies which come in multiple versions (implies \fB\-\-invert\fR).
+When used with the \fB\-p\fR flag, only shows duplicates within the subtree of the
+given package.
+.sp
+It can be beneficial for build times and executable sizes to avoid building
+that same package multiple times. This flag can help identify the offending
+packages. You can then investigate if the package that depends on the
+duplicate with the older version can be updated to the newer version so that
+only one instance is built.
+.RE
+.sp
+\fB\-e\fR \fIkinds\fR,
+\fB\-\-edges\fR \fIkinds\fR
+.RS 4
+The dependency kinds to display. Takes a comma separated list of values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBall\fR \[em] Show all edge kinds.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnormal\fR \[em] Show normal dependencies.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBbuild\fR \[em] Show build dependencies.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBdev\fR \[em] Show development dependencies.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBfeatures\fR \[em] Show features enabled by each dependency. If this is the only
+kind given, then it will automatically include the other dependency kinds.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBno\-normal\fR \[em] Do not include normal dependencies.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBno\-build\fR \[em] Do not include build dependencies.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBno\-dev\fR \[em] Do not include development dependencies.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBno\-proc\-macro\fR \[em] Do not include procedural macro dependencies.
+.RE
+.sp
+The \fBnormal\fR, \fBbuild\fR, \fBdev\fR, and \fBall\fR dependency kinds cannot be mixed with
+\fBno\-normal\fR, \fBno\-build\fR, or \fBno\-dev\fR dependency kinds.
+.sp
+The default is \fBnormal,build,dev\fR\&.
+.RE
+.sp
+\fB\-\-target\fR \fItriple\fR
+.RS 4
+Filter dependencies matching the given \fItarget triple\fR <https://doc.rust\-lang.org/cargo/appendix/glossary.html#target>\&.
+The default is the host platform. Use the value \fBall\fR to include \fIall\fR targets.
+.RE
+.SS "Tree Formatting Options"
+.sp
+\fB\-\-charset\fR \fIcharset\fR
+.RS 4
+Chooses the character set to use for the tree. Valid values are \[lq]utf8\[rq] or
+\[lq]ascii\[rq]\&. Default is \[lq]utf8\[rq]\&.
+.RE
+.sp
+\fB\-f\fR \fIformat\fR,
+\fB\-\-format\fR \fIformat\fR
+.RS 4
+Set the format string for each package. The default is \[lq]{p}\[rq]\&.
+.sp
+This is an arbitrary string which will be used to display each package. The following
+strings will be replaced with the corresponding value:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB{p}\fR \[em] The package name.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB{l}\fR \[em] The package license.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB{r}\fR \[em] The package repository URL.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB{f}\fR \[em] Comma\-separated list of package features that are enabled.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB{lib}\fR \[em] The name, as used in a \fBuse\fR statement, of the package\[cq]s library.
+.RE
+.RE
+.sp
+\fB\-\-prefix\fR \fIprefix\fR
+.RS 4
+Sets how each line is displayed. The \fIprefix\fR value can be one of:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBindent\fR (default) \[em] Shows each line indented as a tree.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBdepth\fR \[em] Show as a list, with the numeric depth printed before each entry.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnone\fR \[em] Show as a flat list.
+.RE
+.RE
+.SS "Package Selection"
+By default, when no package selection options are given, the packages selected
+depend on the selected manifest file (based on the current working directory if
+\fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then
+the workspaces default members are selected, otherwise only the package defined
+by the manifest will be selected.
+.sp
+The default members of a workspace can be set explicitly with the
+\fBworkspace.default\-members\fR key in the root manifest. If this is not set, a
+virtual workspace will include all workspace members (equivalent to passing
+\fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself.
+.sp
+\fB\-p\fR \fIspec\fR\[u2026],
+\fB\-\-package\fR \fIspec\fR\[u2026]
+.RS 4
+Display only the specified packages. See \fBcargo\-pkgid\fR(1) for the
+SPEC format. This flag may be specified multiple times and supports common Unix
+glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally
+expanding glob patterns before Cargo handles them, you must use single quotes or
+double quotes around each pattern.
+.RE
+.sp
+\fB\-\-workspace\fR
+.RS 4
+Display all members in the workspace.
+.RE
+.sp
+\fB\-\-exclude\fR \fISPEC\fR\[u2026]
+.RS 4
+Exclude the specified packages. Must be used in conjunction with the
+\fB\-\-workspace\fR flag. This flag may be specified multiple times and supports
+common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell
+accidentally expanding glob patterns before Cargo handles them, you must use
+single quotes or double quotes around each pattern.
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Feature Selection"
+The feature flags allow you to control which features are enabled. When no
+feature options are given, the \fBdefault\fR feature is activated for every
+selected package.
+.sp
+See \fIthe features documentation\fR <https://doc.rust\-lang.org/cargo/reference/features.html#command\-line\-feature\-options>
+for more details.
+.sp
+\fB\-F\fR \fIfeatures\fR,
+\fB\-\-features\fR \fIfeatures\fR
+.RS 4
+Space or comma separated list of features to activate. Features of workspace
+members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may
+be specified multiple times, which enables all specified features.
+.RE
+.sp
+\fB\-\-all\-features\fR
+.RS 4
+Activate all available features of all selected packages.
+.RE
+.sp
+\fB\-\-no\-default\-features\fR
+.RS 4
+Do not activate the \fBdefault\fR feature of the selected packages.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Display the tree for the package in the current directory:
+.sp
+.RS 4
+.nf
+cargo tree
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Display all the packages that depend on the \fBsyn\fR package:
+.sp
+.RS 4
+.nf
+cargo tree \-i syn
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 3.\h'+01'Show the features enabled on each package:
+.sp
+.RS 4
+.nf
+cargo tree \-\-format "{p} {f}"
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 4.\h'+01'Show all packages that are built multiple times. This can happen if multiple
+semver\-incompatible versions appear in the tree (like 1.0.0 and 2.0.0).
+.sp
+.RS 4
+.nf
+cargo tree \-d
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 5.\h'+01'Explain why features are enabled for the \fBsyn\fR package:
+.sp
+.RS 4
+.nf
+cargo tree \-e features \-i syn
+.fi
+.RE
+.sp
+The \fB\-e features\fR flag is used to show features. The \fB\-i\fR flag is used to
+invert the graph so that it displays the packages that depend on \fBsyn\fR\&. An
+example of what this would display:
+.sp
+.RS 4
+.nf
+syn v1.0.17
+|\-\- syn feature "clone\-impls"
+| `\-\- syn feature "default"
+| `\-\- rustversion v1.0.2
+| `\-\- rustversion feature "default"
+| `\-\- myproject v0.1.0 (/myproject)
+| `\-\- myproject feature "default" (command\-line)
+|\-\- syn feature "default" (*)
+|\-\- syn feature "derive"
+| `\-\- syn feature "default" (*)
+|\-\- syn feature "full"
+| `\-\- rustversion v1.0.2 (*)
+|\-\- syn feature "parsing"
+| `\-\- syn feature "default" (*)
+|\-\- syn feature "printing"
+| `\-\- syn feature "default" (*)
+|\-\- syn feature "proc\-macro"
+| `\-\- syn feature "default" (*)
+`\-\- syn feature "quote"
+ |\-\- syn feature "printing" (*)
+ `\-\- syn feature "proc\-macro" (*)
+.fi
+.RE
+.sp
+To read this graph, you can follow the chain for each feature from the root
+to see why it is included. For example, the \[lq]full\[rq] feature is added by the
+\fBrustversion\fR crate which is included from \fBmyproject\fR (with the default
+features), and \fBmyproject\fR is the package selected on the command\-line. All
+of the other \fBsyn\fR features are added by the \[lq]default\[rq] feature (\[lq]quote\[rq] is
+added by \[lq]printing\[rq] and \[lq]proc\-macro\[rq], both of which are default features).
+.sp
+If you\[cq]re having difficulty cross\-referencing the de\-duplicated \fB(*)\fR
+entries, try with the \fB\-\-no\-dedupe\fR flag to get the full output.
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-metadata\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-uninstall.1 b/src/tools/cargo/src/etc/man/cargo-uninstall.1
new file mode 100644
index 000000000..304d5788a
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-uninstall.1
@@ -0,0 +1,161 @@
+'\" t
+.TH "CARGO\-UNINSTALL" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-uninstall \[em] Remove a Rust binary
+.SH "SYNOPSIS"
+\fBcargo uninstall\fR [\fIoptions\fR] [\fIspec\fR\[u2026]]
+.SH "DESCRIPTION"
+This command removes a package installed with \fBcargo\-install\fR(1). The \fIspec\fR
+argument is a package ID specification of the package to remove (see
+\fBcargo\-pkgid\fR(1)).
+.sp
+By default all binaries are removed for a crate but the \fB\-\-bin\fR and
+\fB\-\-example\fR flags can be used to only remove particular binaries.
+.sp
+The installation root is determined, in order of precedence:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB\-\-root\fR option
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBCARGO_INSTALL_ROOT\fR environment variable
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBinstall.root\fR Cargo \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBCARGO_HOME\fR environment variable
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB$HOME/.cargo\fR
+.RE
+.SH "OPTIONS"
+.SS "Install Options"
+.sp
+\fB\-p\fR,
+\fB\-\-package\fR \fIspec\fR\[u2026]
+.RS 4
+Package to uninstall.
+.RE
+.sp
+\fB\-\-bin\fR \fIname\fR\[u2026]
+.RS 4
+Only uninstall the binary \fIname\fR\&.
+.RE
+.sp
+\fB\-\-root\fR \fIdir\fR
+.RS 4
+Directory to uninstall packages from.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Uninstall a previously installed package.
+.sp
+.RS 4
+.nf
+cargo uninstall ripgrep
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-install\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-update.1 b/src/tools/cargo/src/etc/man/cargo-update.1
new file mode 100644
index 000000000..6f697b3ab
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-update.1
@@ -0,0 +1,218 @@
+'\" t
+.TH "CARGO\-UPDATE" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-update \[em] Update dependencies as recorded in the local lock file
+.SH "SYNOPSIS"
+\fBcargo update\fR [\fIoptions\fR]
+.SH "DESCRIPTION"
+This command will update dependencies in the \fBCargo.lock\fR file to the latest
+version. If the \fBCargo.lock\fR file does not exist, it will be created with the
+latest available versions.
+.SH "OPTIONS"
+.SS "Update Options"
+.sp
+\fB\-p\fR \fIspec\fR\[u2026],
+\fB\-\-package\fR \fIspec\fR\[u2026]
+.RS 4
+Update only the specified packages. This flag may be specified
+multiple times. See \fBcargo\-pkgid\fR(1) for the SPEC format.
+.sp
+If packages are specified with the \fB\-p\fR flag, then a conservative update of
+the lockfile will be performed. This means that only the dependency specified
+by SPEC will be updated. Its transitive dependencies will be updated only if
+SPEC cannot be updated without updating dependencies. All other dependencies
+will remain locked at their currently recorded versions.
+.sp
+If \fB\-p\fR is not specified, all dependencies are updated.
+.RE
+.sp
+\fB\-\-aggressive\fR
+.RS 4
+When used with \fB\-p\fR, dependencies of \fIspec\fR are forced to update as well.
+Cannot be used with \fB\-\-precise\fR\&.
+.RE
+.sp
+\fB\-\-precise\fR \fIprecise\fR
+.RS 4
+When used with \fB\-p\fR, allows you to specify a specific version number to set
+the package to. If the package comes from a git repository, this can be a git
+revision (such as a SHA hash or tag).
+.RE
+.sp
+\fB\-w\fR,
+\fB\-\-workspace\fR
+.RS 4
+Attempt to update only packages defined in the workspace. Other packages
+are updated only if they don\[cq]t already exist in the lockfile. This
+option is useful for updating \fBCargo.lock\fR after you\[cq]ve changed version
+numbers in \fBCargo.toml\fR\&.
+.RE
+.sp
+\fB\-\-dry\-run\fR
+.RS 4
+Displays what would be updated, but doesn\[cq]t actually write the lockfile.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Update all dependencies in the lockfile:
+.sp
+.RS 4
+.nf
+cargo update
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Update only specific dependencies:
+.sp
+.RS 4
+.nf
+cargo update \-p foo \-p bar
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 3.\h'+01'Set a specific dependency to a specific version:
+.sp
+.RS 4
+.nf
+cargo update \-p foo \-\-precise 1.2.3
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-generate\-lockfile\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-vendor.1 b/src/tools/cargo/src/etc/man/cargo-vendor.1
new file mode 100644
index 000000000..cb46f67cd
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-vendor.1
@@ -0,0 +1,209 @@
+'\" t
+.TH "CARGO\-VENDOR" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-vendor \[em] Vendor all dependencies locally
+.SH "SYNOPSIS"
+\fBcargo vendor\fR [\fIoptions\fR] [\fIpath\fR]
+.SH "DESCRIPTION"
+This cargo subcommand will vendor all crates.io and git dependencies for a
+project into the specified directory at \fB<path>\fR\&. After this command completes
+the vendor directory specified by \fB<path>\fR will contain all remote sources from
+dependencies specified. Additional manifests beyond the default one can be
+specified with the \fB\-s\fR option.
+.sp
+The \fBcargo vendor\fR command will also print out the configuration necessary
+to use the vendored sources, which you will need to add to \fB\&.cargo/config.toml\fR\&.
+.SH "OPTIONS"
+.SS "Vendor Options"
+.sp
+\fB\-s\fR \fImanifest\fR,
+\fB\-\-sync\fR \fImanifest\fR
+.RS 4
+Specify an extra \fBCargo.toml\fR manifest to workspaces which should also be
+vendored and synced to the output. May be specified multiple times.
+.RE
+.sp
+\fB\-\-no\-delete\fR
+.RS 4
+Don\[cq]t delete the \[lq]vendor\[rq] directory when vendoring, but rather keep all
+existing contents of the vendor directory
+.RE
+.sp
+\fB\-\-respect\-source\-config\fR
+.RS 4
+Instead of ignoring \fB[source]\fR configuration by default in \fB\&.cargo/config.toml\fR
+read it and use it when downloading crates from crates.io, for example
+.RE
+.sp
+\fB\-\-versioned\-dirs\fR
+.RS 4
+Normally versions are only added to disambiguate multiple versions of the
+same package. This option causes all directories in the \[lq]vendor\[rq] directory
+to be versioned, which makes it easier to track the history of vendored
+packages over time, and can help with the performance of re\-vendoring when
+only a subset of the packages have changed.
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Vendor all dependencies into a local \[lq]vendor\[rq] folder
+.sp
+.RS 4
+.nf
+cargo vendor
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Vendor all dependencies into a local \[lq]third\-party/vendor\[rq] folder
+.sp
+.RS 4
+.nf
+cargo vendor third\-party/vendor
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 3.\h'+01'Vendor the current workspace as well as another to \[lq]vendor\[rq]
+.sp
+.RS 4
+.nf
+cargo vendor \-s ../path/to/Cargo.toml
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-verify-project.1 b/src/tools/cargo/src/etc/man/cargo-verify-project.1
new file mode 100644
index 000000000..d067dd665
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-verify-project.1
@@ -0,0 +1,168 @@
+'\" t
+.TH "CARGO\-VERIFY\-PROJECT" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-verify\-project \[em] Check correctness of crate manifest
+.SH "SYNOPSIS"
+\fBcargo verify\-project\fR [\fIoptions\fR]
+.SH "DESCRIPTION"
+This command will parse the local manifest and check its validity. It emits a
+JSON object with the result. A successful validation will display:
+.sp
+.RS 4
+.nf
+{"success":"true"}
+.fi
+.RE
+.sp
+An invalid workspace will display:
+.sp
+.RS 4
+.nf
+{"invalid":"human\-readable error message"}
+.fi
+.RE
+.SH "OPTIONS"
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-manifest\-path\fR \fIpath\fR
+.RS 4
+Path to the \fBCargo.toml\fR file. By default, Cargo searches for the
+\fBCargo.toml\fR file in the current directory or any parent directory.
+.RE
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: The workspace is OK.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB1\fR: The workspace is invalid.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Check the current workspace for errors:
+.sp
+.RS 4
+.nf
+cargo verify\-project
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-package\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-version.1 b/src/tools/cargo/src/etc/man/cargo-version.1
new file mode 100644
index 000000000..6f1f46303
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-version.1
@@ -0,0 +1,52 @@
+'\" t
+.TH "CARGO\-VERSION" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-version \[em] Show version information
+.SH "SYNOPSIS"
+\fBcargo version\fR [\fIoptions\fR]
+.SH "DESCRIPTION"
+Displays the version of Cargo.
+.SH "OPTIONS"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Display additional version information.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Display the version:
+.sp
+.RS 4
+.nf
+cargo version
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'The version is also available via flags:
+.sp
+.RS 4
+.nf
+cargo \-\-version
+cargo \-V
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 3.\h'+01'Display extra version information:
+.sp
+.RS 4
+.nf
+cargo \-Vv
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo-yank.1 b/src/tools/cargo/src/etc/man/cargo-yank.1
new file mode 100644
index 000000000..423c7bc99
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo-yank.1
@@ -0,0 +1,169 @@
+'\" t
+.TH "CARGO\-YANK" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo\-yank \[em] Remove a pushed crate from the index
+.SH "SYNOPSIS"
+\fBcargo yank\fR [\fIoptions\fR] \fIcrate\fR@\fIversion\fR
+.br
+\fBcargo yank\fR [\fIoptions\fR] \fB\-\-version\fR \fIversion\fR [\fIcrate\fR]
+.SH "DESCRIPTION"
+The yank command removes a previously published crate\[cq]s version from the
+server\[cq]s index. This command does not delete any data, and the crate will
+still be available for download via the registry\[cq]s download link.
+.sp
+Note that existing crates locked to a yanked version will still be able to
+download the yanked version to use it. Cargo will, however, not allow any new
+crates to be locked to any yanked version.
+.sp
+This command requires you to be authenticated with either the \fB\-\-token\fR option
+or using \fBcargo\-login\fR(1).
+.sp
+If the crate name is not specified, it will use the package name from the
+current directory.
+.SH "OPTIONS"
+.SS "Yank Options"
+.sp
+\fB\-\-vers\fR \fIversion\fR,
+\fB\-\-version\fR \fIversion\fR
+.RS 4
+The version to yank or un\-yank.
+.RE
+.sp
+\fB\-\-undo\fR
+.RS 4
+Undo a yank, putting a version back into the index.
+.RE
+.sp
+\fB\-\-token\fR \fItoken\fR
+.RS 4
+API token to use when authenticating. This overrides the token stored in
+the credentials file (which is created by \fBcargo\-login\fR(1)).
+.sp
+\fICargo config\fR <https://doc.rust\-lang.org/cargo/reference/config.html> environment variables can be
+used to override the tokens stored in the credentials file. The token for
+crates.io may be specified with the \fBCARGO_REGISTRY_TOKEN\fR environment
+variable. Tokens for other registries may be specified with environment
+variables of the form \fBCARGO_REGISTRIES_NAME_TOKEN\fR where \fBNAME\fR is the name
+of the registry in all capital letters.
+.RE
+.sp
+\fB\-\-index\fR \fIindex\fR
+.RS 4
+The URL of the registry index to use.
+.RE
+.sp
+\fB\-\-registry\fR \fIregistry\fR
+.RS 4
+Name of the registry to use. Registry names are defined in \fICargo config
+files\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&. If not specified, the default registry is used,
+which is defined by the \fBregistry.default\fR config key which defaults to
+\fBcrates\-io\fR\&.
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Yank a crate from the index:
+.sp
+.RS 4
+.nf
+cargo yank foo@1.0.7
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBcargo\fR(1), \fBcargo\-login\fR(1), \fBcargo\-publish\fR(1)
diff --git a/src/tools/cargo/src/etc/man/cargo.1 b/src/tools/cargo/src/etc/man/cargo.1
new file mode 100644
index 000000000..8f61e0699
--- /dev/null
+++ b/src/tools/cargo/src/etc/man/cargo.1
@@ -0,0 +1,396 @@
+'\" t
+.TH "CARGO" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+cargo \[em] The Rust package manager
+.SH "SYNOPSIS"
+\fBcargo\fR [\fIoptions\fR] \fIcommand\fR [\fIargs\fR]
+.br
+\fBcargo\fR [\fIoptions\fR] \fB\-\-version\fR
+.br
+\fBcargo\fR [\fIoptions\fR] \fB\-\-list\fR
+.br
+\fBcargo\fR [\fIoptions\fR] \fB\-\-help\fR
+.br
+\fBcargo\fR [\fIoptions\fR] \fB\-\-explain\fR \fIcode\fR
+.SH "DESCRIPTION"
+This program is a package manager and build tool for the Rust language,
+available at <https://rust\-lang.org>\&.
+.SH "COMMANDS"
+.SS "Build Commands"
+\fBcargo\-bench\fR(1)
+.br
+\ \ \ \ Execute benchmarks of a package.
+.sp
+\fBcargo\-build\fR(1)
+.br
+\ \ \ \ Compile a package.
+.sp
+\fBcargo\-check\fR(1)
+.br
+\ \ \ \ Check a local package and all of its dependencies for errors.
+.sp
+\fBcargo\-clean\fR(1)
+.br
+\ \ \ \ Remove artifacts that Cargo has generated in the past.
+.sp
+\fBcargo\-doc\fR(1)
+.br
+\ \ \ \ Build a package\[cq]s documentation.
+.sp
+\fBcargo\-fetch\fR(1)
+.br
+\ \ \ \ Fetch dependencies of a package from the network.
+.sp
+\fBcargo\-fix\fR(1)
+.br
+\ \ \ \ Automatically fix lint warnings reported by rustc.
+.sp
+\fBcargo\-run\fR(1)
+.br
+\ \ \ \ Run a binary or example of the local package.
+.sp
+\fBcargo\-rustc\fR(1)
+.br
+\ \ \ \ Compile a package, and pass extra options to the compiler.
+.sp
+\fBcargo\-rustdoc\fR(1)
+.br
+\ \ \ \ Build a package\[cq]s documentation, using specified custom flags.
+.sp
+\fBcargo\-test\fR(1)
+.br
+\ \ \ \ Execute unit and integration tests of a package.
+.SS "Manifest Commands"
+\fBcargo\-generate\-lockfile\fR(1)
+.br
+\ \ \ \ Generate \fBCargo.lock\fR for a project.
+.sp
+\fBcargo\-locate\-project\fR(1)
+.br
+\ \ \ \ Print a JSON representation of a \fBCargo.toml\fR file\[cq]s location.
+.sp
+\fBcargo\-metadata\fR(1)
+.br
+\ \ \ \ Output the resolved dependencies of a package in machine\-readable format.
+.sp
+\fBcargo\-pkgid\fR(1)
+.br
+\ \ \ \ Print a fully qualified package specification.
+.sp
+\fBcargo\-tree\fR(1)
+.br
+\ \ \ \ Display a tree visualization of a dependency graph.
+.sp
+\fBcargo\-update\fR(1)
+.br
+\ \ \ \ Update dependencies as recorded in the local lock file.
+.sp
+\fBcargo\-vendor\fR(1)
+.br
+\ \ \ \ Vendor all dependencies locally.
+.sp
+\fBcargo\-verify\-project\fR(1)
+.br
+\ \ \ \ Check correctness of crate manifest.
+.SS "Package Commands"
+\fBcargo\-init\fR(1)
+.br
+\ \ \ \ Create a new Cargo package in an existing directory.
+.sp
+\fBcargo\-install\fR(1)
+.br
+\ \ \ \ Build and install a Rust binary.
+.sp
+\fBcargo\-new\fR(1)
+.br
+\ \ \ \ Create a new Cargo package.
+.sp
+\fBcargo\-search\fR(1)
+.br
+\ \ \ \ Search packages in crates.io.
+.sp
+\fBcargo\-uninstall\fR(1)
+.br
+\ \ \ \ Remove a Rust binary.
+.SS "Publishing Commands"
+\fBcargo\-login\fR(1)
+.br
+\ \ \ \ Save an API token from the registry locally.
+.sp
+\fBcargo\-logout\fR(1)
+.br
+\ \ \ \ Remove an API token from the registry locally.
+.sp
+\fBcargo\-owner\fR(1)
+.br
+\ \ \ \ Manage the owners of a crate on the registry.
+.sp
+\fBcargo\-package\fR(1)
+.br
+\ \ \ \ Assemble the local package into a distributable tarball.
+.sp
+\fBcargo\-publish\fR(1)
+.br
+\ \ \ \ Upload a package to the registry.
+.sp
+\fBcargo\-yank\fR(1)
+.br
+\ \ \ \ Remove a pushed crate from the index.
+.SS "General Commands"
+\fBcargo\-help\fR(1)
+.br
+\ \ \ \ Display help information about Cargo.
+.sp
+\fBcargo\-version\fR(1)
+.br
+\ \ \ \ Show version information.
+.SH "OPTIONS"
+.SS "Special Options"
+.sp
+\fB\-V\fR,
+\fB\-\-version\fR
+.RS 4
+Print version info and exit. If used with \fB\-\-verbose\fR, prints extra
+information.
+.RE
+.sp
+\fB\-\-list\fR
+.RS 4
+List all installed Cargo subcommands. If used with \fB\-\-verbose\fR, prints extra
+information.
+.RE
+.sp
+\fB\-\-explain\fR \fIcode\fR
+.RS 4
+Run \fBrustc \-\-explain CODE\fR which will print out a detailed explanation of an
+error message (for example, \fBE0004\fR).
+.RE
+.SS "Display Options"
+.sp
+\fB\-v\fR,
+\fB\-\-verbose\fR
+.RS 4
+Use verbose output. May be specified twice for \[lq]very verbose\[rq] output which
+includes extra output such as dependency warnings and build script output.
+May also be specified with the \fBterm.verbose\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-q\fR,
+\fB\-\-quiet\fR
+.RS 4
+Do not print cargo log messages.
+May also be specified with the \fBterm.quiet\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.sp
+\fB\-\-color\fR \fIwhen\fR
+.RS 4
+Control when colored output is used. Valid values:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the
+terminal.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors.
+.RE
+.sp
+May also be specified with the \fBterm.color\fR
+\fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Manifest Options"
+.sp
+\fB\-\-frozen\fR,
+\fB\-\-locked\fR
+.RS 4
+Either of these flags requires that the \fBCargo.lock\fR file is
+up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will
+exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from
+attempting to access the network to determine if it is out\-of\-date.
+.sp
+These may be used in environments where you want to assert that the
+\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network
+access.
+.RE
+.sp
+\fB\-\-offline\fR
+.RS 4
+Prevents Cargo from accessing the network for any reason. Without this
+flag, Cargo will stop with an error if it needs to access the network and
+the network is not available. With this flag, Cargo will attempt to
+proceed without the network if possible.
+.sp
+Beware that this may result in different dependency resolution than online
+mode. Cargo will restrict itself to crates that are downloaded locally, even
+if there might be a newer version as indicated in the local copy of the index.
+See the \fBcargo\-fetch\fR(1) command to download dependencies before going
+offline.
+.sp
+May also be specified with the \fBnet.offline\fR \fIconfig value\fR <https://doc.rust\-lang.org/cargo/reference/config.html>\&.
+.RE
+.SS "Common Options"
+.sp
+\fB+\fR\fItoolchain\fR
+.RS 4
+If Cargo has been installed with rustup, and the first argument to \fBcargo\fR
+begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such
+as \fB+stable\fR or \fB+nightly\fR).
+See the \fIrustup documentation\fR <https://rust\-lang.github.io/rustup/overrides.html>
+for more information about how toolchain overrides work.
+.RE
+.sp
+\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR
+.RS 4
+Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR,
+or provided as a path to an extra configuration file. This flag may be specified multiple times.
+See the \fIcommand\-line overrides section\fR <https://doc.rust\-lang.org/cargo/reference/config.html#command\-line\-overrides> for more information.
+.RE
+.sp
+\fB\-C\fR \fIPATH\fR
+.RS 4
+Changes the current working directory before executing any specified operations. This affects
+things like where cargo looks by default for the project manifest (\fBCargo.toml\fR), as well as
+the directories searched for discovering \fB\&.cargo/config.toml\fR, for example. This option must
+appear before the command name, for example \fBcargo \-C path/to/my\-project build\fR\&.
+.sp
+This option is only available on the \fInightly
+channel\fR <https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html> and
+requires the \fB\-Z unstable\-options\fR flag to enable (see
+\fI#10098\fR <https://github.com/rust\-lang/cargo/issues/10098>).
+.RE
+.sp
+\fB\-h\fR,
+\fB\-\-help\fR
+.RS 4
+Prints help information.
+.RE
+.sp
+\fB\-Z\fR \fIflag\fR
+.RS 4
+Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details.
+.RE
+.SH "ENVIRONMENT"
+See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/environment\-variables.html> for
+details on environment variables that Cargo reads.
+.SH "EXIT STATUS"
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete.
+.RE
+.SH "FILES"
+\fB~/.cargo/\fR
+.br
+\ \ \ \ Default location for Cargo\[cq]s \[lq]home\[rq] directory where it
+stores various files. The location can be changed with the \fBCARGO_HOME\fR
+environment variable.
+.sp
+\fB$CARGO_HOME/bin/\fR
+.br
+\ \ \ \ Binaries installed by \fBcargo\-install\fR(1) will be located here. If using
+\fIrustup\fR <https://rust\-lang.github.io/rustup/>, executables distributed with Rust are also located here.
+.sp
+\fB$CARGO_HOME/config.toml\fR
+.br
+\ \ \ \ The global configuration file. See \fIthe reference\fR <https://doc.rust\-lang.org/cargo/reference/config.html>
+for more information about configuration files.
+.sp
+\fB\&.cargo/config.toml\fR
+.br
+\ \ \ \ Cargo automatically searches for a file named \fB\&.cargo/config.toml\fR in the
+current directory, and all parent directories. These configuration files
+will be merged with the global configuration file.
+.sp
+\fB$CARGO_HOME/credentials.toml\fR
+.br
+\ \ \ \ Private authentication information for logging in to a registry.
+.sp
+\fB$CARGO_HOME/registry/\fR
+.br
+\ \ \ \ This directory contains cached downloads of the registry index and any
+downloaded dependencies.
+.sp
+\fB$CARGO_HOME/git/\fR
+.br
+\ \ \ \ This directory contains cached downloads of git dependencies.
+.sp
+Please note that the internal structure of the \fB$CARGO_HOME\fR directory is not
+stable yet and may be subject to change.
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Build a local package and all of its dependencies:
+.sp
+.RS 4
+.nf
+cargo build
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Build a package with optimizations:
+.sp
+.RS 4
+.nf
+cargo build \-\-release
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 3.\h'+01'Run tests for a cross\-compiled target:
+.sp
+.RS 4
+.nf
+cargo test \-\-target i686\-unknown\-linux\-gnu
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 4.\h'+01'Create a new package that builds an executable:
+.sp
+.RS 4
+.nf
+cargo new foobar
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 5.\h'+01'Create a package in the current directory:
+.sp
+.RS 4
+.nf
+mkdir foo && cd foo
+cargo init .
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 6.\h'+01'Learn about a command\[cq]s options and usage:
+.sp
+.RS 4
+.nf
+cargo help clean
+.fi
+.RE
+.RE
+.SH "BUGS"
+See <https://github.com/rust\-lang/cargo/issues> for issues.
+.SH "SEE ALSO"
+\fBrustc\fR(1), \fBrustdoc\fR(1)
diff --git a/src/tools/cargo/tests/build-std/main.rs b/src/tools/cargo/tests/build-std/main.rs
new file mode 100644
index 000000000..47a4bb671
--- /dev/null
+++ b/src/tools/cargo/tests/build-std/main.rs
@@ -0,0 +1,229 @@
+//! A test suite for `-Zbuild-std` which is much more expensive than the
+//! standard test suite.
+//!
+//! This test suite attempts to perform a full integration test where we
+//! actually compile the standard library from source (like the real one) and
+//! the various tests associated with that.
+//!
+//! YOU SHOULD IDEALLY NOT WRITE TESTS HERE.
+//!
+//! If possible, use `tests/testsuite/standard_lib.rs` instead. That uses a
+//! 'mock' sysroot which is much faster to compile. The tests here are
+//! extremely intensive and are only intended to run on CI and are theoretically
+//! not catching any regressions that `tests/testsuite/standard_lib.rs` isn't
+//! already catching.
+//!
+//! All tests here should use `#[cargo_test(build_std_real)]` to indicate that
+//! boilerplate should be generated to require the nightly toolchain and the
+//! `CARGO_RUN_BUILD_STD_TESTS` env var to be set to actually run these tests.
+//! Otherwise the tests are skipped.
+
+use cargo_test_support::*;
+use std::env;
+use std::path::Path;
+
+fn enable_build_std(e: &mut Execs, arg: Option<&str>) {
+ e.env_remove("CARGO_HOME");
+ e.env_remove("HOME");
+
+ // And finally actually enable `build-std` for now
+ let arg = match arg {
+ Some(s) => format!("-Zbuild-std={}", s),
+ None => "-Zbuild-std".to_string(),
+ };
+ e.arg(arg);
+ e.masquerade_as_nightly_cargo(&["build-std"]);
+}
+
+// Helper methods used in the tests below
+trait BuildStd: Sized {
+ fn build_std(&mut self) -> &mut Self;
+ fn build_std_arg(&mut self, arg: &str) -> &mut Self;
+ fn target_host(&mut self) -> &mut Self;
+}
+
+impl BuildStd for Execs {
+ fn build_std(&mut self) -> &mut Self {
+ enable_build_std(self, None);
+ self
+ }
+
+ fn build_std_arg(&mut self, arg: &str) -> &mut Self {
+ enable_build_std(self, Some(arg));
+ self
+ }
+
+ fn target_host(&mut self) -> &mut Self {
+ self.arg("--target").arg(rustc_host());
+ self
+ }
+}
+
+#[cargo_test(build_std_real)]
+fn basic() {
+ let p = project()
+ .file(
+ "src/main.rs",
+ "
+ fn main() {
+ foo::f();
+ }
+
+ #[test]
+ fn smoke_bin_unit() {
+ foo::f();
+ }
+ ",
+ )
+ .file(
+ "src/lib.rs",
+ "
+ extern crate alloc;
+ extern crate proc_macro;
+
+ /// ```
+ /// foo::f();
+ /// ```
+ pub fn f() {
+ }
+
+ #[test]
+ fn smoke_lib_unit() {
+ f();
+ }
+ ",
+ )
+ .file(
+ "tests/smoke.rs",
+ "
+ #[test]
+ fn smoke_integration() {
+ foo::f();
+ }
+ ",
+ )
+ .build();
+
+ p.cargo("check").build_std().target_host().run();
+ p.cargo("build")
+ .build_std()
+ .target_host()
+ // Importantly, this should not say [UPDATING]
+ // There have been multiple bugs where every build triggers and update.
+ .with_stderr(
+ "[COMPILING] foo v0.0.1 [..]\n\
+ [FINISHED] dev [..]",
+ )
+ .run();
+ p.cargo("run").build_std().target_host().run();
+ p.cargo("test").build_std().target_host().run();
+
+ // Check for hack that removes dylibs.
+ let deps_dir = Path::new("target")
+ .join(rustc_host())
+ .join("debug")
+ .join("deps");
+ assert!(p.glob(deps_dir.join("*.rlib")).count() > 0);
+ assert_eq!(p.glob(deps_dir.join("*.dylib")).count(), 0);
+}
+
+#[cargo_test(build_std_real)]
+fn cross_custom() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [target.custom-target.dependencies]
+ dep = { path = "dep" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "#![no_std] pub fn f() -> u32 { dep::answer() }",
+ )
+ .file("dep/Cargo.toml", &basic_manifest("dep", "0.1.0"))
+ .file("dep/src/lib.rs", "#![no_std] pub fn answer() -> u32 { 42 }")
+ .file(
+ "custom-target.json",
+ r#"
+ {
+ "llvm-target": "x86_64-unknown-none-gnu",
+ "data-layout": "e-m:e-i64:64-f80:128-n8:16:32:64-S128",
+ "arch": "x86_64",
+ "target-endian": "little",
+ "target-pointer-width": "64",
+ "target-c-int-width": "32",
+ "os": "none",
+ "linker-flavor": "ld.lld"
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build --target custom-target.json -v")
+ .build_std_arg("core")
+ .run();
+}
+
+#[cargo_test(build_std_real)]
+fn custom_test_framework() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ #![no_std]
+ #![cfg_attr(test, no_main)]
+ #![feature(custom_test_frameworks)]
+ #![test_runner(crate::test_runner)]
+
+ pub fn test_runner(_tests: &[&dyn Fn()]) {}
+
+ #[panic_handler]
+ fn panic(_info: &core::panic::PanicInfo) -> ! {
+ loop {}
+ }
+ "#,
+ )
+ .file(
+ "target.json",
+ r#"
+ {
+ "llvm-target": "x86_64-unknown-none-gnu",
+ "data-layout": "e-m:e-i64:64-f80:128-n8:16:32:64-S128",
+ "arch": "x86_64",
+ "target-endian": "little",
+ "target-pointer-width": "64",
+ "target-c-int-width": "32",
+ "os": "none",
+ "linker-flavor": "ld.lld",
+ "linker": "rust-lld",
+ "executables": true,
+ "panic-strategy": "abort"
+ }
+ "#,
+ )
+ .build();
+
+ // This is a bit of a hack to use the rust-lld that ships with most toolchains.
+ let sysroot = paths::sysroot();
+ let sysroot = Path::new(&sysroot);
+ let sysroot_bin = sysroot
+ .join("lib")
+ .join("rustlib")
+ .join(rustc_host())
+ .join("bin");
+ let path = env::var_os("PATH").unwrap_or_default();
+ let mut paths = env::split_paths(&path).collect::<Vec<_>>();
+ paths.insert(0, sysroot_bin);
+ let new_path = env::join_paths(paths).unwrap();
+
+ p.cargo("test --target target.json --no-run -v")
+ .env("PATH", new_path)
+ .build_std_arg("core")
+ .run();
+}
diff --git a/src/tools/cargo/tests/internal.rs b/src/tools/cargo/tests/internal.rs
new file mode 100644
index 000000000..c42cfa8f0
--- /dev/null
+++ b/src/tools/cargo/tests/internal.rs
@@ -0,0 +1,107 @@
+//! Tests for internal code checks.
+
+#![allow(clippy::all)]
+
+use std::fs;
+
+#[test]
+fn check_forbidden_code() {
+ // Do not use certain macros, functions, etc.
+ if !cargo_util::is_ci() {
+ // Only check these on CI, otherwise it could be annoying.
+ use std::io::Write;
+ writeln!(
+ std::io::stderr(),
+ "\nSkipping check_forbidden_code test, set CI=1 to enable"
+ )
+ .unwrap();
+ return;
+ }
+ let root_path = std::path::Path::new(env!("CARGO_MANIFEST_DIR")).join("src");
+ for entry in walkdir::WalkDir::new(&root_path)
+ .into_iter()
+ .filter_entry(|e| e.path() != root_path.join("doc"))
+ .filter_map(|e| e.ok())
+ {
+ let path = entry.path();
+ if !entry
+ .file_name()
+ .to_str()
+ .map(|s| s.ends_with(".rs"))
+ .unwrap_or(false)
+ {
+ continue;
+ }
+ eprintln!("checking {}", path.display());
+ let c = fs::read_to_string(path).unwrap();
+ for (line_index, line) in c.lines().enumerate() {
+ if line.trim().starts_with("//") {
+ continue;
+ }
+ if line_has_print(line) {
+ if entry.file_name().to_str().unwrap() == "cargo_new.rs" && line.contains("Hello") {
+ // An exception.
+ continue;
+ }
+ panic!(
+ "found print macro in {}:{}\n\n{}\n\n\
+ print! macros should not be used in Cargo because they can panic.\n\
+ Use one of the drop_print macros instead.\n\
+ ",
+ path.display(),
+ line_index,
+ line
+ );
+ }
+ if line_has_macro(line, "dbg") {
+ panic!(
+ "found dbg! macro in {}:{}\n\n{}\n\n\
+ dbg! should not be used outside of debugging.",
+ path.display(),
+ line_index,
+ line
+ );
+ }
+ }
+ }
+}
+
+fn line_has_print(line: &str) -> bool {
+ line_has_macro(line, "print")
+ || line_has_macro(line, "eprint")
+ || line_has_macro(line, "println")
+ || line_has_macro(line, "eprintln")
+}
+
+#[test]
+fn line_has_print_works() {
+ assert!(line_has_print("print!"));
+ assert!(line_has_print("println!"));
+ assert!(line_has_print("eprint!"));
+ assert!(line_has_print("eprintln!"));
+ assert!(line_has_print("(print!(\"hi!\"))"));
+ assert!(!line_has_print("print"));
+ assert!(!line_has_print("i like to print things"));
+ assert!(!line_has_print("drop_print!"));
+ assert!(!line_has_print("drop_println!"));
+ assert!(!line_has_print("drop_eprint!"));
+ assert!(!line_has_print("drop_eprintln!"));
+}
+
+fn line_has_macro(line: &str, mac: &str) -> bool {
+ for (i, _) in line.match_indices(mac) {
+ if line.get(i + mac.len()..i + mac.len() + 1) != Some("!") {
+ continue;
+ }
+ if i == 0 {
+ return true;
+ }
+ // Check for identifier boundary start.
+ let prev1 = line.get(i - 1..i).unwrap().chars().next().unwrap();
+ if prev1.is_alphanumeric() || prev1 == '_' {
+ continue;
+ }
+ return true;
+ }
+ false
+}
diff --git a/src/tools/cargo/tests/testsuite/advanced_env.rs b/src/tools/cargo/tests/testsuite/advanced_env.rs
new file mode 100644
index 000000000..8aab528ea
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/advanced_env.rs
@@ -0,0 +1,35 @@
+//! -Zadvanced-env tests
+
+use cargo_test_support::{paths, project, registry::Package};
+
+#[cargo_test]
+fn source_config_env() {
+ // Try to define [source] with environment variables.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ somedep = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ Package::new("somedep", "1.0.0")
+ .local(true)
+ .file("src/lib.rs", "")
+ .publish();
+
+ let path = paths::root().join("registry");
+
+ p.cargo("check -Zadvanced-env")
+ .masquerade_as_nightly_cargo(&["advanced-env"])
+ .env("CARGO_SOURCE_crates-io_REPLACE_WITH", "my-local-source")
+ .env("CARGO_SOURCE_my-local-source_LOCAL_REGISTRY", path)
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/alt_registry.rs b/src/tools/cargo/tests/testsuite/alt_registry.rs
new file mode 100644
index 000000000..97da909b8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/alt_registry.rs
@@ -0,0 +1,1496 @@
+//! Tests for alternative registries.
+
+use cargo_test_support::compare::assert_match_exact;
+use cargo_test_support::publish::validate_alt_upload;
+use cargo_test_support::registry::{self, Package, RegistryBuilder};
+use cargo_test_support::{basic_manifest, paths, project};
+use std::fs;
+
+#[cargo_test]
+fn depend_on_alt_registry() {
+ registry::alt_init();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ version = "0.0.1"
+ registry = "alternative"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("bar", "0.0.1").alternative(true).publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.1 (registry `alternative`)
+[CHECKING] bar v0.0.1 (registry `alternative`)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+
+ p.cargo("clean").run();
+
+ // Don't download a second time
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.0.1 (registry `alternative`)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn depend_on_alt_registry_depends_on_same_registry_no_index() {
+ registry::alt_init();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ version = "0.0.1"
+ registry = "alternative"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("baz", "0.0.1").alternative(true).publish();
+ Package::new("bar", "0.0.1")
+ .registry_dep("baz", "0.0.1")
+ .alternative(true)
+ .publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..] v0.0.1 (registry `alternative`)
+[DOWNLOADED] [..] v0.0.1 (registry `alternative`)
+[CHECKING] baz v0.0.1 (registry `alternative`)
+[CHECKING] bar v0.0.1 (registry `alternative`)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn depend_on_alt_registry_depends_on_same_registry() {
+ registry::alt_init();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ version = "0.0.1"
+ registry = "alternative"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("baz", "0.0.1").alternative(true).publish();
+ Package::new("bar", "0.0.1")
+ .registry_dep("baz", "0.0.1")
+ .alternative(true)
+ .publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..] v0.0.1 (registry `alternative`)
+[DOWNLOADED] [..] v0.0.1 (registry `alternative`)
+[CHECKING] baz v0.0.1 (registry `alternative`)
+[CHECKING] bar v0.0.1 (registry `alternative`)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn depend_on_alt_registry_depends_on_crates_io() {
+ registry::alt_init();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ version = "0.0.1"
+ registry = "alternative"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("baz", "0.0.1").publish();
+ Package::new("bar", "0.0.1")
+ .dep("baz", "0.0.1")
+ .alternative(true)
+ .publish();
+
+ p.cargo("check")
+ .with_stderr_unordered(
+ "\
+[UPDATING] `alternative` index
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] baz v0.0.1 (registry `dummy-registry`)
+[DOWNLOADED] bar v0.0.1 (registry `alternative`)
+[CHECKING] baz v0.0.1
+[CHECKING] bar v0.0.1 (registry `alternative`)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn registry_and_path_dep_works() {
+ registry::alt_init();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ registry = "alternative"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.0.1 ([CWD]/bar)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn registry_incompatible_with_git() {
+ registry::alt_init();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ git = ""
+ registry = "alternative"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains(
+ " dependency (bar) specification is ambiguous. \
+ Only one of `git` or `registry` is allowed.",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cannot_publish_to_crates_io_with_registry_dependency() {
+ let crates_io = registry::init();
+ let _alternative = RegistryBuilder::new().alternative().build();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ [dependencies.bar]
+ version = "0.0.1"
+ registry = "alternative"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("bar", "0.0.1").alternative(true).publish();
+
+ p.cargo("publish")
+ .replace_crates_io(crates_io.index_url())
+ .with_status(101)
+ .with_stderr_contains("[ERROR] crates cannot be published to crates.io[..]")
+ .run();
+
+ p.cargo("publish")
+ .replace_crates_io(crates_io.index_url())
+ .arg("--token")
+ .arg(crates_io.token())
+ .arg("--index")
+ .arg(crates_io.index_url().as_str())
+ .with_status(101)
+ .with_stderr_contains("[ERROR] crates cannot be published to crates.io[..]")
+ .run();
+}
+
+#[cargo_test]
+fn publish_with_registry_dependency() {
+ let _reg = RegistryBuilder::new()
+ .http_api()
+ .http_index()
+ .alternative()
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ version = "0.0.1"
+ registry = "alternative"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("bar", "0.0.1").alternative(true).publish();
+
+ p.cargo("publish --registry alternative")
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[WARNING] [..]
+[..]
+[PACKAGING] foo v0.0.1 [..]
+[UPDATING] `alternative` index
+[VERIFYING] foo v0.0.1 [..]
+[DOWNLOADING] [..]
+[DOWNLOADED] bar v0.0.1 (registry `alternative`)
+[COMPILING] bar v0.0.1 (registry `alternative`)
+[COMPILING] foo v0.0.1 [..]
+[FINISHED] [..]
+[PACKAGED] [..]
+[UPLOADING] foo v0.0.1 [..]
+[UPLOADED] foo v0.0.1 to registry `alternative`
+note: Waiting for `foo v0.0.1` to be available at registry `alternative`.
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+[PUBLISHED] foo v0.0.1 at registry `alternative`
+",
+ )
+ .run();
+
+ validate_alt_upload(
+ r#"{
+ "authors": [],
+ "badges": {},
+ "categories": [],
+ "deps": [
+ {
+ "default_features": true,
+ "features": [],
+ "kind": "normal",
+ "name": "bar",
+ "optional": false,
+ "target": null,
+ "version_req": "^0.0.1"
+ }
+ ],
+ "description": null,
+ "documentation": null,
+ "features": {},
+ "homepage": null,
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "name": "foo",
+ "readme": null,
+ "readme_file": null,
+ "repository": null,
+ "homepage": null,
+ "documentation": null,
+ "vers": "0.0.1"
+ }"#,
+ "foo-0.0.1.crate",
+ &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"],
+ );
+}
+
+#[cargo_test]
+fn alt_registry_and_crates_io_deps() {
+ registry::alt_init();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ crates_io_dep = "0.0.1"
+
+ [dependencies.alt_reg_dep]
+ version = "0.1.0"
+ registry = "alternative"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("crates_io_dep", "0.0.1").publish();
+ Package::new("alt_reg_dep", "0.1.0")
+ .alternative(true)
+ .publish();
+
+ p.cargo("check")
+ .with_stderr_unordered(
+ "\
+[UPDATING] `alternative` index
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] crates_io_dep v0.0.1 (registry `dummy-registry`)
+[DOWNLOADED] alt_reg_dep v0.1.0 (registry `alternative`)
+[CHECKING] alt_reg_dep v0.1.0 (registry `alternative`)
+[CHECKING] crates_io_dep v0.0.1
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn block_publish_due_to_no_token() {
+ registry::alt_init();
+ let p = project().file("src/lib.rs", "").build();
+
+ fs::remove_file(paths::home().join(".cargo/credentials.toml")).unwrap();
+
+ // Now perform the actual publish
+ p.cargo("publish --registry alternative")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+error: no token found for `alternative`, please run `cargo login --registry alternative`
+or use environment variable CARGO_REGISTRIES_ALTERNATIVE_TOKEN",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_registries_crates_io_protocol() {
+ let _ = RegistryBuilder::new()
+ .no_configure_token()
+ .alternative()
+ .build();
+ // Should not produce a warning due to the registries.crates-io.protocol = 'sparse' configuration
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config.toml",
+ "[registries.crates-io]
+ protocol = 'sparse'",
+ )
+ .build();
+
+ p.cargo("publish --registry alternative")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+error: no token found for `alternative`, please run `cargo login --registry alternative`
+or use environment variable CARGO_REGISTRIES_ALTERNATIVE_TOKEN",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn publish_to_alt_registry() {
+ let _reg = RegistryBuilder::new()
+ .http_api()
+ .http_index()
+ .alternative()
+ .build();
+
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ // Now perform the actual publish
+ p.cargo("publish --registry alternative")
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[WARNING] [..]
+[..]
+[PACKAGING] foo v0.0.1 [..]
+[VERIFYING] foo v0.0.1 [..]
+[COMPILING] foo v0.0.1 [..]
+[FINISHED] [..]
+[PACKAGED] [..]
+[UPLOADING] foo v0.0.1 [..]
+[UPLOADED] foo v0.0.1 to registry `alternative`
+note: Waiting for `foo v0.0.1` to be available at registry `alternative`.
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+[PUBLISHED] foo v0.0.1 at registry `alternative`
+",
+ )
+ .run();
+
+ validate_alt_upload(
+ r#"{
+ "authors": [],
+ "badges": {},
+ "categories": [],
+ "deps": [],
+ "description": null,
+ "documentation": null,
+ "features": {},
+ "homepage": null,
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "name": "foo",
+ "readme": null,
+ "readme_file": null,
+ "repository": null,
+ "homepage": null,
+ "documentation": null,
+ "vers": "0.0.1"
+ }"#,
+ "foo-0.0.1.crate",
+ &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"],
+ );
+}
+
+#[cargo_test]
+fn publish_with_crates_io_dep() {
+ // crates.io registry.
+ let _dummy_reg = registry::init();
+ // Alternative registry.
+ let _alt_reg = RegistryBuilder::new()
+ .http_api()
+ .http_index()
+ .alternative()
+ .build();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = ["me"]
+ license = "MIT"
+ description = "foo"
+
+ [dependencies.bar]
+ version = "0.0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("bar", "0.0.1").publish();
+
+ p.cargo("publish --registry alternative")
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[WARNING] [..]
+[..]
+[PACKAGING] foo v0.0.1 [..]
+[UPDATING] `dummy-registry` index
+[VERIFYING] foo v0.0.1 [..]
+[DOWNLOADING] [..]
+[DOWNLOADED] bar v0.0.1 (registry `dummy-registry`)
+[COMPILING] bar v0.0.1
+[COMPILING] foo v0.0.1 [..]
+[FINISHED] [..]
+[PACKAGED] [..]
+[UPLOADING] foo v0.0.1 [..]
+[UPLOADED] foo v0.0.1 to registry `alternative`
+note: Waiting for `foo v0.0.1` to be available at registry `alternative`.
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+[PUBLISHED] foo v0.0.1 at registry `alternative`
+",
+ )
+ .run();
+
+ validate_alt_upload(
+ r#"{
+ "authors": ["me"],
+ "badges": {},
+ "categories": [],
+ "deps": [
+ {
+ "default_features": true,
+ "features": [],
+ "kind": "normal",
+ "name": "bar",
+ "optional": false,
+ "registry": "https://github.com/rust-lang/crates.io-index",
+ "target": null,
+ "version_req": "^0.0.1"
+ }
+ ],
+ "description": "foo",
+ "documentation": null,
+ "features": {},
+ "homepage": null,
+ "keywords": [],
+ "license": "MIT",
+ "license_file": null,
+ "links": null,
+ "name": "foo",
+ "readme": null,
+ "readme_file": null,
+ "repository": null,
+ "homepage": null,
+ "documentation": null,
+ "vers": "0.0.1"
+ }"#,
+ "foo-0.0.1.crate",
+ &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"],
+ );
+}
+
+#[cargo_test]
+fn passwords_in_registries_index_url_forbidden() {
+ registry::alt_init();
+
+ let config = paths::home().join(".cargo/config");
+
+ fs::write(
+ config,
+ r#"
+ [registries.alternative]
+ index = "ssh://git:secret@foobar.com"
+ "#,
+ )
+ .unwrap();
+
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ p.cargo("publish --registry alternative")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: invalid index URL for registry `alternative` defined in [..]/home/.cargo/config
+
+Caused by:
+ registry URLs may not contain passwords
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn patch_alt_reg() {
+ registry::alt_init();
+ Package::new("bar", "0.1.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = { version = "0.1.0", registry = "alternative" }
+
+ [patch.alternative]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
+ extern crate bar;
+ pub fn f() { bar::bar(); }
+ ",
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[CHECKING] bar v0.1.0 ([CWD]/bar)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_registry_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ version = "0.0.1"
+ registry = "bad name"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[CWD]/Cargo.toml`
+
+Caused by:
+ invalid character ` ` in registry name: `bad name`, [..]",
+ )
+ .run();
+
+ for cmd in &[
+ "init",
+ "install foo",
+ "login",
+ "owner",
+ "publish",
+ "search",
+ "yank --version 0.0.1",
+ ] {
+ p.cargo(cmd)
+ .arg("--registry")
+ .arg("bad name")
+ .with_status(101)
+ .with_stderr("[ERROR] invalid character ` ` in registry name: `bad name`, [..]")
+ .run();
+ }
+}
+
+#[cargo_test]
+fn no_api() {
+ let _registry = RegistryBuilder::new().alternative().no_api().build();
+ Package::new("bar", "0.0.1").alternative(true).publish();
+
+ // First check that a dependency works.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies.bar]
+ version = "0.0.1"
+ registry = "alternative"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.1 (registry `alternative`)
+[CHECKING] bar v0.0.1 (registry `alternative`)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+
+ // Check all of the API commands.
+ let err = "[ERROR] registry `alternative` does not support API commands";
+
+ p.cargo("login --registry alternative TOKEN")
+ .with_status(101)
+ .with_stderr_contains(&err)
+ .run();
+
+ p.cargo("publish --registry alternative")
+ .with_status(101)
+ .with_stderr_contains(&err)
+ .run();
+
+ p.cargo("search --registry alternative")
+ .with_status(101)
+ .with_stderr_contains(&err)
+ .run();
+
+ p.cargo("owner --registry alternative --list")
+ .with_status(101)
+ .with_stderr_contains(&err)
+ .run();
+
+ p.cargo("yank --registry alternative --version=0.0.1 bar")
+ .with_status(101)
+ .with_stderr_contains(&err)
+ .run();
+
+ p.cargo("yank --registry alternative --version=0.0.1 bar")
+ .with_stderr_contains(&err)
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn alt_reg_metadata() {
+ // Check for "registry" entries in `cargo metadata` with alternative registries.
+ registry::alt_init();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ altdep = { version = "0.0.1", registry = "alternative" }
+ iodep = { version = "0.0.1" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ Package::new("bar", "0.0.1").publish();
+ Package::new("altdep", "0.0.1")
+ .dep("bar", "0.0.1")
+ .alternative(true)
+ .publish();
+ Package::new("altdep2", "0.0.1").alternative(true).publish();
+ Package::new("iodep", "0.0.1")
+ .registry_dep("altdep2", "0.0.1")
+ .publish();
+
+ // The important thing to check here is the "registry" value in `deps`.
+ // They should be:
+ // foo -> altdep: alternative-registry
+ // foo -> iodep: null (because it is in crates.io)
+ // altdep -> bar: null (because it is in crates.io)
+ // iodep -> altdep2: alternative-registry
+ p.cargo("metadata --format-version=1 --no-deps")
+ .with_json(
+ r#"
+ {
+ "packages": [
+ {
+ "name": "foo",
+ "version": "0.0.1",
+ "id": "foo 0.0.1 (path+file:[..]/foo)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": null,
+ "dependencies": [
+ {
+ "name": "altdep",
+ "source": "registry+file:[..]/alternative-registry",
+ "req": "^0.0.1",
+ "kind": null,
+ "rename": null,
+ "optional": false,
+ "uses_default_features": true,
+ "features": [],
+ "target": null,
+ "registry": "file:[..]/alternative-registry"
+ },
+ {
+ "name": "iodep",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "req": "^0.0.1",
+ "kind": null,
+ "rename": null,
+ "optional": false,
+ "uses_default_features": true,
+ "features": [],
+ "target": null,
+ "registry": null
+ }
+ ],
+ "targets": "{...}",
+ "features": {},
+ "manifest_path": "[..]/foo/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2015",
+ "links": null
+ }
+ ],
+ "workspace_members": [
+ "foo 0.0.1 (path+file:[..]/foo)"
+ ],
+ "resolve": null,
+ "target_directory": "[..]/foo/target",
+ "version": 1,
+ "workspace_root": "[..]/foo",
+ "metadata": null
+ }"#,
+ )
+ .run();
+
+ // --no-deps uses a different code path, make sure both work.
+ p.cargo("metadata --format-version=1")
+ .with_json(
+ r#"
+ {
+ "packages": [
+ {
+ "name": "altdep",
+ "version": "0.0.1",
+ "id": "altdep 0.0.1 (registry+file:[..]/alternative-registry)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": "registry+file:[..]/alternative-registry",
+ "dependencies": [
+ {
+ "name": "bar",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "req": "^0.0.1",
+ "kind": null,
+ "rename": null,
+ "optional": false,
+ "uses_default_features": true,
+ "features": [],
+ "target": null,
+ "registry": null
+ }
+ ],
+ "targets": "{...}",
+ "features": {},
+ "manifest_path": "[..]/altdep-0.0.1/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2015",
+ "links": null
+ },
+ {
+ "name": "altdep2",
+ "version": "0.0.1",
+ "id": "altdep2 0.0.1 (registry+file:[..]/alternative-registry)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": "registry+file:[..]/alternative-registry",
+ "dependencies": [],
+ "targets": "{...}",
+ "features": {},
+ "manifest_path": "[..]/altdep2-0.0.1/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2015",
+ "links": null
+ },
+ {
+ "name": "bar",
+ "version": "0.0.1",
+ "id": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "dependencies": [],
+ "targets": "{...}",
+ "features": {},
+ "manifest_path": "[..]/bar-0.0.1/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2015",
+ "links": null
+ },
+ {
+ "name": "foo",
+ "version": "0.0.1",
+ "id": "foo 0.0.1 (path+file:[..]/foo)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": null,
+ "dependencies": [
+ {
+ "name": "altdep",
+ "source": "registry+file:[..]/alternative-registry",
+ "req": "^0.0.1",
+ "kind": null,
+ "rename": null,
+ "optional": false,
+ "uses_default_features": true,
+ "features": [],
+ "target": null,
+ "registry": "file:[..]/alternative-registry"
+ },
+ {
+ "name": "iodep",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "req": "^0.0.1",
+ "kind": null,
+ "rename": null,
+ "optional": false,
+ "uses_default_features": true,
+ "features": [],
+ "target": null,
+ "registry": null
+ }
+ ],
+ "targets": "{...}",
+ "features": {},
+ "manifest_path": "[..]/foo/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2015",
+ "links": null
+ },
+ {
+ "name": "iodep",
+ "version": "0.0.1",
+ "id": "iodep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "dependencies": [
+ {
+ "name": "altdep2",
+ "source": "registry+file:[..]/alternative-registry",
+ "req": "^0.0.1",
+ "kind": null,
+ "rename": null,
+ "optional": false,
+ "uses_default_features": true,
+ "features": [],
+ "target": null,
+ "registry": "file:[..]/alternative-registry"
+ }
+ ],
+ "targets": "{...}",
+ "features": {},
+ "manifest_path": "[..]/iodep-0.0.1/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2015",
+ "links": null
+ }
+ ],
+ "workspace_members": [
+ "foo 0.0.1 (path+file:[..]/foo)"
+ ],
+ "resolve": "{...}",
+ "target_directory": "[..]/foo/target",
+ "version": 1,
+ "workspace_root": "[..]/foo",
+ "metadata": null
+ }"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn unknown_registry() {
+ // A known registry refers to an unknown registry.
+ // foo -> bar(crates.io) -> baz(alt)
+ registry::alt_init();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ version = "0.0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("baz", "0.0.1").alternative(true).publish();
+ Package::new("bar", "0.0.1")
+ .registry_dep("baz", "0.0.1")
+ .publish();
+
+ // Remove "alternative" from config.
+ let cfg_path = paths::home().join(".cargo/config");
+ let mut config = fs::read_to_string(&cfg_path).unwrap();
+ let start = config.find("[registries.alternative]").unwrap();
+ config.insert(start, '#');
+ let start_index = &config[start..].find("index =").unwrap();
+ config.insert(start + start_index, '#');
+ fs::write(&cfg_path, config).unwrap();
+
+ p.cargo("check").run();
+
+ // Important parts:
+ // foo -> bar registry = null
+ // bar -> baz registry = alternate
+ p.cargo("metadata --format-version=1")
+ .with_json(
+ r#"
+ {
+ "packages": [
+ {
+ "name": "bar",
+ "version": "0.0.1",
+ "id": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "dependencies": [
+ {
+ "name": "baz",
+ "source": "registry+file://[..]/alternative-registry",
+ "req": "^0.0.1",
+ "kind": null,
+ "rename": null,
+ "optional": false,
+ "uses_default_features": true,
+ "features": [],
+ "target": null,
+ "registry": "file:[..]/alternative-registry"
+ }
+ ],
+ "targets": "{...}",
+ "features": {},
+ "manifest_path": "[..]",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2015",
+ "links": null
+ },
+ {
+ "name": "baz",
+ "version": "0.0.1",
+ "id": "baz 0.0.1 (registry+file://[..]/alternative-registry)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": "registry+file://[..]/alternative-registry",
+ "dependencies": [],
+ "targets": "{...}",
+ "features": {},
+ "manifest_path": "[..]",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2015",
+ "links": null
+ },
+ {
+ "name": "foo",
+ "version": "0.0.1",
+ "id": "foo 0.0.1 (path+file://[..]/foo)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": null,
+ "dependencies": [
+ {
+ "name": "bar",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "req": "^0.0.1",
+ "kind": null,
+ "rename": null,
+ "optional": false,
+ "uses_default_features": true,
+ "features": [],
+ "target": null,
+ "registry": null
+ }
+ ],
+ "targets": "{...}",
+ "features": {},
+ "manifest_path": "[..]/foo/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2015",
+ "links": null
+ }
+ ],
+ "workspace_members": [
+ "foo 0.0.1 (path+file://[..]/foo)"
+ ],
+ "resolve": "{...}",
+ "target_directory": "[..]/foo/target",
+ "version": 1,
+ "workspace_root": "[..]/foo",
+ "metadata": null
+ }
+ "#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn registries_index_relative_url() {
+ registry::alt_init();
+ let config = paths::root().join(".cargo/config");
+ fs::create_dir_all(config.parent().unwrap()).unwrap();
+ fs::write(
+ &config,
+ r#"
+ [registries.relative]
+ index = "file:alternative-registry"
+ "#,
+ )
+ .unwrap();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ version = "0.0.1"
+ registry = "relative"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("bar", "0.0.1").alternative(true).publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `relative` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.1 (registry `relative`)
+[CHECKING] bar v0.0.1 (registry `relative`)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn registries_index_relative_path_not_allowed() {
+ registry::alt_init();
+ let config = paths::root().join(".cargo/config");
+ fs::create_dir_all(config.parent().unwrap()).unwrap();
+ fs::write(
+ &config,
+ r#"
+ [registries.relative]
+ index = "alternative-registry"
+ "#,
+ )
+ .unwrap();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ version = "0.0.1"
+ registry = "relative"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("bar", "0.0.1").alternative(true).publish();
+
+ p.cargo("check")
+ .with_stderr(&format!(
+ "\
+error: failed to parse manifest at `{root}/foo/Cargo.toml`
+
+Caused by:
+ invalid index URL for registry `relative` defined in [..]/.cargo/config
+
+Caused by:
+ invalid url `alternative-registry`: relative URL without a base
+",
+ root = paths::root().to_str().unwrap()
+ ))
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn both_index_and_registry() {
+ let p = project().file("src/lib.rs", "").build();
+ for cmd in &["publish", "owner", "search", "yank --version 1.0.0"] {
+ p.cargo(cmd)
+ .arg("--registry=foo")
+ .arg("--index=foo")
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] both `--index` and `--registry` \
+ should not be set at the same time",
+ )
+ .run();
+ }
+}
+
+#[cargo_test]
+fn both_index_and_default() {
+ let p = project().file("src/lib.rs", "").build();
+ for cmd in &[
+ "publish",
+ "owner",
+ "search",
+ "yank --version 1.0.0",
+ "install foo",
+ ] {
+ p.cargo(cmd)
+ .env("CARGO_REGISTRY_DEFAULT", "undefined")
+ .arg(format!("--index=index_url"))
+ .with_status(101)
+ .with_stderr("[ERROR] invalid url `index_url`: relative URL without a base")
+ .run();
+ }
+}
+
+#[cargo_test]
+fn sparse_lockfile() {
+ let _registry = registry::RegistryBuilder::new()
+ .http_index()
+ .alternative()
+ .build();
+ Package::new("foo", "0.1.0").alternative(true).publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [project]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ foo = { registry = 'alternative', version = '0.1.0'}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("generate-lockfile").run();
+ assert_match_exact(
+ &p.read_lockfile(),
+ r#"# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "a"
+version = "0.5.0"
+dependencies = [
+ "foo",
+]
+
+[[package]]
+name = "foo"
+version = "0.1.0"
+source = "sparse+http://[..]/"
+checksum = "f6a200a9339fef960979d94d5c99cbbfd899b6f5a396a55d9775089119050203""#,
+ );
+}
+
+#[cargo_test]
+fn publish_with_transitive_dep() {
+ let _alt1 = RegistryBuilder::new()
+ .http_api()
+ .http_index()
+ .alternative_named("Alt-1")
+ .build();
+ let _alt2 = RegistryBuilder::new()
+ .http_api()
+ .http_index()
+ .alternative_named("Alt-2")
+ .build();
+
+ let p1 = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p1.cargo("publish --registry Alt-1").run();
+
+ let p2 = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.6.0"
+ publish = ["Alt-2"]
+
+ [dependencies]
+ a = { version = "0.5.0", registry = "Alt-1" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p2.cargo("publish").run();
+}
diff --git a/src/tools/cargo/tests/testsuite/artifact_dep.rs b/src/tools/cargo/tests/testsuite/artifact_dep.rs
new file mode 100644
index 000000000..ec6bb7103
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/artifact_dep.rs
@@ -0,0 +1,2901 @@
+//! Tests specific to artifact dependencies, designated using
+//! the new `dep = { artifact = "bin", … }` syntax in manifests.
+
+use cargo_test_support::compare::match_exact;
+use cargo_test_support::registry::{Package, RegistryBuilder};
+use cargo_test_support::{
+ basic_bin_manifest, basic_manifest, cross_compile, project, publish, registry, rustc_host,
+ Project,
+};
+
+#[cargo_test]
+fn check_with_invalid_artifact_dependency() {
+ // invalid name
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [dependencies]
+ bar = { path = "bar/", artifact = "unknown" }
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar;") // this would fail but we don't get there, artifacts are no libs
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+ p.cargo("check -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]/Cargo.toml`
+
+Caused by:
+ 'unknown' is not a valid artifact specifier
+",
+ )
+ .with_status(101)
+ .run();
+
+ fn run_cargo_with_and_without_bindeps_feature(
+ p: &Project,
+ cmd: &str,
+ assert: &dyn Fn(&mut cargo_test_support::Execs),
+ ) {
+ assert(
+ p.cargo(&format!("{} -Z bindeps", cmd))
+ .masquerade_as_nightly_cargo(&["bindeps"]),
+ );
+ assert(&mut p.cargo(cmd));
+ }
+
+ // lib specified without artifact
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar/", lib = true }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+ run_cargo_with_and_without_bindeps_feature(&p, "check", &|cargo| {
+ cargo
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]/Cargo.toml`
+
+Caused by:
+ 'lib' specifier cannot be used without an 'artifact = …' value (bar)
+",
+ )
+ .with_status(101)
+ .run();
+ });
+
+ // target specified without artifact
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar/", target = "target" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+ run_cargo_with_and_without_bindeps_feature(&p, "check", &|cargo| {
+ cargo
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]/Cargo.toml`
+
+Caused by:
+ 'target' specifier cannot be used without an 'artifact = …' value (bar)
+",
+ )
+ .with_status(101)
+ .run();
+ })
+}
+
+#[cargo_test]
+fn check_with_invalid_target_triple() {
+ // invalid name
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [dependencies]
+ bar = { path = "bar/", artifact = "bin", target = "unknown-target-triple" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+ p.cargo("check -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr_contains(
+ r#"[..]Could not find specification for target "unknown-target-triple"[..]"#,
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn build_without_nightly_aborts_with_error() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [dependencies]
+ bar = { path = "bar/", artifact = "bin" }
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar;")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at [..]
+
+Caused by:
+ `artifact = …` requires `-Z bindeps` (bar)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn disallow_artifact_and_no_artifact_dep_to_same_package_within_the_same_dep_category() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [dependencies]
+ bar = { path = "bar/", artifact = "bin" }
+ bar_stable = { path = "bar/", package = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+ p.cargo("check -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_status(101)
+ .with_stderr("\
+[WARNING] foo v0.0.0 ([CWD]) ignoring invalid dependency `bar_stable` which is missing a lib target
+[ERROR] the crate `foo v0.0.0 ([CWD])` depends on crate `bar v0.5.0 ([CWD]/bar)` multiple times with different names",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn features_are_unified_among_lib_and_bin_dep_of_same_target() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ resolver = "2"
+
+ [dependencies.d1]
+ path = "d1"
+ features = ["d1f1"]
+ artifact = "bin"
+ lib = true
+
+ [dependencies.d2]
+ path = "d2"
+ features = ["d2f2"]
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ d1::f1();
+ d1::f2();
+ d2::f1();
+ d2::f2();
+ }
+ "#,
+ )
+ .file(
+ "d1/Cargo.toml",
+ r#"
+ [package]
+ name = "d1"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ d1f1 = ["d2"]
+
+ [dependencies.d2]
+ path = "../d2"
+ features = ["d2f1"]
+ optional = true
+ "#,
+ )
+ .file(
+ "d1/src/main.rs",
+ r#"fn main() {
+ #[cfg(feature = "d1f1")]
+ d2::f1();
+
+ // Using f2 is only possible as features are unififed across the same target.
+ // Our own manifest would only enable f1, and f2 comes in because a parent crate
+ // enables the feature in its manifest.
+ #[cfg(feature = "d1f1")]
+ d2::f2();
+ }"#,
+ )
+ .file(
+ "d1/src/lib.rs",
+ r#"
+ #[cfg(feature = "d2")]
+ extern crate d2;
+ /// Importing f2 here shouldn't be possible as unless features are unified.
+ #[cfg(feature = "d1f1")]
+ pub use d2::{f1, f2};
+ "#,
+ )
+ .file(
+ "d2/Cargo.toml",
+ r#"
+ [package]
+ name = "d2"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ d2f1 = []
+ d2f2 = []
+ "#,
+ )
+ .file(
+ "d2/src/lib.rs",
+ r#"
+ #[cfg(feature = "d2f1")] pub fn f1() {}
+ #[cfg(feature = "d2f2")] pub fn f2() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("build -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr(
+ "\
+[COMPILING] d2 v0.0.1 ([CWD]/d2)
+[COMPILING] d1 v0.0.1 ([CWD]/d1)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn features_are_not_unified_among_lib_and_bin_dep_of_different_target() {
+ if cross_compile::disabled() {
+ return;
+ }
+ let target = cross_compile::alternate();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ resolver = "2"
+
+ [dependencies.d1]
+ path = "d1"
+ features = ["d1f1"]
+ artifact = "bin"
+ lib = true
+ target = "$TARGET"
+
+ [dependencies.d2]
+ path = "d2"
+ features = ["d2f2"]
+ "#
+ .replace("$TARGET", target),
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ // the lib = true part always builds for our current target, unifying dependencies
+ d1::d2::f1();
+ d1::d2::f2();
+ d2::f1();
+ d2::f2();
+ }
+ "#,
+ )
+ .file(
+ "d1/Cargo.toml",
+ r#"
+ [package]
+ name = "d1"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ d1f1 = ["d2"]
+
+ [dependencies.d2]
+ path = "../d2"
+ features = ["d2f1"]
+ optional = true
+ "#,
+ )
+ .file("d1/src/main.rs", r#"fn main() {
+ // f1 we set ourselves
+ d2::f1();
+ // As 'main' is only compiled as part of the artifact dependency and since that is not unified
+ // if the target differs, trying to access f2 is a compile time error as the feature isn't enabled in our dependency tree.
+ d2::f2();
+ }"#)
+ .file(
+ "d1/src/lib.rs",
+ r#"
+ #[cfg(feature = "d2")]
+ pub extern crate d2;
+ "#,
+ )
+ .file(
+ "d2/Cargo.toml",
+ r#"
+ [package]
+ name = "d2"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ d2f1 = []
+ d2f2 = []
+ "#,
+ )
+ .file(
+ "d2/src/lib.rs",
+ r#"
+ #[cfg(feature = "d2f1")] pub fn f1() {}
+ #[cfg(feature = "d2f2")] pub fn f2() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("build -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_status(101)
+ .with_stderr_contains(
+ "error[E0425]: cannot find function `f2` in crate `d2`\n --> d1/src/main.rs:6:17",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn feature_resolution_works_for_cfg_target_specification() {
+ if cross_compile::disabled() {
+ return;
+ }
+ let target = cross_compile::alternate();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ resolver = "2"
+
+ [dependencies.d1]
+ path = "d1"
+ artifact = "bin"
+ target = "$TARGET"
+ "#
+ .replace("$TARGET", target),
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ let _b = include_bytes!(env!("CARGO_BIN_FILE_D1"));
+ }
+ "#,
+ )
+ .file(
+ "d1/Cargo.toml",
+ &r#"
+ [package]
+ name = "d1"
+ version = "0.0.1"
+ authors = []
+
+ [target.'$TARGET'.dependencies]
+ d2 = { path = "../d2" }
+ "#
+ .replace("$TARGET", target),
+ )
+ .file(
+ "d1/src/main.rs",
+ r#"fn main() {
+ d1::f();
+ }"#,
+ )
+ .file("d1/build.rs", r#"fn main() { }"#)
+ .file(
+ "d1/src/lib.rs",
+ &r#"pub fn f() {
+ #[cfg(target = "$TARGET")]
+ d2::f();
+ }
+ "#
+ .replace("$TARGET", target),
+ )
+ .file(
+ "d2/Cargo.toml",
+ r#"
+ [package]
+ name = "d2"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file("d2/build.rs", r#"fn main() { }"#)
+ .file("d2/src/lib.rs", "pub fn f() {}")
+ .build();
+
+ p.cargo("test -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .run();
+}
+
+#[cargo_test]
+fn build_script_with_bin_artifacts() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [build-dependencies]
+ bar = { path = "bar/", artifact = ["bin", "staticlib", "cdylib"] }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", r#"
+ fn main() {
+ let baz: std::path::PathBuf = std::env::var("CARGO_BIN_FILE_BAR_baz").expect("CARGO_BIN_FILE_BAR_baz").into();
+ println!("{}", baz.display());
+ assert!(&baz.is_file());
+
+ let lib: std::path::PathBuf = std::env::var("CARGO_STATICLIB_FILE_BAR_bar").expect("CARGO_STATICLIB_FILE_BAR_bar").into();
+ println!("{}", lib.display());
+ assert!(&lib.is_file());
+
+ let lib: std::path::PathBuf = std::env::var("CARGO_CDYLIB_FILE_BAR_bar").expect("CARGO_CDYLIB_FILE_BAR_bar").into();
+ println!("{}", lib.display());
+ assert!(&lib.is_file());
+
+ let dir: std::path::PathBuf = std::env::var("CARGO_BIN_DIR_BAR").expect("CARGO_BIN_DIR_BAR").into();
+ println!("{}", dir.display());
+ assert!(dir.is_dir());
+
+ let bar: std::path::PathBuf = std::env::var("CARGO_BIN_FILE_BAR").expect("CARGO_BIN_FILE_BAR").into();
+ println!("{}", bar.display());
+ assert!(&bar.is_file());
+
+ let bar2: std::path::PathBuf = std::env::var("CARGO_BIN_FILE_BAR_bar").expect("CARGO_BIN_FILE_BAR_bar").into();
+ println!("{}", bar2.display());
+ assert_eq!(bar, bar2);
+ }
+ "#)
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+
+ [lib]
+ crate-type = ["staticlib", "cdylib"]
+ "#,
+ )
+ // compilation target is native for build scripts unless overridden
+ .file("bar/src/bin/bar.rs", &format!(r#"fn main() {{ assert_eq!(std::env::var("TARGET").unwrap(), "{}"); }}"#, cross_compile::native()))
+ .file("bar/src/bin/baz.rs", "fn main() {}")
+ .file("bar/src/lib.rs", "")
+ .build();
+ p.cargo("build -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr_contains("[COMPILING] foo [..]")
+ .with_stderr_contains("[COMPILING] bar v0.5.0 ([CWD]/bar)")
+ .with_stderr_contains("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+ .run();
+
+ let build_script_output = build_script_output_string(&p, "foo");
+ let msg = "we need the binary directory for this artifact along with all binary paths";
+ if cfg!(target_env = "msvc") {
+ match_exact(
+ "[..]/artifact/bar-[..]/bin/baz.exe\n\
+ [..]/artifact/bar-[..]/staticlib/bar-[..].lib\n\
+ [..]/artifact/bar-[..]/cdylib/bar.dll\n\
+ [..]/artifact/bar-[..]/bin\n\
+ [..]/artifact/bar-[..]/bin/bar.exe\n\
+ [..]/artifact/bar-[..]/bin/bar.exe",
+ &build_script_output,
+ msg,
+ "",
+ None,
+ )
+ .unwrap();
+ } else {
+ match_exact(
+ "[..]/artifact/bar-[..]/bin/baz-[..]\n\
+ [..]/artifact/bar-[..]/staticlib/libbar-[..].a\n\
+ [..]/artifact/bar-[..]/cdylib/[..]bar.[..]\n\
+ [..]/artifact/bar-[..]/bin\n\
+ [..]/artifact/bar-[..]/bin/bar-[..]\n\
+ [..]/artifact/bar-[..]/bin/bar-[..]",
+ &build_script_output,
+ msg,
+ "",
+ None,
+ )
+ .unwrap();
+ }
+
+ assert!(
+ !p.bin("bar").is_file(),
+ "artifacts are located in their own directory, exclusively, and won't be lifted up"
+ );
+ assert!(!p.bin("baz").is_file(),);
+ assert_artifact_executable_output(&p, "debug", "bar", "bar");
+}
+
+#[cargo_test]
+fn build_script_with_bin_artifact_and_lib_false() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [build-dependencies]
+ bar = { path = "bar/", artifact = "bin" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ bar::doit()
+ }
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+ .file("bar/src/main.rs", "fn main() { bar::doit(); }")
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ pub fn doit() {
+ panic!("sentinel");
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_status(101)
+ .with_stderr_does_not_contain("[..]sentinel[..]")
+ .run();
+}
+
+#[cargo_test]
+fn lib_with_bin_artifact_and_lib_false() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [dependencies]
+ bar = { path = "bar/", artifact = "bin" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() {
+ bar::doit()
+ }"#,
+ )
+ .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+ .file("bar/src/main.rs", "fn main() { bar::doit(); }")
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ pub fn doit() {
+ panic!("sentinel");
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_status(101)
+ .with_stderr_does_not_contain("[..]sentinel[..]")
+ .run();
+}
+
+#[cargo_test]
+fn build_script_with_selected_dashed_bin_artifact_and_lib_true() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [build-dependencies]
+ bar-baz = { path = "bar/", artifact = "bin:baz-suffix", lib = true }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", r#"
+ fn main() {
+ bar_baz::print_env()
+ }
+ "#)
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar-baz"
+ version = "0.5.0"
+ authors = []
+
+ [[bin]]
+ name = "bar"
+
+ [[bin]]
+ name = "baz-suffix"
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("bar/src/lib.rs", r#"
+ pub fn print_env() {
+ let dir: std::path::PathBuf = std::env::var("CARGO_BIN_DIR_BAR_BAZ").expect("CARGO_BIN_DIR_BAR_BAZ").into();
+ let bin: std::path::PathBuf = std::env::var("CARGO_BIN_FILE_BAR_BAZ_baz-suffix").expect("CARGO_BIN_FILE_BAR_BAZ_baz-suffix").into();
+ println!("{}", dir.display());
+ println!("{}", bin.display());
+ assert!(dir.is_dir());
+ assert!(&bin.is_file());
+ assert!(std::env::var("CARGO_BIN_FILE_BAR_BAZ").is_err(), "CARGO_BIN_FILE_BAR_BAZ isn't set due to name mismatch");
+ assert!(std::env::var("CARGO_BIN_FILE_BAR_BAZ_bar").is_err(), "CARGO_BIN_FILE_BAR_BAZ_bar isn't set as binary isn't selected");
+ }
+ "#)
+ .build();
+ p.cargo("build -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr(
+ "\
+[COMPILING] bar-baz v0.5.0 ([CWD]/bar)
+[COMPILING] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+
+ let build_script_output = build_script_output_string(&p, "foo");
+ let msg = "we need the binary directory for this artifact and the binary itself";
+
+ if cfg!(target_env = "msvc") {
+ cargo_test_support::compare::match_exact(
+ &format!(
+ "[..]/artifact/bar-baz-[..]/bin\n\
+ [..]/artifact/bar-baz-[..]/bin/baz_suffix{}",
+ std::env::consts::EXE_SUFFIX,
+ ),
+ &build_script_output,
+ msg,
+ "",
+ None,
+ )
+ .unwrap();
+ } else {
+ cargo_test_support::compare::match_exact(
+ "[..]/artifact/bar-baz-[..]/bin\n\
+ [..]/artifact/bar-baz-[..]/bin/baz_suffix-[..]",
+ &build_script_output,
+ msg,
+ "",
+ None,
+ )
+ .unwrap();
+ }
+
+ assert!(
+ !p.bin("bar").is_file(),
+ "artifacts are located in their own directory, exclusively, and won't be lifted up"
+ );
+ assert_artifact_executable_output(&p, "debug", "bar", "baz_suffix");
+}
+
+#[cargo_test]
+fn lib_with_selected_dashed_bin_artifact_and_lib_true() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [dependencies]
+ bar-baz = { path = "bar/", artifact = ["bin:baz-suffix", "staticlib", "cdylib"], lib = true }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() {
+ bar_baz::exists();
+
+ env!("CARGO_BIN_DIR_BAR_BAZ");
+ let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR_BAZ_baz-suffix"));
+ let _b = include_bytes!(env!("CARGO_STATICLIB_FILE_BAR_BAZ"));
+ let _b = include_bytes!(env!("CARGO_STATICLIB_FILE_BAR_BAZ_bar-baz"));
+ let _b = include_bytes!(env!("CARGO_CDYLIB_FILE_BAR_BAZ"));
+ let _b = include_bytes!(env!("CARGO_CDYLIB_FILE_BAR_BAZ_bar-baz"));
+ }
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar-baz"
+ version = "0.5.0"
+ authors = []
+
+ [lib]
+ crate-type = ["rlib", "staticlib", "cdylib"]
+
+ [[bin]]
+ name = "bar"
+
+ [[bin]]
+ name = "baz-suffix"
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("bar/src/lib.rs", "pub fn exists() {}")
+ .build();
+ p.cargo("build -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr(
+ "\
+[COMPILING] bar-baz v0.5.0 ([CWD]/bar)
+[COMPILING] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+
+ assert!(
+ !p.bin("bar").is_file(),
+ "artifacts are located in their own directory, exclusively, and won't be lifted up"
+ );
+ assert_artifact_executable_output(&p, "debug", "bar", "baz_suffix");
+}
+
+#[cargo_test]
+fn allow_artifact_and_no_artifact_dep_to_same_package_within_different_dep_categories() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [dependencies]
+ bar = { path = "bar/", artifact = "bin" }
+
+ [dev-dependencies]
+ bar = { path = "bar/", package = "bar" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #[cfg(test)] extern crate bar;
+ pub fn foo() {
+ env!("CARGO_BIN_DIR_BAR");
+ let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR"));
+ }"#,
+ )
+ .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("bar/src/lib.rs", "")
+ .build();
+ p.cargo("test -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr_contains("[COMPILING] bar v0.5.0 ([CWD]/bar)")
+ .with_stderr_contains("[FINISHED] test [unoptimized + debuginfo] target(s) in [..]")
+ .run();
+}
+
+#[cargo_test]
+fn normal_build_deps_are_picked_up_in_presence_of_an_artifact_build_dep_to_the_same_package() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [dependencies]
+ bar = { path = "bar", artifact = "bin:bar" }
+
+ [build-dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("build.rs", "fn main() { bar::f(); }")
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() {
+ env!("CARGO_BIN_DIR_BAR");
+ let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR"));
+ }"#,
+ )
+ .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("bar/src/lib.rs", "pub fn f() {}")
+ .build();
+ p.cargo("check -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .run();
+}
+
+#[cargo_test]
+fn disallow_using_example_binaries_as_artifacts() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [dependencies]
+ bar = { path = "bar/", artifact = "bin:one-example" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("bar/examples/one-example.rs", "fn main() {}")
+ .build();
+ p.cargo("build -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_status(101)
+ .with_stderr(r#"[ERROR] dependency `bar` in package `foo` requires a `bin:one-example` artifact to be present."#)
+ .run();
+}
+
+/// From RFC 3028
+///
+/// > You may also specify separate dependencies with different artifact values, as well as
+/// dependencies on the same crate without artifact specified; for instance, you may have a
+/// build dependency on the binary of a crate and a normal dependency on the Rust library of the same crate.
+#[cargo_test]
+fn allow_artifact_and_non_artifact_dependency_to_same_crate() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [build-dependencies]
+ bar = { path = "bar/", artifact = "bin" }
+
+ [dependencies]
+ bar = { path = "bar/" }
+ "#,
+ )
+ .file("src/lib.rs", r#"
+ pub fn foo() {
+ bar::doit();
+ assert!(option_env!("CARGO_BIN_FILE_BAR").is_none());
+ }"#)
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ assert!(option_env!("CARGO_BIN_FILE_BAR").is_none(), "no environment variables at build time");
+ std::process::Command::new(std::env::var("CARGO_BIN_FILE_BAR").expect("BAR present")).status().unwrap();
+ }"#,
+ )
+ .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("bar/src/lib.rs", "pub fn doit() {}")
+ .build();
+
+ p.cargo("check -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr_contains("[COMPILING] bar [..]")
+ .with_stderr_contains("[COMPILING] foo [..]")
+ .run();
+}
+
+#[cargo_test]
+fn build_script_deps_adopt_specified_target_unconditionally() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let target = cross_compile::alternate();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [build-dependencies.bar]
+ path = "bar/"
+ artifact = "bin"
+ target = "{}"
+ "#,
+ target
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", r#"
+ fn main() {
+ let bar: std::path::PathBuf = std::env::var("CARGO_BIN_FILE_BAR").expect("CARGO_BIN_FILE_BAR").into();
+ assert!(&bar.is_file());
+ }"#)
+ .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("bar/src/lib.rs", "pub fn doit() {}")
+ .build();
+
+ p.cargo("check -v -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr_does_not_contain(format!(
+ "[RUNNING] `rustc --crate-name build_script_build build.rs [..]--target {} [..]",
+ target
+ ))
+ .with_stderr_contains("[RUNNING] `rustc --crate-name build_script_build build.rs [..]")
+ .with_stderr_contains(format!(
+ "[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--target {} [..]",
+ target
+ ))
+ .with_stderr_contains(format!(
+ "[RUNNING] `rustc --crate-name bar bar/src/main.rs [..]--target {} [..]",
+ target
+ ))
+ .with_stderr_does_not_contain(format!(
+ "[RUNNING] `rustc --crate-name foo [..]--target {} [..]",
+ target
+ ))
+ .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]")
+ .run();
+}
+
+/// inverse RFC-3176
+#[cargo_test]
+fn build_script_deps_adopt_do_not_allow_multiple_targets_under_different_name_and_same_version() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let alternate = cross_compile::alternate();
+ let native = cross_compile::native();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [build-dependencies.bar]
+ path = "bar/"
+ artifact = "bin"
+ target = "{}"
+
+ [build-dependencies.bar-native]
+ package = "bar"
+ path = "bar/"
+ artifact = "bin"
+ target = "{}"
+ "#,
+ alternate,
+ native
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", r#"
+ fn main() {
+ let bar: std::path::PathBuf = std::env::var("CARGO_BIN_FILE_BAR").expect("CARGO_BIN_FILE_BAR").into();
+ assert!(&bar.is_file());
+ let bar_native: std::path::PathBuf = std::env::var("CARGO_BIN_FILE_BAR_NATIVE_bar").expect("CARGO_BIN_FILE_BAR_NATIVE_bar").into();
+ assert!(&bar_native.is_file());
+ assert_ne!(bar_native, bar, "should build different binaries due to different targets");
+ }"#)
+ .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check -v -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_status(101)
+ .with_stderr(format!(
+ "error: the crate `foo v0.0.0 ([CWD])` depends on crate `bar v0.5.0 ([CWD]/bar)` multiple times with different names",
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn non_build_script_deps_adopt_specified_target_unconditionally() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let target = cross_compile::alternate();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [dependencies.bar]
+ path = "bar/"
+ artifact = "bin"
+ target = "{}"
+ "#,
+ target
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ r#"pub fn foo() { let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR")); }"#,
+ )
+ .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("bar/src/lib.rs", "pub fn doit() {}")
+ .build();
+
+ p.cargo("check -v -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr_contains(format!(
+ "[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--target {} [..]",
+ target
+ ))
+ .with_stderr_contains(format!(
+ "[RUNNING] `rustc --crate-name bar bar/src/main.rs [..]--target {} [..]",
+ target
+ ))
+ .with_stderr_does_not_contain(format!(
+ "[RUNNING] `rustc --crate-name foo [..]--target {} [..]",
+ target
+ ))
+ .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]")
+ .run();
+}
+
+#[cargo_test]
+fn no_cross_doctests_works_with_artifacts() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ resolver = "2"
+
+ [dependencies]
+ bar = { path = "bar/", artifact = "bin", lib = true }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ //! ```
+ //! env!("CARGO_BIN_DIR_BAR");
+ //! let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR"));
+ //! ```
+ pub fn foo() {
+ env!("CARGO_BIN_DIR_BAR");
+ let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR"));
+ }
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+ .file("bar/src/lib.rs", r#"pub extern "C" fn c() {}"#)
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ let target = rustc_host();
+ p.cargo("test -Z bindeps --target")
+ .arg(&target)
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr(&format!(
+ "\
+[COMPILING] bar v0.5.0 ([CWD]/bar)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/{triple}/debug/deps/foo-[..][EXE])
+[DOCTEST] foo
+",
+ triple = target
+ ))
+ .run();
+
+ println!("c");
+ let target = cross_compile::alternate();
+
+ // This will build the library, but does not build or run doc tests.
+ // This should probably be a warning or error.
+ p.cargo("test -Z bindeps -v --doc --target")
+ .arg(&target)
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr_contains(format!(
+ "[COMPILING] bar v0.5.0 ([CWD]/bar)
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--target {triple} [..]
+[RUNNING] `rustc --crate-name bar bar/src/main.rs [..]--target {triple} [..]
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo [..]
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]",
+ triple = target
+ ))
+ .run();
+
+ if !cross_compile::can_run_on_host() {
+ return;
+ }
+
+ // This tests the library, but does not run the doc tests.
+ p.cargo("test -Z bindeps -v --target")
+ .arg(&target)
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr_contains(&format!(
+ "[FRESH] bar v0.5.0 ([CWD]/bar)
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo [..]--test[..]
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[CWD]/target/{triple}/debug/deps/foo-[..][EXE]`",
+ triple = target
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn build_script_deps_adopts_target_platform_if_target_equals_target() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [build-dependencies]
+ bar = { path = "bar/", artifact = "bin", target = "target" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", r#"
+ fn main() {
+ let bar: std::path::PathBuf = std::env::var("CARGO_BIN_FILE_BAR").expect("CARGO_BIN_FILE_BAR").into();
+ assert!(&bar.is_file());
+ }"#)
+ .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("bar/src/lib.rs", "pub fn doit() {}")
+ .build();
+
+ let alternate_target = cross_compile::alternate();
+ p.cargo("check -v -Z bindeps --target")
+ .arg(alternate_target)
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr_does_not_contain(format!(
+ "[RUNNING] `rustc --crate-name build_script_build build.rs [..]--target {} [..]",
+ alternate_target
+ ))
+ .with_stderr_contains("[RUNNING] `rustc --crate-name build_script_build build.rs [..]")
+ .with_stderr_contains(format!(
+ "[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--target {} [..]",
+ alternate_target
+ ))
+ .with_stderr_contains(format!(
+ "[RUNNING] `rustc --crate-name bar bar/src/main.rs [..]--target {} [..]",
+ alternate_target
+ ))
+ .with_stderr_contains(format!(
+ "[RUNNING] `rustc --crate-name foo [..]--target {} [..]",
+ alternate_target
+ ))
+ .run();
+}
+
+#[cargo_test]
+// TODO(ST): rename bar (dependency) to something else and un-ignore this with RFC-3176
+#[cfg_attr(target_env = "msvc", ignore = "msvc not working")]
+fn profile_override_basic() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [build-dependencies]
+ bar = { path = "bar", artifact = "bin" }
+
+ [dependencies]
+ bar = { path = "bar", artifact = "bin" }
+
+ [profile.dev.build-override]
+ opt-level = 1
+
+ [profile.dev]
+ opt-level = 3
+ "#,
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("build -v -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name build_script_build [..] -C opt-level=1 [..]`",
+ )
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name bar bar/src/main.rs [..] -C opt-level=3 [..]`",
+ )
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name bar bar/src/main.rs [..] -C opt-level=1 [..]`",
+ )
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..] -C opt-level=1 [..]`",
+ )
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..] -C opt-level=3 [..]`",
+ )
+ .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..] -C opt-level=3 [..]`")
+ .run();
+}
+
+#[cargo_test]
+fn dependencies_of_dependencies_work_in_artifacts() {
+ Package::new("baz", "1.0.0")
+ .file("src/lib.rs", "pub fn baz() {}")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [build-dependencies]
+ bar = { path = "bar/", artifact = "bin" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ std::process::Command::new(std::env::var("CARGO_BIN_FILE_BAR").expect("BAR present")).status().unwrap();
+ }
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ baz = "1.0.0"
+ "#,
+ )
+ .file("bar/src/lib.rs", r#"pub fn bar() {baz::baz()}"#)
+ .file("bar/src/main.rs", r#"fn main() {bar::bar()}"#)
+ .build();
+ p.cargo("build -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .run();
+
+ // cargo tree sees artifacts as the dependency kind they are in and doesn't do anything special with it.
+ p.cargo("tree -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stdout(
+ "\
+foo v0.0.0 ([CWD])
+[build-dependencies]
+└── bar v0.5.0 ([CWD]/bar)
+ └── baz v1.0.0
+",
+ )
+ .run();
+}
+
+// TODO: Fix this potentially by reverting 887562bfeb8c540594d7d08e6e9a4ab7eb255865 which adds artifact information to the registry
+// followed by 0ff93733626f7cbecaf9dce9ab62b4ced0be088e which picks it up.
+// For reference, see comments by ehuss https://github.com/rust-lang/cargo/pull/9992#discussion_r801086315 and
+// joshtriplett https://github.com/rust-lang/cargo/pull/9992#issuecomment-1033394197 .
+#[cargo_test]
+#[ignore = "broken, need artifact info in index"]
+fn targets_are_picked_up_from_non_workspace_artifact_deps() {
+ if cross_compile::disabled() {
+ return;
+ }
+ let target = cross_compile::alternate();
+ Package::new("artifact", "1.0.0")
+ .file("src/main.rs", r#"fn main() {}"#)
+ .file("src/lib.rs", r#"pub fn lib() {}"#)
+ .publish();
+
+ let mut dep = registry::Dependency::new("artifact", "1.0.0");
+ Package::new("uses-artifact", "1.0.0")
+ .file(
+ "src/lib.rs",
+ r#"pub fn uses_artifact() { let _b = include_bytes!(env!("CARGO_BIN_FILE_ARTIFACT")); }"#,
+ )
+ .add_dep(dep.artifact("bin", Some(target.to_string())))
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies]
+ uses-artifact = { version = "1.0.0" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"pub fn foo() { uses_artifact::uses_artifact(); }"#,
+ )
+ .build();
+
+ p.cargo("build -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .run();
+}
+
+#[cargo_test]
+fn allow_dep_renames_with_multiple_versions() {
+ Package::new("bar", "1.0.0")
+ .file("src/main.rs", r#"fn main() {println!("1.0.0")}"#)
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [build-dependencies]
+ bar = { path = "bar/", artifact = "bin" }
+ bar_stable = { package = "bar", version = "1.0.0", artifact = "bin" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ std::process::Command::new(std::env::var("CARGO_BIN_FILE_BAR").expect("BAR present")).status().unwrap();
+ std::process::Command::new(std::env::var("CARGO_BIN_FILE_BAR_STABLE_bar").expect("BAR STABLE present")).status().unwrap();
+ }
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+ .file("bar/src/main.rs", r#"fn main() {println!("0.5.0")}"#)
+ .build();
+ p.cargo("check -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr_contains("[COMPILING] bar [..]")
+ .with_stderr_contains("[COMPILING] foo [..]")
+ .run();
+ let build_script_output = build_script_output_string(&p, "foo");
+ match_exact(
+ "0.5.0\n1.0.0",
+ &build_script_output,
+ "build script output",
+ "",
+ None,
+ )
+ .unwrap();
+}
+
+#[cargo_test]
+fn allow_artifact_and_non_artifact_dependency_to_same_crate_if_these_are_not_the_same_dep_kind() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [build-dependencies]
+ bar = { path = "bar/", artifact = "bin", lib = false }
+
+ [dependencies]
+ bar = { path = "bar/" }
+ "#,
+ )
+ .file("src/lib.rs", r#"
+ pub fn foo() {
+ bar::doit();
+ assert!(option_env!("CARGO_BIN_FILE_BAR").is_none());
+ }"#)
+ .file(
+ "build.rs",
+ r#"fn main() {
+ println!("{}", std::env::var("CARGO_BIN_FILE_BAR").expect("CARGO_BIN_FILE_BAR"));
+ println!("{}", std::env::var("CARGO_BIN_FILE_BAR_bar").expect("CARGO_BIN_FILE_BAR_bar"));
+ }"#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "pub fn doit() {}")
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+ p.cargo("build -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr(
+ "\
+[COMPILING] bar [..]
+[COMPILING] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn prevent_no_lib_warning_with_artifact_dependencies() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [dependencies]
+ bar = { path = "bar/", artifact = "bin" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"pub fn foo() { let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR")); }"#,
+ )
+ .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+ p.cargo("check -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr(
+ "\
+ [COMPILING] bar v0.5.0 ([CWD]/bar)\n\
+ [CHECKING] foo v0.0.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn show_no_lib_warning_with_artifact_dependencies_that_have_no_lib_but_lib_true() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [build-dependencies]
+ bar = { path = "bar/", artifact = "bin" }
+
+ [dependencies]
+ bar = { path = "bar/", artifact = "bin", lib = true }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("src/build.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+ p.cargo("check -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr_contains("[WARNING] foo v0.0.0 ([CWD]) ignoring invalid dependency `bar` which is missing a lib target")
+ .with_stderr_contains("[COMPILING] bar v0.5.0 ([CWD]/bar)")
+ .with_stderr_contains("[CHECKING] foo [..]")
+ .with_stderr_contains("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+ .run();
+}
+
+#[cargo_test]
+fn resolver_2_build_dep_without_lib() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ edition = "2021"
+
+ [build-dependencies]
+ bar = { path = "bar/", artifact = "bin" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", r#"
+ fn main() {
+ let bar: std::path::PathBuf = std::env::var("CARGO_BIN_FILE_BAR").expect("CARGO_BIN_FILE_BAR").into();
+ assert!(&bar.is_file());
+ }"#)
+ .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+ p.cargo("check -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .run();
+}
+
+#[cargo_test]
+fn check_missing_crate_type_in_package_fails() {
+ for crate_type in &["cdylib", "staticlib", "bin"] {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies]
+ bar = {{ path = "bar/", artifact = "{}" }}
+ "#,
+ crate_type
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) //no bin, just rlib
+ .file("bar/src/lib.rs", "")
+ .build();
+ p.cargo("check -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] dependency `bar` in package `foo` requires a `[..]` artifact to be present.",
+ )
+ .run();
+ }
+}
+
+#[cargo_test]
+fn check_target_equals_target_in_non_build_dependency_errors() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [dependencies]
+ bar = { path = "bar/", artifact = "bin", target = "target" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+ p.cargo("check -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_status(101)
+ .with_stderr_contains(
+ " `target = \"target\"` in normal- or dev-dependencies has no effect (bar)",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn env_vars_and_build_products_for_various_build_targets() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ resolver = "2"
+
+ [lib]
+ doctest = true
+
+ [build-dependencies]
+ bar = { path = "bar/", artifact = ["cdylib", "staticlib"] }
+
+ [dependencies]
+ bar = { path = "bar/", artifact = "bin", lib = true }
+
+ [dev-dependencies]
+ bar = { path = "bar/", artifact = "bin:baz" }
+ "#,
+ )
+ .file("build.rs", r#"
+ fn main() {
+ let file: std::path::PathBuf = std::env::var("CARGO_CDYLIB_FILE_BAR").expect("CARGO_CDYLIB_FILE_BAR").into();
+ assert!(&file.is_file());
+
+ let file: std::path::PathBuf = std::env::var("CARGO_STATICLIB_FILE_BAR").expect("CARGO_STATICLIB_FILE_BAR").into();
+ assert!(&file.is_file());
+
+ assert!(std::env::var("CARGO_BIN_FILE_BAR").is_err());
+ assert!(std::env::var("CARGO_BIN_FILE_BAR_baz").is_err());
+ }
+ "#)
+ .file(
+ "src/lib.rs",
+ r#"
+ //! ```
+ //! bar::c();
+ //! env!("CARGO_BIN_DIR_BAR");
+ //! let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR"));
+ //! let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR_bar"));
+ //! let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR_baz"));
+ //! assert!(option_env!("CARGO_STATICLIB_FILE_BAR").is_none());
+ //! assert!(option_env!("CARGO_CDYLIB_FILE_BAR").is_none());
+ //! ```
+ pub fn foo() {
+ bar::c();
+ env!("CARGO_BIN_DIR_BAR");
+ let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR"));
+ let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR_bar"));
+ let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR_baz"));
+ assert!(option_env!("CARGO_STATICLIB_FILE_BAR").is_none());
+ assert!(option_env!("CARGO_CDYLIB_FILE_BAR").is_none());
+ }
+
+ #[cfg(test)]
+ #[test]
+ fn env_unit() {
+ env!("CARGO_BIN_DIR_BAR");
+ let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR"));
+ let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR_bar"));
+ let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR_baz"));
+ assert!(option_env!("CARGO_STATICLIB_FILE_BAR").is_none());
+ assert!(option_env!("CARGO_CDYLIB_FILE_BAR").is_none());
+ }
+ "#,
+ )
+ .file(
+ "tests/main.rs",
+ r#"
+ #[test]
+ fn env_integration() {
+ env!("CARGO_BIN_DIR_BAR");
+ let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR"));
+ let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR_bar"));
+ let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR_baz"));
+ }"#,
+ )
+ .file("build.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+
+ [lib]
+ crate-type = ["staticlib", "cdylib", "rlib"]
+
+ [[bin]]
+ name = "bar"
+
+ [[bin]]
+ name = "baz"
+ "#,
+ )
+ .file("bar/src/lib.rs", r#"pub extern "C" fn c() {}"#)
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+ p.cargo("test -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr(
+ "\
+[COMPILING] bar [..]
+[COMPILING] foo [..]
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] unittests [..]
+[RUNNING] tests/main.rs [..]
+[DOCTEST] foo
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn publish_artifact_dep() {
+ let registry = RegistryBuilder::new().http_api().http_index().build();
+
+ Package::new("bar", "1.0.0").publish();
+ Package::new("baz", "1.0.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ resolver = "2"
+
+ [dependencies]
+ bar = { version = "1.0", artifact = "bin", lib = true }
+
+ [build-dependencies]
+ baz = { version = "1.0", artifact = ["bin:a", "cdylib", "staticlib"], target = "target" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish -Z bindeps --no-verify")
+ .replace_crates_io(registry.index_url())
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[PACKAGING] foo v0.1.0 [..]
+[PACKAGED] [..]
+[UPLOADING] foo v0.1.0 [..]
+[UPLOADED] foo v0.1.0 [..]
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.1.0 [..]
+",
+ )
+ .run();
+
+ publish::validate_upload_with_contents(
+ r#"
+ {
+ "authors": [],
+ "badges": {},
+ "categories": [],
+ "deps": [{
+ "default_features": true,
+ "features": [],
+ "kind": "normal",
+ "name": "bar",
+ "optional": false,
+ "target": null,
+ "version_req": "^1.0"
+ },
+ {
+ "default_features": true,
+ "features": [],
+ "kind": "build",
+ "name": "baz",
+ "optional": false,
+ "target": null,
+ "version_req": "^1.0"
+ }
+ ],
+ "description": "foo",
+ "documentation": "foo",
+ "features": {},
+ "homepage": "foo",
+ "keywords": [],
+ "license": "MIT",
+ "license_file": null,
+ "links": null,
+ "name": "foo",
+ "readme": null,
+ "readme_file": null,
+ "repository": "foo",
+ "vers": "0.1.0"
+ }
+ "#,
+ "foo-0.1.0.crate",
+ &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"],
+ &[(
+ "Cargo.toml",
+ &format!(
+ r#"{}
+[package]
+name = "foo"
+version = "0.1.0"
+authors = []
+description = "foo"
+homepage = "foo"
+documentation = "foo"
+license = "MIT"
+repository = "foo"
+resolver = "2"
+
+[dependencies.bar]
+version = "1.0"
+artifact = ["bin"]
+lib = true
+
+[build-dependencies.baz]
+version = "1.0"
+artifact = [
+ "bin:a",
+ "cdylib",
+ "staticlib",
+]
+target = "target""#,
+ cargo::core::package::MANIFEST_PREAMBLE
+ ),
+ )],
+ );
+}
+
+#[cargo_test]
+fn doc_lib_true() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ resolver = "2"
+
+ [dependencies.bar]
+ path = "bar"
+ artifact = "bin"
+ lib = true
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar; pub fn foo() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("doc -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr(
+ "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[DOCUMENTING] bar v0.0.1 ([CWD]/bar)
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ assert!(p.root().join("target/doc").is_dir());
+ assert!(p.root().join("target/doc/foo/index.html").is_file());
+ assert!(p.root().join("target/doc/bar/index.html").is_file());
+
+ // Verify that it emits rmeta for the bin and lib dependency.
+ assert_eq!(p.glob("target/debug/artifact/*.rlib").count(), 0);
+ assert_eq!(p.glob("target/debug/deps/libbar-*.rmeta").count(), 2);
+
+ p.cargo("doc -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .env("CARGO_LOG", "cargo::ops::cargo_rustc::fingerprint")
+ .with_stdout("")
+ .run();
+
+ assert!(p.root().join("target/doc").is_dir());
+ assert!(p.root().join("target/doc/foo/index.html").is_file());
+ assert!(p.root().join("target/doc/bar/index.html").is_file());
+}
+
+#[cargo_test]
+fn rustdoc_works_on_libs_with_artifacts_and_lib_false() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ resolver = "2"
+
+ [dependencies.bar]
+ path = "bar"
+ artifact = ["bin", "staticlib", "cdylib"]
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() {
+ env!("CARGO_BIN_DIR_BAR");
+ let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR"));
+ let _b = include_bytes!(env!("CARGO_CDYLIB_FILE_BAR"));
+ let _b = include_bytes!(env!("CARGO_CDYLIB_FILE_BAR_bar"));
+ let _b = include_bytes!(env!("CARGO_STATICLIB_FILE_BAR"));
+ let _b = include_bytes!(env!("CARGO_STATICLIB_FILE_BAR_bar"));
+ }"#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+
+ [lib]
+ crate-type = ["staticlib", "cdylib"]
+ "#,
+ )
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("doc -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr(
+ "\
+[COMPILING] bar v0.5.0 ([CWD]/bar)
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ assert!(p.root().join("target/doc").is_dir());
+ assert!(p.root().join("target/doc/foo/index.html").is_file());
+ assert!(
+ !p.root().join("target/doc/bar/index.html").is_file(),
+ "bar is not a lib dependency and thus remains undocumented"
+ );
+}
+
+fn assert_artifact_executable_output(
+ p: &Project,
+ target_name: &str,
+ dep_name: &str,
+ bin_name: &str,
+) {
+ if cfg!(target_env = "msvc") {
+ assert_eq!(
+ p.glob(format!(
+ "target/{}/deps/artifact/{}-*/bin/{}{}",
+ target_name,
+ dep_name,
+ bin_name,
+ std::env::consts::EXE_SUFFIX
+ ))
+ .count(),
+ 1,
+ "artifacts are placed into their own output directory to not possibly clash"
+ );
+ } else {
+ assert_eq!(
+ p.glob(format!(
+ "target/{}/deps/artifact/{}-*/bin/{}-*{}",
+ target_name,
+ dep_name,
+ bin_name,
+ std::env::consts::EXE_SUFFIX
+ ))
+ .filter_map(Result::ok)
+ .filter(|f| f.extension().map_or(true, |ext| ext != "o" && ext != "d"))
+ .count(),
+ 1,
+ "artifacts are placed into their own output directory to not possibly clash"
+ );
+ }
+}
+
+fn build_script_output_string(p: &Project, package_name: &str) -> String {
+ let paths = p
+ .glob(format!("target/debug/build/{}-*/output", package_name))
+ .collect::<Result<Vec<_>, _>>()
+ .unwrap();
+ assert_eq!(paths.len(), 1);
+ std::fs::read_to_string(&paths[0]).unwrap()
+}
+
+#[cargo_test]
+fn build_script_features_for_shared_dependency() {
+ // When a build script is built and run, its features should match. Here:
+ //
+ // foo
+ // -> artifact on d1 with target
+ // -> common with features f1
+ //
+ // d1
+ // -> common with features f2
+ //
+ // common has features f1 and f2, with a build script.
+ //
+ // When common is built as a dependency of d1, it should have features
+ // `f2` (for the library and the build script).
+ //
+ // When common is built as a dependency of foo, it should have features
+ // `f1` (for the library and the build script).
+ if cross_compile::disabled() {
+ return;
+ }
+ let target = cross_compile::alternate();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ resolver = "2"
+
+ [dependencies]
+ d1 = { path = "d1", artifact = "bin", target = "$TARGET" }
+ common = { path = "common", features = ["f1"] }
+ "#
+ .replace("$TARGET", target),
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ let _b = include_bytes!(env!("CARGO_BIN_FILE_D1"));
+ common::f1();
+ }
+ "#,
+ )
+ .file(
+ "d1/Cargo.toml",
+ r#"
+ [package]
+ name = "d1"
+ version = "0.0.1"
+
+ [dependencies]
+ common = { path = "../common", features = ["f2"] }
+ "#,
+ )
+ .file(
+ "d1/src/main.rs",
+ r#"fn main() {
+ common::f2();
+ }"#,
+ )
+ .file(
+ "common/Cargo.toml",
+ r#"
+ [package]
+ name = "common"
+ version = "0.0.1"
+
+ [features]
+ f1 = []
+ f2 = []
+ "#,
+ )
+ .file(
+ "common/src/lib.rs",
+ r#"
+ #[cfg(feature = "f1")]
+ pub fn f1() {}
+
+ #[cfg(feature = "f2")]
+ pub fn f2() {}
+ "#,
+ )
+ .file(
+ "common/build.rs",
+ &r#"
+ use std::env::var_os;
+ fn main() {
+ assert_eq!(var_os("CARGO_FEATURE_F1").is_some(), cfg!(feature="f1"));
+ assert_eq!(var_os("CARGO_FEATURE_F2").is_some(), cfg!(feature="f2"));
+ if std::env::var("TARGET").unwrap() == "$TARGET" {
+ assert!(var_os("CARGO_FEATURE_F1").is_none());
+ assert!(var_os("CARGO_FEATURE_F2").is_some());
+ } else {
+ assert!(var_os("CARGO_FEATURE_F1").is_some());
+ assert!(var_os("CARGO_FEATURE_F2").is_none());
+ }
+ }
+ "#
+ .replace("$TARGET", target),
+ )
+ .build();
+
+ p.cargo("build -Z bindeps -v")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .run();
+}
+
+#[cargo_test]
+fn calc_bin_artifact_fingerprint() {
+ // See rust-lang/cargo#10527
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ resolver = "2"
+
+ [dependencies]
+ bar = { path = "bar/", artifact = "bin" }
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR"));
+ }
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+ .file("bar/src/main.rs", r#"fn main() { println!("foo") }"#)
+ .build();
+ p.cargo("check -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr(
+ "\
+[COMPILING] bar v0.5.0 ([CWD]/bar)
+[CHECKING] foo v0.1.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.change_file("bar/src/main.rs", r#"fn main() { println!("bar") }"#);
+ // Change in artifact bin dep `bar` propagates to `foo`, triggering recompile.
+ p.cargo("check -v -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr(
+ "\
+[DIRTY] bar v0.5.0 ([CWD]/bar): the file `bar/src/main.rs` has changed ([..])
+[COMPILING] bar v0.5.0 ([CWD]/bar)
+[RUNNING] `rustc --crate-name bar [..]`
+[DIRTY] foo v0.1.0 ([CWD]): the dependency bar was rebuilt
+[CHECKING] foo v0.1.0 ([CWD])
+[RUNNING] `rustc --crate-name foo [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ // All units are fresh. No recompile.
+ p.cargo("check -v -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr(
+ "\
+[FRESH] bar v0.5.0 ([CWD]/bar)
+[FRESH] foo v0.1.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn with_target_and_optional() {
+ // See rust-lang/cargo#10526
+ if cross_compile::disabled() {
+ return;
+ }
+ let target = cross_compile::alternate();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ edition = "2021"
+ [dependencies]
+ d1 = { path = "d1", artifact = "bin", optional = true, target = "$TARGET" }
+ "#
+ .replace("$TARGET", target),
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ let _b = include_bytes!(env!("CARGO_BIN_FILE_D1"));
+ }
+ "#,
+ )
+ .file(
+ "d1/Cargo.toml",
+ r#"
+ [package]
+ name = "d1"
+ version = "0.0.1"
+ edition = "2021"
+ "#,
+ )
+ .file("d1/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check -Z bindeps -F d1 -v")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr(
+ "\
+[COMPILING] d1 v0.0.1 [..]
+[RUNNING] `rustc --crate-name d1 [..]--crate-type bin[..]
+[CHECKING] foo v0.0.1 [..]
+[RUNNING] `rustc --crate-name foo [..]--cfg[..]d1[..]
+[FINISHED] dev [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn with_assumed_host_target_and_optional_build_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ edition = "2021"
+ [build-dependencies]
+ d1 = { path = "d1", artifact = "bin", optional = true, target = "target" }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ std::env::var("CARGO_BIN_FILE_D1").unwrap();
+ }
+ "#,
+ )
+ .file(
+ "d1/Cargo.toml",
+ r#"
+ [package]
+ name = "d1"
+ version = "0.0.1"
+ edition = "2021"
+ "#,
+ )
+ .file("d1/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check -Z bindeps -F d1 -v")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr_unordered(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[COMPILING] d1 v0.0.1 ([CWD]/d1)
+[RUNNING] `rustc --crate-name build_script_build [..]--crate-type bin[..]
+[RUNNING] `rustc --crate-name d1 [..]--crate-type bin[..]
+[RUNNING] `[CWD]/target/debug/build/foo-[..]/build-script-build`
+[RUNNING] `rustc --crate-name foo [..]--cfg[..]d1[..]
+[FINISHED] dev [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn decouple_same_target_transitive_dep_from_artifact_dep() {
+ // See https://github.com/rust-lang/cargo/issues/11463
+ let target = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2021"
+
+ [dependencies]
+ a = {{ path = "a" }}
+ bar = {{ path = "bar", artifact = "bin", target = "{target}" }}
+ "#
+ ),
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {}
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [dependencies]
+ a = { path = "../a", features = ["feature"] }
+ "#,
+ )
+ .file(
+ "bar/src/main.rs",
+ r#"
+ fn main() {}
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+ edition = "2021"
+
+ [dependencies]
+ b = { path = "../b" }
+ c = { path = "../c" }
+
+ [features]
+ feature = ["c/feature"]
+ "#,
+ )
+ .file(
+ "a/src/lib.rs",
+ r#"
+ use b::Trait as _;
+
+ pub fn use_b_trait(x: &impl c::Trait) {
+ x.b();
+ }
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.1.0"
+
+ [dependencies]
+ c = { path = "../c" }
+ "#,
+ )
+ .file(
+ "b/src/lib.rs",
+ r#"
+ pub trait Trait {
+ fn b(&self) {}
+ }
+
+ impl<T: c::Trait> Trait for T {}
+ "#,
+ )
+ .file(
+ "c/Cargo.toml",
+ r#"
+ [package]
+ name = "c"
+ version = "0.1.0"
+
+ [features]
+ feature = []
+ "#,
+ )
+ .file(
+ "c/src/lib.rs",
+ r#"
+ pub trait Trait {}
+ "#,
+ )
+ .build();
+ p.cargo("build -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr(
+ "\
+[COMPILING] c v0.1.0 ([CWD]/c)
+[COMPILING] b v0.1.0 ([CWD]/b)
+[COMPILING] a v0.1.0 ([CWD]/a)
+[COMPILING] bar v0.1.0 ([CWD]/bar)
+[COMPILING] foo v0.1.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn decouple_same_target_transitive_dep_from_artifact_dep_lib() {
+ // See https://github.com/rust-lang/cargo/issues/10837
+ let target = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2021"
+
+ [dependencies]
+ a = {{ path = "a" }}
+ b = {{ path = "b", features = ["feature"] }}
+ bar = {{ path = "bar", artifact = "bin", lib = true, target = "{target}" }}
+ "#
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ edition = "2021"
+
+ [dependencies]
+ a = { path = "../a", features = ["b"] }
+ b = { path = "../b" }
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .file(
+ "bar/src/main.rs",
+ r#"
+ use b::Trait;
+
+ fn main() {
+ a::A.b()
+ }
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [dependencies]
+ b = { path = "../b", optional = true }
+ "#,
+ )
+ .file(
+ "a/src/lib.rs",
+ r#"
+ pub struct A;
+
+ #[cfg(feature = "b")]
+ impl b::Trait for A {}
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.1.0"
+
+ [features]
+ feature = []
+ "#,
+ )
+ .file(
+ "b/src/lib.rs",
+ r#"
+ pub trait Trait {
+ fn b(&self) {}
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr(
+ "\
+[COMPILING] b v0.1.0 ([CWD]/b)
+[COMPILING] a v0.1.0 ([CWD]/a)
+[COMPILING] bar v0.1.0 ([CWD]/bar)
+[COMPILING] foo v0.1.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn decouple_same_target_transitive_dep_from_artifact_dep_and_proc_macro() {
+ let target = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2021"
+
+ [dependencies]
+ c = {{ path = "c" }}
+ bar = {{ path = "bar", artifact = "bin", target = "{target}" }}
+ "#
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [dependencies]
+ b = { path = "../b" }
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.1.0"
+ edition = "2021"
+
+ [dependencies]
+ a = { path = "../a" }
+
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .file(
+ "c/Cargo.toml",
+ r#"
+ [package]
+ name = "c"
+ version = "0.1.0"
+ edition = "2021"
+
+ [dependencies]
+ d = { path = "../d", features = ["feature"] }
+ a = { path = "../a" }
+
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file(
+ "c/src/lib.rs",
+ r#"
+ use a::Trait;
+
+ fn _c() {
+ d::D.a()
+ }
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [dependencies]
+ d = { path = "../d" }
+ "#,
+ )
+ .file(
+ "a/src/lib.rs",
+ r#"
+ pub trait Trait {
+ fn a(&self) {}
+ }
+
+ impl Trait for d::D {}
+ "#,
+ )
+ .file(
+ "d/Cargo.toml",
+ r#"
+ [package]
+ name = "d"
+ version = "0.1.0"
+
+ [features]
+ feature = []
+ "#,
+ )
+ .file("d/src/lib.rs", "pub struct D;")
+ .build();
+
+ p.cargo("build -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr_unordered(
+ "\
+[COMPILING] d v0.1.0 ([CWD]/d)
+[COMPILING] a v0.1.0 ([CWD]/a)
+[COMPILING] b v0.1.0 ([CWD]/b)
+[COMPILING] c v0.1.0 ([CWD]/c)
+[COMPILING] bar v0.1.0 ([CWD]/bar)
+[COMPILING] foo v0.1.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn same_target_artifact_dep_sharing() {
+ let target = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ a = {{ path = "a" }}
+ bar = {{ path = "bar", artifact = "bin", target = "{target}" }}
+ "#
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [dependencies]
+ a = { path = "../a" }
+ "#,
+ )
+ .file(
+ "bar/src/main.rs",
+ r#"
+ fn main() {}
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .build();
+ p.cargo(&format!("build -Z bindeps --target {target}"))
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr(
+ "\
+[COMPILING] a v0.1.0 ([CWD]/a)
+[COMPILING] bar v0.1.0 ([CWD]/bar)
+[COMPILING] foo v0.1.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn check_transitive_artifact_dependency_with_different_target() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+
+ [dependencies]
+ bar = { path = "bar/" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.0"
+
+ [dependencies]
+ baz = { path = "baz/", artifact = "bin", target = "custom-target" }
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .file(
+ "bar/baz/Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.0.0"
+
+ [dependencies]
+ "#,
+ )
+ .file("bar/baz/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_stderr_contains(
+ "error: could not find specification for target `custom-target`.\n \
+ Dependency `baz v0.0.0 [..]` requires to build for target `custom-target`.",
+ )
+ .with_status(101)
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/bad_config.rs b/src/tools/cargo/tests/testsuite/bad_config.rs
new file mode 100644
index 000000000..ca51b101e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/bad_config.rs
@@ -0,0 +1,1514 @@
+//! Tests for some invalid .cargo/config files.
+
+use cargo_test_support::git::cargo_uses_gitoxide;
+use cargo_test_support::registry::{self, Package};
+use cargo_test_support::{basic_manifest, project, rustc_host};
+
+#[cargo_test]
+fn bad1() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [target]
+ nonexistent-target = "foo"
+ "#,
+ )
+ .build();
+ p.cargo("check -v --target=nonexistent-target")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] expected table for configuration key `target.nonexistent-target`, \
+but found string in [..]/config
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad2() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [http]
+ proxy = 3.0
+ "#,
+ )
+ .build();
+ p.cargo("publish -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] could not load Cargo configuration
+
+Caused by:
+ failed to load TOML configuration from `[..]config`
+
+Caused by:
+ failed to parse key `http`
+
+Caused by:
+ failed to parse key `proxy`
+
+Caused by:
+ found TOML configuration value of unknown type `float`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad3() {
+ let registry = registry::init();
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [http]
+ proxy = true
+ "#,
+ )
+ .build();
+ Package::new("foo", "1.0.0").publish();
+
+ p.cargo("publish -v")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to update registry [..]
+
+Caused by:
+ error in [..]config: `http.proxy` expected a string, but found a boolean
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad4() {
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [cargo-new]
+ vcs = false
+ "#,
+ )
+ .build();
+ p.cargo("new -v foo")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] Failed to create package `foo` at `[..]`
+
+Caused by:
+ error in [..]config: `cargo-new.vcs` expected a string, but found a boolean
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad6() {
+ let registry = registry::init();
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [http]
+ user-agent = true
+ "#,
+ )
+ .build();
+ Package::new("foo", "1.0.0").publish();
+
+ p.cargo("publish -v")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to update registry [..]
+
+Caused by:
+ error in [..]config: `http.user-agent` expected a string, but found a boolean
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid_global_config() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies]
+ foo = "0.1.0"
+ "#,
+ )
+ .file(".cargo/config", "4")
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] could not load Cargo configuration
+
+Caused by:
+ could not parse TOML configuration in `[..]`
+
+Caused by:
+ could not parse input as TOML
+
+Caused by:
+ TOML parse error at line 1, column 2
+ |
+ 1 | 4
+ | ^
+ expected `.`, `=`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_cargo_lock() {
+ let p = project()
+ .file("Cargo.lock", "[[package]]\nfoo = 92")
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse lock file at: [..]Cargo.lock
+
+Caused by:
+ missing field `name`
+ in `package`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn duplicate_packages_in_cargo_lock() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "Cargo.lock",
+ r#"
+ [[package]]
+ name = "foo"
+ version = "0.0.1"
+ dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ ]
+
+ [[package]]
+ name = "bar"
+ version = "0.1.0"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+
+ [[package]]
+ name = "bar"
+ version = "0.1.0"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+ "#,
+ )
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse lock file at: [..]
+
+Caused by:
+ package `bar` is specified twice in the lockfile
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_source_in_cargo_lock() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "Cargo.lock",
+ r#"
+ [[package]]
+ name = "foo"
+ version = "0.0.1"
+ dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ ]
+
+ [[package]]
+ name = "bar"
+ version = "0.1.0"
+ source = "You shall not parse"
+ "#,
+ )
+ .build();
+
+ p.cargo("check --verbose")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse lock file at: [..]
+
+Caused by:
+ invalid source `You shall not parse`
+ in `package.source`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_dependency_in_lockfile() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "Cargo.lock",
+ r#"
+ [[package]]
+ name = "foo"
+ version = "0.0.1"
+ dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ ]
+ "#,
+ )
+ .build();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn bad_git_dependency() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies]
+ foo = {{ git = "{url}" }}
+ "#,
+ url = if cargo_uses_gitoxide() {
+ "git://host.xz"
+ } else {
+ "file:.."
+ }
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ let expected_stderr = if cargo_uses_gitoxide() {
+ "\
+[UPDATING] git repository `git://host.xz`
+[ERROR] failed to get `foo` as a dependency of package `foo v0.0.0 [..]`
+
+Caused by:
+ failed to load source for dependency `foo`
+
+Caused by:
+ Unable to update git://host.xz
+
+Caused by:
+ failed to clone into: [..]
+
+Caused by:
+ URLs need to specify the path to the repository
+"
+ } else {
+ "\
+[UPDATING] git repository `file:///`
+[ERROR] failed to get `foo` as a dependency of package `foo v0.0.0 [..]`
+
+Caused by:
+ failed to load source for dependency `foo`
+
+Caused by:
+ Unable to update file:///
+
+Caused by:
+ failed to clone into: [..]
+
+Caused by:
+ [..]'file:///' is not a valid local file URI[..]
+"
+ };
+ p.cargo("check -v")
+ .with_status(101)
+ .with_stderr(expected_stderr)
+ .run();
+}
+
+#[cargo_test]
+fn bad_crate_type() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [lib]
+ crate-type = ["bad_type", "rlib"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v")
+ .with_status(101)
+ .with_stderr_contains(
+ "error: failed to run `rustc` to learn about crate-type bad_type information",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn malformed_override() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [target.x86_64-apple-darwin.freetype]
+ native = {
+ foo: "bar"
+ }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ could not parse input as TOML
+
+Caused by:
+ TOML parse error at line 8, column 27
+ |
+ 8 | native = {
+ | ^
+ invalid inline table
+ expected `}`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn duplicate_binary_names() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "qqq"
+ version = "0.1.0"
+ authors = ["A <a@a.a>"]
+
+ [[bin]]
+ name = "e"
+ path = "a.rs"
+
+ [[bin]]
+ name = "e"
+ path = "b.rs"
+ "#,
+ )
+ .file("a.rs", r#"fn main() -> () {}"#)
+ .file("b.rs", r#"fn main() -> () {}"#)
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ found duplicate binary name e, but all binary targets must have a unique name
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn duplicate_example_names() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "qqq"
+ version = "0.1.0"
+ authors = ["A <a@a.a>"]
+
+ [[example]]
+ name = "ex"
+ path = "examples/ex.rs"
+
+ [[example]]
+ name = "ex"
+ path = "examples/ex2.rs"
+ "#,
+ )
+ .file("examples/ex.rs", r#"fn main () -> () {}"#)
+ .file("examples/ex2.rs", r#"fn main () -> () {}"#)
+ .build();
+
+ p.cargo("check --example ex")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ found duplicate example name ex, but all example targets must have a unique name
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn duplicate_bench_names() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "qqq"
+ version = "0.1.0"
+ authors = ["A <a@a.a>"]
+
+ [[bench]]
+ name = "ex"
+ path = "benches/ex.rs"
+
+ [[bench]]
+ name = "ex"
+ path = "benches/ex2.rs"
+ "#,
+ )
+ .file("benches/ex.rs", r#"fn main () {}"#)
+ .file("benches/ex2.rs", r#"fn main () {}"#)
+ .build();
+
+ p.cargo("bench")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ found duplicate bench name ex, but all bench targets must have a unique name
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn duplicate_deps() {
+ let p = project()
+ .file("shim-bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("shim-bar/src/lib.rs", "pub fn a() {}")
+ .file("linux-bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("linux-bar/src/lib.rs", "pub fn a() {}")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "qqq"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = { path = "shim-bar" }
+
+ [target.x86_64-unknown-linux-gnu.dependencies]
+ bar = { path = "linux-bar" }
+ "#,
+ )
+ .file("src/main.rs", r#"fn main () {}"#)
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ Dependency 'bar' has different source paths depending on the build target. Each dependency must \
+have a single canonical source path irrespective of build target.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn duplicate_deps_diff_sources() {
+ let p = project()
+ .file("shim-bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("shim-bar/src/lib.rs", "pub fn a() {}")
+ .file("linux-bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("linux-bar/src/lib.rs", "pub fn a() {}")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "qqq"
+ version = "0.0.1"
+ authors = []
+
+ [target.i686-unknown-linux-gnu.dependencies]
+ bar = { path = "shim-bar" }
+
+ [target.x86_64-unknown-linux-gnu.dependencies]
+ bar = { path = "linux-bar" }
+ "#,
+ )
+ .file("src/main.rs", r#"fn main () {}"#)
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ Dependency 'bar' has different source paths depending on the build target. Each dependency must \
+have a single canonical source path irrespective of build target.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn unused_keys() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [target.foo]
+ bar = "3"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+warning: unused manifest key: target.foo.bar
+[CHECKING] foo v0.1.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+ bulid = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
+ .build();
+ p.cargo("check")
+ .with_stderr(
+ "\
+warning: unused manifest key: package.bulid
+[CHECKING] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ let p = project()
+ .at("bar")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [lib]
+ build = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
+ .build();
+ p.cargo("check")
+ .with_stderr(
+ "\
+warning: unused manifest key: lib.build
+[CHECKING] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn unused_keys_in_virtual_manifest() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ bulid = "foo"
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+ p.cargo("check --workspace")
+ .with_stderr(
+ "\
+[WARNING] [..]/foo/Cargo.toml: unused manifest key: workspace.bulid
+[CHECKING] bar [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn empty_dependencies() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies]
+ bar = {}
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("bar", "0.0.1").publish();
+
+ p.cargo("check")
+ .with_stderr_contains(
+ "\
+warning: dependency (bar) specified without providing a local path, Git repository, or version \
+to use. This will be considered an error in future versions
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid_toml_historically_allowed_fails() {
+ let p = project()
+ .file(".cargo/config", "[bar] baz = 2")
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: could not load Cargo configuration
+
+Caused by:
+ could not parse TOML configuration in `[..]`
+
+Caused by:
+ could not parse input as TOML
+
+Caused by:
+ TOML parse error at line 1, column 7
+ |
+ 1 | [bar] baz = 2
+ | ^
+ invalid table header
+ expected newline, `#`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn ambiguous_git_reference() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies.bar]
+ git = "http://127.0.0.1"
+ branch = "master"
+ tag = "some-tag"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ dependency (bar) specification is ambiguous. Only one of `branch`, `tag` or `rev` is allowed.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn fragment_in_git_url() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies.bar]
+ git = "http://127.0.0.1#foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check -v")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[WARNING] URL fragment `#foo` in git URL is ignored for dependency (bar). \
+If you were trying to specify a specific git revision, \
+use `rev = \"foo\"` in the dependency declaration.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_source_config1() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(".cargo/config", "[source.foo]")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr("error: no source location specified for `source.foo`, need [..]")
+ .run();
+}
+
+#[cargo_test]
+fn bad_source_config2() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [source.crates-io]
+ registry = 'http://example.com'
+ replace-with = 'bar'
+ "#,
+ )
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to get `bar` as a dependency of package `foo v0.0.0 [..]`
+
+Caused by:
+ failed to load source for dependency `bar`
+
+Caused by:
+ Unable to update registry `crates-io`
+
+Caused by:
+ could not find a configured source with the name `bar` \
+ when attempting to lookup `crates-io` (configuration in [..])
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_source_config3() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [source.crates-io]
+ registry = 'https://example.com'
+ replace-with = 'crates-io'
+ "#,
+ )
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to get `bar` as a dependency of package `foo v0.0.0 [..]`
+
+Caused by:
+ failed to load source for dependency `bar`
+
+Caused by:
+ Unable to update registry `crates-io`
+
+Caused by:
+ detected a cycle of `replace-with` sources, [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_source_config4() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [source.crates-io]
+ replace-with = 'bar'
+
+ [source.bar]
+ registry = 'https://example.com'
+ replace-with = 'crates-io'
+ "#,
+ )
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to get `bar` as a dependency of package `foo v0.0.0 ([..])`
+
+Caused by:
+ failed to load source for dependency `bar`
+
+Caused by:
+ Unable to update registry `crates-io`
+
+Caused by:
+ detected a cycle of `replace-with` sources, the source `crates-io` is \
+ eventually replaced with itself (configuration in [..])
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_source_config5() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [source.crates-io]
+ registry = 'https://example.com'
+ replace-with = 'bar'
+
+ [source.bar]
+ registry = 'not a url'
+ "#,
+ )
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: configuration key `source.bar.registry` specified an invalid URL (in [..])
+
+Caused by:
+ invalid url `not a url`: [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn both_git_and_path_specified() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies.bar]
+ git = "http://127.0.0.1"
+ path = "bar"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ foo.cargo("check -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ dependency (bar) specification is ambiguous. Only one of `git` or `path` is allowed.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_source_config6() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [source.crates-io]
+ registry = 'https://example.com'
+ replace-with = ['not', 'a', 'string']
+ "#,
+ )
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] error in [..]/foo/.cargo/config: could not load config key `source.crates-io.replace-with`
+
+Caused by:
+ error in [..]/foo/.cargo/config: `source.crates-io.replace-with` expected a string, but found a array
+"
+ )
+ .run();
+}
+
+#[cargo_test]
+fn ignored_git_revision() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ branch = "spam"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ let err_msg = "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ key `branch` is ignored for dependency (bar).
+";
+ foo.cargo("check -v")
+ .with_status(101)
+ .with_stderr(err_msg)
+ .run();
+
+ // #11540, check that [target] dependencies fail the same way.
+ foo.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+
+ [target.some-target.dependencies]
+ bar = { path = "bar", branch = "spam" }
+ "#,
+ );
+ foo.cargo("check")
+ .with_status(101)
+ .with_stderr(err_msg)
+ .run();
+}
+
+#[cargo_test]
+fn bad_source_config7() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [source.foo]
+ registry = 'https://example.com'
+ local-registry = 'file:///another/file'
+ "#,
+ )
+ .build();
+
+ Package::new("bar", "0.1.0").publish();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr("error: more than one source location specified for `source.foo`")
+ .run();
+}
+
+#[cargo_test]
+fn bad_source_config8() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [source.foo]
+ branch = "somebranch"
+ "#,
+ )
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] source definition `source.foo` specifies `branch`, \
+ but that requires a `git` key to be specified (in [..]/foo/.cargo/config)",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_dependency() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies]
+ bar = 3
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ invalid type: integer `3`, expected a version string like [..]
+ in `dependencies.bar`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_debuginfo() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [profile.dev]
+ debug = 'a'
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ expected a boolean or an integer
+ in `profile.dev.debug`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_opt_level() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ build = 3
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ expected a boolean or a string
+ in `package.build`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn warn_semver_metadata() {
+ Package::new("bar", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+
+ [dependencies]
+ bar = "1.0.0+1234"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("check")
+ .with_stderr_contains("[WARNING] version requirement `1.0.0+1234` for dependency `bar`[..]")
+ .run();
+}
+
+#[cargo_test]
+fn bad_target_cfg() {
+ // Invalid type in a StringList.
+ //
+ // The error message is a bit unfortunate here. The type here ends up
+ // being essentially Value<Value<StringList>>, and each layer of "Value"
+ // adds some context to the error message. Also, untagged enums provide
+ // strange error messages. Hopefully most users will be able to untangle
+ // the message.
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [target.'cfg(not(target_os = "none"))']
+ runner = false
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] error in [..]/foo/.cargo/config: \
+could not load config key `target.\"cfg(not(target_os = \\\"none\\\"))\".runner`
+
+Caused by:
+ error in [..]/foo/.cargo/config: \
+ could not load config key `target.\"cfg(not(target_os = \\\"none\\\"))\".runner`
+
+Caused by:
+ invalid configuration for key `target.\"cfg(not(target_os = \\\"none\\\"))\".runner`
+ expected a string or array of strings, but found a boolean for \
+ `target.\"cfg(not(target_os = \\\"none\\\"))\".runner` in [..]/foo/.cargo/config
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_target_links_overrides() {
+ // Invalid parsing of links overrides.
+ //
+ // This error message is terrible. Nothing in the deserialization path is
+ // using config::Value<>, so nothing is able to report the location. I
+ // think this illustrates how the way things break down with how it
+ // currently is designed with serde.
+ let p = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}.somelib]
+ rustc-flags = 'foo'
+ "#,
+ rustc_host()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] Only `-l` and `-L` flags are allowed in target config \
+ `target.[..].rustc-flags` (in [..]foo/.cargo/config): `foo`",
+ )
+ .run();
+
+ p.change_file(
+ ".cargo/config",
+ &format!(
+ "[target.{}.somelib]
+ warning = \"foo\"
+ ",
+ rustc_host(),
+ ),
+ );
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr("[ERROR] `warning` is not supported in build script overrides")
+ .run();
+}
+
+#[cargo_test]
+fn redefined_sources() {
+ // Cannot define a source multiple times.
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [source.foo]
+ registry = "https://github.com/rust-lang/crates.io-index"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] source `foo` defines source registry `crates-io`, \
+ but that source is already defined by `crates-io`
+note: Sources are not allowed to be defined multiple times.
+",
+ )
+ .run();
+
+ p.change_file(
+ ".cargo/config",
+ r#"
+ [source.one]
+ directory = "index"
+
+ [source.two]
+ directory = "index"
+ "#,
+ );
+
+ // Name is `[..]` because we can't guarantee the order.
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] source `[..]` defines source dir [..]/foo/index, \
+ but that source is already defined by `[..]`
+note: Sources are not allowed to be defined multiple times.
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/bad_manifest_path.rs b/src/tools/cargo/tests/testsuite/bad_manifest_path.rs
new file mode 100644
index 000000000..fb214e321
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/bad_manifest_path.rs
@@ -0,0 +1,386 @@
+//! Tests for invalid --manifest-path arguments.
+
+use cargo_test_support::{basic_bin_manifest, main_file, project};
+
+#[track_caller]
+fn assert_not_a_cargo_toml(command: &str, manifest_path_argument: &str) {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo(command)
+ .arg("--manifest-path")
+ .arg(manifest_path_argument)
+ .cwd(p.root().parent().unwrap())
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] the manifest-path must be a path \
+ to a Cargo.toml file",
+ )
+ .run();
+}
+
+#[track_caller]
+fn assert_cargo_toml_doesnt_exist(command: &str, manifest_path_argument: &str) {
+ let p = project().build();
+ let expected_path = manifest_path_argument
+ .split('/')
+ .collect::<Vec<_>>()
+ .join("[..]");
+
+ p.cargo(command)
+ .arg("--manifest-path")
+ .arg(manifest_path_argument)
+ .cwd(p.root().parent().unwrap())
+ .with_status(101)
+ .with_stderr(format!(
+ "[ERROR] manifest path `{}` does not exist",
+ expected_path
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn bench_dir_containing_cargo_toml() {
+ assert_not_a_cargo_toml("bench", "foo");
+}
+
+#[cargo_test]
+fn bench_dir_plus_file() {
+ assert_not_a_cargo_toml("bench", "foo/bar");
+}
+
+#[cargo_test]
+fn bench_dir_plus_path() {
+ assert_not_a_cargo_toml("bench", "foo/bar/baz");
+}
+
+#[cargo_test]
+fn bench_dir_to_nonexistent_cargo_toml() {
+ assert_cargo_toml_doesnt_exist("bench", "foo/bar/baz/Cargo.toml");
+}
+
+#[cargo_test]
+fn build_dir_containing_cargo_toml() {
+ assert_not_a_cargo_toml("check", "foo");
+}
+
+#[cargo_test]
+fn build_dir_plus_file() {
+ assert_not_a_cargo_toml("bench", "foo/bar");
+}
+
+#[cargo_test]
+fn build_dir_plus_path() {
+ assert_not_a_cargo_toml("bench", "foo/bar/baz");
+}
+
+#[cargo_test]
+fn build_dir_to_nonexistent_cargo_toml() {
+ assert_cargo_toml_doesnt_exist("check", "foo/bar/baz/Cargo.toml");
+}
+
+#[cargo_test]
+fn clean_dir_containing_cargo_toml() {
+ assert_not_a_cargo_toml("clean", "foo");
+}
+
+#[cargo_test]
+fn clean_dir_plus_file() {
+ assert_not_a_cargo_toml("clean", "foo/bar");
+}
+
+#[cargo_test]
+fn clean_dir_plus_path() {
+ assert_not_a_cargo_toml("clean", "foo/bar/baz");
+}
+
+#[cargo_test]
+fn clean_dir_to_nonexistent_cargo_toml() {
+ assert_cargo_toml_doesnt_exist("clean", "foo/bar/baz/Cargo.toml");
+}
+
+#[cargo_test]
+fn doc_dir_containing_cargo_toml() {
+ assert_not_a_cargo_toml("doc", "foo");
+}
+
+#[cargo_test]
+fn doc_dir_plus_file() {
+ assert_not_a_cargo_toml("doc", "foo/bar");
+}
+
+#[cargo_test]
+fn doc_dir_plus_path() {
+ assert_not_a_cargo_toml("doc", "foo/bar/baz");
+}
+
+#[cargo_test]
+fn doc_dir_to_nonexistent_cargo_toml() {
+ assert_cargo_toml_doesnt_exist("doc", "foo/bar/baz/Cargo.toml");
+}
+
+#[cargo_test]
+fn fetch_dir_containing_cargo_toml() {
+ assert_not_a_cargo_toml("fetch", "foo");
+}
+
+#[cargo_test]
+fn fetch_dir_plus_file() {
+ assert_not_a_cargo_toml("fetch", "foo/bar");
+}
+
+#[cargo_test]
+fn fetch_dir_plus_path() {
+ assert_not_a_cargo_toml("fetch", "foo/bar/baz");
+}
+
+#[cargo_test]
+fn fetch_dir_to_nonexistent_cargo_toml() {
+ assert_cargo_toml_doesnt_exist("fetch", "foo/bar/baz/Cargo.toml");
+}
+
+#[cargo_test]
+fn generate_lockfile_dir_containing_cargo_toml() {
+ assert_not_a_cargo_toml("generate-lockfile", "foo");
+}
+
+#[cargo_test]
+fn generate_lockfile_dir_plus_file() {
+ assert_not_a_cargo_toml("generate-lockfile", "foo/bar");
+}
+
+#[cargo_test]
+fn generate_lockfile_dir_plus_path() {
+ assert_not_a_cargo_toml("generate-lockfile", "foo/bar/baz");
+}
+
+#[cargo_test]
+fn generate_lockfile_dir_to_nonexistent_cargo_toml() {
+ assert_cargo_toml_doesnt_exist("generate-lockfile", "foo/bar/baz/Cargo.toml");
+}
+
+#[cargo_test]
+fn package_dir_containing_cargo_toml() {
+ assert_not_a_cargo_toml("package", "foo");
+}
+
+#[cargo_test]
+fn package_dir_plus_file() {
+ assert_not_a_cargo_toml("package", "foo/bar");
+}
+
+#[cargo_test]
+fn package_dir_plus_path() {
+ assert_not_a_cargo_toml("package", "foo/bar/baz");
+}
+
+#[cargo_test]
+fn package_dir_to_nonexistent_cargo_toml() {
+ assert_cargo_toml_doesnt_exist("package", "foo/bar/baz/Cargo.toml");
+}
+
+#[cargo_test]
+fn pkgid_dir_containing_cargo_toml() {
+ assert_not_a_cargo_toml("pkgid", "foo");
+}
+
+#[cargo_test]
+fn pkgid_dir_plus_file() {
+ assert_not_a_cargo_toml("pkgid", "foo/bar");
+}
+
+#[cargo_test]
+fn pkgid_dir_plus_path() {
+ assert_not_a_cargo_toml("pkgid", "foo/bar/baz");
+}
+
+#[cargo_test]
+fn pkgid_dir_to_nonexistent_cargo_toml() {
+ assert_cargo_toml_doesnt_exist("pkgid", "foo/bar/baz/Cargo.toml");
+}
+
+#[cargo_test]
+fn publish_dir_containing_cargo_toml() {
+ assert_not_a_cargo_toml("publish", "foo");
+}
+
+#[cargo_test]
+fn publish_dir_plus_file() {
+ assert_not_a_cargo_toml("publish", "foo/bar");
+}
+
+#[cargo_test]
+fn publish_dir_plus_path() {
+ assert_not_a_cargo_toml("publish", "foo/bar/baz");
+}
+
+#[cargo_test]
+fn publish_dir_to_nonexistent_cargo_toml() {
+ assert_cargo_toml_doesnt_exist("publish", "foo/bar/baz/Cargo.toml");
+}
+
+#[cargo_test]
+fn read_manifest_dir_containing_cargo_toml() {
+ assert_not_a_cargo_toml("read-manifest", "foo");
+}
+
+#[cargo_test]
+fn read_manifest_dir_plus_file() {
+ assert_not_a_cargo_toml("read-manifest", "foo/bar");
+}
+
+#[cargo_test]
+fn read_manifest_dir_plus_path() {
+ assert_not_a_cargo_toml("read-manifest", "foo/bar/baz");
+}
+
+#[cargo_test]
+fn read_manifest_dir_to_nonexistent_cargo_toml() {
+ assert_cargo_toml_doesnt_exist("read-manifest", "foo/bar/baz/Cargo.toml");
+}
+
+#[cargo_test]
+fn run_dir_containing_cargo_toml() {
+ assert_not_a_cargo_toml("run", "foo");
+}
+
+#[cargo_test]
+fn run_dir_plus_file() {
+ assert_not_a_cargo_toml("run", "foo/bar");
+}
+
+#[cargo_test]
+fn run_dir_plus_path() {
+ assert_not_a_cargo_toml("run", "foo/bar/baz");
+}
+
+#[cargo_test]
+fn run_dir_to_nonexistent_cargo_toml() {
+ assert_cargo_toml_doesnt_exist("run", "foo/bar/baz/Cargo.toml");
+}
+
+#[cargo_test]
+fn rustc_dir_containing_cargo_toml() {
+ assert_not_a_cargo_toml("rustc", "foo");
+}
+
+#[cargo_test]
+fn rustc_dir_plus_file() {
+ assert_not_a_cargo_toml("rustc", "foo/bar");
+}
+
+#[cargo_test]
+fn rustc_dir_plus_path() {
+ assert_not_a_cargo_toml("rustc", "foo/bar/baz");
+}
+
+#[cargo_test]
+fn rustc_dir_to_nonexistent_cargo_toml() {
+ assert_cargo_toml_doesnt_exist("rustc", "foo/bar/baz/Cargo.toml");
+}
+
+#[cargo_test]
+fn test_dir_containing_cargo_toml() {
+ assert_not_a_cargo_toml("test", "foo");
+}
+
+#[cargo_test]
+fn test_dir_plus_file() {
+ assert_not_a_cargo_toml("test", "foo/bar");
+}
+
+#[cargo_test]
+fn test_dir_plus_path() {
+ assert_not_a_cargo_toml("test", "foo/bar/baz");
+}
+
+#[cargo_test]
+fn test_dir_to_nonexistent_cargo_toml() {
+ assert_cargo_toml_doesnt_exist("test", "foo/bar/baz/Cargo.toml");
+}
+
+#[cargo_test]
+fn update_dir_containing_cargo_toml() {
+ assert_not_a_cargo_toml("update", "foo");
+}
+
+#[cargo_test]
+fn update_dir_plus_file() {
+ assert_not_a_cargo_toml("update", "foo/bar");
+}
+
+#[cargo_test]
+fn update_dir_plus_path() {
+ assert_not_a_cargo_toml("update", "foo/bar/baz");
+}
+
+#[cargo_test]
+fn update_dir_to_nonexistent_cargo_toml() {
+ assert_cargo_toml_doesnt_exist("update", "foo/bar/baz/Cargo.toml");
+}
+
+#[cargo_test]
+fn verify_project_dir_containing_cargo_toml() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("verify-project --manifest-path foo")
+ .cwd(p.root().parent().unwrap())
+ .with_status(1)
+ .with_stdout(
+ "{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\
+ ",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn verify_project_dir_plus_file() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("verify-project --manifest-path foo/bar")
+ .cwd(p.root().parent().unwrap())
+ .with_status(1)
+ .with_stdout(
+ "{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\
+ ",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn verify_project_dir_plus_path() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("verify-project --manifest-path foo/bar/baz")
+ .cwd(p.root().parent().unwrap())
+ .with_status(1)
+ .with_stdout(
+ "{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\
+ ",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn verify_project_dir_to_nonexistent_cargo_toml() {
+ let p = project().build();
+ p.cargo("verify-project --manifest-path foo/bar/baz/Cargo.toml")
+ .cwd(p.root().parent().unwrap())
+ .with_status(1)
+ .with_stdout(
+ "{\"invalid\":\"manifest path `foo[..]bar[..]baz[..]Cargo.toml` does not exist\"}\
+ ",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/bench.rs b/src/tools/cargo/tests/testsuite/bench.rs
new file mode 100644
index 000000000..60ad2b60d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/bench.rs
@@ -0,0 +1,1673 @@
+//! Tests for the `cargo bench` command.
+
+use cargo_test_support::paths::CargoPathExt;
+use cargo_test_support::{basic_bin_manifest, basic_lib_manifest, basic_manifest, project};
+
+#[cargo_test(nightly, reason = "bench")]
+fn cargo_bench_simple() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ "src/main.rs",
+ r#"
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+
+ fn hello() -> &'static str {
+ "hello"
+ }
+
+ pub fn main() {
+ println!("{}", hello())
+ }
+
+ #[bench]
+ fn bench_hello(_b: &mut test::Bencher) {
+ assert_eq!(hello(), "hello")
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build").run();
+ assert!(p.bin("foo").is_file());
+
+ p.process(&p.bin("foo")).with_stdout("hello\n").run();
+
+ p.cargo("bench")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("test bench_hello ... bench: [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn bench_bench_implicit() {
+ let p = project()
+ .file(
+ "src/main.rs",
+ r#"
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+ #[bench] fn run1(_ben: &mut test::Bencher) { }
+ fn main() { println!("Hello main!"); }
+ "#,
+ )
+ .file(
+ "tests/other.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+ #[bench] fn run3(_ben: &mut test::Bencher) { }
+ "#,
+ )
+ .file(
+ "benches/mybench.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+ #[bench] fn run2(_ben: &mut test::Bencher) { }
+ "#,
+ )
+ .build();
+
+ p.cargo("bench --benches")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])
+[RUNNING] [..] (target/release/deps/mybench-[..][EXE])
+",
+ )
+ .with_stdout_contains("test run2 ... bench: [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn bench_bin_implicit() {
+ let p = project()
+ .file(
+ "src/main.rs",
+ r#"
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+ #[bench] fn run1(_ben: &mut test::Bencher) { }
+ fn main() { println!("Hello main!"); }
+ "#,
+ )
+ .file(
+ "tests/other.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+ #[bench] fn run3(_ben: &mut test::Bencher) { }
+ "#,
+ )
+ .file(
+ "benches/mybench.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+ #[bench] fn run2(_ben: &mut test::Bencher) { }
+ "#,
+ )
+ .build();
+
+ p.cargo("bench --bins")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])
+",
+ )
+ .with_stdout_contains("test run1 ... bench: [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn bench_tarname() {
+ let p = project()
+ .file(
+ "benches/bin1.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+ #[bench] fn run1(_ben: &mut test::Bencher) { }
+ "#,
+ )
+ .file(
+ "benches/bin2.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+ #[bench] fn run2(_ben: &mut test::Bencher) { }
+ "#,
+ )
+ .build();
+
+ p.cargo("bench --bench bin2")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/bin2-[..][EXE])
+",
+ )
+ .with_stdout_contains("test run2 ... bench: [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn bench_multiple_targets() {
+ let p = project()
+ .file(
+ "benches/bin1.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+ #[bench] fn run1(_ben: &mut test::Bencher) { }
+ "#,
+ )
+ .file(
+ "benches/bin2.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+ #[bench] fn run2(_ben: &mut test::Bencher) { }
+ "#,
+ )
+ .file(
+ "benches/bin3.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+ #[bench] fn run3(_ben: &mut test::Bencher) { }
+ "#,
+ )
+ .build();
+
+ p.cargo("bench --bench bin1 --bench bin2")
+ .with_stdout_contains("test run1 ... bench: [..]")
+ .with_stdout_contains("test run2 ... bench: [..]")
+ .with_stdout_does_not_contain("[..]run3[..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn cargo_bench_verbose() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ "src/main.rs",
+ r#"
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+ fn main() {}
+ #[bench] fn bench_hello(_b: &mut test::Bencher) {}
+ "#,
+ )
+ .build();
+
+ p.cargo("bench -v hello")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc [..] src/main.rs [..]`
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] `[..]target/release/deps/foo-[..][EXE] hello --bench`",
+ )
+ .with_stdout_contains("test bench_hello ... bench: [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn many_similar_names() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ "
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+ pub fn foo() {}
+ #[bench] fn lib_bench(_b: &mut test::Bencher) {}
+ ",
+ )
+ .file(
+ "src/main.rs",
+ "
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate foo;
+ #[cfg(test)]
+ extern crate test;
+ fn main() {}
+ #[bench] fn bin_bench(_b: &mut test::Bencher) { foo::foo() }
+ ",
+ )
+ .file(
+ "benches/foo.rs",
+ r#"
+ #![feature(test)]
+ extern crate foo;
+ extern crate test;
+ #[bench] fn bench_bench(_b: &mut test::Bencher) { foo::foo() }
+ "#,
+ )
+ .build();
+
+ p.cargo("bench")
+ .with_stdout_contains("test bin_bench ... bench: 0 ns/iter (+/- 0)")
+ .with_stdout_contains("test lib_bench ... bench: 0 ns/iter (+/- 0)")
+ .with_stdout_contains("test bench_bench ... bench: 0 ns/iter (+/- 0)")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn cargo_bench_failing_test() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ "src/main.rs",
+ r#"
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+ fn hello() -> &'static str {
+ "hello"
+ }
+
+ pub fn main() {
+ println!("{}", hello())
+ }
+
+ #[bench]
+ fn bench_hello(_b: &mut test::Bencher) {
+ assert_eq!(hello(), "nope")
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build").run();
+ assert!(p.bin("foo").is_file());
+
+ p.process(&p.bin("foo")).with_stdout("hello\n").run();
+
+ // Force libtest into serial execution so that the test header will be printed.
+ p.cargo("bench -- --test-threads=1")
+ .with_stdout_contains("test bench_hello ...[..]")
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.5.0 ([CWD])[..]
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains(
+ "[..]thread '[..]' panicked at 'assertion failed: `(left == right)`[..]",
+ )
+ .with_stdout_contains("[..]left: `\"hello\"`[..]")
+ .with_stdout_contains("[..]right: `\"nope\"`[..]")
+ .with_stdout_contains("[..]src/main.rs:15[..]")
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn bench_with_lib_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name = "baz"
+ path = "src/main.rs"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+ ///
+ /// ```rust
+ /// extern crate foo;
+ /// fn main() {
+ /// println!("{}", foo::foo());
+ /// }
+ /// ```
+ ///
+ pub fn foo(){}
+ #[bench] fn lib_bench(_b: &mut test::Bencher) {}
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "
+ #![feature(test)]
+ #[allow(unused_extern_crates)]
+ extern crate foo;
+ #[cfg(test)]
+ extern crate test;
+
+ fn main() {}
+
+ #[bench]
+ fn bin_bench(_b: &mut test::Bencher) {}
+ ",
+ )
+ .build();
+
+ p.cargo("bench")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])
+[RUNNING] [..] (target/release/deps/baz-[..][EXE])",
+ )
+ .with_stdout_contains("test lib_bench ... bench: [..]")
+ .with_stdout_contains("test bin_bench ... bench: [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn bench_with_deep_lib_dep() {
+ let p = project()
+ .at("bar")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.foo]
+ path = "../foo"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate foo;
+ #[cfg(test)]
+ extern crate test;
+ #[bench]
+ fn bar_bench(_b: &mut test::Bencher) {
+ foo::foo();
+ }
+ ",
+ )
+ .build();
+ let _p2 = project()
+ .file(
+ "src/lib.rs",
+ "
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+
+ pub fn foo() {}
+
+ #[bench]
+ fn foo_bench(_b: &mut test::Bencher) {}
+ ",
+ )
+ .build();
+
+ p.cargo("bench")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[COMPILING] bar v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/bar-[..][EXE])",
+ )
+ .with_stdout_contains("test bar_bench ... bench: [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn external_bench_explicit() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[bench]]
+ name = "bench"
+ path = "src/bench.rs"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+ pub fn get_hello() -> &'static str { "Hello" }
+
+ #[bench]
+ fn internal_bench(_b: &mut test::Bencher) {}
+ "#,
+ )
+ .file(
+ "src/bench.rs",
+ r#"
+ #![feature(test)]
+ #[allow(unused_extern_crates)]
+ extern crate foo;
+ extern crate test;
+
+ #[bench]
+ fn external_bench(_b: &mut test::Bencher) {}
+ "#,
+ )
+ .build();
+
+ p.cargo("bench")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])
+[RUNNING] [..] (target/release/deps/bench-[..][EXE])",
+ )
+ .with_stdout_contains("test internal_bench ... bench: [..]")
+ .with_stdout_contains("test external_bench ... bench: [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn external_bench_implicit() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+
+ pub fn get_hello() -> &'static str { "Hello" }
+
+ #[bench]
+ fn internal_bench(_b: &mut test::Bencher) {}
+ "#,
+ )
+ .file(
+ "benches/external.rs",
+ r#"
+ #![feature(test)]
+ #[allow(unused_extern_crates)]
+ extern crate foo;
+ extern crate test;
+
+ #[bench]
+ fn external_bench(_b: &mut test::Bencher) {}
+ "#,
+ )
+ .build();
+
+ p.cargo("bench")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])
+[RUNNING] [..] (target/release/deps/external-[..][EXE])",
+ )
+ .with_stdout_contains("test internal_bench ... bench: [..]")
+ .with_stdout_contains("test external_bench ... bench: [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn bench_autodiscover_2015() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ edition = "2015"
+
+ [features]
+ magic = []
+
+ [[bench]]
+ name = "bench_magic"
+ required-features = ["magic"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "benches/bench_basic.rs",
+ r#"
+ #![feature(test)]
+ #[allow(unused_extern_crates)]
+ extern crate foo;
+ extern crate test;
+
+ #[bench]
+ fn bench_basic(_b: &mut test::Bencher) {}
+ "#,
+ )
+ .file(
+ "benches/bench_magic.rs",
+ r#"
+ #![feature(test)]
+ #[allow(unused_extern_crates)]
+ extern crate foo;
+ extern crate test;
+
+ #[bench]
+ fn bench_magic(_b: &mut test::Bencher) {}
+ "#,
+ )
+ .build();
+
+ p.cargo("bench bench_basic")
+ .with_stderr(
+ "warning: \
+An explicit [[bench]] section is specified in Cargo.toml which currently
+disables Cargo from automatically inferring other benchmark targets.
+This inference behavior will change in the Rust 2018 edition and the following
+files will be included as a benchmark target:
+
+* [..]bench_basic.rs
+
+This is likely to break cargo build or cargo test as these files may not be
+ready to be compiled as a benchmark target today. You can future-proof yourself
+and disable this warning by adding `autobenches = false` to your [package]
+section. You may also move the files to a location where Cargo would not
+automatically infer them to be a target, such as in subfolders.
+
+For more information on this warning you can consult
+https://github.com/rust-lang/cargo/issues/5330
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])
+",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn dont_run_examples() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "examples/dont-run-me-i-will-fail.rs",
+ r#"fn main() { panic!("Examples should not be run by 'cargo test'"); }"#,
+ )
+ .build();
+ p.cargo("bench").run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn pass_through_command_line() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ "
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+
+ #[bench] fn foo(_b: &mut test::Bencher) {}
+ #[bench] fn bar(_b: &mut test::Bencher) {}
+ ",
+ )
+ .build();
+
+ p.cargo("bench bar")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("test bar ... bench: [..]")
+ .run();
+
+ p.cargo("bench foo")
+ .with_stderr(
+ "[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("test foo ... bench: [..]")
+ .run();
+}
+
+// Regression test for running cargo-bench twice with
+// tests in an rlib
+#[cargo_test(nightly, reason = "bench")]
+fn cargo_bench_twice() {
+ let p = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file(
+ "src/foo.rs",
+ r#"
+ #![crate_type = "rlib"]
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+
+ #[bench]
+ fn dummy_bench(b: &mut test::Bencher) { }
+ "#,
+ )
+ .build();
+
+ for _ in 0..2 {
+ p.cargo("bench").run();
+ }
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn lib_bin_same_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "foo"
+ [[bin]]
+ name = "foo"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+ #[bench] fn lib_bench(_b: &mut test::Bencher) {}
+ ",
+ )
+ .file(
+ "src/main.rs",
+ "
+ #![feature(test)]
+ #[allow(unused_extern_crates)]
+ extern crate foo;
+ #[cfg(test)]
+ extern crate test;
+
+ #[bench]
+ fn bin_bench(_b: &mut test::Bencher) {}
+ ",
+ )
+ .build();
+
+ p.cargo("bench")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains_n("test [..] ... bench: [..]", 2)
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn lib_with_standard_name() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("syntax", "0.0.1"))
+ .file(
+ "src/lib.rs",
+ "
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+
+ /// ```
+ /// syntax::foo();
+ /// ```
+ pub fn foo() {}
+
+ #[bench]
+ fn foo_bench(_b: &mut test::Bencher) {}
+ ",
+ )
+ .file(
+ "benches/bench.rs",
+ "
+ #![feature(test)]
+ extern crate syntax;
+ extern crate test;
+
+ #[bench]
+ fn bench(_b: &mut test::Bencher) { syntax::foo() }
+ ",
+ )
+ .build();
+
+ p.cargo("bench")
+ .with_stderr(
+ "\
+[COMPILING] syntax v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/syntax-[..][EXE])
+[RUNNING] [..] (target/release/deps/bench-[..][EXE])",
+ )
+ .with_stdout_contains("test foo_bench ... bench: [..]")
+ .with_stdout_contains("test bench ... bench: [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn lib_with_standard_name2() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "syntax"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "syntax"
+ bench = false
+ doctest = false
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
+ .file(
+ "src/main.rs",
+ "
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate syntax;
+ #[cfg(test)]
+ extern crate test;
+
+ fn main() {}
+
+ #[bench]
+ fn bench(_b: &mut test::Bencher) { syntax::foo() }
+ ",
+ )
+ .build();
+
+ p.cargo("bench")
+ .with_stderr(
+ "\
+[COMPILING] syntax v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/syntax-[..][EXE])",
+ )
+ .with_stdout_contains("test bench ... bench: [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn bench_dylib() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "foo"
+ crate_type = ["dylib"]
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #![feature(test)]
+ extern crate bar as the_bar;
+ #[cfg(test)]
+ extern crate test;
+
+ pub fn bar() { the_bar::baz(); }
+
+ #[bench]
+ fn foo(_b: &mut test::Bencher) {}
+ "#,
+ )
+ .file(
+ "benches/bench.rs",
+ r#"
+ #![feature(test)]
+ extern crate foo as the_foo;
+ extern crate test;
+
+ #[bench]
+ fn foo(_b: &mut test::Bencher) { the_foo::bar(); }
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "bar"
+ crate_type = ["dylib"]
+ "#,
+ )
+ .file("bar/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ p.cargo("bench -v")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[RUNNING] [..] -C opt-level=3 [..]
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] [..] -C opt-level=3 [..]
+[RUNNING] [..] -C opt-level=3 [..]
+[RUNNING] [..] -C opt-level=3 [..]
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] `[..]target/release/deps/foo-[..][EXE] --bench`
+[RUNNING] `[..]target/release/deps/bench-[..][EXE] --bench`",
+ )
+ .with_stdout_contains_n("test foo ... bench: [..]", 2)
+ .run();
+
+ p.root().move_into_the_past();
+ p.cargo("bench -v")
+ .with_stderr(
+ "\
+[FRESH] bar v0.0.1 ([CWD]/bar)
+[FRESH] foo v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] `[..]target/release/deps/foo-[..][EXE] --bench`
+[RUNNING] `[..]target/release/deps/bench-[..][EXE] --bench`",
+ )
+ .with_stdout_contains_n("test foo ... bench: [..]", 2)
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn bench_twice_with_build_cmd() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("build.rs", "fn main() {}")
+ .file(
+ "src/lib.rs",
+ "
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+ #[bench]
+ fn foo(_b: &mut test::Bencher) {}
+ ",
+ )
+ .build();
+
+ p.cargo("bench")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("test foo ... bench: [..]")
+ .run();
+
+ p.cargo("bench")
+ .with_stderr(
+ "[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("test foo ... bench: [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn bench_with_examples() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "6.6.6"
+ authors = []
+
+ [[example]]
+ name = "teste1"
+
+ [[bench]]
+ name = "testb1"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+ #[cfg(test)]
+ use test::Bencher;
+
+ pub fn f1() {
+ println!("f1");
+ }
+
+ pub fn f2() {}
+
+ #[bench]
+ fn bench_bench1(_b: &mut Bencher) {
+ f2();
+ }
+ "#,
+ )
+ .file(
+ "benches/testb1.rs",
+ "
+ #![feature(test)]
+ extern crate foo;
+ extern crate test;
+
+ use test::Bencher;
+
+ #[bench]
+ fn bench_bench2(_b: &mut Bencher) {
+ foo::f2();
+ }
+ ",
+ )
+ .file(
+ "examples/teste1.rs",
+ r#"
+ extern crate foo;
+
+ fn main() {
+ println!("example1");
+ foo::f1();
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("bench -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v6.6.6 ([CWD])
+[RUNNING] `rustc [..]`
+[RUNNING] `rustc [..]`
+[RUNNING] `rustc [..]`
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] `[CWD]/target/release/deps/foo-[..][EXE] --bench`
+[RUNNING] `[CWD]/target/release/deps/testb1-[..][EXE] --bench`",
+ )
+ .with_stdout_contains("test bench_bench1 ... bench: [..]")
+ .with_stdout_contains("test bench_bench2 ... bench: [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn test_a_bench() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.1.0"
+
+ [lib]
+ name = "foo"
+ test = false
+ doctest = false
+
+ [[bench]]
+ name = "b"
+ test = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("benches/b.rs", "#[test] fn foo() {}")
+ .build();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.1.0 ([..])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/b-[..][EXE])",
+ )
+ .with_stdout_contains("test foo ... ok")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn test_bench_no_run() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "benches/bbaz.rs",
+ r#"
+ #![feature(test)]
+
+ extern crate test;
+
+ use test::Bencher;
+
+ #[bench]
+ fn bench_baz(_: &mut Bencher) {}
+ "#,
+ )
+ .build();
+
+ p.cargo("bench --no-run")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] bench [optimized] target(s) in [..]
+[EXECUTABLE] benches src/lib.rs (target/release/deps/foo-[..][EXE])
+[EXECUTABLE] benches/bbaz.rs (target/release/deps/bbaz-[..][EXE])
+",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn test_bench_no_run_emit_json() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "benches/bbaz.rs",
+ r#"
+ #![feature(test)]
+
+ extern crate test;
+
+ use test::Bencher;
+
+ #[bench]
+ fn bench_baz(_: &mut Bencher) {}
+ "#,
+ )
+ .build();
+
+ p.cargo("bench --no-run --message-format json")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] bench [optimized] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn test_bench_no_fail_fast() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ "src/main.rs",
+ r#"
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+ fn hello() -> &'static str {
+ "hello"
+ }
+
+ pub fn main() {
+ println!("{}", hello())
+ }
+
+ #[bench]
+ fn bench_hello(_b: &mut test::Bencher) {
+ assert_eq!(hello(), "hello")
+ }
+
+ #[bench]
+ fn bench_nope(_b: &mut test::Bencher) {
+ assert_eq!("nope", hello())
+ }
+ "#,
+ )
+ .file(
+ "benches/b1.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+ #[bench]
+ fn b1_fail(_b: &mut test::Bencher) { assert_eq!(1, 2); }
+ "#,
+ )
+ .build();
+
+ p.cargo("bench --no-fail-fast -- --test-threads=1")
+ .with_status(101)
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 [..]
+[FINISHED] bench [..]
+[RUNNING] unittests src/main.rs (target/release/deps/foo[..])
+[ERROR] bench failed, to rerun pass `--bin foo`
+[RUNNING] benches/b1.rs (target/release/deps/b1[..])
+[ERROR] bench failed, to rerun pass `--bench b1`
+[ERROR] 2 targets failed:
+ `--bin foo`
+ `--bench b1`
+",
+ )
+ .with_stdout_contains("running 2 tests")
+ .with_stdout_contains("test bench_hello [..]")
+ .with_stdout_contains("test bench_nope [..]")
+ .with_stdout_contains("test b1_fail [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn test_bench_multiple_packages() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.1.0"
+
+ [dependencies.bar]
+ path = "../bar"
+
+ [dependencies.baz]
+ path = "../baz"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ let _bar = project()
+ .at("bar")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ authors = []
+ version = "0.1.0"
+
+ [[bench]]
+ name = "bbar"
+ test = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "benches/bbar.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+
+ use test::Bencher;
+
+ #[bench]
+ fn bench_bar(_b: &mut Bencher) {}
+ "#,
+ )
+ .build();
+
+ let _baz = project()
+ .at("baz")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ authors = []
+ version = "0.1.0"
+
+ [[bench]]
+ name = "bbaz"
+ test = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "benches/bbaz.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+
+ use test::Bencher;
+
+ #[bench]
+ fn bench_baz(_b: &mut Bencher) {}
+ "#,
+ )
+ .build();
+
+ p.cargo("bench -p bar -p baz")
+ .with_stderr_contains("[RUNNING] [..] (target/release/deps/bbaz-[..][EXE])")
+ .with_stdout_contains("test bench_baz ... bench: [..]")
+ .with_stderr_contains("[RUNNING] [..] (target/release/deps/bbar-[..][EXE])")
+ .with_stdout_contains("test bench_bar ... bench: [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn bench_all_workspace() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { path = "bar" }
+
+ [workspace]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "benches/foo.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+
+ use test::Bencher;
+
+ #[bench]
+ fn bench_foo(_: &mut Bencher) -> () { () }
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file(
+ "bar/benches/bar.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+
+ use test::Bencher;
+
+ #[bench]
+ fn bench_bar(_: &mut Bencher) -> () { () }
+ "#,
+ )
+ .build();
+
+ p.cargo("bench --workspace")
+ .with_stderr_contains("[RUNNING] [..] (target/release/deps/bar-[..][EXE])")
+ .with_stdout_contains("test bench_bar ... bench: [..]")
+ .with_stderr_contains("[RUNNING] [..] (target/release/deps/foo-[..][EXE])")
+ .with_stdout_contains("test bench_foo ... bench: [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn bench_all_exclude() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+
+ #[bench]
+ pub fn bar(b: &mut test::Bencher) {
+ b.iter(|| {});
+ }
+ "#,
+ )
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file(
+ "baz/src/lib.rs",
+ "#[test] pub fn baz() { break_the_build(); }",
+ )
+ .build();
+
+ p.cargo("bench --workspace --exclude baz")
+ .with_stdout_contains(
+ "\
+running 1 test
+test bar ... bench: [..] ns/iter (+/- [..])",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn bench_all_exclude_glob() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+
+ #[bench]
+ pub fn bar(b: &mut test::Bencher) {
+ b.iter(|| {});
+ }
+ "#,
+ )
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file(
+ "baz/src/lib.rs",
+ "#[test] pub fn baz() { break_the_build(); }",
+ )
+ .build();
+
+ p.cargo("bench --workspace --exclude '*z'")
+ .with_stdout_contains(
+ "\
+running 1 test
+test bar ... bench: [..] ns/iter (+/- [..])",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn bench_all_virtual_manifest() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file(
+ "bar/benches/bar.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+
+ use test::Bencher;
+
+ #[bench]
+ fn bench_bar(_: &mut Bencher) -> () { () }
+ "#,
+ )
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .file(
+ "baz/benches/baz.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+
+ use test::Bencher;
+
+ #[bench]
+ fn bench_baz(_: &mut Bencher) -> () { () }
+ "#,
+ )
+ .build();
+
+ // The order in which bar and baz are built is not guaranteed
+ p.cargo("bench --workspace")
+ .with_stderr_contains("[RUNNING] [..] (target/release/deps/baz-[..][EXE])")
+ .with_stdout_contains("test bench_baz ... bench: [..]")
+ .with_stderr_contains("[RUNNING] [..] (target/release/deps/bar-[..][EXE])")
+ .with_stdout_contains("test bench_bar ... bench: [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn bench_virtual_manifest_glob() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() { break_the_build(); }")
+ .file(
+ "bar/benches/bar.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+
+ use test::Bencher;
+
+ #[bench]
+ fn bench_bar(_: &mut Bencher) -> () { break_the_build(); }
+ "#,
+ )
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .file(
+ "baz/benches/baz.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+
+ use test::Bencher;
+
+ #[bench]
+ fn bench_baz(_: &mut Bencher) -> () { () }
+ "#,
+ )
+ .build();
+
+ // The order in which bar and baz are built is not guaranteed
+ p.cargo("bench -p '*z'")
+ .with_stderr_contains("[RUNNING] [..] (target/release/deps/baz-[..][EXE])")
+ .with_stdout_contains("test bench_baz ... bench: [..]")
+ .with_stderr_does_not_contain("[RUNNING] [..] (target/release/deps/bar-[..][EXE])")
+ .with_stdout_does_not_contain("test bench_bar ... bench: [..]")
+ .run();
+}
+
+// https://github.com/rust-lang/cargo/issues/4287
+#[cargo_test(nightly, reason = "bench")]
+fn legacy_bench_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [[bench]]
+ name = "bench"
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
+ .file(
+ "src/bench.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+
+ use test::Bencher;
+
+ #[bench]
+ fn bench_foo(_: &mut Bencher) -> () { () }
+ "#,
+ )
+ .build();
+
+ p.cargo("bench")
+ .with_stderr_contains(
+ "\
+[WARNING] path `[..]src/bench.rs` was erroneously implicitly accepted for benchmark `bench`,
+please set bench.path in Cargo.toml",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn bench_virtual_manifest_all_implied() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn foo() {}")
+ .file(
+ "bar/benches/bar.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+ use test::Bencher;
+ #[bench]
+ fn bench_bar(_: &mut Bencher) -> () { () }
+ "#,
+ )
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .file(
+ "baz/benches/baz.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+ use test::Bencher;
+ #[bench]
+ fn bench_baz(_: &mut Bencher) -> () { () }
+ "#,
+ )
+ .build();
+
+ // The order in which bar and baz are built is not guaranteed
+
+ p.cargo("bench")
+ .with_stderr_contains("[RUNNING] [..] (target/release/deps/baz-[..][EXE])")
+ .with_stdout_contains("test bench_baz ... bench: [..]")
+ .with_stderr_contains("[RUNNING] [..] (target/release/deps/bar-[..][EXE])")
+ .with_stdout_contains("test bench_bar ... bench: [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn json_artifact_includes_executable_for_benchmark() {
+ let p = project()
+ .file(
+ "benches/benchmark.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+
+ use test::Bencher;
+
+ #[bench]
+ fn bench_foo(_: &mut Bencher) -> () { () }
+ "#,
+ )
+ .build();
+
+ p.cargo("bench --no-run --message-format=json")
+ .with_json(
+ r#"
+ {
+ "executable": "[..]/foo/target/release/deps/benchmark-[..][EXE]",
+ "features": [],
+ "filenames": "{...}",
+ "fresh": false,
+ "package_id": "foo 0.0.1 ([..])",
+ "manifest_path": "[..]",
+ "profile": "{...}",
+ "reason": "compiler-artifact",
+ "target": {
+ "crate_types": [ "bin" ],
+ "kind": [ "bench" ],
+ "doc": false,
+ "doctest": false,
+ "edition": "2015",
+ "name": "benchmark",
+ "src_path": "[..]/foo/benches/benchmark.rs",
+ "test": false
+ }
+ }
+
+ {"reason": "build-finished", "success": true}
+ "#,
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/binary_name.rs b/src/tools/cargo/tests/testsuite/binary_name.rs
new file mode 100644
index 000000000..7735d6054
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/binary_name.rs
@@ -0,0 +1,301 @@
+use cargo_test_support::install::{
+ assert_has_installed_exe, assert_has_not_installed_exe, cargo_home,
+};
+use cargo_test_support::project;
+
+#[cargo_test]
+fn gated() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [[bin]]
+ name = "foo"
+ filename = "007bar"
+ path = "src/main.rs"
+ "#,
+ )
+ .file("src/main.rs", "fn main() { assert!(true) }")
+ .build();
+
+ // Run cargo build.
+ p.cargo("build")
+ .masquerade_as_nightly_cargo(&["different-binary-name"])
+ .with_status(101)
+ .with_stderr_contains("[..]feature `different-binary-name` is required")
+ .run();
+}
+
+#[cargo_test]
+// This test checks if:
+// 1. The correct binary is produced
+// 2. The deps file has the correct content
+// 3. Fingerprinting works
+// 4. `cargo clean` command works
+fn binary_name1() {
+ // Create the project.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["different-binary-name"]
+
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [[bin]]
+ name = "foo"
+ filename = "007bar"
+ path = "src/main.rs"
+ "#,
+ )
+ .file("src/main.rs", "fn main() { assert!(true) }")
+ .build();
+
+ // Run cargo build.
+ p.cargo("build")
+ .masquerade_as_nightly_cargo(&["different-binary-name"])
+ .run();
+
+ // Check the name of the binary that cargo has generated.
+ // A binary with the name of the crate should NOT be created.
+ let foo_path = p.bin("foo");
+ assert!(!foo_path.is_file());
+ // A binary with the name provided in `filename` parameter should be created.
+ let bar_path = p.bin("007bar");
+ assert!(bar_path.is_file());
+
+ // Check if deps file exists.
+ let deps_path = p.bin("007bar").with_extension("d");
+ assert!(deps_path.is_file(), "{:?}", bar_path);
+
+ let depinfo = p.read_file(deps_path.to_str().unwrap());
+
+ // Prepare what content we expect to be present in deps file.
+ let deps_exp = format!(
+ "{}: {}",
+ p.bin("007bar").to_str().unwrap(),
+ p.root().join("src").join("main.rs").to_str().unwrap()
+ );
+
+ // Compare actual deps content with expected deps content.
+ assert!(
+ depinfo.lines().any(|line| line == deps_exp),
+ "Content of `{}` is incorrect",
+ deps_path.to_string_lossy()
+ );
+
+ // Run cargo second time, to verify fingerprint.
+ p.cargo("build -p foo -v")
+ .masquerade_as_nightly_cargo(&["different-binary-name"])
+ .with_stderr(
+ "\
+[FRESH] foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ // Run cargo clean.
+ p.cargo("clean -p foo")
+ .masquerade_as_nightly_cargo(&["different-binary-name"])
+ .run();
+
+ // Check if the appropriate file was removed.
+ assert!(
+ !bar_path.is_file(),
+ "`cargo clean` did not remove the correct files"
+ );
+}
+
+#[cargo_test]
+// This test checks if:
+// 1. Check `cargo run`
+// 2. Check `cargo test`
+// 3. Check `cargo install/uninstall`
+fn binary_name2() {
+ // Create the project.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["different-binary-name"]
+
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [[bin]]
+ name = "foo"
+ filename = "007bar"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn hello(name: &str) -> String {
+ format!("Hello, {}!", name)
+ }
+
+ fn main() {
+ println!("{}", hello("crabs"));
+ }
+
+ #[cfg(test)]
+ mod tests {
+ use super::*;
+
+ #[test]
+ fn check_crabs() {
+ assert_eq!(hello("crabs"), "Hello, crabs!");
+ }
+ }
+ "#,
+ )
+ .build();
+
+ // Run cargo build.
+ p.cargo("build")
+ .masquerade_as_nightly_cargo(&["different-binary-name"])
+ .run();
+
+ // Check the name of the binary that cargo has generated.
+ // A binary with the name of the crate should NOT be created.
+ let foo_path = p.bin("foo");
+ assert!(!foo_path.is_file());
+ // A binary with the name provided in `filename` parameter should be created.
+ let bar_path = p.bin("007bar");
+ assert!(bar_path.is_file());
+
+ // Check if `cargo test` works
+ p.cargo("test")
+ .masquerade_as_nightly_cargo(&["different-binary-name"])
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("test tests::check_crabs ... ok")
+ .run();
+
+ // Check if `cargo run` is able to execute the binary
+ p.cargo("run")
+ .masquerade_as_nightly_cargo(&["different-binary-name"])
+ .with_stdout("Hello, crabs!")
+ .run();
+
+ p.cargo("install")
+ .masquerade_as_nightly_cargo(&["different-binary-name"])
+ .run();
+
+ assert_has_installed_exe(cargo_home(), "007bar");
+
+ p.cargo("uninstall")
+ .with_stderr("[REMOVING] [ROOT]/home/.cargo/bin/007bar[EXE]")
+ .masquerade_as_nightly_cargo(&["different-binary-name"])
+ .run();
+
+ assert_has_not_installed_exe(cargo_home(), "007bar");
+}
+
+#[cargo_test]
+fn check_env_vars() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["different-binary-name"]
+
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [[bin]]
+ name = "foo"
+ filename = "007bar"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ println!("{}", option_env!("CARGO_BIN_NAME").unwrap());
+ }
+ "#,
+ )
+ .file(
+ "tests/integration.rs",
+ r#"
+ #[test]
+ fn check_env_vars2() {
+ let value = option_env!("CARGO_BIN_EXE_007bar").expect("Could not find environment variable.");
+ assert!(value.contains("007bar"));
+ }
+ "#
+ )
+ .build();
+
+ // Run cargo build.
+ p.cargo("build")
+ .masquerade_as_nightly_cargo(&["different-binary-name"])
+ .run();
+ p.cargo("run")
+ .masquerade_as_nightly_cargo(&["different-binary-name"])
+ .with_stdout("007bar")
+ .run();
+ p.cargo("test")
+ .masquerade_as_nightly_cargo(&["different-binary-name"])
+ .with_status(0)
+ .run();
+}
+
+#[cargo_test]
+fn check_msg_format_json() {
+ // Create the project.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["different-binary-name"]
+
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [[bin]]
+ name = "foo"
+ filename = "007bar"
+ path = "src/main.rs"
+ "#,
+ )
+ .file("src/main.rs", "fn main() { assert!(true) }")
+ .build();
+
+ let output = r#"
+{
+ "reason": "compiler-artifact",
+ "package_id": "foo 0.0.1 [..]",
+ "manifest_path": "[CWD]/Cargo.toml",
+ "target": "{...}",
+ "profile": "{...}",
+ "features": [],
+ "filenames": "{...}",
+ "executable": "[ROOT]/foo/target/debug/007bar[EXE]",
+ "fresh": false
+}
+
+{"reason":"build-finished", "success":true}
+"#;
+
+ // Run cargo build.
+ p.cargo("build --message-format=json")
+ .masquerade_as_nightly_cargo(&["different-binary-name"])
+ .with_json(output)
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/build.rs b/src/tools/cargo/tests/testsuite/build.rs
new file mode 100644
index 000000000..8a1b6ca86
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/build.rs
@@ -0,0 +1,6409 @@
+//! Tests for the `cargo build` command.
+
+use cargo::{
+ core::compiler::CompileMode,
+ core::{Shell, Workspace},
+ ops::CompileOptions,
+ Config,
+};
+use cargo_test_support::compare;
+use cargo_test_support::paths::{root, CargoPathExt};
+use cargo_test_support::registry::Package;
+use cargo_test_support::tools;
+use cargo_test_support::{
+ basic_bin_manifest, basic_lib_manifest, basic_manifest, cargo_exe, git, is_nightly, main_file,
+ paths, process, project, rustc_host, sleep_ms, symlink_supported, t, Execs, ProjectBuilder,
+};
+use cargo_util::paths::dylib_path_envvar;
+use std::env;
+use std::fs;
+use std::io::Read;
+use std::process::Stdio;
+
+#[cargo_test]
+fn cargo_compile_simple() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("build").run();
+ assert!(p.bin("foo").is_file());
+
+ p.process(&p.bin("foo")).with_stdout("i am foo\n").run();
+}
+
+#[cargo_test]
+fn cargo_fail_with_no_stderr() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &String::from("refusal"))
+ .build();
+ p.cargo("build --message-format=json")
+ .with_status(101)
+ .with_stderr_does_not_contain("--- stderr")
+ .run();
+}
+
+/// Checks that the `CARGO_INCREMENTAL` environment variable results in
+/// `rustc` getting `-C incremental` passed to it.
+#[cargo_test]
+fn cargo_compile_incremental() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("build -v")
+ .env("CARGO_INCREMENTAL", "1")
+ .with_stderr_contains(
+ "[RUNNING] `rustc [..] -C incremental=[..]/target/debug/incremental[..]`\n",
+ )
+ .run();
+
+ p.cargo("test -v")
+ .env("CARGO_INCREMENTAL", "1")
+ .with_stderr_contains(
+ "[RUNNING] `rustc [..] -C incremental=[..]/target/debug/incremental[..]`\n",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn incremental_profile() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [profile.dev]
+ incremental = false
+
+ [profile.release]
+ incremental = true
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build -v")
+ .env_remove("CARGO_INCREMENTAL")
+ .with_stderr_does_not_contain("[..]C incremental=[..]")
+ .run();
+
+ p.cargo("build -v")
+ .env("CARGO_INCREMENTAL", "1")
+ .with_stderr_contains("[..]C incremental=[..]")
+ .run();
+
+ p.cargo("build --release -v")
+ .env_remove("CARGO_INCREMENTAL")
+ .with_stderr_contains("[..]C incremental=[..]")
+ .run();
+
+ p.cargo("build --release -v")
+ .env("CARGO_INCREMENTAL", "0")
+ .with_stderr_does_not_contain("[..]C incremental=[..]")
+ .run();
+}
+
+#[cargo_test]
+fn incremental_config() {
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ incremental = false
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v")
+ .env_remove("CARGO_INCREMENTAL")
+ .with_stderr_does_not_contain("[..]C incremental=[..]")
+ .run();
+
+ p.cargo("build -v")
+ .env("CARGO_INCREMENTAL", "1")
+ .with_stderr_contains("[..]C incremental=[..]")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_workspace_excluded() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ p.cargo("build --workspace --exclude foo")
+ .with_stderr_does_not_contain("[..]virtual[..]")
+ .with_stderr_contains("[..]no packages to compile")
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_manifest_path() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("build --manifest-path foo/Cargo.toml")
+ .cwd(p.root().parent().unwrap())
+ .run();
+ assert!(p.bin("foo").is_file());
+}
+
+#[cargo_test]
+fn chdir_gated() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .build();
+ p.cargo("-C foo build")
+ .cwd(p.root().parent().unwrap())
+ .with_stderr(
+ "error: the `-C` flag is unstable, \
+ pass `-Z unstable-options` on the nightly channel to enable it",
+ )
+ .with_status(101)
+ .run();
+ // No masquerade should also fail.
+ p.cargo("-C foo -Z unstable-options build")
+ .cwd(p.root().parent().unwrap())
+ .with_stderr(
+ "error: the `-C` flag is unstable, \
+ pass `-Z unstable-options` on the nightly channel to enable it",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_directory_not_cwd() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .file(".cargo/config.toml", &"")
+ .build();
+
+ p.cargo("-Zunstable-options -C foo build")
+ .masquerade_as_nightly_cargo(&["chdir"])
+ .cwd(p.root().parent().unwrap())
+ .run();
+ assert!(p.bin("foo").is_file());
+}
+
+#[cargo_test]
+fn cargo_compile_directory_not_cwd_with_invalid_config() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .file(".cargo/config.toml", &"!")
+ .build();
+
+ p.cargo("-Zunstable-options -C foo build")
+ .masquerade_as_nightly_cargo(&["chdir"])
+ .cwd(p.root().parent().unwrap())
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+Caused by:
+ TOML parse error at line 1, column 1
+ |
+ 1 | !
+ | ^
+ invalid key
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_invalid_manifest() {
+ let p = project().file("Cargo.toml", "").build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ virtual manifests must be configured with [workspace]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_invalid_manifest2() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ "
+ [package]
+ foo = bar
+ ",
+ )
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ could not parse input as TOML
+
+Caused by:
+ TOML parse error at line 3, column 23
+ |
+ 3 | foo = bar
+ | ^
+ invalid string
+ expected `\"`, `'`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_invalid_manifest3() {
+ let p = project().file("src/Cargo.toml", "a = bar").build();
+
+ p.cargo("build --manifest-path src/Cargo.toml")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ could not parse input as TOML
+
+Caused by:
+ TOML parse error at line 1, column 5
+ |
+ 1 | a = bar
+ | ^
+ invalid string
+ expected `\"`, `'`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_duplicate_build_targets() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "main"
+ path = "src/main.rs"
+ crate-type = ["dylib"]
+
+ [dependencies]
+ "#,
+ )
+ .file("src/main.rs", "#![allow(warnings)] fn main() {}")
+ .build();
+
+ p.cargo("build")
+ .with_stderr(
+ "\
+warning: file `[..]main.rs` found to be present in multiple build targets:
+ * `lib` target `main`
+ * `bin` target `foo`
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_invalid_version() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "1.0"))
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ unexpected end of input while parsing minor version number
+ in `package.version`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_empty_package_name() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("", "0.0.0"))
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ package name cannot be an empty string
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_invalid_package_name() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo::bar", "0.0.0"))
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ invalid character `:` in package name: `foo::bar`, [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_invalid_bin_target_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.0"
+
+ [[bin]]
+ name = ""
+ "#,
+ )
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ binary target names cannot be empty
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_forbidden_bin_target_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.0"
+
+ [[bin]]
+ name = "build"
+ "#,
+ )
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ the binary target name `build` is forbidden, it conflicts with with cargo's build directory names
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_bin_and_crate_type() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.0"
+
+ [[bin]]
+ name = "the_foo_bin"
+ path = "src/foo.rs"
+ crate-type = ["cdylib", "rlib"]
+ "#,
+ )
+ .file("src/foo.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ the target `the_foo_bin` is a binary and can't have any crate-types set \
+(currently \"cdylib, rlib\")",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_api_exposes_artifact_paths() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.0"
+
+ [[bin]]
+ name = "the_foo_bin"
+ path = "src/bin.rs"
+
+ [lib]
+ name = "the_foo_lib"
+ path = "src/foo.rs"
+ crate-type = ["cdylib", "rlib"]
+ "#,
+ )
+ .file("src/foo.rs", "pub fn bar() {}")
+ .file("src/bin.rs", "pub fn main() {}")
+ .build();
+
+ let shell = Shell::from_write(Box::new(Vec::new()));
+ let config = Config::new(shell, env::current_dir().unwrap(), paths::home());
+ let ws = Workspace::new(&p.root().join("Cargo.toml"), &config).unwrap();
+ let compile_options = CompileOptions::new(ws.config(), CompileMode::Build).unwrap();
+
+ let result = cargo::ops::compile(&ws, &compile_options).unwrap();
+
+ assert_eq!(1, result.binaries.len());
+ assert!(result.binaries[0].path.exists());
+ assert!(result.binaries[0]
+ .path
+ .to_str()
+ .unwrap()
+ .contains("the_foo_bin"));
+
+ assert_eq!(1, result.cdylibs.len());
+ // The exact library path varies by platform, but should certainly exist at least
+ assert!(result.cdylibs[0].path.exists());
+ assert!(result.cdylibs[0]
+ .path
+ .to_str()
+ .unwrap()
+ .contains("the_foo_lib"));
+}
+
+#[cargo_test]
+fn cargo_compile_with_bin_and_proc() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.0"
+
+ [[bin]]
+ name = "the_foo_bin"
+ path = "src/foo.rs"
+ proc-macro = true
+ "#,
+ )
+ .file("src/foo.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ the target `the_foo_bin` is a binary and can't have `proc-macro` set `true`",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_invalid_lib_target_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.0"
+
+ [lib]
+ name = ""
+ "#,
+ )
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ library target names cannot be empty
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_invalid_non_numeric_dep_version() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ crossbeam = "y"
+ "#,
+ )
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[CWD]/Cargo.toml`
+
+Caused by:
+ failed to parse the version requirement `y` for dependency `crossbeam`
+
+Caused by:
+ unexpected character 'y' while parsing major version number
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_without_manifest() {
+ let p = project().no_manifest().build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr("[ERROR] could not find `Cargo.toml` in `[..]` or any parent directory")
+ .run();
+}
+
+#[cargo_test]
+#[cfg(target_os = "linux")]
+fn cargo_compile_with_lowercase_cargo_toml() {
+ let p = project()
+ .no_manifest()
+ .file("cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/lib.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] could not find `Cargo.toml` in `[..]` or any parent directory, \
+ but found cargo.toml please try to rename it to Cargo.toml",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_invalid_code() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", "invalid rust code!")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr_contains(
+ "[ERROR] could not compile `foo` (bin \"foo\") due to previous error\n",
+ )
+ .run();
+ assert!(p.root().join("Cargo.lock").is_file());
+}
+
+#[cargo_test]
+fn cargo_compile_with_invalid_code_in_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "../bar"
+ [dependencies.baz]
+ path = "../baz"
+ "#,
+ )
+ .file("src/main.rs", "invalid rust code!")
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "invalid rust code!")
+ .build();
+ let _baz = project()
+ .at("baz")
+ .file("Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("src/lib.rs", "invalid rust code!")
+ .build();
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr_contains("[..]invalid rust code[..]")
+ .with_stderr_contains("[ERROR] could not compile [..]")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_warnings_in_the_root_package() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", "fn main() {} fn dead() {}")
+ .build();
+
+ p.cargo("build")
+ .with_stderr_contains("[WARNING] [..]dead[..]")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_warnings_in_a_dep_package() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+ path = "bar"
+
+ [[bin]]
+
+ name = "foo"
+ "#,
+ )
+ .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+ .file(
+ "bar/src/bar.rs",
+ r#"
+ pub fn gimme() -> &'static str {
+ "test passed"
+ }
+
+ fn dead() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("build")
+ .with_stderr_contains("[WARNING] [..]dead[..]")
+ .run();
+
+ assert!(p.bin("foo").is_file());
+
+ p.process(&p.bin("foo")).with_stdout("test passed\n").run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_nested_deps_inferred() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+ path = 'bar'
+
+ [[bin]]
+ name = "foo"
+ "#,
+ )
+ .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.baz]
+ path = "../baz"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ extern crate baz;
+
+ pub fn gimme() -> String {
+ baz::gimme()
+ }
+ "#,
+ )
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.5.0"))
+ .file(
+ "baz/src/lib.rs",
+ r#"
+ pub fn gimme() -> String {
+ "test passed".to_string()
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build").run();
+
+ assert!(p.bin("foo").is_file());
+ assert!(!p.bin("libbar.rlib").is_file());
+ assert!(!p.bin("libbaz.rlib").is_file());
+
+ p.process(&p.bin("foo")).with_stdout("test passed\n").run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_nested_deps_correct_bin() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+ path = "bar"
+
+ [[bin]]
+ name = "foo"
+ "#,
+ )
+ .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.baz]
+ path = "../baz"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ extern crate baz;
+
+ pub fn gimme() -> String {
+ baz::gimme()
+ }
+ "#,
+ )
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.5.0"))
+ .file(
+ "baz/src/lib.rs",
+ r#"
+ pub fn gimme() -> String {
+ "test passed".to_string()
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build").run();
+
+ assert!(p.bin("foo").is_file());
+ assert!(!p.bin("libbar.rlib").is_file());
+ assert!(!p.bin("libbaz.rlib").is_file());
+
+ p.process(&p.bin("foo")).with_stdout("test passed\n").run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_nested_deps_shorthand() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.baz]
+ path = "../baz"
+
+ [lib]
+
+ name = "bar"
+ "#,
+ )
+ .file(
+ "bar/src/bar.rs",
+ r#"
+ extern crate baz;
+
+ pub fn gimme() -> String {
+ baz::gimme()
+ }
+ "#,
+ )
+ .file("baz/Cargo.toml", &basic_lib_manifest("baz"))
+ .file(
+ "baz/src/baz.rs",
+ r#"
+ pub fn gimme() -> String {
+ "test passed".to_string()
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build").run();
+
+ assert!(p.bin("foo").is_file());
+ assert!(!p.bin("libbar.rlib").is_file());
+ assert!(!p.bin("libbaz.rlib").is_file());
+
+ p.process(&p.bin("foo")).with_stdout("test passed\n").run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_nested_deps_longhand() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+ path = "bar"
+ version = "0.5.0"
+
+ [[bin]]
+
+ name = "foo"
+ "#,
+ )
+ .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.baz]
+ path = "../baz"
+ version = "0.5.0"
+
+ [lib]
+
+ name = "bar"
+ "#,
+ )
+ .file(
+ "bar/src/bar.rs",
+ r#"
+ extern crate baz;
+
+ pub fn gimme() -> String {
+ baz::gimme()
+ }
+ "#,
+ )
+ .file("baz/Cargo.toml", &basic_lib_manifest("baz"))
+ .file(
+ "baz/src/baz.rs",
+ r#"
+ pub fn gimme() -> String {
+ "test passed".to_string()
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build").run();
+
+ assert!(p.bin("foo").is_file());
+ assert!(!p.bin("libbar.rlib").is_file());
+ assert!(!p.bin("libbaz.rlib").is_file());
+
+ p.process(&p.bin("foo")).with_stdout("test passed\n").run();
+}
+
+// Check that Cargo gives a sensible error if a dependency can't be found
+// because of a name mismatch.
+#[cargo_test]
+fn cargo_compile_with_dep_name_mismatch() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.0.1"
+ authors = ["wycats@example.com"]
+
+ [[bin]]
+
+ name = "foo"
+
+ [dependencies.notquitebar]
+
+ path = "bar"
+ "#,
+ )
+ .file("src/bin/foo.rs", &main_file(r#""i am foo""#, &["bar"]))
+ .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+ .file("bar/src/bar.rs", &main_file(r#""i am bar""#, &[]))
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: no matching package named `notquitebar` found
+location searched: [CWD]/bar
+required by package `foo v0.0.1 ([CWD])`
+",
+ )
+ .run();
+}
+
+// Ensure that renamed deps have a valid name
+#[cargo_test]
+fn cargo_compile_with_invalid_dep_rename() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "buggin"
+ version = "0.1.0"
+
+ [dependencies]
+ "haha this isn't a valid name 🐛" = { package = "libc", version = "0.1" }
+ "#,
+ )
+ .file("src/main.rs", &main_file(r#""What's good?""#, &[]))
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ invalid character ` ` in dependency name: `haha this isn't a valid name 🐛`, characters must be Unicode XID characters (numbers, `-`, `_`, or most letters)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_filename() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "src/bin/a.rs",
+ r#"
+ extern crate foo;
+ fn main() { println!("hello a.rs"); }
+ "#,
+ )
+ .file("examples/a.rs", r#"fn main() { println!("example"); }"#)
+ .build();
+
+ p.cargo("build --bin bin.rs")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] no bin target named `bin.rs`.
+Available bin targets:
+ a
+
+",
+ )
+ .run();
+
+ p.cargo("build --bin a.rs")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] no bin target named `a.rs`
+
+<tab>Did you mean `a`?",
+ )
+ .run();
+
+ p.cargo("build --example example.rs")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] no example target named `example.rs`.
+Available example targets:
+ a
+
+",
+ )
+ .run();
+
+ p.cargo("build --example a.rs")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] no example target named `a.rs`
+
+<tab>Did you mean `a`?",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn incompatible_dependencies() {
+ Package::new("bad", "0.1.0").publish();
+ Package::new("bad", "1.0.0").publish();
+ Package::new("bad", "1.0.1").publish();
+ Package::new("bad", "1.0.2").publish();
+ Package::new("bar", "0.1.0").dep("bad", "0.1.0").publish();
+ Package::new("baz", "0.1.1").dep("bad", "=1.0.0").publish();
+ Package::new("baz", "0.1.0").dep("bad", "=1.0.0").publish();
+ Package::new("qux", "0.1.2").dep("bad", ">=1.0.1").publish();
+ Package::new("qux", "0.1.1").dep("bad", ">=1.0.1").publish();
+ Package::new("qux", "0.1.0").dep("bad", ">=1.0.1").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = "0.1.0"
+ baz = "0.1.0"
+ qux = "0.1.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main(){}")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: failed to select a version for `bad`.
+ ... required by package `qux v0.1.0`
+ ... which satisfies dependency `qux = \"^0.1.0\"` of package `foo v0.0.1 ([..])`
+versions that meet the requirements `>=1.0.1` are: 1.0.2, 1.0.1
+
+all possible versions conflict with previously selected packages.
+
+ previously selected package `bad v1.0.0`
+ ... which satisfies dependency `bad = \"=1.0.0\"` of package `baz v0.1.0`
+ ... which satisfies dependency `baz = \"^0.1.0\"` of package `foo v0.0.1 ([..])`
+
+failed to select a version for `bad` which could resolve this conflict",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn incompatible_dependencies_with_multi_semver() {
+ Package::new("bad", "1.0.0").publish();
+ Package::new("bad", "1.0.1").publish();
+ Package::new("bad", "2.0.0").publish();
+ Package::new("bad", "2.0.1").publish();
+ Package::new("bar", "0.1.0").dep("bad", "=1.0.0").publish();
+ Package::new("baz", "0.1.0").dep("bad", ">=2.0.1").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = "0.1.0"
+ baz = "0.1.0"
+ bad = ">=1.0.1, <=2.0.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main(){}")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: failed to select a version for `bad`.
+ ... required by package `foo v0.0.1 ([..])`
+versions that meet the requirements `>=1.0.1, <=2.0.0` are: 2.0.0, 1.0.1
+
+all possible versions conflict with previously selected packages.
+
+ previously selected package `bad v2.0.1`
+ ... which satisfies dependency `bad = \">=2.0.1\"` of package `baz v0.1.0`
+ ... which satisfies dependency `baz = \"^0.1.0\"` of package `foo v0.0.1 ([..])`
+
+ previously selected package `bad v1.0.0`
+ ... which satisfies dependency `bad = \"=1.0.0\"` of package `bar v0.1.0`
+ ... which satisfies dependency `bar = \"^0.1.0\"` of package `foo v0.0.1 ([..])`
+
+failed to select a version for `bad` which could resolve this conflict",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn compile_path_dep_then_change_version() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+
+ p.change_file("bar/Cargo.toml", &basic_manifest("bar", "0.0.2"));
+
+ p.cargo("build").run();
+}
+
+#[cargo_test]
+fn ignores_carriage_return_in_lockfile() {
+ let p = project()
+ .file("src/main.rs", "mod a; fn main() {}")
+ .file("src/a.rs", "")
+ .build();
+
+ p.cargo("build").run();
+
+ let lock = p.read_lockfile();
+ p.change_file("Cargo.lock", &lock.replace("\n", "\r\n"));
+ p.cargo("build").run();
+}
+
+#[cargo_test]
+fn cargo_default_env_metadata_env_var() {
+ // Ensure that path dep + dylib + env_var get metadata
+ // (even though path_dep + dylib should not)
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/lib.rs", "// hi")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "bar"
+ crate_type = ["dylib"]
+ "#,
+ )
+ .file("bar/src/lib.rs", "// hello")
+ .build();
+
+ // No metadata on libbar since it's a dylib path dependency
+ p.cargo("build -v")
+ .with_stderr(&format!(
+ "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type dylib \
+ --emit=[..]link \
+ -C prefer-dynamic[..]-C debuginfo=2 \
+ -C metadata=[..] \
+ --out-dir [..] \
+ -L dependency=[CWD]/target/debug/deps`
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \
+ --emit=[..]link[..]-C debuginfo=2 \
+ -C metadata=[..] \
+ -C extra-filename=[..] \
+ --out-dir [..] \
+ -L dependency=[CWD]/target/debug/deps \
+ --extern bar=[CWD]/target/debug/deps/{prefix}bar{suffix}`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ prefix = env::consts::DLL_PREFIX,
+ suffix = env::consts::DLL_SUFFIX,
+ ))
+ .run();
+
+ p.cargo("clean").run();
+
+ // If you set the env-var, then we expect metadata on libbar
+ p.cargo("build -v")
+ .env("__CARGO_DEFAULT_LIB_METADATA", "stable")
+ .with_stderr(&format!(
+ "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type dylib \
+ --emit=[..]link \
+ -C prefer-dynamic[..]-C debuginfo=2 \
+ -C metadata=[..] \
+ --out-dir [..] \
+ -L dependency=[CWD]/target/debug/deps`
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \
+ --emit=[..]link[..]-C debuginfo=2 \
+ -C metadata=[..] \
+ -C extra-filename=[..] \
+ --out-dir [..] \
+ -L dependency=[CWD]/target/debug/deps \
+ --extern bar=[CWD]/target/debug/deps/{prefix}bar-[..]{suffix}`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ prefix = env::consts::DLL_PREFIX,
+ suffix = env::consts::DLL_SUFFIX,
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn crate_env_vars() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.1-alpha.1"
+ description = "This is foo"
+ homepage = "https://example.com"
+ repository = "https://example.com/repo.git"
+ authors = ["wycats@example.com"]
+ license = "MIT OR Apache-2.0"
+ license-file = "license.txt"
+ rust-version = "1.61.0"
+ readme = "../../README.md"
+
+ [[bin]]
+ name = "foo-bar"
+ path = "src/main.rs"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ extern crate foo;
+
+
+ static VERSION_MAJOR: &'static str = env!("CARGO_PKG_VERSION_MAJOR");
+ static VERSION_MINOR: &'static str = env!("CARGO_PKG_VERSION_MINOR");
+ static VERSION_PATCH: &'static str = env!("CARGO_PKG_VERSION_PATCH");
+ static VERSION_PRE: &'static str = env!("CARGO_PKG_VERSION_PRE");
+ static VERSION: &'static str = env!("CARGO_PKG_VERSION");
+ static CARGO_MANIFEST_DIR: &'static str = env!("CARGO_MANIFEST_DIR");
+ static PKG_NAME: &'static str = env!("CARGO_PKG_NAME");
+ static HOMEPAGE: &'static str = env!("CARGO_PKG_HOMEPAGE");
+ static REPOSITORY: &'static str = env!("CARGO_PKG_REPOSITORY");
+ static LICENSE: &'static str = env!("CARGO_PKG_LICENSE");
+ static LICENSE_FILE: &'static str = env!("CARGO_PKG_LICENSE_FILE");
+ static DESCRIPTION: &'static str = env!("CARGO_PKG_DESCRIPTION");
+ static RUST_VERSION: &'static str = env!("CARGO_PKG_RUST_VERSION");
+ static README: &'static str = env!("CARGO_PKG_README");
+ static BIN_NAME: &'static str = env!("CARGO_BIN_NAME");
+ static CRATE_NAME: &'static str = env!("CARGO_CRATE_NAME");
+
+
+ fn main() {
+ let s = format!("{}-{}-{} @ {} in {}", VERSION_MAJOR,
+ VERSION_MINOR, VERSION_PATCH, VERSION_PRE,
+ CARGO_MANIFEST_DIR);
+ assert_eq!(s, foo::version());
+ println!("{}", s);
+ assert_eq!("foo", PKG_NAME);
+ assert_eq!("foo-bar", BIN_NAME);
+ assert_eq!("foo_bar", CRATE_NAME);
+ assert_eq!("https://example.com", HOMEPAGE);
+ assert_eq!("https://example.com/repo.git", REPOSITORY);
+ assert_eq!("MIT OR Apache-2.0", LICENSE);
+ assert_eq!("license.txt", LICENSE_FILE);
+ assert_eq!("This is foo", DESCRIPTION);
+ assert_eq!("1.61.0", RUST_VERSION);
+ assert_eq!("../../README.md", README);
+ let s = format!("{}.{}.{}-{}", VERSION_MAJOR,
+ VERSION_MINOR, VERSION_PATCH, VERSION_PRE);
+ assert_eq!(s, VERSION);
+
+ // Verify CARGO_TARGET_TMPDIR isn't set for bins
+ assert!(option_env!("CARGO_TARGET_TMPDIR").is_none());
+ }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ use std::env;
+ use std::path::PathBuf;
+
+ pub fn version() -> String {
+ format!("{}-{}-{} @ {} in {}",
+ env!("CARGO_PKG_VERSION_MAJOR"),
+ env!("CARGO_PKG_VERSION_MINOR"),
+ env!("CARGO_PKG_VERSION_PATCH"),
+ env!("CARGO_PKG_VERSION_PRE"),
+ env!("CARGO_MANIFEST_DIR"))
+ }
+
+ pub fn check_no_int_test_env() {
+ env::var("CARGO_TARGET_DIR").unwrap_err();
+ }
+
+ pub fn check_tmpdir(tmp: Option<&'static str>) {
+ let tmpdir: PathBuf = tmp.unwrap().into();
+
+ let exe: PathBuf = env::current_exe().unwrap().into();
+ let mut expected: PathBuf = exe.parent().unwrap()
+ .parent().unwrap()
+ .parent().unwrap()
+ .into();
+ expected.push("tmp");
+ assert_eq!(tmpdir, expected);
+
+ // Check that CARGO_TARGET_TMPDIR isn't set for lib code
+ assert!(option_env!("CARGO_TARGET_TMPDIR").is_none());
+ env::var("CARGO_TARGET_TMPDIR").unwrap_err();
+ }
+
+ #[test]
+ fn env() {
+ // Check that CARGO_TARGET_TMPDIR isn't set for unit tests
+ assert!(option_env!("CARGO_TARGET_TMPDIR").is_none());
+ env::var("CARGO_TARGET_TMPDIR").unwrap_err();
+ }
+ "#,
+ )
+ .file(
+ "examples/ex-env-vars.rs",
+ r#"
+ static PKG_NAME: &'static str = env!("CARGO_PKG_NAME");
+ static BIN_NAME: &'static str = env!("CARGO_BIN_NAME");
+ static CRATE_NAME: &'static str = env!("CARGO_CRATE_NAME");
+
+ fn main() {
+ assert_eq!("foo", PKG_NAME);
+ assert_eq!("ex-env-vars", BIN_NAME);
+ assert_eq!("ex_env_vars", CRATE_NAME);
+
+ // Verify CARGO_TARGET_TMPDIR isn't set for examples
+ assert!(option_env!("CARGO_TARGET_TMPDIR").is_none());
+ }
+ "#,
+ )
+ .file(
+ "tests/env.rs",
+ r#"
+ #[test]
+ fn env() {
+ foo::check_tmpdir(option_env!("CARGO_TARGET_TMPDIR"));
+ }
+ "#,
+ );
+
+ let p = if is_nightly() {
+ p.file(
+ "benches/env.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+ use test::Bencher;
+
+ #[bench]
+ fn env(_: &mut Bencher) {
+ foo::check_tmpdir(option_env!("CARGO_TARGET_TMPDIR"));
+ }
+ "#,
+ )
+ .build()
+ } else {
+ p.build()
+ };
+
+ println!("build");
+ p.cargo("build -v").run();
+
+ println!("bin");
+ p.process(&p.bin("foo-bar"))
+ .with_stdout("0-5-1 @ alpha.1 in [CWD]")
+ .run();
+
+ println!("example");
+ p.cargo("run --example ex-env-vars -v").run();
+
+ println!("test");
+ p.cargo("test -v").run();
+
+ if is_nightly() {
+ println!("bench");
+ p.cargo("bench -v").run();
+ }
+}
+
+#[cargo_test]
+fn crate_authors_env_vars() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.1-alpha.1"
+ authors = ["wycats@example.com", "neikos@example.com"]
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ extern crate foo;
+
+ static AUTHORS: &'static str = env!("CARGO_PKG_AUTHORS");
+
+ fn main() {
+ let s = "wycats@example.com:neikos@example.com";
+ assert_eq!(AUTHORS, foo::authors());
+ println!("{}", AUTHORS);
+ assert_eq!(s, AUTHORS);
+ }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn authors() -> String {
+ format!("{}", env!("CARGO_PKG_AUTHORS"))
+ }
+ "#,
+ )
+ .build();
+
+ println!("build");
+ p.cargo("build -v").run();
+
+ println!("bin");
+ p.process(&p.bin("foo"))
+ .with_stdout("wycats@example.com:neikos@example.com")
+ .run();
+
+ println!("test");
+ p.cargo("test -v").run();
+}
+
+#[cargo_test]
+fn vv_prints_rustc_env_vars() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = ["escape='\"@example.com"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ let mut b = p.cargo("build -vv");
+
+ if cfg!(windows) {
+ b.with_stderr_contains(
+ "[RUNNING] `[..]set CARGO_PKG_NAME=foo&& [..]rustc [..]`"
+ ).with_stderr_contains(
+ r#"[RUNNING] `[..]set CARGO_PKG_AUTHORS="escape='\"@example.com"&& [..]rustc [..]`"#
+ )
+ } else {
+ b.with_stderr_contains("[RUNNING] `[..]CARGO_PKG_NAME=foo [..]rustc [..]`")
+ .with_stderr_contains(
+ r#"[RUNNING] `[..]CARGO_PKG_AUTHORS='escape='\''"@example.com' [..]rustc [..]`"#,
+ )
+ };
+
+ b.run();
+}
+
+// The tester may already have LD_LIBRARY_PATH=::/foo/bar which leads to a false positive error
+fn setenv_for_removing_empty_component(mut execs: Execs) -> Execs {
+ let v = dylib_path_envvar();
+ if let Ok(search_path) = env::var(v) {
+ let new_search_path =
+ env::join_paths(env::split_paths(&search_path).filter(|e| !e.as_os_str().is_empty()))
+ .expect("join_paths");
+ execs.env(v, new_search_path); // build_command() will override LD_LIBRARY_PATH accordingly
+ }
+ execs
+}
+
+// Regression test for #4277
+#[cargo_test]
+fn crate_library_path_env_var() {
+ let p = project()
+ .file(
+ "src/main.rs",
+ &format!(
+ r#"
+ fn main() {{
+ let search_path = env!("{}");
+ let paths = std::env::split_paths(&search_path).collect::<Vec<_>>();
+ assert!(!paths.contains(&"".into()));
+ }}
+ "#,
+ dylib_path_envvar()
+ ),
+ )
+ .build();
+
+ setenv_for_removing_empty_component(p.cargo("run")).run();
+}
+
+// Regression test for #4277
+#[cargo_test]
+fn build_with_fake_libc_not_loading() {
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file("src/lib.rs", r#" "#)
+ .file("libc.so.6", r#""#)
+ .build();
+
+ setenv_for_removing_empty_component(p.cargo("build")).run();
+}
+
+// this is testing that src/<pkg-name>.rs still works (for now)
+#[cargo_test]
+fn many_crate_types_old_style_lib_location() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [lib]
+
+ name = "foo"
+ crate_type = ["rlib", "dylib"]
+ "#,
+ )
+ .file("src/foo.rs", "pub fn foo() {}")
+ .build();
+ p.cargo("build")
+ .with_stderr_contains(
+ "\
+[WARNING] path `[..]src/foo.rs` was erroneously implicitly accepted for library `foo`,
+please rename the file to `src/lib.rs` or set lib.path in Cargo.toml",
+ )
+ .run();
+
+ assert!(p.root().join("target/debug/libfoo.rlib").is_file());
+ let fname = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX);
+ assert!(p.root().join("target/debug").join(&fname).is_file());
+}
+
+#[cargo_test]
+fn many_crate_types_correct() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [lib]
+
+ name = "foo"
+ crate_type = ["rlib", "dylib"]
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
+ .build();
+ p.cargo("build").run();
+
+ assert!(p.root().join("target/debug/libfoo.rlib").is_file());
+ let fname = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX);
+ assert!(p.root().join("target/debug").join(&fname).is_file());
+}
+
+#[cargo_test]
+fn set_both_dylib_and_cdylib_crate_types() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [lib]
+
+ name = "foo"
+ crate_type = ["cdylib", "dylib"]
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
+ .build();
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ library `foo` cannot set the crate type of both `dylib` and `cdylib`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dev_dependencies_conflicting_warning() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dev-dependencies]
+ a = {path = "a"}
+ [dev_dependencies]
+ a = {path = "a"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .build();
+ p.cargo("build")
+ .with_stderr_contains(
+"[WARNING] conflicting between `dev-dependencies` and `dev_dependencies` in the `foo` package.\n
+ `dev_dependencies` is ignored and not recommended for use in the future"
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_dependencies_conflicting_warning() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [build-dependencies]
+ a = {path = "a"}
+ [build_dependencies]
+ a = {path = "a"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .build();
+ p.cargo("build")
+ .with_stderr_contains(
+"[WARNING] conflicting between `build-dependencies` and `build_dependencies` in the `foo` package.\n
+ `build_dependencies` is ignored and not recommended for use in the future"
+ )
+ .run();
+}
+
+#[cargo_test]
+fn lib_crate_types_conflicting_warning() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [lib]
+ name = "foo"
+ crate-type = ["rlib", "dylib"]
+ crate_type = ["staticlib", "dylib"]
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
+ .build();
+ p.cargo("build")
+ .with_stderr_contains(
+"[WARNING] conflicting between `crate-type` and `crate_type` in the `foo` library target.\n
+ `crate_type` is ignored and not recommended for use in the future",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn examples_crate_types_conflicting_warning() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [[example]]
+ name = "ex"
+ path = "examples/ex.rs"
+ crate-type = ["rlib", "dylib"]
+ crate_type = ["proc_macro"]
+ [[example]]
+ name = "goodbye"
+ path = "examples/ex-goodbye.rs"
+ crate-type = ["rlib", "dylib"]
+ crate_type = ["rlib", "staticlib"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "examples/ex.rs",
+ r#"
+ fn main() { println!("ex"); }
+ "#,
+ )
+ .file(
+ "examples/ex-goodbye.rs",
+ r#"
+ fn main() { println!("goodbye"); }
+ "#,
+ )
+ .build();
+ p.cargo("build")
+ .with_stderr_contains(
+ "\
+[WARNING] conflicting between `crate-type` and `crate_type` in the `ex` example target.\n
+ `crate_type` is ignored and not recommended for use in the future
+[WARNING] conflicting between `crate-type` and `crate_type` in the `goodbye` example target.\n
+ `crate_type` is ignored and not recommended for use in the future",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn self_dependency() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "test"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies.test]
+
+ path = "."
+
+ [lib]
+ name = "test"
+ path = "src/test.rs"
+ "#,
+ )
+ .file("src/test.rs", "fn main() {}")
+ .build();
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] cyclic package dependency: package `test v0.0.0 ([CWD])` depends on itself. Cycle:
+package `test v0.0.0 ([CWD])`
+ ... which satisfies path dependency `test` of package `test v0.0.0 ([..])`",
+ )
+ .run();
+}
+
+#[cargo_test]
+/// Make sure broken and loop symlinks don't break the build
+///
+/// This test requires you to be able to make symlinks.
+/// For windows, this may require you to enable developer mode.
+fn ignore_broken_symlinks() {
+ if !symlink_supported() {
+ return;
+ }
+
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .symlink("Notafile", "bar")
+ // To hit the symlink directory, we need a build script
+ // to trigger a full scan of package files.
+ .file("build.rs", &main_file(r#""build script""#, &[]))
+ .symlink_dir("a/b", "a/b/c/d/foo")
+ .build();
+
+ p.cargo("build")
+ .with_stderr_contains(
+ "[WARNING] File system loop found: [..]/a/b/c/d/foo points to an ancestor [..]/a/b",
+ )
+ .run();
+ assert!(p.bin("foo").is_file());
+
+ p.process(&p.bin("foo")).with_stdout("i am foo\n").run();
+}
+
+#[cargo_test]
+fn missing_lib_and_bin() {
+ let p = project().build();
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]Cargo.toml`
+
+Caused by:
+ no targets specified in the manifest
+ either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present\n",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn lto_build() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "test"
+ version = "0.0.0"
+ authors = []
+
+ [profile.release]
+ lto = true
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+ p.cargo("build -v --release")
+ .with_stderr(
+ "\
+[COMPILING] test v0.0.0 ([CWD])
+[RUNNING] `rustc --crate-name test src/main.rs [..]--crate-type bin \
+ --emit=[..]link \
+ -C opt-level=3 \
+ -C lto \
+ [..]
+[FINISHED] release [optimized] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn verbose_build() {
+ let p = project().file("src/lib.rs", "").build();
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \
+ --emit=[..]link[..]-C debuginfo=2 \
+ -C metadata=[..] \
+ --out-dir [..] \
+ -L dependency=[CWD]/target/debug/deps`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn verbose_release_build() {
+ let p = project().file("src/lib.rs", "").build();
+ p.cargo("build -v --release")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \
+ --emit=[..]link[..]\
+ -C opt-level=3[..]\
+ -C metadata=[..] \
+ --out-dir [..] \
+ -L dependency=[CWD]/target/release/deps`
+[FINISHED] release [optimized] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn verbose_release_build_short() {
+ let p = project().file("src/lib.rs", "").build();
+ p.cargo("build -v -r")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \
+ --emit=[..]link[..]\
+ -C opt-level=3[..]\
+ -C metadata=[..] \
+ --out-dir [..] \
+ -L dependency=[CWD]/target/release/deps`
+[FINISHED] release [optimized] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn verbose_release_build_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "test"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies.foo]
+ path = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [lib]
+ name = "foo"
+ crate_type = ["dylib", "rlib"]
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .build();
+ p.cargo("build -v --release")
+ .with_stderr(&format!(
+ "\
+[COMPILING] foo v0.0.0 ([CWD]/foo)
+[RUNNING] `rustc --crate-name foo foo/src/lib.rs [..]\
+ --crate-type dylib --crate-type rlib \
+ --emit=[..]link \
+ -C prefer-dynamic[..]\
+ -C opt-level=3[..]\
+ -C metadata=[..] \
+ --out-dir [..] \
+ -L dependency=[CWD]/target/release/deps`
+[COMPILING] test v0.0.0 ([CWD])
+[RUNNING] `rustc --crate-name test src/lib.rs [..]--crate-type lib \
+ --emit=[..]link[..]\
+ -C opt-level=3[..]\
+ -C metadata=[..] \
+ --out-dir [..] \
+ -L dependency=[CWD]/target/release/deps \
+ --extern foo=[CWD]/target/release/deps/{prefix}foo{suffix} \
+ --extern foo=[CWD]/target/release/deps/libfoo.rlib`
+[FINISHED] release [optimized] target(s) in [..]
+",
+ prefix = env::consts::DLL_PREFIX,
+ suffix = env::consts::DLL_SUFFIX
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn explicit_examples() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+ authors = []
+
+ [lib]
+ name = "foo"
+ path = "src/lib.rs"
+
+ [[example]]
+ name = "hello"
+ path = "examples/ex-hello.rs"
+
+ [[example]]
+ name = "goodbye"
+ path = "examples/ex-goodbye.rs"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn get_hello() -> &'static str { "Hello" }
+ pub fn get_goodbye() -> &'static str { "Goodbye" }
+ pub fn get_world() -> &'static str { "World" }
+ "#,
+ )
+ .file(
+ "examples/ex-hello.rs",
+ r#"
+ extern crate foo;
+ fn main() { println!("{}, {}!", foo::get_hello(), foo::get_world()); }
+ "#,
+ )
+ .file(
+ "examples/ex-goodbye.rs",
+ r#"
+ extern crate foo;
+ fn main() { println!("{}, {}!", foo::get_goodbye(), foo::get_world()); }
+ "#,
+ )
+ .build();
+
+ p.cargo("build --examples").run();
+ p.process(&p.bin("examples/hello"))
+ .with_stdout("Hello, World!\n")
+ .run();
+ p.process(&p.bin("examples/goodbye"))
+ .with_stdout("Goodbye, World!\n")
+ .run();
+}
+
+#[cargo_test]
+fn non_existing_test() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+
+ [lib]
+ name = "foo"
+ path = "src/lib.rs"
+
+ [[test]]
+ name = "hello"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build --tests -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ can't find `hello` test at `tests/hello.rs` or `tests/hello/main.rs`. \
+ Please specify test.path if you want to use a non-default path.",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn non_existing_example() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+
+ [lib]
+ name = "foo"
+ path = "src/lib.rs"
+
+ [[example]]
+ name = "hello"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build --examples -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ can't find `hello` example at `examples/hello.rs` or `examples/hello/main.rs`. \
+ Please specify example.path if you want to use a non-default path.",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn non_existing_benchmark() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+
+ [lib]
+ name = "foo"
+ path = "src/lib.rs"
+
+ [[bench]]
+ name = "hello"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build --benches -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ can't find `hello` bench at `benches/hello.rs` or `benches/hello/main.rs`. \
+ Please specify bench.path if you want to use a non-default path.",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn non_existing_binary() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/lib.rs", "")
+ .file("src/bin/ehlo.rs", "")
+ .build();
+
+ p.cargo("build -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ can't find `foo` bin at `src/bin/foo.rs` or `src/bin/foo/main.rs`. \
+ Please specify bin.path if you want to use a non-default path.",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn commonly_wrong_path_of_test() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+
+ [lib]
+ name = "foo"
+ path = "src/lib.rs"
+
+ [[test]]
+ name = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("test/foo.rs", "")
+ .build();
+
+ p.cargo("build --tests -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ can't find `foo` test at default paths, but found a file at `test/foo.rs`.
+ Perhaps rename the file to `tests/foo.rs` for target auto-discovery, \
+ or specify test.path if you want to use a non-default path.",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn commonly_wrong_path_of_example() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+
+ [lib]
+ name = "foo"
+ path = "src/lib.rs"
+
+ [[example]]
+ name = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("example/foo.rs", "")
+ .build();
+
+ p.cargo("build --examples -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ can't find `foo` example at default paths, but found a file at `example/foo.rs`.
+ Perhaps rename the file to `examples/foo.rs` for target auto-discovery, \
+ or specify example.path if you want to use a non-default path.",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn commonly_wrong_path_of_benchmark() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+
+ [lib]
+ name = "foo"
+ path = "src/lib.rs"
+
+ [[bench]]
+ name = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bench/foo.rs", "")
+ .build();
+
+ p.cargo("build --benches -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ can't find `foo` bench at default paths, but found a file at `bench/foo.rs`.
+ Perhaps rename the file to `benches/foo.rs` for target auto-discovery, \
+ or specify bench.path if you want to use a non-default path.",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn commonly_wrong_path_binary() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/lib.rs", "")
+ .file("src/bins/foo.rs", "")
+ .build();
+
+ p.cargo("build -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ can't find `foo` bin at default paths, but found a file at `src/bins/foo.rs`.
+ Perhaps rename the file to `src/bin/foo.rs` for target auto-discovery, \
+ or specify bin.path if you want to use a non-default path.",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn commonly_wrong_path_subdir_binary() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/lib.rs", "")
+ .file("src/bins/foo/main.rs", "")
+ .build();
+
+ p.cargo("build -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ can't find `foo` bin at default paths, but found a file at `src/bins/foo/main.rs`.
+ Perhaps rename the file to `src/bin/foo/main.rs` for target auto-discovery, \
+ or specify bin.path if you want to use a non-default path.",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn found_multiple_target_files() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/lib.rs", "")
+ .file("src/bin/foo.rs", "")
+ .file("src/bin/foo/main.rs", "")
+ .build();
+
+ p.cargo("build -v")
+ .with_status(101)
+ // Don't assert the inferred paths since the order is non-deterministic.
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ cannot infer path for `foo` bin
+ Cargo doesn't know which to use because multiple target files found \
+ at `src/bin/foo[..].rs` and `src/bin/foo[..].rs`.",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn legacy_binary_paths_warnings() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+ authors = []
+
+ [[bin]]
+ name = "bar"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr_contains(
+ "\
+[WARNING] path `[..]src/main.rs` was erroneously implicitly accepted for binary `bar`,
+please set bin.path in Cargo.toml",
+ )
+ .run();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+ authors = []
+
+ [[bin]]
+ name = "bar"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("src/bin/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr_contains(
+ "\
+[WARNING] path `[..]src/bin/main.rs` was erroneously implicitly accepted for binary `bar`,
+please set bin.path in Cargo.toml",
+ )
+ .run();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+ authors = []
+
+ [[bin]]
+ name = "bar"
+ "#,
+ )
+ .file("src/bar.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr_contains(
+ "\
+[WARNING] path `[..]src/bar.rs` was erroneously implicitly accepted for binary `bar`,
+please set bin.path in Cargo.toml",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn implicit_examples() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn get_hello() -> &'static str { "Hello" }
+ pub fn get_goodbye() -> &'static str { "Goodbye" }
+ pub fn get_world() -> &'static str { "World" }
+ "#,
+ )
+ .file(
+ "examples/hello.rs",
+ r#"
+ extern crate foo;
+ fn main() {
+ println!("{}, {}!", foo::get_hello(), foo::get_world());
+ }
+ "#,
+ )
+ .file(
+ "examples/goodbye.rs",
+ r#"
+ extern crate foo;
+ fn main() {
+ println!("{}, {}!", foo::get_goodbye(), foo::get_world());
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build --examples").run();
+ p.process(&p.bin("examples/hello"))
+ .with_stdout("Hello, World!\n")
+ .run();
+ p.process(&p.bin("examples/goodbye"))
+ .with_stdout("Goodbye, World!\n")
+ .run();
+}
+
+#[cargo_test]
+fn standard_build_no_ndebug() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ "src/foo.rs",
+ r#"
+ fn main() {
+ if cfg!(debug_assertions) {
+ println!("slow")
+ } else {
+ println!("fast")
+ }
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build").run();
+ p.process(&p.bin("foo")).with_stdout("slow\n").run();
+}
+
+#[cargo_test]
+fn release_build_ndebug() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ "src/foo.rs",
+ r#"
+ fn main() {
+ if cfg!(debug_assertions) {
+ println!("slow")
+ } else {
+ println!("fast")
+ }
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build --release").run();
+ p.process(&p.release_bin("foo")).with_stdout("fast\n").run();
+}
+
+#[cargo_test]
+fn inferred_main_bin() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ p.cargo("build").run();
+ p.process(&p.bin("foo")).run();
+}
+
+#[cargo_test]
+fn deletion_causes_failure() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/main.rs", "extern crate bar; fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+ p.change_file("Cargo.toml", &basic_manifest("foo", "0.0.1"));
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr_contains("[..]can't find crate for `bar`")
+ .run();
+}
+
+#[cargo_test]
+fn bad_cargo_toml_in_target_dir() {
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file("target/Cargo.toml", "bad-toml")
+ .build();
+
+ p.cargo("build").run();
+ p.process(&p.bin("foo")).run();
+}
+
+#[cargo_test]
+fn lib_with_standard_name() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("syntax", "0.0.1"))
+ .file("src/lib.rs", "pub fn foo() {}")
+ .file(
+ "src/main.rs",
+ "extern crate syntax; fn main() { syntax::foo() }",
+ )
+ .build();
+
+ p.cargo("build")
+ .with_stderr(
+ "\
+[COMPILING] syntax v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn simple_staticlib() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.1"
+
+ [lib]
+ name = "foo"
+ crate-type = ["staticlib"]
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
+ .build();
+
+ // env var is a test for #1381
+ p.cargo("build").env("CARGO_LOG", "nekoneko=trace").run();
+}
+
+#[cargo_test]
+fn staticlib_rlib_and_bin() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.1"
+
+ [lib]
+ name = "foo"
+ crate-type = ["staticlib", "rlib"]
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
+ .file("src/main.rs", "extern crate foo; fn main() { foo::foo(); }")
+ .build();
+
+ p.cargo("build -v").run();
+}
+
+#[cargo_test]
+fn opt_out_of_bin() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ bin = []
+
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("src/main.rs", "bad syntax")
+ .build();
+ p.cargo("build").run();
+}
+
+#[cargo_test]
+fn single_lib() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.1"
+
+ [lib]
+ name = "foo"
+ path = "src/bar.rs"
+ "#,
+ )
+ .file("src/bar.rs", "")
+ .build();
+ p.cargo("build").run();
+}
+
+#[cargo_test]
+fn freshness_ignores_excluded() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ build = "build.rs"
+ exclude = ["src/b*.rs"]
+ "#,
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+ .build();
+ foo.root().move_into_the_past();
+
+ foo.cargo("build")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ // Smoke test to make sure it doesn't compile again
+ println!("first pass");
+ foo.cargo("build").with_stdout("").run();
+
+ // Modify an ignored file and make sure we don't rebuild
+ println!("second pass");
+ foo.change_file("src/bar.rs", "");
+ foo.cargo("build").with_stdout("").run();
+}
+
+#[cargo_test]
+fn rebuild_preserves_out_dir() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ build = 'build.rs'
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+ use std::fs::File;
+ use std::path::Path;
+
+ fn main() {
+ let path = Path::new(&env::var("OUT_DIR").unwrap()).join("foo");
+ if env::var_os("FIRST").is_some() {
+ File::create(&path).unwrap();
+ } else {
+ File::create(&path).unwrap();
+ }
+ }
+ "#,
+ )
+ .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+ .build();
+ foo.root().move_into_the_past();
+
+ foo.cargo("build")
+ .env("FIRST", "1")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ foo.change_file("src/bar.rs", "");
+ foo.cargo("build")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dep_no_libs() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.0"))
+ .file("bar/src/main.rs", "")
+ .build();
+ foo.cargo("build").run();
+}
+
+#[cargo_test]
+fn recompile_space_in_name() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [lib]
+ name = "foo"
+ path = "src/my lib.rs"
+ "#,
+ )
+ .file("src/my lib.rs", "")
+ .build();
+ foo.cargo("build").run();
+ foo.root().move_into_the_past();
+ foo.cargo("build").with_stdout("").run();
+}
+
+#[cfg(unix)]
+#[cargo_test]
+fn credentials_is_unreadable() {
+ use cargo_test_support::paths::home;
+ use std::os::unix::prelude::*;
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/lib.rs", "")
+ .build();
+
+ let credentials = home().join(".cargo/credentials.toml");
+ t!(fs::create_dir_all(credentials.parent().unwrap()));
+ t!(fs::write(
+ &credentials,
+ r#"
+ [registry]
+ token = "api-token"
+ "#
+ ));
+ let stat = fs::metadata(credentials.as_path()).unwrap();
+ let mut perms = stat.permissions();
+ perms.set_mode(0o000);
+ fs::set_permissions(credentials, perms).unwrap();
+
+ p.cargo("build").run();
+}
+
+#[cfg(unix)]
+#[cargo_test]
+fn ignore_bad_directories() {
+ use std::os::unix::prelude::*;
+ let foo = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.0.0"))
+ .file("src/lib.rs", "")
+ .build();
+ let dir = foo.root().join("tmp");
+ fs::create_dir(&dir).unwrap();
+ let stat = fs::metadata(&dir).unwrap();
+ let mut perms = stat.permissions();
+ perms.set_mode(0o644);
+ fs::set_permissions(&dir, perms.clone()).unwrap();
+ foo.cargo("build").run();
+ perms.set_mode(0o755);
+ fs::set_permissions(&dir, perms).unwrap();
+}
+
+#[cargo_test]
+fn bad_cargo_config() {
+ let foo = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.0.0"))
+ .file("src/lib.rs", "")
+ .file(".cargo/config", "this is not valid toml")
+ .build();
+ foo.cargo("build -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] could not load Cargo configuration
+
+Caused by:
+ could not parse TOML configuration in `[..]`
+
+Caused by:
+ could not parse input as TOML
+
+Caused by:
+ TOML parse error at line 1, column 6
+ |
+ 1 | this is not valid toml
+ | ^
+ expected `.`, `=`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_platform_specific_dependency() {
+ let host = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+ build = "build.rs"
+
+ [target.{host}.dependencies]
+ dep = {{ path = "dep" }}
+ [target.{host}.build-dependencies]
+ build = {{ path = "build" }}
+ [target.{host}.dev-dependencies]
+ dev = {{ path = "dev" }}
+ "#,
+ host = host
+ ),
+ )
+ .file("src/main.rs", "extern crate dep; fn main() { dep::dep() }")
+ .file(
+ "tests/foo.rs",
+ "extern crate dev; #[test] fn foo() { dev::dev() }",
+ )
+ .file(
+ "build.rs",
+ "extern crate build; fn main() { build::build(); }",
+ )
+ .file("dep/Cargo.toml", &basic_manifest("dep", "0.5.0"))
+ .file("dep/src/lib.rs", "pub fn dep() {}")
+ .file("build/Cargo.toml", &basic_manifest("build", "0.5.0"))
+ .file("build/src/lib.rs", "pub fn build() {}")
+ .file("dev/Cargo.toml", &basic_manifest("dev", "0.5.0"))
+ .file("dev/src/lib.rs", "pub fn dev() {}")
+ .build();
+
+ p.cargo("build").run();
+
+ assert!(p.bin("foo").is_file());
+ p.cargo("test").run();
+}
+
+#[cargo_test]
+fn cargo_platform_specific_dependency_build_dependencies_conflicting_warning() {
+ let host = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+ build = "build.rs"
+
+ [target.{host}.build-dependencies]
+ build = {{ path = "build" }}
+ [target.{host}.build_dependencies]
+ build = {{ path = "build" }}
+ "#,
+ host = host
+ ),
+ )
+ .file("src/main.rs", "fn main() { }")
+ .file(
+ "build.rs",
+ "extern crate build; fn main() { build::build(); }",
+ )
+ .file("build/Cargo.toml", &basic_manifest("build", "0.5.0"))
+ .file("build/src/lib.rs", "pub fn build() {}")
+ .build();
+
+ p.cargo("build")
+ .with_stderr_contains(
+ format!("[WARNING] conflicting between `build-dependencies` and `build_dependencies` in the `{}` platform target.\n
+ `build_dependencies` is ignored and not recommended for use in the future", host)
+ )
+ .run();
+
+ assert!(p.bin("foo").is_file());
+}
+
+#[cargo_test]
+fn cargo_platform_specific_dependency_dev_dependencies_conflicting_warning() {
+ let host = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [target.{host}.dev-dependencies]
+ dev = {{ path = "dev" }}
+ [target.{host}.dev_dependencies]
+ dev = {{ path = "dev" }}
+ "#,
+ host = host
+ ),
+ )
+ .file("src/main.rs", "fn main() { }")
+ .file(
+ "tests/foo.rs",
+ "extern crate dev; #[test] fn foo() { dev::dev() }",
+ )
+ .file("dev/Cargo.toml", &basic_manifest("dev", "0.5.0"))
+ .file("dev/src/lib.rs", "pub fn dev() {}")
+ .build();
+
+ p.cargo("build")
+ .with_stderr_contains(
+ format!("[WARNING] conflicting between `dev-dependencies` and `dev_dependencies` in the `{}` platform target.\n
+ `dev_dependencies` is ignored and not recommended for use in the future", host)
+ )
+ .run();
+
+ assert!(p.bin("foo").is_file());
+ p.cargo("test").run();
+}
+
+#[cargo_test]
+fn bad_platform_specific_dependency() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [target.wrong-target.dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file(
+ "bar/src/lib.rs",
+ r#"pub fn gimme() -> String { format!("") }"#,
+ )
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr_contains("[..]can't find crate for `bar`")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_platform_specific_dependency_wrong_platform() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [target.non-existing-triplet.dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file(
+ "bar/src/lib.rs",
+ "invalid rust file, should not be compiled",
+ )
+ .build();
+
+ p.cargo("build").run();
+
+ assert!(p.bin("foo").is_file());
+ p.process(&p.bin("foo")).run();
+
+ let lockfile = p.read_lockfile();
+ assert!(lockfile.contains("bar"));
+}
+
+#[cargo_test]
+fn example_as_lib() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[example]]
+ name = "ex"
+ crate-type = ["lib"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("examples/ex.rs", "")
+ .build();
+
+ p.cargo("build --example=ex").run();
+ assert!(p.example_lib("ex", "lib").is_file());
+}
+
+#[cargo_test]
+fn example_as_rlib() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[example]]
+ name = "ex"
+ crate-type = ["rlib"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("examples/ex.rs", "")
+ .build();
+
+ p.cargo("build --example=ex").run();
+ assert!(p.example_lib("ex", "rlib").is_file());
+}
+
+#[cargo_test]
+fn example_as_dylib() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[example]]
+ name = "ex"
+ crate-type = ["dylib"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("examples/ex.rs", "")
+ .build();
+
+ p.cargo("build --example=ex").run();
+ assert!(p.example_lib("ex", "dylib").is_file());
+}
+
+#[cargo_test]
+fn example_as_proc_macro() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[example]]
+ name = "ex"
+ crate-type = ["proc-macro"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "examples/ex.rs",
+ r#"
+ extern crate proc_macro;
+ use proc_macro::TokenStream;
+
+ #[proc_macro]
+ pub fn eat(_item: TokenStream) -> TokenStream {
+ "".parse().unwrap()
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build --example=ex").run();
+ assert!(p.example_lib("ex", "proc-macro").is_file());
+}
+
+#[cargo_test]
+fn example_bin_same_name() {
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file("examples/foo.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --examples").run();
+
+ assert!(!p.bin("foo").is_file());
+ // We expect a file of the form bin/foo-{metadata_hash}
+ assert!(p.bin("examples/foo").is_file());
+
+ p.cargo("build --examples").run();
+
+ assert!(!p.bin("foo").is_file());
+ // We expect a file of the form bin/foo-{metadata_hash}
+ assert!(p.bin("examples/foo").is_file());
+}
+
+#[cargo_test]
+fn compile_then_delete() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ p.cargo("run -v").run();
+ assert!(p.bin("foo").is_file());
+ if cfg!(windows) {
+ // On windows unlinking immediately after running often fails, so sleep
+ sleep_ms(100);
+ }
+ fs::remove_file(&p.bin("foo")).unwrap();
+ p.cargo("run -v").run();
+}
+
+#[cargo_test]
+fn transitive_dependencies_not_available() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.aaaaa]
+ path = "a"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "extern crate bbbbb; extern crate aaaaa; fn main() {}",
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "aaaaa"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bbbbb]
+ path = "../b"
+ "#,
+ )
+ .file("a/src/lib.rs", "extern crate bbbbb;")
+ .file("b/Cargo.toml", &basic_manifest("bbbbb", "0.0.1"))
+ .file("b/src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v")
+ .with_status(101)
+ .with_stderr_contains("[..] can't find crate for `bbbbb`[..]")
+ .run();
+}
+
+#[cargo_test]
+fn cyclic_deps_rejected() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.a]
+ path = "a"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.foo]
+ path = ".."
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v")
+ .with_status(101)
+ .with_stderr(
+"[ERROR] cyclic package dependency: package `a v0.0.1 ([CWD]/a)` depends on itself. Cycle:
+package `a v0.0.1 ([CWD]/a)`
+ ... which satisfies path dependency `a` of package `foo v0.0.1 ([CWD])`
+ ... which satisfies path dependency `foo` of package `a v0.0.1 ([..])`",
+ ).run();
+}
+
+#[cargo_test]
+fn predictable_filenames() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "foo"
+ crate-type = ["dylib", "rlib"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v").run();
+ assert!(p.root().join("target/debug/libfoo.rlib").is_file());
+ let dylib_name = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX);
+ assert!(p.root().join("target/debug").join(dylib_name).is_file());
+}
+
+#[cargo_test]
+fn dashes_to_underscores() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo-bar", "0.0.1"))
+ .file("src/lib.rs", "")
+ .file("src/main.rs", "extern crate foo_bar; fn main() {}")
+ .build();
+
+ p.cargo("build -v").run();
+ assert!(p.bin("foo-bar").is_file());
+}
+
+#[cargo_test]
+fn dashes_in_crate_name_bad() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "foo-bar"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("src/main.rs", "extern crate foo_bar; fn main() {}")
+ .build();
+
+ p.cargo("build -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml`
+
+Caused by:
+ library target names cannot contain hyphens: foo-bar
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustc_env_var() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("build -v")
+ .env("RUSTC", "rustc-that-does-not-exist")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] could not execute process `rustc-that-does-not-exist -vV` ([..])
+
+Caused by:
+[..]
+",
+ )
+ .run();
+ assert!(!p.bin("a").is_file());
+}
+
+#[cargo_test]
+fn filtering() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("src/bin/a.rs", "fn main() {}")
+ .file("src/bin/b.rs", "fn main() {}")
+ .file("examples/a.rs", "fn main() {}")
+ .file("examples/b.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --lib").run();
+ assert!(!p.bin("a").is_file());
+
+ p.cargo("build --bin=a --example=a").run();
+ assert!(p.bin("a").is_file());
+ assert!(!p.bin("b").is_file());
+ assert!(p.bin("examples/a").is_file());
+ assert!(!p.bin("examples/b").is_file());
+}
+
+#[cargo_test]
+fn filtering_implicit_bins() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("src/bin/a.rs", "fn main() {}")
+ .file("src/bin/b.rs", "fn main() {}")
+ .file("examples/a.rs", "fn main() {}")
+ .file("examples/b.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --bins").run();
+ assert!(p.bin("a").is_file());
+ assert!(p.bin("b").is_file());
+ assert!(!p.bin("examples/a").is_file());
+ assert!(!p.bin("examples/b").is_file());
+}
+
+#[cargo_test]
+fn filtering_implicit_examples() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("src/bin/a.rs", "fn main() {}")
+ .file("src/bin/b.rs", "fn main() {}")
+ .file("examples/a.rs", "fn main() {}")
+ .file("examples/b.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --examples").run();
+ assert!(!p.bin("a").is_file());
+ assert!(!p.bin("b").is_file());
+ assert!(p.bin("examples/a").is_file());
+ assert!(p.bin("examples/b").is_file());
+}
+
+#[cargo_test]
+fn ignore_dotfile() {
+ let p = project()
+ .file("src/bin/.a.rs", "")
+ .file("src/bin/a.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build").run();
+}
+
+#[cargo_test]
+fn ignore_dotdirs() {
+ let p = project()
+ .file("src/bin/a.rs", "fn main() {}")
+ .file(".git/Cargo.toml", "")
+ .file(".pc/dummy-fix.patch/Cargo.toml", "")
+ .build();
+
+ p.cargo("build").run();
+}
+
+#[cargo_test]
+fn dotdir_root() {
+ let p = ProjectBuilder::new(root().join(".foo"))
+ .file("src/bin/a.rs", "fn main() {}")
+ .build();
+ p.cargo("build").run();
+}
+
+#[cargo_test]
+fn custom_target_dir_env() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ let exe_name = format!("foo{}", env::consts::EXE_SUFFIX);
+
+ p.cargo("build").env("CARGO_TARGET_DIR", "foo/target").run();
+ assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
+ assert!(!p.root().join("target/debug").join(&exe_name).is_file());
+
+ p.cargo("build").run();
+ assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
+ assert!(p.root().join("target/debug").join(&exe_name).is_file());
+
+ p.cargo("build")
+ .env("CARGO_BUILD_TARGET_DIR", "foo2/target")
+ .run();
+ assert!(p.root().join("foo2/target/debug").join(&exe_name).is_file());
+
+ p.change_file(
+ ".cargo/config",
+ r#"
+ [build]
+ target-dir = "foo/target"
+ "#,
+ );
+ p.cargo("build").env("CARGO_TARGET_DIR", "bar/target").run();
+ assert!(p.root().join("bar/target/debug").join(&exe_name).is_file());
+ assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
+ assert!(p.root().join("target/debug").join(&exe_name).is_file());
+}
+
+#[cargo_test]
+fn custom_target_dir_line_parameter() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ let exe_name = format!("foo{}", env::consts::EXE_SUFFIX);
+
+ p.cargo("build --target-dir foo/target").run();
+ assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
+ assert!(!p.root().join("target/debug").join(&exe_name).is_file());
+
+ p.cargo("build").run();
+ assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
+ assert!(p.root().join("target/debug").join(&exe_name).is_file());
+
+ p.change_file(
+ ".cargo/config",
+ r#"
+ [build]
+ target-dir = "foo/target"
+ "#,
+ );
+ p.cargo("build --target-dir bar/target").run();
+ assert!(p.root().join("bar/target/debug").join(&exe_name).is_file());
+ assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
+ assert!(p.root().join("target/debug").join(&exe_name).is_file());
+
+ p.cargo("build --target-dir foobar/target")
+ .env("CARGO_TARGET_DIR", "bar/target")
+ .run();
+ assert!(p
+ .root()
+ .join("foobar/target/debug")
+ .join(&exe_name)
+ .is_file());
+ assert!(p.root().join("bar/target/debug").join(&exe_name).is_file());
+ assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
+ assert!(p.root().join("target/debug").join(&exe_name).is_file());
+}
+
+#[cargo_test]
+fn build_multiple_packages() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.d1]
+ path = "d1"
+ [dependencies.d2]
+ path = "d2"
+
+ [[bin]]
+ name = "foo"
+ "#,
+ )
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .file("d1/Cargo.toml", &basic_bin_manifest("d1"))
+ .file("d1/src/lib.rs", "")
+ .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }")
+ .file(
+ "d2/Cargo.toml",
+ r#"
+ [package]
+ name = "d2"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name = "d2"
+ doctest = false
+ "#,
+ )
+ .file("d2/src/main.rs", "fn main() { println!(\"d2\"); }")
+ .build();
+
+ p.cargo("build -p d1 -p d2 -p foo").run();
+
+ assert!(p.bin("foo").is_file());
+ p.process(&p.bin("foo")).with_stdout("i am foo\n").run();
+
+ let d1_path = &p
+ .build_dir()
+ .join("debug")
+ .join(format!("d1{}", env::consts::EXE_SUFFIX));
+ let d2_path = &p
+ .build_dir()
+ .join("debug")
+ .join(format!("d2{}", env::consts::EXE_SUFFIX));
+
+ assert!(d1_path.is_file());
+ p.process(d1_path).with_stdout("d1").run();
+
+ assert!(d2_path.is_file());
+ p.process(d2_path).with_stdout("d2").run();
+}
+
+#[cargo_test]
+fn invalid_spec() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.d1]
+ path = "d1"
+
+ [[bin]]
+ name = "foo"
+ "#,
+ )
+ .file("src/bin/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .file("d1/Cargo.toml", &basic_bin_manifest("d1"))
+ .file("d1/src/lib.rs", "")
+ .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }")
+ .build();
+
+ p.cargo("build -p notAValidDep")
+ .with_status(101)
+ .with_stderr("[ERROR] package ID specification `notAValidDep` did not match any packages")
+ .run();
+
+ p.cargo("build -p d1 -p notAValidDep")
+ .with_status(101)
+ .with_stderr("[ERROR] package ID specification `notAValidDep` did not match any packages")
+ .run();
+}
+
+#[cargo_test]
+fn manifest_with_bom_is_ok() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ "\u{FEFF}
+ [package]
+ name = \"foo\"
+ version = \"0.0.1\"
+ authors = []
+ ",
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("build -v").run();
+}
+
+#[cargo_test]
+fn panic_abort_compiles_with_panic_abort() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [profile.dev]
+ panic = 'abort'
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("build -v")
+ .with_stderr_contains("[..] -C panic=abort [..]")
+ .run();
+}
+
+#[cargo_test]
+fn compiler_json_error_format() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file(
+ "build.rs",
+ "fn main() { println!(\"cargo:rustc-cfg=xyz\") }",
+ )
+ .file("src/main.rs", "fn main() { let unused = 92; }")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file("bar/src/lib.rs", r#"fn dead() {}"#)
+ .build();
+
+ let output = |fresh| {
+ r#"
+ {
+ "reason":"compiler-artifact",
+ "package_id":"foo 0.5.0 ([..])",
+ "manifest_path": "[..]",
+ "target":{
+ "kind":["custom-build"],
+ "crate_types":["bin"],
+ "doc": false,
+ "doctest": false,
+ "edition": "2015",
+ "name":"build-script-build",
+ "src_path":"[..]build.rs",
+ "test": false
+ },
+ "profile": {
+ "debug_assertions": true,
+ "debuginfo": null,
+ "opt_level": "0",
+ "overflow_checks": true,
+ "test": false
+ },
+ "executable": null,
+ "features": [],
+ "filenames": "{...}",
+ "fresh": $FRESH
+ }
+
+ {
+ "reason":"compiler-message",
+ "package_id":"bar 0.5.0 ([..])",
+ "manifest_path": "[..]",
+ "target":{
+ "kind":["lib"],
+ "crate_types":["lib"],
+ "doc": true,
+ "doctest": true,
+ "edition": "2015",
+ "name":"bar",
+ "src_path":"[..]lib.rs",
+ "test": true
+ },
+ "message":"{...}"
+ }
+
+ {
+ "reason":"compiler-artifact",
+ "profile": {
+ "debug_assertions": true,
+ "debuginfo": 2,
+ "opt_level": "0",
+ "overflow_checks": true,
+ "test": false
+ },
+ "executable": null,
+ "features": [],
+ "package_id":"bar 0.5.0 ([..])",
+ "manifest_path": "[..]",
+ "target":{
+ "kind":["lib"],
+ "crate_types":["lib"],
+ "doc": true,
+ "doctest": true,
+ "edition": "2015",
+ "name":"bar",
+ "src_path":"[..]lib.rs",
+ "test": true
+ },
+ "filenames":[
+ "[..].rlib",
+ "[..].rmeta"
+ ],
+ "fresh": $FRESH
+ }
+
+ {
+ "reason":"build-script-executed",
+ "package_id":"foo 0.5.0 ([..])",
+ "linked_libs":[],
+ "linked_paths":[],
+ "env":[],
+ "cfgs":["xyz"],
+ "out_dir": "[..]target/debug/build/foo-[..]/out"
+ }
+
+ {
+ "reason":"compiler-message",
+ "package_id":"foo 0.5.0 ([..])",
+ "manifest_path": "[..]",
+ "target":{
+ "kind":["bin"],
+ "crate_types":["bin"],
+ "doc": true,
+ "doctest": false,
+ "edition": "2015",
+ "name":"foo",
+ "src_path":"[..]main.rs",
+ "test": true
+ },
+ "message":"{...}"
+ }
+
+ {
+ "reason":"compiler-artifact",
+ "package_id":"foo 0.5.0 ([..])",
+ "manifest_path": "[..]",
+ "target":{
+ "kind":["bin"],
+ "crate_types":["bin"],
+ "doc": true,
+ "doctest": false,
+ "edition": "2015",
+ "name":"foo",
+ "src_path":"[..]main.rs",
+ "test": true
+ },
+ "profile": {
+ "debug_assertions": true,
+ "debuginfo": 2,
+ "opt_level": "0",
+ "overflow_checks": true,
+ "test": false
+ },
+ "executable": "[..]/foo/target/debug/foo[EXE]",
+ "features": [],
+ "filenames": "{...}",
+ "fresh": $FRESH
+ }
+
+ {"reason": "build-finished", "success": true}
+ "#
+ .replace("$FRESH", fresh)
+ };
+
+ // Use `jobs=1` to ensure that the order of messages is consistent.
+ p.cargo("build -v --message-format=json --jobs=1")
+ .with_json_contains_unordered(&output("false"))
+ .run();
+
+ // With fresh build, we should repeat the artifacts,
+ // and replay the cached compiler warnings.
+ p.cargo("build -v --message-format=json --jobs=1")
+ .with_json_contains_unordered(&output("true"))
+ .run();
+}
+
+#[cargo_test]
+fn wrong_message_format_option() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --message-format XML")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: invalid message format specifier: `xml`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn message_format_json_forward_stderr() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() { let unused = 0; }")
+ .build();
+
+ p.cargo("rustc --release --bin foo --message-format JSON")
+ .with_json_contains_unordered(
+ r#"
+ {
+ "reason":"compiler-message",
+ "package_id":"foo 0.5.0 ([..])",
+ "manifest_path": "[..]",
+ "target":{
+ "kind":["bin"],
+ "crate_types":["bin"],
+ "doc": true,
+ "doctest": false,
+ "edition": "2015",
+ "name":"foo",
+ "src_path":"[..]",
+ "test": true
+ },
+ "message":"{...}"
+ }
+
+ {
+ "reason":"compiler-artifact",
+ "package_id":"foo 0.5.0 ([..])",
+ "manifest_path": "[..]",
+ "target":{
+ "kind":["bin"],
+ "crate_types":["bin"],
+ "doc": true,
+ "doctest": false,
+ "edition": "2015",
+ "name":"foo",
+ "src_path":"[..]",
+ "test": true
+ },
+ "profile":{
+ "debug_assertions":false,
+ "debuginfo":null,
+ "opt_level":"3",
+ "overflow_checks": false,
+ "test":false
+ },
+ "executable": "{...}",
+ "features":[],
+ "filenames": "{...}",
+ "fresh": false
+ }
+
+ {"reason": "build-finished", "success": true}
+ "#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn no_warn_about_package_metadata() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [package.metadata]
+ foo = "bar"
+ a = true
+ b = 3
+
+ [package.metadata.another]
+ bar = 3
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("build")
+ .with_stderr(
+ "[..] foo v0.0.1 ([..])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn no_warn_about_workspace_metadata() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo"]
+
+ [workspace.metadata]
+ something = "something_else"
+ x = 1
+ y = 2
+
+ [workspace.metadata.another]
+ bar = 12
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_stderr(
+ "[..] foo v0.0.1 ([..])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_build_empty_target() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --target")
+ .arg("")
+ .with_status(101)
+ .with_stderr_contains("[..] target was empty")
+ .run();
+}
+
+#[cargo_test]
+fn build_all_workspace() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { path = "bar" }
+
+ [workspace]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("build --workspace")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.1.0 ([..])
+[COMPILING] foo v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_all_exclude() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }")
+ .build();
+
+ p.cargo("build --workspace --exclude baz")
+ .with_stderr_does_not_contain("[COMPILING] baz v0.1.0 [..]")
+ .with_stderr_unordered(
+ "\
+[COMPILING] foo v0.1.0 ([..])
+[COMPILING] bar v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_all_exclude_not_found() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("build --workspace --exclude baz")
+ .with_stderr_does_not_contain("[COMPILING] baz v0.1.0 [..]")
+ .with_stderr_unordered(
+ "\
+[WARNING] excluded package(s) `baz` not found in workspace [..]
+[COMPILING] foo v0.1.0 ([..])
+[COMPILING] bar v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_all_exclude_glob() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }")
+ .build();
+
+ p.cargo("build --workspace --exclude '*z'")
+ .with_stderr_does_not_contain("[COMPILING] baz v0.1.0 [..]")
+ .with_stderr_unordered(
+ "\
+[COMPILING] foo v0.1.0 ([..])
+[COMPILING] bar v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_all_exclude_glob_not_found() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("build --workspace --exclude '*z'")
+ .with_stderr_does_not_contain("[COMPILING] baz v0.1.0 [..]")
+ .with_stderr(
+ "\
+[WARNING] excluded package pattern(s) `*z` not found in workspace [..]
+[COMPILING] [..] v0.1.0 ([..])
+[COMPILING] [..] v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_all_exclude_broken_glob() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ p.cargo("build --workspace --exclude '[*z'")
+ .with_status(101)
+ .with_stderr_contains("[ERROR] cannot build glob pattern from `[*z`")
+ .run();
+}
+
+#[cargo_test]
+fn build_all_workspace_implicit_examples() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { path = "bar" }
+
+ [workspace]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("src/bin/a.rs", "fn main() {}")
+ .file("src/bin/b.rs", "fn main() {}")
+ .file("examples/c.rs", "fn main() {}")
+ .file("examples/d.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "")
+ .file("bar/src/bin/e.rs", "fn main() {}")
+ .file("bar/src/bin/f.rs", "fn main() {}")
+ .file("bar/examples/g.rs", "fn main() {}")
+ .file("bar/examples/h.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --workspace --examples")
+ .with_stderr(
+ "[..] Compiling bar v0.1.0 ([..])\n\
+ [..] Compiling foo v0.1.0 ([..])\n\
+ [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
+ )
+ .run();
+ assert!(!p.bin("a").is_file());
+ assert!(!p.bin("b").is_file());
+ assert!(p.bin("examples/c").is_file());
+ assert!(p.bin("examples/d").is_file());
+ assert!(!p.bin("e").is_file());
+ assert!(!p.bin("f").is_file());
+ assert!(p.bin("examples/g").is_file());
+ assert!(p.bin("examples/h").is_file());
+}
+
+#[cargo_test]
+fn build_all_virtual_manifest() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ // The order in which bar and baz are built is not guaranteed
+ p.cargo("build --workspace")
+ .with_stderr_unordered(
+ "\
+[COMPILING] baz v0.1.0 ([..])
+[COMPILING] bar v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_virtual_manifest_all_implied() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ // The order in which `bar` and `baz` are built is not guaranteed.
+ p.cargo("build")
+ .with_stderr_unordered(
+ "\
+[COMPILING] baz v0.1.0 ([..])
+[COMPILING] bar v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_virtual_manifest_one_project() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }")
+ .build();
+
+ p.cargo("build -p bar")
+ .with_stderr_does_not_contain("[..]baz[..]")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_virtual_manifest_glob() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() { break_the_build(); }")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ p.cargo("build -p '*z'")
+ .with_stderr_does_not_contain("[..]bar[..]")
+ .with_stderr(
+ "\
+[COMPILING] baz v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_virtual_manifest_glob_not_found() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("build -p bar -p '*z'")
+ .with_status(101)
+ .with_stderr("[ERROR] package pattern(s) `*z` not found in workspace [..]")
+ .run();
+}
+
+#[cargo_test]
+fn build_virtual_manifest_broken_glob() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("build -p '[*z'")
+ .with_status(101)
+ .with_stderr_contains("[ERROR] cannot build glob pattern from `[*z`")
+ .run();
+}
+
+#[cargo_test]
+fn build_all_virtual_manifest_implicit_examples() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "")
+ .file("bar/src/bin/a.rs", "fn main() {}")
+ .file("bar/src/bin/b.rs", "fn main() {}")
+ .file("bar/examples/c.rs", "fn main() {}")
+ .file("bar/examples/d.rs", "fn main() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "")
+ .file("baz/src/bin/e.rs", "fn main() {}")
+ .file("baz/src/bin/f.rs", "fn main() {}")
+ .file("baz/examples/g.rs", "fn main() {}")
+ .file("baz/examples/h.rs", "fn main() {}")
+ .build();
+
+ // The order in which bar and baz are built is not guaranteed
+ p.cargo("build --workspace --examples")
+ .with_stderr_unordered(
+ "\
+[COMPILING] baz v0.1.0 ([..])
+[COMPILING] bar v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ assert!(!p.bin("a").is_file());
+ assert!(!p.bin("b").is_file());
+ assert!(p.bin("examples/c").is_file());
+ assert!(p.bin("examples/d").is_file());
+ assert!(!p.bin("e").is_file());
+ assert!(!p.bin("f").is_file());
+ assert!(p.bin("examples/g").is_file());
+ assert!(p.bin("examples/h").is_file());
+}
+
+#[cargo_test]
+fn build_all_member_dependency_same_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a"]
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [dependencies]
+ a = "0.1.0"
+ "#,
+ )
+ .file("a/src/lib.rs", "pub fn a() {}")
+ .build();
+
+ Package::new("a", "0.1.0").publish();
+
+ p.cargo("build --workspace")
+ .with_stderr(
+ "[UPDATING] `[..]` index\n\
+ [DOWNLOADING] crates ...\n\
+ [DOWNLOADED] a v0.1.0 ([..])\n\
+ [COMPILING] a v0.1.0\n\
+ [COMPILING] a v0.1.0 ([..])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn run_proper_binary() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.0"
+ [[bin]]
+ name = "main"
+ [[bin]]
+ name = "other"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "src/bin/main.rs",
+ r#"fn main() { panic!("This should never be run."); }"#,
+ )
+ .file("src/bin/other.rs", "fn main() {}")
+ .build();
+
+ p.cargo("run --bin other").run();
+}
+
+#[cargo_test]
+fn run_proper_binary_main_rs() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/lib.rs", "")
+ .file("src/bin/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("run --bin foo").run();
+}
+
+#[cargo_test]
+fn run_proper_alias_binary_from_src() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.0"
+ [[bin]]
+ name = "foo"
+ [[bin]]
+ name = "bar"
+ "#,
+ )
+ .file("src/foo.rs", r#"fn main() { println!("foo"); }"#)
+ .file("src/bar.rs", r#"fn main() { println!("bar"); }"#)
+ .build();
+
+ p.cargo("build --workspace").run();
+ p.process(&p.bin("foo")).with_stdout("foo\n").run();
+ p.process(&p.bin("bar")).with_stdout("bar\n").run();
+}
+
+#[cargo_test]
+fn run_proper_alias_binary_main_rs() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.0"
+ [[bin]]
+ name = "foo"
+ [[bin]]
+ name = "bar"
+ "#,
+ )
+ .file("src/main.rs", r#"fn main() { println!("main"); }"#)
+ .build();
+
+ p.cargo("build --workspace").run();
+ p.process(&p.bin("foo")).with_stdout("main\n").run();
+ p.process(&p.bin("bar")).with_stdout("main\n").run();
+}
+
+#[cargo_test]
+fn run_proper_binary_main_rs_as_foo() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ "src/foo.rs",
+ r#" fn main() { panic!("This should never be run."); }"#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("run --bin foo").run();
+}
+
+#[cargo_test]
+fn rustc_wrapper() {
+ let p = project().file("src/lib.rs", "").build();
+ let wrapper = tools::echo_wrapper();
+ let running = format!(
+ "[RUNNING] `{} rustc --crate-name foo [..]",
+ wrapper.display()
+ );
+ p.cargo("build -v")
+ .env("RUSTC_WRAPPER", &wrapper)
+ .with_stderr_contains(&running)
+ .run();
+ p.build_dir().rm_rf();
+ p.cargo("build -v")
+ .env("RUSTC_WORKSPACE_WRAPPER", &wrapper)
+ .with_stderr_contains(&running)
+ .run();
+}
+
+#[cargo_test]
+fn rustc_wrapper_relative() {
+ Package::new("bar", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ let wrapper = tools::echo_wrapper();
+ let exe_name = wrapper.file_name().unwrap().to_str().unwrap();
+ let relative_path = format!("./{}", exe_name);
+ fs::hard_link(&wrapper, p.root().join(exe_name)).unwrap();
+ let running = format!("[RUNNING] `[ROOT]/foo/./{} rustc[..]", exe_name);
+ p.cargo("build -v")
+ .env("RUSTC_WRAPPER", &relative_path)
+ .with_stderr_contains(&running)
+ .run();
+ p.build_dir().rm_rf();
+ p.cargo("build -v")
+ .env("RUSTC_WORKSPACE_WRAPPER", &relative_path)
+ .with_stderr_contains(&running)
+ .run();
+ p.build_dir().rm_rf();
+ p.change_file(
+ ".cargo/config.toml",
+ &format!(
+ r#"
+ build.rustc-wrapper = "./{}"
+ "#,
+ exe_name
+ ),
+ );
+ p.cargo("build -v").with_stderr_contains(&running).run();
+}
+
+#[cargo_test]
+fn rustc_wrapper_from_path() {
+ let p = project().file("src/lib.rs", "").build();
+ p.cargo("build -v")
+ .env("RUSTC_WRAPPER", "wannabe_sccache")
+ .with_status(101)
+ .with_stderr_contains("[..]`wannabe_sccache rustc [..]")
+ .run();
+ p.build_dir().rm_rf();
+ p.cargo("build -v")
+ .env("RUSTC_WORKSPACE_WRAPPER", "wannabe_sccache")
+ .with_status(101)
+ .with_stderr_contains("[..]`wannabe_sccache rustc [..]")
+ .run();
+}
+
+#[cargo_test]
+fn cdylib_not_lifted() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.1.0"
+
+ [lib]
+ crate-type = ["cdylib"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+
+ let files = if cfg!(windows) {
+ if cfg!(target_env = "msvc") {
+ vec!["foo.dll.lib", "foo.dll.exp", "foo.dll"]
+ } else {
+ vec!["libfoo.dll.a", "foo.dll"]
+ }
+ } else if cfg!(target_os = "macos") {
+ vec!["libfoo.dylib"]
+ } else {
+ vec!["libfoo.so"]
+ };
+
+ for file in files {
+ println!("checking: {}", file);
+ assert!(p.root().join("target/debug/deps").join(&file).is_file());
+ }
+}
+
+#[cargo_test]
+fn cdylib_final_outputs() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo-bar"
+ authors = []
+ version = "0.1.0"
+
+ [lib]
+ crate-type = ["cdylib"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+
+ let files = if cfg!(windows) {
+ if cfg!(target_env = "msvc") {
+ vec!["foo_bar.dll.lib", "foo_bar.dll"]
+ } else {
+ vec!["foo_bar.dll", "libfoo_bar.dll.a"]
+ }
+ } else if cfg!(target_os = "macos") {
+ vec!["libfoo_bar.dylib"]
+ } else {
+ vec!["libfoo_bar.so"]
+ };
+
+ for file in files {
+ println!("checking: {}", file);
+ assert!(p.root().join("target/debug").join(&file).is_file());
+ }
+}
+
+#[cargo_test]
+// NOTE: Windows MSVC and wasm32-unknown-emscripten do not use metadata. Skip them.
+// See <https://github.com/rust-lang/cargo/issues/9325#issuecomment-1030662699>
+#[cfg(not(all(target_os = "windows", target_env = "msvc")))]
+fn no_dep_info_collision_when_cdylib_and_bin_coexist() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+
+ [lib]
+ crate-type = ["cdylib"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr_unordered(
+ "\
+[COMPILING] foo v1.0.0 ([CWD])
+[RUNNING] `rustc [..] --crate-type bin [..] -C metadata=[..]`
+[RUNNING] `rustc [..] --crate-type cdylib [..] -C metadata=[..]`
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ let deps_dir = p.target_debug_dir().join("deps");
+ assert!(deps_dir.join("foo.d").exists());
+ let dep_info_count = deps_dir
+ .read_dir()
+ .unwrap()
+ .filter(|e| {
+ let filename = e.as_ref().unwrap().file_name();
+ let filename = filename.to_str().unwrap();
+ filename.starts_with("foo") && filename.ends_with(".d")
+ })
+ .count();
+ // cdylib -> foo.d
+ // bin -> foo-<meta>.d
+ assert_eq!(dep_info_count, 2);
+}
+
+#[cargo_test]
+fn deterministic_cfg_flags() {
+ // This bug is non-deterministic.
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ build = "build.rs"
+
+ [features]
+ default = ["f_a", "f_b", "f_c", "f_d"]
+ f_a = []
+ f_b = []
+ f_c = []
+ f_d = []
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-cfg=cfg_a");
+ println!("cargo:rustc-cfg=cfg_b");
+ println!("cargo:rustc-cfg=cfg_c");
+ println!("cargo:rustc-cfg=cfg_d");
+ println!("cargo:rustc-cfg=cfg_e");
+ }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.1.0 [..]
+[RUNNING] [..]
+[RUNNING] [..]
+[RUNNING] `rustc --crate-name foo [..] \
+--cfg[..]default[..]--cfg[..]f_a[..]--cfg[..]f_b[..]\
+--cfg[..]f_c[..]--cfg[..]f_d[..] \
+--cfg cfg_a --cfg cfg_b --cfg cfg_c --cfg cfg_d --cfg cfg_e`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn explicit_bins_without_paths() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [[bin]]
+ name = "foo"
+
+ [[bin]]
+ name = "bar"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("src/main.rs", "fn main() {}")
+ .file("src/bin/bar.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build").run();
+}
+
+#[cargo_test]
+fn no_bin_in_src_with_lib() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/lib.rs", "")
+ .file("src/foo.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ can't find `foo` bin at `src/bin/foo.rs` or `src/bin/foo/main.rs`. [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn inferred_bins() {
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file("src/bin/bar.rs", "fn main() {}")
+ .file("src/bin/baz/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build").run();
+ assert!(p.bin("foo").is_file());
+ assert!(p.bin("bar").is_file());
+ assert!(p.bin("baz").is_file());
+}
+
+#[cargo_test]
+fn inferred_bins_duplicate_name() {
+ // this should fail, because we have two binaries with the same name
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file("src/bin/bar.rs", "fn main() {}")
+ .file("src/bin/bar/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build").with_status(101).with_stderr_contains(
+ "[..]found duplicate binary name bar, but all binary targets must have a unique name[..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn inferred_bin_path() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [[bin]]
+ name = "bar"
+ # Note, no `path` key!
+ "#,
+ )
+ .file("src/bin/bar/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build").run();
+ assert!(p.bin("bar").is_file());
+}
+
+#[cargo_test]
+fn inferred_examples() {
+ let p = project()
+ .file("src/lib.rs", "fn main() {}")
+ .file("examples/bar.rs", "fn main() {}")
+ .file("examples/baz/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --examples").run();
+ assert!(p.bin("examples/bar").is_file());
+ assert!(p.bin("examples/baz").is_file());
+}
+
+#[cargo_test]
+fn inferred_tests() {
+ let p = project()
+ .file("src/lib.rs", "fn main() {}")
+ .file("tests/bar.rs", "fn main() {}")
+ .file("tests/baz/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("test --test=bar --test=baz").run();
+}
+
+#[cargo_test]
+fn inferred_benchmarks() {
+ let p = project()
+ .file("src/lib.rs", "fn main() {}")
+ .file("benches/bar.rs", "fn main() {}")
+ .file("benches/baz/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("bench --bench=bar --bench=baz").run();
+}
+
+#[cargo_test]
+fn no_infer_dirs() {
+ let p = project()
+ .file("src/lib.rs", "fn main() {}")
+ .file("examples/dir.rs/dummy", "")
+ .file("benches/dir.rs/dummy", "")
+ .file("tests/dir.rs/dummy", "")
+ .build();
+
+ p.cargo("build --examples --benches --tests").run(); // should not fail with "is a directory"
+}
+
+#[cargo_test]
+fn target_edition() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [lib]
+ edition = "2018"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..]--edition=2018 [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn target_edition_override() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ edition = "2018"
+
+ [lib]
+ edition = "2015"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
+ pub fn async() {}
+ pub fn try() {}
+ pub fn await() {}
+ ",
+ )
+ .build();
+
+ p.cargo("build -v").run();
+}
+
+#[cargo_test]
+fn same_metadata_different_directory() {
+ // A top-level crate built in two different workspaces should have the
+ // same metadata hash.
+ let p = project()
+ .at("foo1")
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+ let output = t!(String::from_utf8(
+ t!(p.cargo("build -v").exec_with_output()).stderr,
+ ));
+ let metadata = output
+ .split_whitespace()
+ .find(|arg| arg.starts_with("metadata="))
+ .unwrap();
+
+ let p = project()
+ .at("foo2")
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr_contains(format!("[..]{}[..]", metadata))
+ .run();
+}
+
+#[cargo_test]
+fn building_a_dependent_crate_without_bin_should_fail() {
+ Package::new("testless", "0.1.0")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "testless"
+ version = "0.1.0"
+
+ [[bin]]
+ name = "a_bin"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ testless = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr_contains(
+ "[..]can't find `a_bin` bin at `src/bin/a_bin.rs` or `src/bin/a_bin/main.rs`[..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+#[cfg(any(target_os = "macos", target_os = "ios"))]
+fn uplift_dsym_of_bin_on_mac() {
+ let p = project()
+ .file("src/main.rs", "fn main() { panic!(); }")
+ .file("src/bin/b.rs", "fn main() { panic!(); }")
+ .file("examples/c.rs", "fn main() { panic!(); }")
+ .file("tests/d.rs", "fn main() { panic!(); }")
+ .build();
+
+ p.cargo("build --bins --examples --tests")
+ .enable_mac_dsym()
+ .run();
+ assert!(p.target_debug_dir().join("foo.dSYM").is_dir());
+ assert!(p.target_debug_dir().join("b.dSYM").is_dir());
+ assert!(p.target_debug_dir().join("b.dSYM").is_symlink());
+ assert!(p.target_debug_dir().join("examples/c.dSYM").is_dir());
+ assert!(!p.target_debug_dir().join("c.dSYM").exists());
+ assert!(!p.target_debug_dir().join("d.dSYM").exists());
+}
+
+#[cargo_test]
+#[cfg(any(target_os = "macos", target_os = "ios"))]
+fn uplift_dsym_of_bin_on_mac_when_broken_link_exists() {
+ let p = project()
+ .file("src/main.rs", "fn main() { panic!(); }")
+ .build();
+ let dsym = p.target_debug_dir().join("foo.dSYM");
+
+ p.cargo("build").enable_mac_dsym().run();
+ assert!(dsym.is_dir());
+
+ // Simulate the situation where the underlying dSYM bundle goes missing
+ // but the uplifted symlink to it remains. This would previously cause
+ // builds to permanently fail until the bad symlink was manually removed.
+ dsym.rm_rf();
+ p.symlink(
+ p.target_debug_dir()
+ .join("deps")
+ .join("foo-baaaaaadbaaaaaad.dSYM"),
+ &dsym,
+ );
+ assert!(dsym.is_symlink());
+ assert!(!dsym.exists());
+
+ p.cargo("build").enable_mac_dsym().run();
+ assert!(dsym.is_dir());
+}
+
+#[cargo_test]
+#[cfg(all(target_os = "windows", target_env = "msvc"))]
+fn uplift_pdb_of_bin_on_windows() {
+ let p = project()
+ .file("src/main.rs", "fn main() { panic!(); }")
+ .file("src/bin/b.rs", "fn main() { panic!(); }")
+ .file("src/bin/foo-bar.rs", "fn main() { panic!(); }")
+ .file("examples/c.rs", "fn main() { panic!(); }")
+ .file("tests/d.rs", "fn main() { panic!(); }")
+ .build();
+
+ p.cargo("build --bins --examples --tests").run();
+ assert!(p.target_debug_dir().join("foo.pdb").is_file());
+ assert!(p.target_debug_dir().join("b.pdb").is_file());
+ assert!(p.target_debug_dir().join("examples/c.pdb").exists());
+ assert!(p.target_debug_dir().join("foo-bar.exe").is_file());
+ assert!(p.target_debug_dir().join("foo_bar.pdb").is_file());
+ assert!(!p.target_debug_dir().join("c.pdb").exists());
+ assert!(!p.target_debug_dir().join("d.pdb").exists());
+}
+
+#[cargo_test]
+#[cfg(target_os = "linux")]
+fn uplift_dwp_of_bin_on_linux() {
+ let p = project()
+ .file("src/main.rs", "fn main() { panic!(); }")
+ .file("src/bin/b.rs", "fn main() { panic!(); }")
+ .file("src/bin/foo-bar.rs", "fn main() { panic!(); }")
+ .file("examples/c.rs", "fn main() { panic!(); }")
+ .file("tests/d.rs", "fn main() { panic!(); }")
+ .build();
+
+ p.cargo("build --bins --examples --tests")
+ .enable_split_debuginfo_packed()
+ .run();
+ assert!(p.target_debug_dir().join("foo.dwp").is_file());
+ assert!(p.target_debug_dir().join("b.dwp").is_file());
+ assert!(p.target_debug_dir().join("examples/c.dwp").exists());
+ assert!(p.target_debug_dir().join("foo-bar").is_file());
+ assert!(p.target_debug_dir().join("foo-bar.dwp").is_file());
+ assert!(!p.target_debug_dir().join("c.dwp").exists());
+ assert!(!p.target_debug_dir().join("d.dwp").exists());
+}
+
+// Ensure that `cargo build` chooses the correct profile for building
+// targets based on filters (assuming `--profile` is not specified).
+#[cargo_test]
+fn build_filter_infer_profile() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("src/main.rs", "fn main() {}")
+ .file("tests/t1.rs", "")
+ .file("benches/b1.rs", "")
+ .file("examples/ex1.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \
+ --emit=[..]link[..]",
+ )
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \
+ --emit=[..]link[..]",
+ )
+ .run();
+
+ p.root().join("target").rm_rf();
+ p.cargo("build -v --test=t1")
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \
+ --emit=[..]link[..]-C debuginfo=2 [..]",
+ )
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name t1 tests/t1.rs [..]--emit=[..]link[..]\
+ -C debuginfo=2 [..]",
+ )
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \
+ --emit=[..]link[..]-C debuginfo=2 [..]",
+ )
+ .run();
+
+ p.root().join("target").rm_rf();
+ // Bench uses test profile without `--release`.
+ p.cargo("build -v --bench=b1")
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \
+ --emit=[..]link[..]-C debuginfo=2 [..]",
+ )
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name b1 benches/b1.rs [..]--emit=[..]link[..]\
+ -C debuginfo=2 [..]",
+ )
+ .with_stderr_does_not_contain("opt-level")
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \
+ --emit=[..]link[..]-C debuginfo=2 [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn targets_selected_default() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+ p.cargo("build -v")
+ // Binaries.
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \
+ --emit=[..]link[..]",
+ )
+ // Benchmarks.
+ .with_stderr_does_not_contain(
+ "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link \
+ -C opt-level=3 --test [..]",
+ )
+ // Unit tests.
+ .with_stderr_does_not_contain(
+ "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link[..]\
+ -C debuginfo=2 --test [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn targets_selected_all() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+ p.cargo("build -v --all-targets")
+ // Binaries.
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \
+ --emit=[..]link[..]",
+ )
+ // Unit tests.
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link[..]\
+ -C debuginfo=2 --test [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn all_targets_no_lib() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+ p.cargo("build -v --all-targets")
+ // Binaries.
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \
+ --emit=[..]link[..]",
+ )
+ // Unit tests.
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link[..]\
+ -C debuginfo=2 --test [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn no_linkable_target() {
+ // Issue 3169: this is currently not an error as per discussion in PR #4797.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ [dependencies]
+ the_lib = { path = "the_lib" }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "the_lib/Cargo.toml",
+ r#"
+ [package]
+ name = "the_lib"
+ version = "0.1.0"
+ [lib]
+ name = "the_lib"
+ crate-type = ["staticlib"]
+ "#,
+ )
+ .file("the_lib/src/lib.rs", "pub fn foo() {}")
+ .build();
+ p.cargo("build")
+ .with_stderr_contains(
+ "[WARNING] The package `the_lib` provides no linkable [..] \
+ while compiling `foo`. [..] in `the_lib`'s Cargo.toml. [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn avoid_dev_deps() {
+ Package::new("foo", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dev-dependencies]
+ baz = "1.0.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[ERROR] no matching package named `baz` found
+location searched: registry `crates-io`
+required by package `bar v0.1.0 ([..]/foo)`
+",
+ )
+ .run();
+ p.cargo("build -Zavoid-dev-deps")
+ .masquerade_as_nightly_cargo(&["avoid-dev-deps"])
+ .run();
+}
+
+#[cargo_test]
+fn default_cargo_config_jobs() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ jobs = 1
+ "#,
+ )
+ .build();
+ p.cargo("build -v").run();
+}
+
+#[cargo_test]
+fn good_cargo_config_jobs() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ jobs = 4
+ "#,
+ )
+ .build();
+ p.cargo("build -v").run();
+}
+
+#[cargo_test]
+fn good_jobs() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("build --jobs 1").run();
+
+ p.cargo("build --jobs -1").run();
+}
+
+#[cargo_test]
+fn invalid_cargo_config_jobs() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ jobs = 0
+ "#,
+ )
+ .build();
+ p.cargo("build -v")
+ .with_status(101)
+ .with_stderr_contains("error: jobs may not be 0")
+ .run();
+}
+
+#[cargo_test]
+fn invalid_jobs() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("build --jobs 0")
+ .with_status(101)
+ .with_stderr_contains("error: jobs may not be 0")
+ .run();
+
+ p.cargo("build --jobs over9000")
+ .with_status(1)
+ .with_stderr("error: Invalid value: could not parse `over9000` as a number")
+ .run();
+}
+
+#[cargo_test]
+fn target_filters_workspace() {
+ let ws = project()
+ .at("ws")
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_lib_manifest("a"))
+ .file("a/src/lib.rs", "")
+ .file("a/examples/ex1.rs", "fn main() {}")
+ .file("b/Cargo.toml", &basic_bin_manifest("b"))
+ .file("b/src/lib.rs", "")
+ .file("b/src/main.rs", "fn main() {}")
+ .build();
+
+ ws.cargo("build -v --example ex")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] no example target named `ex`
+
+<tab>Did you mean `ex1`?",
+ )
+ .run();
+
+ ws.cargo("build -v --example 'ex??'")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] no example target matches pattern `ex??`
+
+<tab>Did you mean `ex1`?",
+ )
+ .run();
+
+ ws.cargo("build -v --lib")
+ .with_stderr_contains("[RUNNING] `rustc [..]a/src/lib.rs[..]")
+ .with_stderr_contains("[RUNNING] `rustc [..]b/src/lib.rs[..]")
+ .run();
+
+ ws.cargo("build -v --example ex1")
+ .with_stderr_contains("[RUNNING] `rustc [..]a/examples/ex1.rs[..]")
+ .run();
+}
+
+#[cargo_test]
+fn target_filters_workspace_not_found() {
+ let ws = project()
+ .at("ws")
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_bin_manifest("a"))
+ .file("a/src/main.rs", "fn main() {}")
+ .file("b/Cargo.toml", &basic_bin_manifest("b"))
+ .file("b/src/main.rs", "fn main() {}")
+ .build();
+
+ ws.cargo("build -v --lib")
+ .with_status(101)
+ .with_stderr("[ERROR] no library targets found in packages: a, b")
+ .run();
+}
+
+#[cfg(unix)]
+#[cargo_test]
+fn signal_display() {
+ // Cause the compiler to crash with a signal.
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [dependencies]
+ pm = { path = "pm" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #[macro_use]
+ extern crate pm;
+
+ #[derive(Foo)]
+ pub struct S;
+ "#,
+ )
+ .file(
+ "pm/Cargo.toml",
+ r#"
+ [package]
+ name = "pm"
+ version = "0.1.0"
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file(
+ "pm/src/lib.rs",
+ r#"
+ extern crate proc_macro;
+ use proc_macro::TokenStream;
+
+ #[proc_macro_derive(Foo)]
+ pub fn derive(_input: TokenStream) -> TokenStream {
+ std::process::abort()
+ }
+ "#,
+ )
+ .build();
+
+ foo.cargo("build")
+ .with_stderr(
+ "\
+[COMPILING] pm [..]
+[COMPILING] foo [..]
+[ERROR] could not compile `foo` [..]
+
+Caused by:
+ process didn't exit successfully: `rustc [..]` (signal: 6, SIGABRT: process abort signal)
+",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn tricky_pipelining() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar;")
+ .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ foo.cargo("build -p bar").run();
+ foo.cargo("build -p foo").run();
+}
+
+#[cargo_test]
+fn pipelining_works() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar;")
+ .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ foo.cargo("build")
+ .with_stdout("")
+ .with_stderr(
+ "\
+[COMPILING] [..]
+[COMPILING] [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn pipelining_big_graph() {
+ // Create a crate graph of the form {a,b}{0..29}, where {a,b}(n) depend on {a,b}(n+1)
+ // Then have `foo`, a binary crate, depend on the whole thing.
+ let mut project = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [dependencies]
+ a1 = { path = "a1" }
+ b1 = { path = "b1" }
+ "#,
+ )
+ .file("src/main.rs", "fn main(){}");
+
+ for n in 0..30 {
+ for x in &["a", "b"] {
+ project = project
+ .file(
+ &format!("{x}{n}/Cargo.toml", x = x, n = n),
+ &format!(
+ r#"
+ [package]
+ name = "{x}{n}"
+ version = "0.1.0"
+ [dependencies]
+ a{np1} = {{ path = "../a{np1}" }}
+ b{np1} = {{ path = "../b{np1}" }}
+ "#,
+ x = x,
+ n = n,
+ np1 = n + 1
+ ),
+ )
+ .file(&format!("{x}{n}/src/lib.rs", x = x, n = n), "");
+ }
+ }
+
+ let foo = project
+ .file("a30/Cargo.toml", &basic_lib_manifest("a30"))
+ .file(
+ "a30/src/lib.rs",
+ r#"compile_error!("don't actually build me");"#,
+ )
+ .file("b30/Cargo.toml", &basic_lib_manifest("b30"))
+ .file("b30/src/lib.rs", "")
+ .build();
+ foo.cargo("build -p foo")
+ .with_status(101)
+ .with_stderr_contains("[ERROR] could not compile `a30`[..]")
+ .run();
+}
+
+#[cargo_test]
+fn forward_rustc_output() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = '2018'
+ [dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "bar::foo!();")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ extern crate proc_macro;
+ use proc_macro::*;
+
+ #[proc_macro]
+ pub fn foo(input: TokenStream) -> TokenStream {
+ println!("a");
+ println!("b");
+ println!("{{}}");
+ eprintln!("c");
+ eprintln!("d");
+ eprintln!("{{a"); // "malformed json"
+ input
+ }
+ "#,
+ )
+ .build();
+
+ foo.cargo("build")
+ .with_stdout("a\nb\n{}")
+ .with_stderr(
+ "\
+[COMPILING] [..]
+[COMPILING] [..]
+c
+d
+{a
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_lib_only() {
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file("src/lib.rs", r#" "#)
+ .build();
+
+ p.cargo("build --lib -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \
+ --emit=[..]link[..]-C debuginfo=2 \
+ -C metadata=[..] \
+ --out-dir [..] \
+ -L dependency=[CWD]/target/debug/deps`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_with_no_lib() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --lib")
+ .with_status(101)
+ .with_stderr("[ERROR] no library targets found in package `foo`")
+ .run();
+}
+
+#[cargo_test]
+fn build_with_relative_cargo_home_path() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.0.1"
+ authors = ["wycats@example.com"]
+
+ [dependencies]
+
+ "test-dependency" = { path = "src/test_dependency" }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("src/test_dependency/src/lib.rs", r#" "#)
+ .file(
+ "src/test_dependency/Cargo.toml",
+ &basic_manifest("test-dependency", "0.0.1"),
+ )
+ .build();
+
+ p.cargo("build").env("CARGO_HOME", "./cargo_home/").run();
+}
+
+#[cargo_test]
+fn user_specific_cfgs_are_filtered_out() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", r#"fn main() {}"#)
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ assert!(std::env::var_os("CARGO_CFG_PROC_MACRO").is_none());
+ assert!(std::env::var_os("CARGO_CFG_DEBUG_ASSERTIONS").is_none());
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("rustc -- --cfg debug_assertions --cfg proc_macro")
+ .run();
+ p.process(&p.bin("foo")).run();
+}
+
+#[cargo_test]
+fn close_output() {
+ // What happens when stdout or stderr is closed during a build.
+
+ // Server to know when rustc has spawned.
+ let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap();
+ let addr = listener.local_addr().unwrap();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [lib]
+ proc-macro = true
+
+ [[bin]]
+ name = "foobar"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ &r#"
+ use proc_macro::TokenStream;
+ use std::io::Read;
+
+ #[proc_macro]
+ pub fn repro(_input: TokenStream) -> TokenStream {
+ println!("hello stdout!");
+ eprintln!("hello stderr!");
+ // Tell the test we have started.
+ let mut socket = std::net::TcpStream::connect("__ADDR__").unwrap();
+ // Wait for the test to tell us to start printing.
+ let mut buf = [0];
+ drop(socket.read_exact(&mut buf));
+ let use_stderr = std::env::var("__CARGO_REPRO_STDERR").is_ok();
+ // Emit at least 1MB of data.
+ // Linux pipes can buffer up to 64KB.
+ // This test seems to be sensitive to having other threads
+ // calling fork. My hypothesis is that the stdout/stderr
+ // file descriptors are duplicated into the child process,
+ // and during the short window between fork and exec, the
+ // file descriptor is kept alive long enough for the
+ // build to finish. It's a half-baked theory, but this
+ // seems to prevent the spurious errors in CI.
+ // An alternative solution is to run this test in
+ // a single-threaded environment.
+ for i in 0..100000 {
+ if use_stderr {
+ eprintln!("0123456789{}", i);
+ } else {
+ println!("0123456789{}", i);
+ }
+ }
+ TokenStream::new()
+ }
+ "#
+ .replace("__ADDR__", &addr.to_string()),
+ )
+ .file(
+ "src/bin/foobar.rs",
+ r#"
+ foo::repro!();
+
+ fn main() {}
+ "#,
+ )
+ .build();
+
+ // The `stderr` flag here indicates if this should forcefully close stderr or stdout.
+ let spawn = |stderr: bool| {
+ let mut cmd = p.cargo("build").build_command();
+ cmd.stdout(Stdio::piped()).stderr(Stdio::piped());
+ if stderr {
+ cmd.env("__CARGO_REPRO_STDERR", "1");
+ }
+ let mut child = cmd.spawn().unwrap();
+ // Wait for proc macro to start.
+ let pm_conn = listener.accept().unwrap().0;
+ // Close stderr or stdout.
+ if stderr {
+ drop(child.stderr.take());
+ } else {
+ drop(child.stdout.take());
+ }
+ // Tell the proc-macro to continue;
+ drop(pm_conn);
+ // Read the output from the other channel.
+ let out: &mut dyn Read = if stderr {
+ child.stdout.as_mut().unwrap()
+ } else {
+ child.stderr.as_mut().unwrap()
+ };
+ let mut result = String::new();
+ out.read_to_string(&mut result).unwrap();
+ let status = child.wait().unwrap();
+ assert!(!status.success());
+ result
+ };
+
+ let stderr = spawn(false);
+ compare::match_unordered(
+ "\
+[COMPILING] foo [..]
+hello stderr!
+[ERROR] [..]
+[WARNING] build failed, waiting for other jobs to finish...
+",
+ &stderr,
+ None,
+ )
+ .unwrap();
+
+ // Try again with stderr.
+ p.build_dir().rm_rf();
+ let stdout = spawn(true);
+ assert_eq!(stdout, "hello stdout!\n");
+}
+
+#[cargo_test]
+fn close_output_during_drain() {
+ // Test to close the output during the build phase (drain_the_queue).
+ // There was a bug where it would hang.
+
+ // Server to know when rustc has spawned.
+ let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap();
+ let addr = listener.local_addr().unwrap();
+
+ // Create a wrapper so the test can know when compiling has started.
+ let rustc_wrapper = {
+ let p = project()
+ .at("compiler")
+ .file("Cargo.toml", &basic_manifest("compiler", "1.0.0"))
+ .file(
+ "src/main.rs",
+ &r#"
+ use std::process::Command;
+ use std::env;
+ use std::io::Read;
+
+ fn main() {
+ // Only wait on the first dependency.
+ if matches!(env::var("CARGO_PKG_NAME").as_deref(), Ok("dep")) {
+ let mut socket = std::net::TcpStream::connect("__ADDR__").unwrap();
+ // Wait for the test to tell us to start printing.
+ let mut buf = [0];
+ drop(socket.read_exact(&mut buf));
+ }
+ let mut cmd = Command::new("rustc");
+ for arg in env::args_os().skip(1) {
+ cmd.arg(arg);
+ }
+ std::process::exit(cmd.status().unwrap().code().unwrap());
+ }
+ "#
+ .replace("__ADDR__", &addr.to_string()),
+ )
+ .build();
+ p.cargo("build").run();
+ p.bin("compiler")
+ };
+
+ Package::new("dep", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ dep = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // Spawn cargo, wait for the first rustc to start, and then close stderr.
+ let mut cmd = process(&cargo_exe())
+ .arg("check")
+ .cwd(p.root())
+ .env("RUSTC", rustc_wrapper)
+ .build_command();
+ cmd.stdout(Stdio::piped()).stderr(Stdio::piped());
+ let mut child = cmd.spawn().expect("cargo should spawn");
+ // Wait for the rustc wrapper to start.
+ let rustc_conn = listener.accept().unwrap().0;
+ // Close stderr to force an error.
+ drop(child.stderr.take());
+ // Tell the wrapper to continue.
+ drop(rustc_conn);
+ match child.wait() {
+ Ok(status) => assert!(!status.success()),
+ Err(e) => panic!("child wait failed: {}", e),
+ }
+}
+
+use cargo_test_support::registry::Dependency;
+
+#[cargo_test]
+fn reduced_reproduction_8249() {
+ // https://github.com/rust-lang/cargo/issues/8249
+ Package::new("a-src", "0.1.0").links("a").publish();
+ Package::new("a-src", "0.2.0").links("a").publish();
+
+ Package::new("b", "0.1.0")
+ .add_dep(Dependency::new("a-src", "0.1").optional(true))
+ .publish();
+ Package::new("b", "0.2.0")
+ .add_dep(Dependency::new("a-src", "0.2").optional(true))
+ .publish();
+
+ Package::new("c", "1.0.0")
+ .add_dep(&Dependency::new("b", "0.1.0"))
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ b = { version = "*", features = ["a-src"] }
+ a-src = "*"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("generate-lockfile").run();
+ cargo_util::paths::append(&p.root().join("Cargo.toml"), b"c = \"*\"").unwrap();
+ p.cargo("check").run();
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn target_directory_backup_exclusion() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ // Newly created target/ should have CACHEDIR.TAG inside...
+ p.cargo("build").run();
+ let cachedir_tag = p.build_dir().join("CACHEDIR.TAG");
+ assert!(cachedir_tag.is_file());
+ assert!(fs::read_to_string(&cachedir_tag)
+ .unwrap()
+ .starts_with("Signature: 8a477f597d28d172789f06886806bc55"));
+ // ...but if target/ already exists CACHEDIR.TAG should not be created in it.
+ fs::remove_file(&cachedir_tag).unwrap();
+ p.cargo("build").run();
+ assert!(!&cachedir_tag.is_file());
+}
+
+#[cargo_test]
+fn simple_terminal_width() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() {
+ let _: () = 42;
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v")
+ .env("__CARGO_TEST_TTY_WIDTH_DO_NOT_USE_THIS", "20")
+ .with_status(101)
+ .with_stderr_contains("[RUNNING] `rustc [..]--diagnostic-width=20[..]")
+ .run();
+
+ p.cargo("doc -v")
+ .env("__CARGO_TEST_TTY_WIDTH_DO_NOT_USE_THIS", "20")
+ .with_stderr_contains("[RUNNING] `rustdoc [..]--diagnostic-width=20[..]")
+ .run();
+}
+
+#[cargo_test]
+fn build_script_o0_default() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build -v --release")
+ .with_stderr_does_not_contain("[..]build_script_build[..]opt-level[..]")
+ .run();
+}
+
+#[cargo_test]
+fn build_script_o0_default_even_with_release() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [profile.release]
+ opt-level = 1
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build -v --release")
+ .with_stderr_does_not_contain("[..]build_script_build[..]opt-level[..]")
+ .run();
+}
+
+#[cargo_test]
+fn primary_package_env_var() {
+ // Test that CARGO_PRIMARY_PACKAGE is enabled only for "foo" and not for any dependency.
+
+ let is_primary_package = r#"
+ pub fn is_primary_package() -> bool {{
+ option_env!("CARGO_PRIMARY_PACKAGE").is_some()
+ }}
+ "#;
+
+ Package::new("qux", "0.1.0")
+ .file("src/lib.rs", is_primary_package)
+ .publish();
+
+ let baz = git::new("baz", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("src/lib.rs", is_primary_package)
+ });
+
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = {{ path = "bar" }}
+ baz = {{ git = '{}' }}
+ qux = "0.1"
+ "#,
+ baz.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ &format!(
+ r#"
+ extern crate bar;
+ extern crate baz;
+ extern crate qux;
+
+ {}
+
+ #[test]
+ fn verify_primary_package() {{
+ assert!(!bar::is_primary_package());
+ assert!(!baz::is_primary_package());
+ assert!(!qux::is_primary_package());
+ assert!(is_primary_package());
+ }}
+ "#,
+ is_primary_package
+ ),
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", is_primary_package)
+ .build();
+
+ foo.cargo("test").run();
+}
+
+#[cargo_test]
+fn renamed_uplifted_artifact_remains_unmodified_after_rebuild() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build").run();
+
+ let bin = p.bin("foo");
+ let renamed_bin = p.bin("foo-renamed");
+
+ fs::rename(&bin, &renamed_bin).unwrap();
+
+ p.change_file("src/main.rs", "fn main() { eprintln!(\"hello, world\"); }");
+ p.cargo("build").run();
+
+ let not_the_same = !same_file::is_same_file(bin, renamed_bin).unwrap();
+ assert!(not_the_same, "renamed uplifted artifact must be unmodified");
+}
diff --git a/src/tools/cargo/tests/testsuite/build_plan.rs b/src/tools/cargo/tests/testsuite/build_plan.rs
new file mode 100644
index 000000000..647bc1234
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/build_plan.rs
@@ -0,0 +1,222 @@
+//! Tests for --build-plan feature.
+
+use cargo_test_support::registry::Package;
+use cargo_test_support::{basic_bin_manifest, basic_manifest, main_file, project};
+
+#[cargo_test]
+fn cargo_build_plan_simple() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("build --build-plan -Zunstable-options")
+ .masquerade_as_nightly_cargo(&["build-plan"])
+ .with_json(
+ r#"
+ {
+ "inputs": [
+ "[..]/foo/Cargo.toml"
+ ],
+ "invocations": [
+ {
+ "args": "{...}",
+ "cwd": "[..]/cit/[..]/foo",
+ "deps": [],
+ "env": "{...}",
+ "kind": null,
+ "links": "{...}",
+ "outputs": "{...}",
+ "package_name": "foo",
+ "package_version": "0.5.0",
+ "program": "rustc",
+ "target_kind": ["bin"],
+ "compile_mode": "build"
+ }
+ ]
+ }
+ "#,
+ )
+ .run();
+ assert!(!p.bin("foo").is_file());
+}
+
+#[cargo_test]
+fn cargo_build_plan_single_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.5.0"
+
+ [dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate bar;
+ pub fn foo() { bar::bar(); }
+
+ #[test]
+ fn test() { foo(); }
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+ p.cargo("build --build-plan -Zunstable-options")
+ .masquerade_as_nightly_cargo(&["build-plan"])
+ .with_json(
+ r#"
+ {
+ "inputs": [
+ "[..]/foo/Cargo.toml",
+ "[..]/foo/bar/Cargo.toml"
+ ],
+ "invocations": [
+ {
+ "args": "{...}",
+ "cwd": "[..]/cit/[..]/foo",
+ "deps": [],
+ "env": "{...}",
+ "kind": null,
+ "links": "{...}",
+ "outputs": [
+ "[..]/foo/target/debug/deps/libbar-[..].rlib",
+ "[..]/foo/target/debug/deps/libbar-[..].rmeta"
+ ],
+ "package_name": "bar",
+ "package_version": "0.0.1",
+ "program": "rustc",
+ "target_kind": ["lib"],
+ "compile_mode": "build"
+ },
+ {
+ "args": "{...}",
+ "cwd": "[..]/cit/[..]/foo",
+ "deps": [0],
+ "env": "{...}",
+ "kind": null,
+ "links": "{...}",
+ "outputs": [
+ "[..]/foo/target/debug/deps/libfoo-[..].rlib",
+ "[..]/foo/target/debug/deps/libfoo-[..].rmeta"
+ ],
+ "package_name": "foo",
+ "package_version": "0.5.0",
+ "program": "rustc",
+ "target_kind": ["lib"],
+ "compile_mode": "build"
+ }
+ ]
+ }
+ "#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_build_plan_build_script() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+ build = "build.rs"
+ "#,
+ )
+ .file("src/main.rs", r#"fn main() {}"#)
+ .file("build.rs", r#"fn main() {}"#)
+ .build();
+
+ p.cargo("build --build-plan -Zunstable-options")
+ .masquerade_as_nightly_cargo(&["build-plan"])
+ .with_json(
+ r#"
+ {
+ "inputs": [
+ "[..]/foo/Cargo.toml"
+ ],
+ "invocations": [
+ {
+ "args": "{...}",
+ "cwd": "[..]/cit/[..]/foo",
+ "deps": [],
+ "env": "{...}",
+ "kind": null,
+ "links": "{...}",
+ "outputs": "{...}",
+ "package_name": "foo",
+ "package_version": "0.5.0",
+ "program": "rustc",
+ "target_kind": ["custom-build"],
+ "compile_mode": "build"
+ },
+ {
+ "args": "{...}",
+ "cwd": "[..]/cit/[..]/foo",
+ "deps": [0],
+ "env": "{...}",
+ "kind": null,
+ "links": "{...}",
+ "outputs": [],
+ "package_name": "foo",
+ "package_version": "0.5.0",
+ "program": "[..]/build-script-build",
+ "target_kind": ["custom-build"],
+ "compile_mode": "run-custom-build"
+ },
+ {
+ "args": "{...}",
+ "cwd": "[..]/cit/[..]/foo",
+ "deps": [1],
+ "env": "{...}",
+ "kind": null,
+ "links": "{...}",
+ "outputs": "{...}",
+ "package_name": "foo",
+ "package_version": "0.5.0",
+ "program": "rustc",
+ "target_kind": ["bin"],
+ "compile_mode": "build"
+ }
+ ]
+ }
+ "#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_plan_with_dev_dep() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dev-dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build --build-plan -Zunstable-options")
+ .masquerade_as_nightly_cargo(&["build-plan"])
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/build_script.rs b/src/tools/cargo/tests/testsuite/build_script.rs
new file mode 100644
index 000000000..80a24960e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/build_script.rs
@@ -0,0 +1,5168 @@
+//! Tests for build.rs scripts.
+
+use cargo_test_support::compare::assert_match_exact;
+use cargo_test_support::install::cargo_home;
+use cargo_test_support::paths::CargoPathExt;
+use cargo_test_support::registry::Package;
+use cargo_test_support::tools;
+use cargo_test_support::{
+ basic_manifest, cargo_exe, cross_compile, is_coarse_mtime, project, project_in,
+};
+use cargo_test_support::{rustc_host, sleep_ms, slow_cpu_multiplier, symlink_supported};
+use cargo_util::paths::{self, remove_dir_all};
+use std::env;
+use std::fs;
+use std::io;
+use std::thread;
+
+#[cargo_test]
+fn custom_build_script_failed() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+ build = "build.rs"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("build.rs", "fn main() { std::process::exit(101); }")
+ .build();
+ p.cargo("build -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]`
+[RUNNING] `[..]/build-script-build`
+[ERROR] failed to run custom build command for `foo v0.5.0 ([CWD])`
+
+Caused by:
+ process didn't exit successfully: `[..]/build-script-build` (exit [..]: 101)",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn custom_build_script_failed_backtraces_message() {
+ // In this situation (no dependency sharing), debuginfo is turned off in
+ // `dev.build-override`. However, if an error occurs running e.g. a build
+ // script, and backtraces are opted into: a message explaining how to
+ // improve backtraces is also displayed.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+ build = "build.rs"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("build.rs", "fn main() { std::process::exit(101); }")
+ .build();
+ p.cargo("build -v")
+ .env("RUST_BACKTRACE", "1")
+ .with_status(101)
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]`
+[RUNNING] `[..]/build-script-build`
+[ERROR] failed to run custom build command for `foo v0.5.0 ([CWD])`
+note: To improve backtraces for build dependencies, set the \
+CARGO_PROFILE_DEV_BUILD_OVERRIDE_DEBUG=true environment variable [..]
+
+Caused by:
+ process didn't exit successfully: `[..]/build-script-build` (exit [..]: 101)",
+ )
+ .run();
+
+ p.cargo("check -v")
+ .env("RUST_BACKTRACE", "1")
+ .with_status(101)
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `[..]/build-script-build`
+[ERROR] failed to run custom build command for `foo v0.5.0 ([CWD])`
+note: To improve backtraces for build dependencies, set the \
+CARGO_PROFILE_DEV_BUILD_OVERRIDE_DEBUG=true environment variable [..]
+
+Caused by:
+ process didn't exit successfully: `[..]/build-script-build` (exit [..]: 101)",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn custom_build_script_failed_backtraces_message_with_debuginfo() {
+ // This is the same test as `custom_build_script_failed_backtraces_message` above, this time
+ // ensuring that the message dedicated to improving backtraces by requesting debuginfo is not
+ // shown when debuginfo is already turned on.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+ build = "build.rs"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("build.rs", "fn main() { std::process::exit(101); }")
+ .build();
+ p.cargo("build -v")
+ .env("RUST_BACKTRACE", "1")
+ .env("CARGO_PROFILE_DEV_BUILD_OVERRIDE_DEBUG", "true")
+ .with_status(101)
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]`
+[RUNNING] `[..]/build-script-build`
+[ERROR] failed to run custom build command for `foo v0.5.0 ([CWD])`
+
+Caused by:
+ process didn't exit successfully: `[..]/build-script-build` (exit [..]: 101)",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn custom_build_env_vars() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [features]
+ bar_feat = ["bar/foo"]
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+ build = "build.rs"
+
+ [features]
+ foo = []
+ "#,
+ )
+ .file("bar/src/lib.rs", "pub fn hello() {}");
+
+ let cargo = cargo_exe().canonicalize().unwrap();
+ let cargo = cargo.to_str().unwrap();
+ let rustc = paths::resolve_executable("rustc".as_ref())
+ .unwrap()
+ .canonicalize()
+ .unwrap();
+ let rustc = rustc.to_str().unwrap();
+ let file_content = format!(
+ r##"
+ use std::env;
+ use std::path::Path;
+
+ fn main() {{
+ let _target = env::var("TARGET").unwrap();
+ let _ncpus = env::var("NUM_JOBS").unwrap();
+ let _dir = env::var("CARGO_MANIFEST_DIR").unwrap();
+
+ let opt = env::var("OPT_LEVEL").unwrap();
+ assert_eq!(opt, "0");
+
+ let opt = env::var("PROFILE").unwrap();
+ assert_eq!(opt, "debug");
+
+ let debug = env::var("DEBUG").unwrap();
+ assert_eq!(debug, "true");
+
+ let out = env::var("OUT_DIR").unwrap();
+ assert!(out.starts_with(r"{0}"));
+ assert!(Path::new(&out).is_dir());
+
+ let _host = env::var("HOST").unwrap();
+
+ let _feat = env::var("CARGO_FEATURE_FOO").unwrap();
+
+ let cargo = env::var("CARGO").unwrap();
+ if env::var_os("CHECK_CARGO_IS_RUSTC").is_some() {{
+ assert_eq!(cargo, r#"{rustc}"#);
+ }} else {{
+ assert_eq!(cargo, r#"{cargo}"#);
+ }}
+
+ let rustc = env::var("RUSTC").unwrap();
+ assert_eq!(rustc, "rustc");
+
+ let rustdoc = env::var("RUSTDOC").unwrap();
+ assert_eq!(rustdoc, "rustdoc");
+
+ assert!(env::var("RUSTC_WRAPPER").is_err());
+ assert!(env::var("RUSTC_WORKSPACE_WRAPPER").is_err());
+
+ assert!(env::var("RUSTC_LINKER").is_err());
+
+ assert!(env::var("RUSTFLAGS").is_err());
+ let rustflags = env::var("CARGO_ENCODED_RUSTFLAGS").unwrap();
+ assert_eq!(rustflags, "");
+ }}
+ "##,
+ p.root()
+ .join("target")
+ .join("debug")
+ .join("build")
+ .display(),
+ );
+
+ let p = p.file("bar/build.rs", &file_content).build();
+
+ p.cargo("build --features bar_feat").run();
+ p.cargo("build --features bar_feat")
+ // we use rustc since $CARGO is only used if it points to a path that exists
+ .env("CHECK_CARGO_IS_RUSTC", "1")
+ .env(cargo::CARGO_ENV, rustc)
+ .run();
+}
+
+#[cargo_test]
+fn custom_build_env_var_rustflags() {
+ let rustflags = "--cfg=special";
+ let rustflags_alt = "--cfg=notspecial";
+ let p = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [build]
+ rustflags = ["{}"]
+ "#,
+ rustflags
+ ),
+ )
+ .file(
+ "build.rs",
+ &format!(
+ r#"
+ use std::env;
+
+ fn main() {{
+ // Static assertion that exactly one of the cfg paths is always taken.
+ assert!(env::var("RUSTFLAGS").is_err());
+ let x;
+ #[cfg(special)]
+ {{ assert_eq!(env::var("CARGO_ENCODED_RUSTFLAGS").unwrap(), "{}"); x = String::new(); }}
+ #[cfg(notspecial)]
+ {{ assert_eq!(env::var("CARGO_ENCODED_RUSTFLAGS").unwrap(), "{}"); x = String::new(); }}
+ let _ = x;
+ }}
+ "#,
+ rustflags, rustflags_alt,
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+
+ // RUSTFLAGS overrides build.rustflags, so --cfg=special shouldn't be passed
+ p.cargo("check").env("RUSTFLAGS", rustflags_alt).run();
+}
+
+#[cargo_test]
+fn custom_build_env_var_encoded_rustflags() {
+ // NOTE: We use "-Clink-arg=-B nope" here rather than, say, "-A missing_docs", since for the
+ // latter it won't matter if the whitespace accidentally gets split, as rustc will do the right
+ // thing either way.
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ rustflags = ["-Clink-arg=-B nope", "--cfg=foo"]
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+
+ fn main() {{
+ assert_eq!(env::var("CARGO_ENCODED_RUSTFLAGS").unwrap(), "-Clink-arg=-B nope\x1f--cfg=foo");
+ }}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn custom_build_env_var_rustc_wrapper() {
+ let wrapper = tools::echo_wrapper();
+ let p = project()
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+
+ fn main() {{
+ assert_eq!(
+ env::var("RUSTC_WRAPPER").unwrap(),
+ env::var("CARGO_RUSTC_WRAPPER_CHECK").unwrap()
+ );
+ }}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .env("CARGO_BUILD_RUSTC_WRAPPER", &wrapper)
+ .env("CARGO_RUSTC_WRAPPER_CHECK", &wrapper)
+ .run();
+}
+
+#[cargo_test]
+fn custom_build_env_var_rustc_workspace_wrapper() {
+ let wrapper = tools::echo_wrapper();
+
+ // Workspace wrapper should be set for any crate we're operating directly on.
+ let p = project()
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+
+ fn main() {{
+ assert_eq!(
+ env::var("RUSTC_WORKSPACE_WRAPPER").unwrap(),
+ env::var("CARGO_RUSTC_WORKSPACE_WRAPPER_CHECK").unwrap()
+ );
+ }}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .env("CARGO_BUILD_RUSTC_WORKSPACE_WRAPPER", &wrapper)
+ .env("CARGO_RUSTC_WORKSPACE_WRAPPER_CHECK", &wrapper)
+ .run();
+
+ // But should not be set for a crate from the registry, as then it's not in a workspace.
+ Package::new("bar", "0.1.0")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ links = "a"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+
+ fn main() {{
+ assert!(env::var("RUSTC_WORKSPACE_WRAPPER").is_err());
+ }}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .env("CARGO_BUILD_RUSTC_WORKSPACE_WRAPPER", &wrapper)
+ .run();
+}
+
+#[cargo_test]
+fn custom_build_env_var_rustc_linker() {
+ if cross_compile::disabled() {
+ return;
+ }
+ let target = cross_compile::alternate();
+ let p = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}]
+ linker = "/path/to/linker"
+ "#,
+ target
+ ),
+ )
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+
+ fn main() {
+ assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/linker"));
+ }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // no crate type set => linker never called => build succeeds if and
+ // only if build.rs succeeds, despite linker binary not existing.
+ p.cargo("build --target").arg(&target).run();
+}
+
+#[cargo_test]
+fn custom_build_env_var_rustc_linker_bad_host_target() {
+ let target = rustc_host();
+ let p = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}]
+ linker = "/path/to/linker"
+ "#,
+ target
+ ),
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "")
+ .build();
+
+ // build.rs should fail since host == target when no target is set
+ p.cargo("build --verbose")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]-C linker=[..]/path/to/linker [..]`
+[ERROR] linker `[..]/path/to/linker` not found
+"
+ )
+ .run();
+}
+
+#[cargo_test]
+fn custom_build_env_var_rustc_linker_host_target() {
+ let target = rustc_host();
+ let p = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ target-applies-to-host = false
+ [target.{}]
+ linker = "/path/to/linker"
+ "#,
+ target
+ ),
+ )
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+
+ fn main() {
+ assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/linker"));
+ }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // no crate type set => linker never called => build succeeds if and
+ // only if build.rs succeeds, despite linker binary not existing.
+ p.cargo("build -Z target-applies-to-host --target")
+ .arg(&target)
+ .masquerade_as_nightly_cargo(&["target-applies-to-host"])
+ .run();
+}
+
+#[cargo_test]
+fn custom_build_env_var_rustc_linker_host_target_env() {
+ let target = rustc_host();
+ let p = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}]
+ linker = "/path/to/linker"
+ "#,
+ target
+ ),
+ )
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+
+ fn main() {
+ assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/linker"));
+ }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // no crate type set => linker never called => build succeeds if and
+ // only if build.rs succeeds, despite linker binary not existing.
+ p.cargo("build -Z target-applies-to-host --target")
+ .env("CARGO_TARGET_APPLIES_TO_HOST", "false")
+ .arg(&target)
+ .masquerade_as_nightly_cargo(&["target-applies-to-host"])
+ .run();
+}
+
+#[cargo_test]
+fn custom_build_invalid_host_config_feature_flag() {
+ let target = rustc_host();
+ let p = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}]
+ linker = "/path/to/linker"
+ "#,
+ target
+ ),
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "")
+ .build();
+
+ // build.rs should fail due to -Zhost-config being set without -Ztarget-applies-to-host
+ p.cargo("build -Z host-config --target")
+ .arg(&target)
+ .masquerade_as_nightly_cargo(&["host-config"])
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: the -Zhost-config flag requires the -Ztarget-applies-to-host flag to be set
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn custom_build_linker_host_target_with_bad_host_config() {
+ let target = rustc_host();
+ let p = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [host]
+ linker = "/path/to/host/linker"
+ [target.{}]
+ linker = "/path/to/target/linker"
+ "#,
+ target
+ ),
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "")
+ .build();
+
+ // build.rs should fail due to bad host linker being set
+ p.cargo("build -Z target-applies-to-host -Z host-config --verbose --target")
+ .arg(&target)
+ .masquerade_as_nightly_cargo(&["target-applies-to-host", "host-config"])
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]-C linker=[..]/path/to/host/linker [..]`
+[ERROR] linker `[..]/path/to/host/linker` not found
+"
+ )
+ .run();
+}
+
+#[cargo_test]
+fn custom_build_linker_bad_host() {
+ let target = rustc_host();
+ let p = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [host]
+ linker = "/path/to/host/linker"
+ [target.{}]
+ linker = "/path/to/target/linker"
+ "#,
+ target
+ ),
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "")
+ .build();
+
+ // build.rs should fail due to bad host linker being set
+ p.cargo("build -Z target-applies-to-host -Z host-config --verbose --target")
+ .arg(&target)
+ .masquerade_as_nightly_cargo(&["target-applies-to-host", "host-config"])
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]-C linker=[..]/path/to/host/linker [..]`
+[ERROR] linker `[..]/path/to/host/linker` not found
+"
+ )
+ .run();
+}
+
+#[cargo_test]
+fn custom_build_linker_bad_host_with_arch() {
+ let target = rustc_host();
+ let p = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [host]
+ linker = "/path/to/host/linker"
+ [host.{}]
+ linker = "/path/to/host/arch/linker"
+ [target.{}]
+ linker = "/path/to/target/linker"
+ "#,
+ target, target
+ ),
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "")
+ .build();
+
+ // build.rs should fail due to bad host linker being set
+ p.cargo("build -Z target-applies-to-host -Z host-config --verbose --target")
+ .arg(&target)
+ .masquerade_as_nightly_cargo(&["target-applies-to-host", "host-config"])
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]-C linker=[..]/path/to/host/arch/linker [..]`
+[ERROR] linker `[..]/path/to/host/arch/linker` not found
+"
+ )
+ .run();
+}
+
+#[cargo_test]
+fn custom_build_env_var_rustc_linker_cross_arch_host() {
+ let target = rustc_host();
+ let cross_target = cross_compile::alternate();
+ let p = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [host.{}]
+ linker = "/path/to/host/arch/linker"
+ [target.{}]
+ linker = "/path/to/target/linker"
+ "#,
+ cross_target, target
+ ),
+ )
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+
+ fn main() {
+ assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/target/linker"));
+ }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // build.rs should be built fine since cross target != host target.
+ // assertion should succeed since it's still passed the target linker
+ p.cargo("build -Z target-applies-to-host -Z host-config --verbose --target")
+ .arg(&target)
+ .masquerade_as_nightly_cargo(&["target-applies-to-host", "host-config"])
+ .run();
+}
+
+#[cargo_test]
+fn custom_build_linker_bad_cross_arch_host() {
+ let target = rustc_host();
+ let cross_target = cross_compile::alternate();
+ let p = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [host]
+ linker = "/path/to/host/linker"
+ [host.{}]
+ linker = "/path/to/host/arch/linker"
+ [target.{}]
+ linker = "/path/to/target/linker"
+ "#,
+ cross_target, target
+ ),
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "")
+ .build();
+
+ // build.rs should fail due to bad host linker being set
+ p.cargo("build -Z target-applies-to-host -Z host-config --verbose --target")
+ .arg(&target)
+ .masquerade_as_nightly_cargo(&["target-applies-to-host", "host-config"])
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]-C linker=[..]/path/to/host/linker [..]`
+[ERROR] linker `[..]/path/to/host/linker` not found
+"
+ )
+ .run();
+}
+
+#[cargo_test]
+fn custom_build_script_wrong_rustc_flags() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+ build = "build.rs"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "build.rs",
+ r#"fn main() { println!("cargo:rustc-flags=-aaa -bbb"); }"#,
+ )
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr_contains(
+ "[ERROR] Only `-l` and `-L` flags are allowed in build script of `foo v0.5.0 ([CWD])`: \
+ `-aaa -bbb`",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn custom_build_script_rustc_flags() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.foo]
+ path = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+ build = "build.rs"
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .file(
+ "foo/build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-flags=-l nonexistinglib -L /dummy/path1 -L /dummy/path2");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build --verbose")
+ .with_stderr(
+ "\
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name build_script_build foo/build.rs [..]
+[RUNNING] `[..]build-script-build`
+[RUNNING] `rustc --crate-name foo foo/src/lib.rs [..]\
+ -L dependency=[CWD]/target/debug/deps \
+ -L /dummy/path1 -L /dummy/path2 -l nonexistinglib`
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar src/main.rs [..]\
+ -L dependency=[CWD]/target/debug/deps \
+ --extern foo=[..]libfoo-[..] \
+ -L /dummy/path1 -L /dummy/path2`
+[FINISHED] dev [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn custom_build_script_rustc_flags_no_space() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.foo]
+ path = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+ build = "build.rs"
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .file(
+ "foo/build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-flags=-lnonexistinglib -L/dummy/path1 -L/dummy/path2");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build --verbose")
+ .with_stderr(
+ "\
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name build_script_build foo/build.rs [..]
+[RUNNING] `[..]build-script-build`
+[RUNNING] `rustc --crate-name foo foo/src/lib.rs [..]\
+ -L dependency=[CWD]/target/debug/deps \
+ -L /dummy/path1 -L /dummy/path2 -l nonexistinglib`
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar src/main.rs [..]\
+ -L dependency=[CWD]/target/debug/deps \
+ --extern foo=[..]libfoo-[..] \
+ -L /dummy/path1 -L /dummy/path2`
+[FINISHED] dev [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn links_no_build_cmd() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ links = "a"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml`
+
+Caused by:
+ package `foo v0.5.0 ([CWD])` specifies that it links to `a` but does \
+not have a custom build script
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn links_duplicates() {
+ // this tests that the links_duplicates are caught at resolver time
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ links = "a"
+ build = "build.rs"
+
+ [dependencies.a-sys]
+ path = "a-sys"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "")
+ .file(
+ "a-sys/Cargo.toml",
+ r#"
+ [package]
+ name = "a-sys"
+ version = "0.5.0"
+ authors = []
+ links = "a"
+ build = "build.rs"
+ "#,
+ )
+ .file("a-sys/src/lib.rs", "")
+ .file("a-sys/build.rs", "")
+ .build();
+
+ p.cargo("build").with_status(101)
+ .with_stderr("\
+error: failed to select a version for `a-sys`.
+ ... required by package `foo v0.5.0 ([..])`
+versions that meet the requirements `*` are: 0.5.0
+
+the package `a-sys` links to the native library `a`, but it conflicts with a previous package which links to `a` as well:
+package `foo v0.5.0 ([..])`
+Only one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. Try to adjust your dependencies so that only one package uses the links ='a-sys' value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links.
+
+failed to select a version for `a-sys` which could resolve this conflict
+").run();
+}
+
+#[cargo_test]
+fn links_duplicates_old_registry() {
+ // Test old links validator. See `validate_links`.
+ Package::new("bar", "0.1.0")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ links = "a"
+ "#,
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ links = "a"
+
+ [dependencies]
+ bar = "0.1"
+ "#,
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 ([..])
+[ERROR] multiple packages link to native library `a`, \
+ but a native library can be linked only once
+
+package `bar v0.1.0`
+ ... which satisfies dependency `bar = \"^0.1\"` (locked to 0.1.0) of package `foo v0.1.0 ([..]foo)`
+links to native library `a`
+
+package `foo v0.1.0 ([..]foo)`
+also links to native library `a`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn links_duplicates_deep_dependency() {
+ // this tests that the links_duplicates are caught at resolver time
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ links = "a"
+ build = "build.rs"
+
+ [dependencies.a]
+ path = "a"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+
+ [dependencies.a-sys]
+ path = "a-sys"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file("a/build.rs", "")
+ .file(
+ "a/a-sys/Cargo.toml",
+ r#"
+ [package]
+ name = "a-sys"
+ version = "0.5.0"
+ authors = []
+ links = "a"
+ build = "build.rs"
+ "#,
+ )
+ .file("a/a-sys/src/lib.rs", "")
+ .file("a/a-sys/build.rs", "")
+ .build();
+
+ p.cargo("build").with_status(101)
+ .with_stderr("\
+error: failed to select a version for `a-sys`.
+ ... required by package `a v0.5.0 ([..])`
+ ... which satisfies path dependency `a` of package `foo v0.5.0 ([..])`
+versions that meet the requirements `*` are: 0.5.0
+
+the package `a-sys` links to the native library `a`, but it conflicts with a previous package which links to `a` as well:
+package `foo v0.5.0 ([..])`
+Only one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. Try to adjust your dependencies so that only one package uses the links ='a-sys' value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links.
+
+failed to select a version for `a-sys` which could resolve this conflict
+").run();
+}
+
+#[cargo_test]
+fn overrides_and_links() {
+ let target = rustc_host();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+
+ [dependencies.a]
+ path = "a"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+ fn main() {
+ assert_eq!(env::var("DEP_FOO_FOO").ok().expect("FOO missing"),
+ "bar");
+ assert_eq!(env::var("DEP_FOO_BAR").ok().expect("BAR missing"),
+ "baz");
+ }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}.foo]
+ rustc-flags = "-L foo -L bar"
+ foo = "bar"
+ bar = "baz"
+ "#,
+ target
+ ),
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ links = "foo"
+ build = "build.rs"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file("a/build.rs", "not valid rust code")
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[..]
+[..]
+[..]
+[..]
+[..]
+[RUNNING] `rustc --crate-name foo [..] -L foo -L bar`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn unused_overrides() {
+ let target = rustc_host();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}.foo]
+ rustc-flags = "-L foo -L bar"
+ foo = "bar"
+ bar = "baz"
+ "#,
+ target
+ ),
+ )
+ .build();
+
+ p.cargo("build -v").run();
+}
+
+#[cargo_test]
+fn links_passes_env_vars() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+
+ [dependencies.a]
+ path = "a"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+ fn main() {
+ assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar");
+ assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz");
+ }
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ links = "foo"
+ build = "build.rs"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "a/build.rs",
+ r#"
+ use std::env;
+ fn main() {
+ let lib = env::var("CARGO_MANIFEST_LINKS").unwrap();
+ assert_eq!(lib, "foo");
+
+ println!("cargo:foo=bar");
+ println!("cargo:bar=baz");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v").run();
+}
+
+#[cargo_test]
+fn only_rerun_build_script() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build -v").run();
+ p.root().move_into_the_past();
+
+ p.change_file("some-new-file", "");
+ p.root().move_into_the_past();
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[DIRTY] foo v0.5.0 ([CWD]): the precalculated components changed
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `[..]/build-script-build`
+[RUNNING] `rustc --crate-name foo [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rebuild_continues_to_pass_env_vars() {
+ let a = project()
+ .at("a")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ links = "foo"
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ use std::time::Duration;
+ fn main() {
+ println!("cargo:foo=bar");
+ println!("cargo:bar=baz");
+ std::thread::sleep(Duration::from_millis(500));
+ }
+ "#,
+ )
+ .build();
+ a.root().move_into_the_past();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+
+ [dependencies.a]
+ path = '{}'
+ "#,
+ a.root().display()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+ fn main() {
+ assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar");
+ assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v").run();
+ p.root().move_into_the_past();
+
+ p.change_file("some-new-file", "");
+ p.root().move_into_the_past();
+
+ p.cargo("build -v").run();
+}
+
+#[cargo_test]
+fn testing_and_such() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .build();
+
+ println!("build");
+ p.cargo("build -v").run();
+ p.root().move_into_the_past();
+
+ p.change_file("src/lib.rs", "");
+ p.root().move_into_the_past();
+
+ println!("test");
+ p.cargo("test -vj1")
+ .with_stderr(
+ "\
+[DIRTY] foo v0.5.0 ([CWD]): the precalculated components changed
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `[..]/build-script-build`
+[RUNNING] `rustc --crate-name foo [..]`
+[RUNNING] `rustc --crate-name foo [..]`
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]/foo-[..][EXE]`
+[DOCTEST] foo
+[RUNNING] `rustdoc [..]--test [..]`",
+ )
+ .with_stdout_contains_n("running 0 tests", 2)
+ .run();
+
+ println!("doc");
+ p.cargo("doc -v")
+ .with_stderr(
+ "\
+[DOCUMENTING] foo v0.5.0 ([CWD])
+[RUNNING] `rustdoc [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.change_file("src/main.rs", "fn main() {}");
+ println!("run");
+ p.cargo("run")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/foo[EXE]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn propagation_of_l_flags() {
+ let target = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ [dependencies.a]
+ path = "a"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ links = "bar"
+ build = "build.rs"
+
+ [dependencies.b]
+ path = "../b"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "a/build.rs",
+ r#"fn main() { println!("cargo:rustc-flags=-L bar"); }"#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.5.0"
+ authors = []
+ links = "foo"
+ build = "build.rs"
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .file("b/build.rs", "bad file")
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}.foo]
+ rustc-flags = "-L foo"
+ "#,
+ target
+ ),
+ )
+ .build();
+
+ p.cargo("build -v -j1")
+ .with_stderr_contains(
+ "\
+[RUNNING] `rustc --crate-name a [..] -L bar[..]-L foo[..]`
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc --crate-name foo [..] -L bar -L foo`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn propagation_of_l_flags_new() {
+ let target = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ [dependencies.a]
+ path = "a"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ links = "bar"
+ build = "build.rs"
+
+ [dependencies.b]
+ path = "../b"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "a/build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-link-search=bar");
+ }
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.5.0"
+ authors = []
+ links = "foo"
+ build = "build.rs"
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .file("b/build.rs", "bad file")
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}.foo]
+ rustc-link-search = ["foo"]
+ "#,
+ target
+ ),
+ )
+ .build();
+
+ p.cargo("build -v -j1")
+ .with_stderr_contains(
+ "\
+[RUNNING] `rustc --crate-name a [..] -L bar[..]-L foo[..]`
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc --crate-name foo [..] -L bar -L foo`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_deps_simple() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ [build-dependencies.a]
+ path = "a"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ "
+ #[allow(unused_extern_crates)]
+ extern crate a;
+ fn main() {}
+ ",
+ )
+ .file("a/Cargo.toml", &basic_manifest("a", "0.5.0"))
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[COMPILING] a v0.5.0 ([CWD]/a)
+[RUNNING] `rustc --crate-name a [..]`
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc [..] build.rs [..] --extern a=[..]`
+[RUNNING] `[..]/foo-[..]/build-script-build`
+[RUNNING] `rustc --crate-name foo [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_deps_not_for_normal() {
+ let target = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ [build-dependencies.aaaaa]
+ path = "a"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "#[allow(unused_extern_crates)] extern crate aaaaa;",
+ )
+ .file(
+ "build.rs",
+ "
+ #[allow(unused_extern_crates)]
+ extern crate aaaaa;
+ fn main() {}
+ ",
+ )
+ .file("a/Cargo.toml", &basic_manifest("aaaaa", "0.5.0"))
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v --target")
+ .arg(&target)
+ .with_status(101)
+ .with_stderr_contains("[..]can't find crate for `aaaaa`[..]")
+ .with_stderr_contains(
+ "\
+[ERROR] could not compile `foo` (lib) due to previous error
+
+Caused by:
+ process didn't exit successfully: [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_cmd_with_a_build_cmd() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+
+ [build-dependencies.a]
+ path = "a"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ "
+ #[allow(unused_extern_crates)]
+ extern crate a;
+ fn main() {}
+ ",
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+
+ [build-dependencies.b]
+ path = "../b"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "a/build.rs",
+ "#[allow(unused_extern_crates)] extern crate b; fn main() {}",
+ )
+ .file("b/Cargo.toml", &basic_manifest("b", "0.5.0"))
+ .file("b/src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[COMPILING] b v0.5.0 ([CWD]/b)
+[RUNNING] `rustc --crate-name b [..]`
+[COMPILING] a v0.5.0 ([CWD]/a)
+[RUNNING] `rustc [..] a/build.rs [..] --extern b=[..]`
+[RUNNING] `[..]/a-[..]/build-script-build`
+[RUNNING] `rustc --crate-name a [..]lib.rs [..]--crate-type lib \
+ --emit=[..]link[..] \
+ -C metadata=[..] \
+ --out-dir [..]target/debug/deps \
+ -L [..]target/debug/deps`
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin \
+ --emit=[..]link[..]\
+ -C metadata=[..] --out-dir [..] \
+ -L [..]target/debug/deps \
+ --extern a=[..]liba[..].rlib`
+[RUNNING] `[..]/foo-[..]/build-script-build`
+[RUNNING] `rustc --crate-name foo [..]lib.rs [..]--crate-type lib \
+ --emit=[..]link[..]-C debuginfo=2 \
+ -C metadata=[..] \
+ --out-dir [..] \
+ -L [..]target/debug/deps`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn out_dir_is_preserved() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+ use std::fs::File;
+ use std::path::Path;
+ fn main() {
+ let out = env::var("OUT_DIR").unwrap();
+ File::create(Path::new(&out).join("foo")).unwrap();
+ }
+ "#,
+ )
+ .build();
+
+ // Make the file
+ p.cargo("build -v").run();
+
+ // Change to asserting that it's there
+ p.change_file(
+ "build.rs",
+ r#"
+ use std::env;
+ use std::fs::File;
+ use std::path::Path;
+ fn main() {
+ let out = env::var("OUT_DIR").unwrap();
+ File::open(&Path::new(&out).join("foo")).unwrap();
+ }
+ "#,
+ );
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[DIRTY] foo [..]: the file `build.rs` has changed ([..])
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name build_script_build [..]
+[RUNNING] `[..]/build-script-build`
+[RUNNING] `rustc --crate-name foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ // Run a fresh build where file should be preserved
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[FRESH] foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ // One last time to make sure it's still there.
+ p.change_file("foo", "");
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[DIRTY] foo [..]: the precalculated components changed
+[COMPILING] foo [..]
+[RUNNING] `[..]build-script-build`
+[RUNNING] `rustc --crate-name foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn output_separate_lines() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-flags=-L foo");
+ println!("cargo:rustc-flags=-l static=foo");
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build -v")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc [..] build.rs [..]`
+[RUNNING] `[..]/foo-[..]/build-script-build`
+[RUNNING] `rustc --crate-name foo [..] -L foo -l static=foo`
+[ERROR] could not find native static library [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn output_separate_lines_new() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-link-search=foo");
+ println!("cargo:rustc-link-lib=static=foo");
+ println!("cargo:rustc-link-lib=bar");
+ println!("cargo:rustc-link-search=bar");
+ }
+ "#,
+ )
+ .build();
+ // The order of the arguments passed to rustc is important.
+ p.cargo("build -v")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc [..] build.rs [..]`
+[RUNNING] `[..]/foo-[..]/build-script-build`
+[RUNNING] `rustc --crate-name foo [..] -L foo -L bar -l static=foo -l bar`
+[ERROR] could not find native static library [..]
+",
+ )
+ .run();
+}
+
+#[cfg(not(windows))] // FIXME(#867)
+#[cargo_test]
+fn code_generation() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ include!(concat!(env!("OUT_DIR"), "/hello.rs"));
+
+ fn main() {
+ println!("{}", message());
+ }
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+ use std::fs;
+ use std::path::PathBuf;
+
+ fn main() {
+ let dst = PathBuf::from(env::var("OUT_DIR").unwrap());
+ fs::write(dst.join("hello.rs"),
+ "
+ pub fn message() -> &'static str {
+ \"Hello, World!\"
+ }
+ ")
+ .unwrap();
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/foo`",
+ )
+ .with_stdout("Hello, World!")
+ .run();
+
+ p.cargo("test").run();
+}
+
+#[cargo_test]
+fn release_with_build_script() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v --release").run();
+}
+
+#[cargo_test]
+fn build_script_only() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("build.rs", r#"fn main() {}"#)
+ .build();
+ p.cargo("build -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ no targets specified in the manifest
+ either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn shared_dep_with_a_build_script() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+
+ [dependencies.a]
+ path = "a"
+
+ [build-dependencies.b]
+ path = "b"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("a/build.rs", "fn main() {}")
+ .file("a/src/lib.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies.a]
+ path = "../a"
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .build();
+ p.cargo("build -v").run();
+}
+
+#[cargo_test]
+fn transitive_dep_host() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+
+ [build-dependencies.b]
+ path = "b"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ links = "foo"
+ build = "build.rs"
+ "#,
+ )
+ .file("a/build.rs", "fn main() {}")
+ .file("a/src/lib.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.5.0"
+ authors = []
+
+ [lib]
+ name = "b"
+ plugin = true
+
+ [dependencies.a]
+ path = "../a"
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .build();
+ p.cargo("build").run();
+}
+
+#[cargo_test]
+fn test_a_lib_with_a_build_command() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ include!(concat!(env!("OUT_DIR"), "/foo.rs"));
+
+ /// ```
+ /// foo::bar();
+ /// ```
+ pub fn bar() {
+ assert_eq!(foo(), 1);
+ }
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+ use std::fs;
+ use std::path::PathBuf;
+
+ fn main() {
+ let out = PathBuf::from(env::var("OUT_DIR").unwrap());
+ fs::write(out.join("foo.rs"), "fn foo() -> i32 { 1 }").unwrap();
+ }
+ "#,
+ )
+ .build();
+ p.cargo("test").run();
+}
+
+#[cargo_test]
+fn test_dev_dep_build_script() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dev-dependencies.a]
+ path = "a"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("a/build.rs", "fn main() {}")
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("test").run();
+}
+
+#[cargo_test]
+fn build_script_with_dynamic_native_dependency() {
+ let build = project()
+ .at("builder")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "builder"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "builder"
+ crate-type = ["dylib"]
+ "#,
+ )
+ .file("src/lib.rs", "#[no_mangle] pub extern fn foo() {}")
+ .build();
+
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+
+ [build-dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("build.rs", "extern crate bar; fn main() { bar::bar() }")
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "bar/build.rs",
+ r#"
+ use std::env;
+ use std::fs;
+ use std::path::PathBuf;
+
+ fn main() {
+ let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
+ let root = PathBuf::from(env::var("BUILDER_ROOT").unwrap());
+ let file = format!("{}builder{}",
+ env::consts::DLL_PREFIX,
+ env::consts::DLL_SUFFIX);
+ let src = root.join(&file);
+ let dst = out_dir.join(&file);
+ fs::copy(src, dst).unwrap();
+ if cfg!(target_env = "msvc") {
+ fs::copy(root.join("builder.dll.lib"),
+ out_dir.join("builder.dll.lib")).unwrap();
+ }
+ println!("cargo:rustc-link-search=native={}", out_dir.display());
+ }
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ pub fn bar() {
+ #[cfg_attr(not(target_env = "msvc"), link(name = "builder"))]
+ #[cfg_attr(target_env = "msvc", link(name = "builder.dll"))]
+ extern { fn foo(); }
+ unsafe { foo() }
+ }
+ "#,
+ )
+ .build();
+
+ build
+ .cargo("build -v")
+ .env("CARGO_LOG", "cargo::ops::cargo_rustc")
+ .run();
+
+ let root = build.root().join("target").join("debug");
+ foo.cargo("build -v")
+ .env("BUILDER_ROOT", root)
+ .env("CARGO_LOG", "cargo::ops::cargo_rustc")
+ .run();
+}
+
+#[cargo_test]
+fn profile_and_opt_level_set_correctly() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+
+ fn main() {
+ assert_eq!(env::var("OPT_LEVEL").unwrap(), "3");
+ assert_eq!(env::var("PROFILE").unwrap(), "release");
+ assert_eq!(env::var("DEBUG").unwrap(), "false");
+ }
+ "#,
+ )
+ .build();
+ p.cargo("bench").run();
+}
+
+#[cargo_test]
+fn profile_debug_0() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [profile.dev]
+ debug = 0
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+
+ fn main() {
+ assert_eq!(env::var("OPT_LEVEL").unwrap(), "0");
+ assert_eq!(env::var("PROFILE").unwrap(), "debug");
+ assert_eq!(env::var("DEBUG").unwrap(), "false");
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build").run();
+}
+
+#[cargo_test]
+fn build_script_with_lto() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+
+ [profile.dev]
+ lto = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .build();
+ p.cargo("build").run();
+}
+
+#[cargo_test]
+fn test_duplicate_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ build = "build.rs"
+
+ [dependencies.bar]
+ path = "bar"
+
+ [build-dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ extern crate bar;
+ fn main() { bar::do_nothing() }
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ extern crate bar;
+ fn main() { bar::do_nothing() }
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn do_nothing() {}")
+ .build();
+
+ p.cargo("build").run();
+}
+
+#[cargo_test]
+fn cfg_feedback() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/main.rs", "#[cfg(foo)] fn main() {}")
+ .file(
+ "build.rs",
+ r#"fn main() { println!("cargo:rustc-cfg=foo"); }"#,
+ )
+ .build();
+ p.cargo("build -v").run();
+}
+
+#[cargo_test]
+fn cfg_override() {
+ let target = rustc_host();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ links = "a"
+ build = "build.rs"
+ "#,
+ )
+ .file("src/main.rs", "#[cfg(foo)] fn main() {}")
+ .file("build.rs", "")
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}.a]
+ rustc-cfg = ["foo"]
+ "#,
+ target
+ ),
+ )
+ .build();
+
+ p.cargo("build -v").run();
+}
+
+#[cargo_test]
+fn cfg_test() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"fn main() { println!("cargo:rustc-cfg=foo"); }"#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ ///
+ /// ```
+ /// extern crate foo;
+ ///
+ /// fn main() {
+ /// foo::foo()
+ /// }
+ /// ```
+ ///
+ #[cfg(foo)]
+ pub fn foo() {}
+
+ #[cfg(foo)]
+ #[test]
+ fn test_foo() {
+ foo()
+ }
+ "#,
+ )
+ .file("tests/test.rs", "#[cfg(foo)] #[test] fn test_bar() {}")
+ .build();
+ p.cargo("test -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] [..] build.rs [..]
+[RUNNING] `[..]/build-script-build`
+[RUNNING] [..] --cfg foo[..]
+[RUNNING] [..] --cfg foo[..]
+[RUNNING] [..] --cfg foo[..]
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]/foo-[..][EXE]`
+[RUNNING] `[..]/test-[..][EXE]`
+[DOCTEST] foo
+[RUNNING] [..] --cfg foo[..]",
+ )
+ .with_stdout_contains("test test_foo ... ok")
+ .with_stdout_contains("test test_bar ... ok")
+ .with_stdout_contains_n("test [..] ... ok", 3)
+ .run();
+}
+
+#[cargo_test]
+fn cfg_doc() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"fn main() { println!("cargo:rustc-cfg=foo"); }"#,
+ )
+ .file("src/lib.rs", "#[cfg(foo)] pub fn foo() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "bar/build.rs",
+ r#"fn main() { println!("cargo:rustc-cfg=bar"); }"#,
+ )
+ .file("bar/src/lib.rs", "#[cfg(bar)] pub fn bar() {}")
+ .build();
+ p.cargo("doc").run();
+ assert!(p.root().join("target/doc").is_dir());
+ assert!(p.root().join("target/doc/foo/fn.foo.html").is_file());
+ assert!(p.root().join("target/doc/bar/fn.bar.html").is_file());
+}
+
+#[cargo_test]
+fn cfg_override_test() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ links = "a"
+ "#,
+ )
+ .file("build.rs", "")
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}.a]
+ rustc-cfg = ["foo"]
+ "#,
+ rustc_host()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ ///
+ /// ```
+ /// extern crate foo;
+ ///
+ /// fn main() {
+ /// foo::foo()
+ /// }
+ /// ```
+ ///
+ #[cfg(foo)]
+ pub fn foo() {}
+
+ #[cfg(foo)]
+ #[test]
+ fn test_foo() {
+ foo()
+ }
+ "#,
+ )
+ .file("tests/test.rs", "#[cfg(foo)] #[test] fn test_bar() {}")
+ .build();
+ p.cargo("test -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `[..]`
+[RUNNING] `[..]`
+[RUNNING] `[..]`
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]/foo-[..][EXE]`
+[RUNNING] `[..]/test-[..][EXE]`
+[DOCTEST] foo
+[RUNNING] [..] --cfg foo[..]",
+ )
+ .with_stdout_contains("test test_foo ... ok")
+ .with_stdout_contains("test test_bar ... ok")
+ .with_stdout_contains_n("test [..] ... ok", 3)
+ .run();
+}
+
+#[cargo_test]
+fn cfg_override_doc() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ links = "a"
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{target}.a]
+ rustc-cfg = ["foo"]
+ [target.{target}.b]
+ rustc-cfg = ["bar"]
+ "#,
+ target = rustc_host()
+ ),
+ )
+ .file("build.rs", "")
+ .file("src/lib.rs", "#[cfg(foo)] pub fn foo() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ links = "b"
+ "#,
+ )
+ .file("bar/build.rs", "")
+ .file("bar/src/lib.rs", "#[cfg(bar)] pub fn bar() {}")
+ .build();
+ p.cargo("doc").run();
+ assert!(p.root().join("target/doc").is_dir());
+ assert!(p.root().join("target/doc/foo/fn.foo.html").is_file());
+ assert!(p.root().join("target/doc/bar/fn.bar.html").is_file());
+}
+
+#[cargo_test]
+fn env_build() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ const FOO: &'static str = env!("FOO");
+ fn main() {
+ println!("{}", FOO);
+ }
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"fn main() { println!("cargo:rustc-env=FOO=foo"); }"#,
+ )
+ .build();
+ p.cargo("build -v").run();
+ p.cargo("run -v").with_stdout("foo\n").run();
+}
+
+#[cargo_test]
+fn env_test() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"fn main() { println!("cargo:rustc-env=FOO=foo"); }"#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"pub const FOO: &'static str = env!("FOO"); "#,
+ )
+ .file(
+ "tests/test.rs",
+ r#"
+ extern crate foo;
+
+ #[test]
+ fn test_foo() {
+ assert_eq!("foo", foo::FOO);
+ }
+ "#,
+ )
+ .build();
+ p.cargo("test -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] [..] build.rs [..]
+[RUNNING] `[..]/build-script-build`
+[RUNNING] [..] --crate-name foo[..]
+[RUNNING] [..] --crate-name foo[..]
+[RUNNING] [..] --crate-name test[..]
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]/foo-[..][EXE]`
+[RUNNING] `[..]/test-[..][EXE]`
+[DOCTEST] foo
+[RUNNING] [..] --crate-name foo[..]",
+ )
+ .with_stdout_contains_n("running 0 tests", 2)
+ .with_stdout_contains("test test_foo ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn env_doc() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ const FOO: &'static str = env!("FOO");
+ fn main() {}
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"fn main() { println!("cargo:rustc-env=FOO=foo"); }"#,
+ )
+ .build();
+ p.cargo("doc -v").run();
+}
+
+#[cargo_test]
+fn flags_go_into_tests() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ b = { path = "b" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("tests/foo.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.5.0"
+ authors = []
+ [dependencies]
+ a = { path = "../a" }
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "a/build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-link-search=test");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("test -v --test=foo")
+ .with_stderr(
+ "\
+[COMPILING] a v0.5.0 ([..]
+[RUNNING] `rustc [..] a/build.rs [..]`
+[RUNNING] `[..]/build-script-build`
+[RUNNING] `rustc [..] a/src/lib.rs [..] -L test[..]`
+[COMPILING] b v0.5.0 ([..]
+[RUNNING] `rustc [..] b/src/lib.rs [..] -L test[..]`
+[COMPILING] foo v0.5.0 ([..]
+[RUNNING] `rustc [..] src/lib.rs [..] -L test[..]`
+[RUNNING] `rustc [..] tests/foo.rs [..] -L test[..]`
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]/foo-[..][EXE]`",
+ )
+ .with_stdout_contains("running 0 tests")
+ .run();
+
+ p.cargo("test -v -pb --lib")
+ .with_stderr(
+ "\
+[FRESH] a v0.5.0 ([..]
+[COMPILING] b v0.5.0 ([..]
+[RUNNING] `rustc [..] b/src/lib.rs [..] -L test[..]`
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]/b-[..][EXE]`",
+ )
+ .with_stdout_contains("running 0 tests")
+ .run();
+}
+
+#[cargo_test]
+fn diamond_passes_args_only_once() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ a = { path = "a" }
+ b = { path = "b" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("tests/foo.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ [dependencies]
+ b = { path = "../b" }
+ c = { path = "../c" }
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.5.0"
+ authors = []
+ [dependencies]
+ c = { path = "../c" }
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .file(
+ "c/Cargo.toml",
+ r#"
+ [package]
+ name = "c"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "c/build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-link-search=native=test");
+ }
+ "#,
+ )
+ .file("c/src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[COMPILING] c v0.5.0 ([..]
+[RUNNING] `rustc [..]`
+[RUNNING] `[..]`
+[RUNNING] `rustc [..]`
+[COMPILING] b v0.5.0 ([..]
+[RUNNING] `rustc [..]`
+[COMPILING] a v0.5.0 ([..]
+[RUNNING] `rustc [..]`
+[COMPILING] foo v0.5.0 ([..]
+[RUNNING] `[..]rmeta -L native=test`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn adding_an_override_invalidates() {
+ let target = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ links = "foo"
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(".cargo/config", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-link-search=native=foo");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([..]
+[RUNNING] `rustc [..]`
+[RUNNING] `[..]`
+[RUNNING] `rustc [..] -L native=foo`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.change_file(
+ ".cargo/config",
+ &format!(
+ "
+ [target.{}.foo]
+ rustc-link-search = [\"native=bar\"]
+ ",
+ target
+ ),
+ );
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([..]
+[RUNNING] `rustc [..] -L native=bar`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn changing_an_override_invalidates() {
+ let target = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ links = "foo"
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ &format!(
+ "
+ [target.{}.foo]
+ rustc-link-search = [\"native=foo\"]
+ ",
+ target
+ ),
+ )
+ .file("build.rs", "")
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([..]
+[RUNNING] `rustc [..] -L native=foo`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.change_file(
+ ".cargo/config",
+ &format!(
+ "
+ [target.{}.foo]
+ rustc-link-search = [\"native=bar\"]
+ ",
+ target
+ ),
+ );
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[DIRTY] foo v0.5.0 ([..]): the precalculated components changed
+[COMPILING] foo v0.5.0 ([..]
+[RUNNING] `rustc [..] -L native=bar`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn fresh_builds_possible_with_link_libs() {
+ // The bug is non-deterministic. Sometimes you can get a fresh build
+ let target = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ links = "nativefoo"
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ &format!(
+ "
+ [target.{}.nativefoo]
+ rustc-link-lib = [\"a\"]
+ rustc-link-search = [\"./b\"]
+ rustc-flags = \"-l z -L ./\"
+ ",
+ target
+ ),
+ )
+ .file("build.rs", "")
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([..]
+[RUNNING] `rustc [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[FRESH] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn fresh_builds_possible_with_multiple_metadata_overrides() {
+ // The bug is non-deterministic. Sometimes you can get a fresh build
+ let target = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ links = "foo"
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ &format!(
+ "
+ [target.{}.foo]
+ a = \"\"
+ b = \"\"
+ c = \"\"
+ d = \"\"
+ e = \"\"
+ ",
+ target
+ ),
+ )
+ .file("build.rs", "")
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([..]
+[RUNNING] `rustc [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("build -v")
+ .env("CARGO_LOG", "cargo::ops::cargo_rustc::fingerprint=info")
+ .with_stderr(
+ "\
+[FRESH] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn generate_good_d_files() {
+ // this is here to stop regression on an issue where build.rs rerun-if-changed paths aren't
+ // made absolute properly, which in turn interacts poorly with the dep-info-basedir setting,
+ // and the dep-info files have other-crate-relative paths spat out in them
+ let p = project()
+ .file(
+ "awoo/Cargo.toml",
+ r#"
+ [package]
+ name = "awoo"
+ version = "0.5.0"
+ build = "build.rs"
+ "#,
+ )
+ .file("awoo/src/lib.rs", "")
+ .file(
+ "awoo/build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rerun-if-changed=build.rs");
+ println!("cargo:rerun-if-changed=barkbarkbark");
+ }
+ "#,
+ )
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "meow"
+ version = "0.5.0"
+ [dependencies]
+ awoo = { path = "awoo" }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build -v").run();
+
+ let dot_d_path = p.bin("meow").with_extension("d");
+ println!("*meow at* {:?}", dot_d_path);
+ let dot_d = fs::read_to_string(&dot_d_path).unwrap();
+
+ println!("*.d file content*: {}", &dot_d);
+
+ assert_match_exact(
+ "[..]/target/debug/meow[EXE]: [..]/awoo/barkbarkbark [..]/awoo/build.rs[..]",
+ &dot_d,
+ );
+
+ // paths relative to dependency roots should not be allowed
+ assert!(!dot_d
+ .split_whitespace()
+ .any(|v| v == "barkbarkbark" || v == "build.rs"));
+
+ p.change_file(
+ ".cargo/config.toml",
+ r#"
+ [build]
+ dep-info-basedir="."
+ "#,
+ );
+ p.cargo("build -v").run();
+
+ let dot_d = fs::read_to_string(&dot_d_path).unwrap();
+
+ println!("*.d file content with dep-info-basedir*: {}", &dot_d);
+
+ assert_match_exact(
+ "target/debug/meow[EXE]: awoo/barkbarkbark awoo/build.rs[..]",
+ &dot_d,
+ );
+
+ // paths relative to dependency roots should not be allowed
+ assert!(!dot_d
+ .split_whitespace()
+ .any(|v| v == "barkbarkbark" || v == "build.rs"));
+}
+
+#[cargo_test]
+fn generate_good_d_files_for_external_tools() {
+ // This tests having a relative paths going out of the
+ // project root in config's dep-info-basedir
+ let p = project_in("rust_things")
+ .file(
+ "awoo/Cargo.toml",
+ r#"
+ [package]
+ name = "awoo"
+ version = "0.5.0"
+ build = "build.rs"
+ "#,
+ )
+ .file("awoo/src/lib.rs", "")
+ .file(
+ "awoo/build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rerun-if-changed=build.rs");
+ println!("cargo:rerun-if-changed=barkbarkbark");
+ }
+ "#,
+ )
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "meow"
+ version = "0.5.0"
+ [dependencies]
+ awoo = { path = "awoo" }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config.toml",
+ r#"
+ [build]
+ dep-info-basedir="../.."
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v").run();
+
+ let dot_d_path = p.bin("meow").with_extension("d");
+ let dot_d = fs::read_to_string(&dot_d_path).unwrap();
+
+ println!("*.d file content with dep-info-basedir*: {}", &dot_d);
+
+ assert_match_exact(
+ concat!(
+ "rust_things/foo/target/debug/meow[EXE]:",
+ " rust_things/foo/awoo/barkbarkbark",
+ " rust_things/foo/awoo/build.rs",
+ " rust_things/foo/awoo/src/lib.rs",
+ " rust_things/foo/src/main.rs",
+ ),
+ &dot_d,
+ );
+}
+
+#[cargo_test]
+fn rebuild_only_on_explicit_paths() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rerun-if-changed=foo");
+ println!("cargo:rerun-if-changed=bar");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v").run();
+
+ // files don't exist, so should always rerun if they don't exist
+ println!("run without");
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[DIRTY] foo v0.5.0 ([..]): the file `foo` is missing
+[COMPILING] foo v0.5.0 ([..])
+[RUNNING] `[..]/build-script-build`
+[RUNNING] `rustc [..] src/lib.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ sleep_ms(1000);
+ p.change_file("foo", "");
+ p.change_file("bar", "");
+ sleep_ms(1000); // make sure the to-be-created outfile has a timestamp distinct from the infiles
+
+ // now the exist, so run once, catch the mtime, then shouldn't run again
+ println!("run with");
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[DIRTY] foo v0.5.0 ([..]): the file `foo` has changed ([..])
+[COMPILING] foo v0.5.0 ([..])
+[RUNNING] `[..]/build-script-build`
+[RUNNING] `rustc [..] src/lib.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ println!("run with2");
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[FRESH] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ sleep_ms(1000);
+
+ // random other files do not affect freshness
+ println!("run baz");
+ p.change_file("baz", "");
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[FRESH] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ // but changing dependent files does
+ println!("run foo change");
+ p.change_file("foo", "");
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[DIRTY] foo v0.5.0 ([..]): the file `foo` has changed ([..])
+[COMPILING] foo v0.5.0 ([..])
+[RUNNING] `[..]/build-script-build`
+[RUNNING] `rustc [..] src/lib.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ // .. as does deleting a file
+ println!("run bar delete");
+ fs::remove_file(p.root().join("bar")).unwrap();
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[DIRTY] foo v0.5.0 ([..]): the file `bar` is missing
+[COMPILING] foo v0.5.0 ([..])
+[RUNNING] `[..]/build-script-build`
+[RUNNING] `rustc [..] src/lib.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn doctest_receives_build_link_args() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ [dependencies.a]
+ path = "a"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ links = "bar"
+ build = "build.rs"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "a/build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-link-search=native=bar");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("test -v")
+ .with_stderr_contains(
+ "[RUNNING] `rustdoc [..]--crate-name foo --test [..]-L native=bar[..]`",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn please_respect_the_dag() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+
+ [dependencies]
+ a = { path = 'a' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-link-search=native=foo");
+ }
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ links = "bar"
+ build = "build.rs"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "a/build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-link-search=native=bar");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr_contains("[RUNNING] `rustc [..] -L native=foo -L native=bar[..]`")
+ .run();
+}
+
+#[cargo_test]
+fn non_utf8_output() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ use std::io::prelude::*;
+
+ fn main() {
+ let mut out = std::io::stdout();
+ // print something that's not utf8
+ out.write_all(b"\xff\xff\n").unwrap();
+
+ // now print some cargo metadata that's utf8
+ println!("cargo:rustc-cfg=foo");
+
+ // now print more non-utf8
+ out.write_all(b"\xff\xff\n").unwrap();
+ }
+ "#,
+ )
+ .file("src/main.rs", "#[cfg(foo)] fn main() {}")
+ .build();
+
+ p.cargo("build -v").run();
+}
+
+#[cargo_test]
+fn custom_target_dir() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ a = { path = "a" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ target-dir = 'test'
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("a/build.rs", "fn main() {}")
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v").run();
+}
+
+#[cargo_test]
+fn panic_abort_with_build_scripts() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [profile.release]
+ panic = 'abort'
+
+ [dependencies]
+ a = { path = "a" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "#[allow(unused_extern_crates)] extern crate a;",
+ )
+ .file("build.rs", "fn main() {}")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+
+ [build-dependencies]
+ b = { path = "../b" }
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "a/build.rs",
+ "#[allow(unused_extern_crates)] extern crate b; fn main() {}",
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.5.0"
+ authors = []
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v --release").run();
+
+ p.root().join("target").rm_rf();
+
+ p.cargo("test --release -v")
+ .with_stderr_does_not_contain("[..]panic=abort[..]")
+ .run();
+}
+
+#[cargo_test]
+fn warnings_emitted() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:warning=foo");
+ println!("cargo:warning=bar");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([..])
+[RUNNING] `rustc [..]`
+[RUNNING] `[..]`
+warning: foo
+warning: bar
+[RUNNING] `rustc [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn warnings_emitted_when_build_script_panics() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:warning=foo");
+ println!("cargo:warning=bar");
+ panic!();
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stdout("")
+ .with_stderr_contains("warning: foo\nwarning: bar")
+ .run();
+}
+
+#[cargo_test]
+fn warnings_hidden_for_upstream() {
+ Package::new("bar", "0.1.0")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:warning=foo");
+ println!("cargo:warning=bar");
+ }
+ "#,
+ )
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 ([..])
+[COMPILING] bar v0.1.0
+[RUNNING] `rustc [..]`
+[RUNNING] `[..]`
+[RUNNING] `rustc [..]`
+[COMPILING] foo v0.5.0 ([..])
+[RUNNING] `rustc [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn warnings_printed_on_vv() {
+ Package::new("bar", "0.1.0")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:warning=foo");
+ println!("cargo:warning=bar");
+ }
+ "#,
+ )
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build -vv")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 ([..])
+[COMPILING] bar v0.1.0
+[RUNNING] `[..] rustc [..]`
+[RUNNING] `[..]`
+warning: foo
+warning: bar
+[RUNNING] `[..] rustc [..]`
+[COMPILING] foo v0.5.0 ([..])
+[RUNNING] `[..] rustc [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn output_shows_on_vv() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ use std::io::prelude::*;
+
+ fn main() {
+ std::io::stderr().write_all(b"stderr\n").unwrap();
+ std::io::stdout().write_all(b"stdout\n").unwrap();
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build -vv")
+ .with_stdout("[foo 0.5.0] stdout")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([..])
+[RUNNING] `[..] rustc [..]`
+[RUNNING] `[..]`
+[foo 0.5.0] stderr
+[RUNNING] `[..] rustc [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn links_with_dots() {
+ let target = rustc_host();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ links = "a.b"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-link-search=bar")
+ }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}.'a.b']
+ rustc-link-search = ["foo"]
+ "#,
+ target
+ ),
+ )
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..] [..] -L foo[..]`")
+ .run();
+}
+
+#[cargo_test]
+fn rustc_and_rustdoc_set_correctly() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+
+ fn main() {
+ assert_eq!(env::var("RUSTC").unwrap(), "rustc");
+ assert_eq!(env::var("RUSTDOC").unwrap(), "rustdoc");
+ }
+ "#,
+ )
+ .build();
+ p.cargo("bench").run();
+}
+
+#[cargo_test]
+fn cfg_env_vars_available() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+
+ fn main() {
+ let fam = env::var("CARGO_CFG_TARGET_FAMILY").unwrap();
+ if cfg!(unix) {
+ assert_eq!(fam, "unix");
+ } else {
+ assert_eq!(fam, "windows");
+ }
+ }
+ "#,
+ )
+ .build();
+ p.cargo("bench").run();
+}
+
+#[cargo_test]
+fn switch_features_rerun() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+
+ [features]
+ foo = []
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ println!(include_str!(concat!(env!("OUT_DIR"), "/output")));
+ }
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+ use std::fs;
+ use std::path::Path;
+
+ fn main() {
+ let out_dir = env::var_os("OUT_DIR").unwrap();
+ let output = Path::new(&out_dir).join("output");
+
+ if env::var_os("CARGO_FEATURE_FOO").is_some() {
+ fs::write(output, "foo").unwrap();
+ } else {
+ fs::write(output, "bar").unwrap();
+ }
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v --features=foo").run();
+ p.rename_run("foo", "with_foo").with_stdout("foo\n").run();
+ p.cargo("build -v").run();
+ p.rename_run("foo", "without_foo")
+ .with_stdout("bar\n")
+ .run();
+ p.cargo("build -v --features=foo").run();
+ p.rename_run("foo", "with_foo2").with_stdout("foo\n").run();
+}
+
+#[cargo_test]
+fn assume_build_script_when_build_rs_present() {
+ let p = project()
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ if ! cfg!(foo) {
+ panic!("the build script was not run");
+ }
+ }
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-cfg=foo");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run -v").run();
+}
+
+#[cargo_test]
+fn if_build_set_to_false_dont_treat_build_rs_as_build_script() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = false
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ if cfg!(foo) {
+ panic!("the build script was run");
+ }
+ }
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-cfg=foo");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run -v").run();
+}
+
+#[cargo_test]
+fn deterministic_rustc_dependency_flags() {
+ // This bug is non-deterministic hence the large number of dependencies
+ // in the hopes it will have a much higher chance of triggering it.
+
+ Package::new("dep1", "0.1.0")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "dep1"
+ version = "0.1.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-flags=-L native=test1");
+ }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .publish();
+ Package::new("dep2", "0.1.0")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "dep2"
+ version = "0.1.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-flags=-L native=test2");
+ }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .publish();
+ Package::new("dep3", "0.1.0")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "dep3"
+ version = "0.1.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-flags=-L native=test3");
+ }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .publish();
+ Package::new("dep4", "0.1.0")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "dep4"
+ version = "0.1.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-flags=-L native=test4");
+ }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ dep1 = "*"
+ dep2 = "*"
+ dep3 = "*"
+ dep4 = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr_contains(
+ "\
+[RUNNING] `rustc --crate-name foo [..] -L native=test1 -L native=test2 \
+-L native=test3 -L native=test4`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn links_duplicates_with_cycle() {
+ // this tests that the links_duplicates are caught at resolver time
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ links = "a"
+ build = "build.rs"
+
+ [dependencies.a]
+ path = "a"
+
+ [dev-dependencies]
+ b = { path = "b" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ links = "a"
+ build = "build.rs"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file("a/build.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ foo = { path = ".." }
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").with_status(101)
+ .with_stderr("\
+error: failed to select a version for `a`.
+ ... required by package `foo v0.5.0 ([..])`
+versions that meet the requirements `*` are: 0.5.0
+
+the package `a` links to the native library `a`, but it conflicts with a previous package which links to `a` as well:
+package `foo v0.5.0 ([..])`
+Only one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. Try to adjust your dependencies so that only one package uses the links ='a' value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links.
+
+failed to select a version for `a` which could resolve this conflict
+").run();
+}
+
+#[cargo_test]
+fn rename_with_link_search_path() {
+ _rename_with_link_search_path(false);
+}
+
+#[cargo_test]
+#[cfg_attr(
+ target_os = "macos",
+ ignore = "don't have a cdylib cross target on macos"
+)]
+fn rename_with_link_search_path_cross() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ _rename_with_link_search_path(true);
+}
+
+fn _rename_with_link_search_path(cross: bool) {
+ let target_arg = if cross {
+ format!(" --target={}", cross_compile::alternate())
+ } else {
+ "".to_string()
+ };
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [lib]
+ crate-type = ["cdylib"]
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "#[no_mangle] pub extern fn cargo_test_foo() {}",
+ );
+ let p = p.build();
+
+ p.cargo(&format!("build{}", target_arg)).run();
+
+ let p2 = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+ use std::fs;
+ use std::path::PathBuf;
+
+ fn main() {
+ // Move the `libfoo.so` from the root of our project into the
+ // build directory. This way Cargo should automatically manage
+ // `LD_LIBRARY_PATH` and such.
+ let root = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
+ let file = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX);
+ let src = root.join(&file);
+
+ let dst_dir = PathBuf::from(env::var_os("OUT_DIR").unwrap());
+ let dst = dst_dir.join(&file);
+
+ fs::copy(&src, &dst).unwrap();
+ // handle windows, like below
+ drop(fs::copy(root.join("foo.dll.lib"), dst_dir.join("foo.dll.lib")));
+
+ println!("cargo:rerun-if-changed=build.rs");
+ if cfg!(target_env = "msvc") {
+ println!("cargo:rustc-link-lib=foo.dll");
+ } else {
+ println!("cargo:rustc-link-lib=foo");
+ }
+ println!("cargo:rustc-link-search=all={}",
+ dst.parent().unwrap().display());
+ }
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ extern {
+ #[link_name = "cargo_test_foo"]
+ fn foo();
+ }
+
+ fn main() {
+ unsafe { foo(); }
+ }
+ "#,
+ );
+ let p2 = p2.build();
+
+ // Move the output `libfoo.so` into the directory of `p2`, and then delete
+ // the `p` project. On macOS, the `libfoo.dylib` artifact references the
+ // original path in `p` so we want to make sure that it can't find it (hence
+ // the deletion).
+ let root = if cross {
+ p.root()
+ .join("target")
+ .join(cross_compile::alternate())
+ .join("debug")
+ .join("deps")
+ } else {
+ p.root().join("target").join("debug").join("deps")
+ };
+ let file = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX);
+ let src = root.join(&file);
+
+ let dst = p2.root().join(&file);
+
+ fs::copy(&src, &dst).unwrap();
+ // copy the import library for windows, if it exists
+ drop(fs::copy(
+ &root.join("foo.dll.lib"),
+ p2.root().join("foo.dll.lib"),
+ ));
+ remove_dir_all(p.root()).unwrap();
+
+ // Everything should work the first time
+ p2.cargo(&format!("run{}", target_arg)).run();
+
+ // Now rename the root directory and rerun `cargo run`. Not only should we
+ // not build anything but we also shouldn't crash.
+ let mut new = p2.root();
+ new.pop();
+ new.push("bar2");
+
+ // For whatever reason on Windows right after we execute a binary it's very
+ // unlikely that we're able to successfully delete or rename that binary.
+ // It's not really clear why this is the case or if it's a bug in Cargo
+ // holding a handle open too long. In an effort to reduce the flakiness of
+ // this test though we throw this in a loop
+ //
+ // For some more information see #5481 and rust-lang/rust#48775
+ let mut i = 0;
+ loop {
+ let error = match fs::rename(p2.root(), &new) {
+ Ok(()) => break,
+ Err(e) => e,
+ };
+ i += 1;
+ if !cfg!(windows) || error.kind() != io::ErrorKind::PermissionDenied || i > 10 {
+ panic!("failed to rename: {}", error);
+ }
+ println!("assuming {} is spurious, waiting to try again", error);
+ thread::sleep(slow_cpu_multiplier(100));
+ }
+
+ p2.cargo(&format!("run{}", target_arg))
+ .cwd(&new)
+ .with_stderr(
+ "\
+[FINISHED] [..]
+[RUNNING] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn optional_build_script_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar", optional = true }
+
+ [build-dependencies]
+ bar = { path = "bar", optional = true }
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ #[cfg(feature = "bar")]
+ extern crate bar;
+
+ fn main() {
+ #[cfg(feature = "bar")] {
+ println!("cargo:rustc-env=FOO={}", bar::bar());
+ return
+ }
+ println!("cargo:rustc-env=FOO=0");
+ }
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[cfg(feature = "bar")]
+ extern crate bar;
+
+ fn main() {
+ println!("{}", env!("FOO"));
+ }
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file("bar/src/lib.rs", "pub fn bar() -> u32 { 1 }");
+ let p = p.build();
+
+ p.cargo("run").with_stdout("0\n").run();
+ p.cargo("run --features bar").with_stdout("1\n").run();
+}
+
+#[cargo_test]
+fn optional_build_dep_and_required_normal_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "./bar", optional = true }
+
+ [build-dependencies]
+ bar = { path = "./bar" }
+ "#,
+ )
+ .file("build.rs", "extern crate bar; fn main() { bar::bar(); }")
+ .file(
+ "src/main.rs",
+ r#"
+ #[cfg(feature = "bar")]
+ extern crate bar;
+
+ fn main() {
+ #[cfg(feature = "bar")] {
+ println!("{}", bar::bar());
+ }
+ #[cfg(not(feature = "bar"))] {
+ println!("0");
+ }
+ }
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file("bar/src/lib.rs", "pub fn bar() -> u32 { 1 }");
+ let p = p.build();
+
+ p.cargo("run")
+ .with_stdout("0")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.5.0 ([..])
+[COMPILING] foo v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]foo[EXE]`",
+ )
+ .run();
+
+ p.cargo("run --all-features")
+ .with_stdout("1")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.5.0 ([..])
+[COMPILING] foo v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]foo[EXE]`",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn using_rerun_if_changed_does_not_rebuild() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rerun-if-changed=build.rs");
+ }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+ p.cargo("build").with_stderr("[FINISHED] [..]").run();
+}
+
+#[cargo_test]
+fn links_interrupted_can_restart() {
+ // Test for a `links` dependent build script getting canceled and then
+ // restarted. Steps:
+ // 1. Build to establish fingerprints.
+ // 2. Change something (an env var in this case) that triggers the
+ // dependent build script to run again. Kill the top-level build script
+ // while it is running (such as hitting Ctrl-C).
+ // 3. Run the build again, it should re-run the build script.
+ let bar = project()
+ .at("bar")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+ links = "foo"
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rerun-if-env-changed=SOMEVAR");
+ }
+ "#,
+ )
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+
+ [dependencies.bar]
+ path = '{}'
+ "#,
+ bar.root().display()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+ fn main() {
+ println!("cargo:rebuild-if-changed=build.rs");
+ if std::path::Path::new("abort").exists() {
+ panic!("Crash!");
+ }
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build").run();
+ // Simulate the user hitting Ctrl-C during a build.
+ p.change_file("abort", "");
+ // Set SOMEVAR to trigger a rebuild.
+ p.cargo("build")
+ .env("SOMEVAR", "1")
+ .with_stderr_contains("[..]Crash![..]")
+ .with_status(101)
+ .run();
+ fs::remove_file(p.root().join("abort")).unwrap();
+ // Try again without aborting the script.
+ // ***This is currently broken, the script does not re-run.
+ p.cargo("build -v")
+ .env("SOMEVAR", "1")
+ .with_stderr_contains("[RUNNING] [..]/foo-[..]/build-script-build[..]")
+ .run();
+}
+
+#[cargo_test]
+fn dev_dep_with_links() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ links = "x"
+
+ [dev-dependencies]
+ bar = { path = "./bar" }
+ "#,
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ links = "y"
+
+ [dependencies]
+ foo = { path = ".." }
+ "#,
+ )
+ .file("bar/build.rs", "fn main() {}")
+ .file("bar/src/lib.rs", "")
+ .build();
+ p.cargo("check --tests").run()
+}
+
+#[cargo_test]
+fn rerun_if_directory() {
+ if !symlink_supported() {
+ return;
+ }
+
+ // rerun-if-changed of a directory should rerun if any file in the directory changes.
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rerun-if-changed=somedir");
+ }
+ "#,
+ )
+ .build();
+
+ let dirty = |dirty_line: &str, compile_build_script: bool| {
+ let mut dirty_line = dirty_line.to_string();
+
+ if !dirty_line.is_empty() {
+ dirty_line.push('\n');
+ }
+
+ let compile_build_script_line = if compile_build_script {
+ "[RUNNING] `rustc --crate-name build_script_build [..]\n"
+ } else {
+ ""
+ };
+
+ p.cargo("check -v")
+ .with_stderr(format!(
+ "\
+{dirty_line}\
+[COMPILING] foo [..]
+{compile_build_script_line}\
+[RUNNING] `[..]build-script-build[..]`
+[RUNNING] `rustc --crate-name foo [..]
+[FINISHED] [..]",
+ ))
+ .run();
+ };
+
+ let fresh = || {
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+ };
+
+ // Start with a missing directory.
+ dirty("", true);
+ // Because the directory doesn't exist, it will trigger a rebuild every time.
+ // https://github.com/rust-lang/cargo/issues/6003
+ dirty(
+ "[DIRTY] foo v0.1.0 ([..]): the file `somedir` is missing",
+ false,
+ );
+
+ if is_coarse_mtime() {
+ sleep_ms(1000);
+ }
+
+ // Empty directory.
+ fs::create_dir(p.root().join("somedir")).unwrap();
+ dirty(
+ "[DIRTY] foo v0.1.0 ([..]): the file `somedir` has changed ([..])",
+ false,
+ );
+ fresh();
+
+ if is_coarse_mtime() {
+ sleep_ms(1000);
+ }
+
+ // Add a file.
+ p.change_file("somedir/foo", "");
+ p.change_file("somedir/bar", "");
+ dirty(
+ "[DIRTY] foo v0.1.0 ([..]): the file `somedir` has changed ([..])",
+ false,
+ );
+ fresh();
+
+ if is_coarse_mtime() {
+ sleep_ms(1000);
+ }
+
+ // Add a symlink.
+ p.symlink("foo", "somedir/link");
+ dirty(
+ "[DIRTY] foo v0.1.0 ([..]): the file `somedir` has changed ([..])",
+ false,
+ );
+ fresh();
+
+ if is_coarse_mtime() {
+ sleep_ms(1000);
+ }
+
+ // Move the symlink.
+ fs::remove_file(p.root().join("somedir/link")).unwrap();
+ p.symlink("bar", "somedir/link");
+ dirty(
+ "[DIRTY] foo v0.1.0 ([..]): the file `somedir` has changed ([..])",
+ false,
+ );
+ fresh();
+
+ if is_coarse_mtime() {
+ sleep_ms(1000);
+ }
+
+ // Remove a file.
+ fs::remove_file(p.root().join("somedir/foo")).unwrap();
+ dirty(
+ "[DIRTY] foo v0.1.0 ([..]): the file `somedir` has changed ([..])",
+ false,
+ );
+ fresh();
+}
+
+#[cargo_test]
+fn rerun_if_published_directory() {
+ // build script of a dependency contains a `rerun-if-changed` pointing to a directory
+ Package::new("mylib-sys", "1.0.0")
+ .file("mylib/balrog.c", "")
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ // Changing to mylib/balrog.c will not trigger a rebuild
+ println!("cargo:rerun-if-changed=mylib");
+ }
+ "#,
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ mylib-sys = "1.0.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check").run();
+
+ // Delete regitry src to make directories being recreated with the latest timestamp.
+ cargo_home().join("registry/src").rm_rf();
+
+ p.cargo("check --verbose")
+ .with_stderr(
+ "\
+[FRESH] mylib-sys v1.0.0
+[FRESH] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ // Upgrade of a package should still trigger a rebuild
+ Package::new("mylib-sys", "1.0.1")
+ .file("mylib/balrog.c", "")
+ .file("mylib/balrog.h", "")
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rerun-if-changed=mylib");
+ }
+ "#,
+ )
+ .publish();
+ p.cargo("update").run();
+ p.cargo("fetch").run();
+
+ p.cargo("check -v")
+ .with_stderr(format!(
+ "\
+[COMPILING] mylib-sys [..]
+[RUNNING] `rustc --crate-name build_script_build [..]
+[RUNNING] `[..]build-script-build[..]`
+[RUNNING] `rustc --crate-name mylib_sys [..]
+[CHECKING] foo [..]
+[RUNNING] `rustc --crate-name foo [..]
+[FINISHED] [..]",
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn test_with_dep_metadata() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { path = 'bar' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ assert_eq!(std::env::var("DEP_BAR_FOO").unwrap(), "bar");
+ }
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ links = 'bar'
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .file(
+ "bar/build.rs",
+ r#"
+ fn main() {
+ println!("cargo:foo=bar");
+ }
+ "#,
+ )
+ .build();
+ p.cargo("test --lib").run();
+}
+
+#[cargo_test]
+fn duplicate_script_with_extra_env() {
+ // Test where a build script is run twice, that emits different rustc-env
+ // and rustc-cfg values. In this case, one is run for host, the other for
+ // target.
+ if !cross_compile::can_run_on_host() {
+ return;
+ }
+
+ let target = cross_compile::alternate();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo", "pm"]
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ pm = { path = "../pm" }
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ &r#"
+ //! ```rust
+ //! #[cfg(not(mycfg="{target}"))]
+ //! compile_error!{"expected mycfg set"}
+ //! assert_eq!(env!("CRATE_TARGET"), "{target}");
+ //! assert_eq!(std::env::var("CRATE_TARGET").unwrap(), "{target}");
+ //! ```
+
+ #[test]
+ fn check_target() {
+ #[cfg(not(mycfg="{target}"))]
+ compile_error!{"expected mycfg set"}
+ // Compile-time assertion.
+ assert_eq!(env!("CRATE_TARGET"), "{target}");
+ // Run-time assertion.
+ assert_eq!(std::env::var("CRATE_TARGET").unwrap(), "{target}");
+ }
+ "#
+ .replace("{target}", target),
+ )
+ .file(
+ "foo/build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-env=CRATE_TARGET={}", std::env::var("TARGET").unwrap());
+ println!("cargo:rustc-cfg=mycfg=\"{}\"", std::env::var("TARGET").unwrap());
+ }
+ "#,
+ )
+ .file(
+ "pm/Cargo.toml",
+ r#"
+ [package]
+ name = "pm"
+ version = "0.1.0"
+
+ [lib]
+ proc-macro = true
+ # This is just here to speed things up.
+ doctest = false
+
+ [dev-dependencies]
+ foo = { path = "../foo" }
+ "#,
+ )
+ .file("pm/src/lib.rs", "")
+ .build();
+
+ p.cargo("test --workspace --target")
+ .arg(&target)
+ .with_stdout_contains("test check_target ... ok")
+ .run();
+
+ if cargo_test_support::is_nightly() {
+ p.cargo("test --workspace -Z doctest-xcompile --doc --target")
+ .arg(&target)
+ .masquerade_as_nightly_cargo(&["doctest-xcompile"])
+ .with_stdout_contains("test src/lib.rs - (line 2) ... ok")
+ .run();
+ }
+}
+
+#[cargo_test]
+fn wrong_output() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:example");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[COMPILING] foo [..]
+error: invalid output in build script of `foo v0.0.1 ([ROOT]/foo)`: `cargo:example`
+Expected a line with `cargo:key=value` with an `=` character, but none was found.
+See https://doc.rust-lang.org/cargo/reference/build-scripts.html#outputs-of-the-build-script \
+for more information about build script outputs.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn custom_build_closes_stdin() {
+ // Ensure stdin is closed to prevent deadlock.
+ // See https://github.com/rust-lang/cargo/issues/11196
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ build = "build.rs"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "build.rs",
+ r#"fn main() {
+ let mut line = String::new();
+ std::io::stdin().read_line(&mut line).unwrap();
+ }"#,
+ )
+ .build();
+ p.cargo("build").run();
+}
diff --git a/src/tools/cargo/tests/testsuite/build_script_env.rs b/src/tools/cargo/tests/testsuite/build_script_env.rs
new file mode 100644
index 000000000..bc87b7120
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/build_script_env.rs
@@ -0,0 +1,303 @@
+//! Tests for build.rs rerun-if-env-changed and rustc-env
+
+use cargo_test_support::basic_manifest;
+use cargo_test_support::project;
+use cargo_test_support::sleep_ms;
+
+#[cargo_test]
+fn rerun_if_env_changes() {
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rerun-if-env-changed=FOO");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.cargo("check")
+ .env("FOO", "bar")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.cargo("check")
+ .env("FOO", "baz")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.cargo("check")
+ .env("FOO", "baz")
+ .with_stderr("[FINISHED] [..]")
+ .run();
+ p.cargo("check")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rerun_if_env_or_file_changes() {
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rerun-if-env-changed=FOO");
+ println!("cargo:rerun-if-changed=foo");
+ }
+ "#,
+ )
+ .file("foo", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.cargo("check")
+ .env("FOO", "bar")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.cargo("check")
+ .env("FOO", "bar")
+ .with_stderr("[FINISHED] [..]")
+ .run();
+ sleep_ms(1000);
+ p.change_file("foo", "");
+ p.cargo("check")
+ .env("FOO", "bar")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustc_bootstrap() {
+ let build_rs = r#"
+ fn main() {
+ println!("cargo:rustc-env=RUSTC_BOOTSTRAP=1");
+ }
+ "#;
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("has-dashes", "0.0.1"))
+ .file("src/lib.rs", "#![feature(rustc_attrs)]")
+ .file("build.rs", build_rs)
+ .build();
+ // RUSTC_BOOTSTRAP unset on stable should error
+ p.cargo("check")
+ .with_stderr_contains("error: Cannot set `RUSTC_BOOTSTRAP=1` [..]")
+ .with_stderr_contains(
+ "help: [..] set the environment variable `RUSTC_BOOTSTRAP=has_dashes` [..]",
+ )
+ .with_status(101)
+ .run();
+ // nightly should warn whether or not RUSTC_BOOTSTRAP is set
+ p.cargo("check")
+ .masquerade_as_nightly_cargo(&["RUSTC_BOOTSTRAP"])
+ // NOTE: uses RUSTC_BOOTSTRAP so it will be propagated to rustc
+ // (this matters when tests are being run with a beta or stable cargo)
+ .env("RUSTC_BOOTSTRAP", "1")
+ .with_stderr_contains("warning: Cannot set `RUSTC_BOOTSTRAP=1` [..]")
+ .run();
+ // RUSTC_BOOTSTRAP set to the name of the library should warn
+ p.cargo("check")
+ .env("RUSTC_BOOTSTRAP", "has_dashes")
+ .with_stderr_contains("warning: Cannot set `RUSTC_BOOTSTRAP=1` [..]")
+ .run();
+ // RUSTC_BOOTSTRAP set to some random value should error
+ p.cargo("check")
+ .env("RUSTC_BOOTSTRAP", "bar")
+ .with_stderr_contains("error: Cannot set `RUSTC_BOOTSTRAP=1` [..]")
+ .with_stderr_contains(
+ "help: [..] set the environment variable `RUSTC_BOOTSTRAP=has_dashes` [..]",
+ )
+ .with_status(101)
+ .run();
+
+ // Tests for binaries instead of libraries
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.0.1"))
+ .file("src/main.rs", "#![feature(rustc_attrs)] fn main() {}")
+ .file("build.rs", build_rs)
+ .build();
+ // nightly should warn when there's no library whether or not RUSTC_BOOTSTRAP is set
+ p.cargo("check")
+ .masquerade_as_nightly_cargo(&["RUSTC_BOOTSTRAP"])
+ // NOTE: uses RUSTC_BOOTSTRAP so it will be propagated to rustc
+ // (this matters when tests are being run with a beta or stable cargo)
+ .env("RUSTC_BOOTSTRAP", "1")
+ .with_stderr_contains("warning: Cannot set `RUSTC_BOOTSTRAP=1` [..]")
+ .run();
+ // RUSTC_BOOTSTRAP conditionally set when there's no library should error (regardless of the value)
+ p.cargo("check")
+ .env("RUSTC_BOOTSTRAP", "foo")
+ .with_stderr_contains("error: Cannot set `RUSTC_BOOTSTRAP=1` [..]")
+ .with_stderr_contains("help: [..] set the environment variable `RUSTC_BOOTSTRAP=1` [..]")
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+#[cfg(target_arch = "x86_64")]
+fn build_script_sees_cfg_target_feature() {
+ let build_rs = r#"
+ fn main() {
+ let cfg = std::env::var("CARGO_CFG_TARGET_FEATURE").unwrap();
+ eprintln!("CARGO_CFG_TARGET_FEATURE={cfg}");
+ }
+ "#;
+
+ let configs = [
+ r#"
+ [build]
+ rustflags = ["-Ctarget-feature=+sse4.1,+sse4.2"]
+ "#,
+ r#"
+ [target.'cfg(target_arch = "x86_64")']
+ rustflags = ["-Ctarget-feature=+sse4.1,+sse4.2"]
+ "#,
+ ];
+
+ for config in configs {
+ let p = project()
+ .file(".cargo/config.toml", config)
+ .file("src/lib.rs", r#""#)
+ .file("build.rs", build_rs)
+ .build();
+
+ p.cargo("check -vv")
+ .with_stderr_contains("[foo 0.0.1] CARGO_CFG_TARGET_FEATURE=[..]sse4.2[..]")
+ .with_stderr_contains("[..]-Ctarget-feature=[..]+sse4.2[..]")
+ .run();
+ }
+}
+
+/// In this test, the cfg is self-contradictory. There's no *right* answer as to
+/// what the value of `RUSTFLAGS` should be in this case. We chose to give a
+/// warning. However, no matter what we do, it's important that build scripts
+/// and rustc see a consistent picture
+#[cargo_test]
+fn cfg_paradox() {
+ let build_rs = r#"
+ fn main() {
+ let cfg = std::env::var("CARGO_CFG_BERTRAND").is_ok();
+ eprintln!("cfg!(bertrand)={cfg}");
+ }
+ "#;
+
+ let config = r#"
+ [target.'cfg(not(bertrand))']
+ rustflags = ["--cfg=bertrand"]
+ "#;
+
+ let p = project()
+ .file(".cargo/config.toml", config)
+ .file("src/lib.rs", r#""#)
+ .file("build.rs", build_rs)
+ .build();
+
+ p.cargo("check -vv")
+ .with_stderr_contains("[WARNING] non-trivial mutual dependency between target-specific configuration and RUSTFLAGS")
+ .with_stderr_contains("[foo 0.0.1] cfg!(bertrand)=true")
+ .with_stderr_contains("[..]--cfg=bertrand[..]")
+ .run();
+}
+
+/// This test checks how Cargo handles rustc cfgs which are defined both with
+/// and without a value. The expected behavior is that the environment variable
+/// is going to contain all the values.
+///
+/// For example, this configuration:
+/// ```
+/// target_has_atomic
+/// target_has_atomic="16"
+/// target_has_atomic="32"
+/// target_has_atomic="64"
+/// target_has_atomic="8"
+/// target_has_atomic="ptr"
+/// ```
+///
+/// Should result in the following environment variable:
+///
+/// ```
+/// CARGO_CFG_TARGET_HAS_ATOMIC=16,32,64,8,ptr
+/// ```
+///
+/// On the other hand, configuration symbols without any value should result in
+/// an empty string.
+///
+/// For example, this configuration:
+///
+/// ```
+/// target_thread_local
+/// ```
+///
+/// Should result in the following environment variable:
+///
+/// ```
+/// CARGO_CFG_TARGET_THREAD_LOCAL=
+/// ```
+#[cargo_test(nightly, reason = "affected rustc cfg is unstable")]
+#[cfg(target_arch = "x86_64")]
+fn rustc_cfg_with_and_without_value() {
+ let build_rs = r#"
+ fn main() {
+ let cfg = std::env::var("CARGO_CFG_TARGET_HAS_ATOMIC");
+ eprintln!("CARGO_CFG_TARGET_HAS_ATOMIC={cfg:?}");
+ let cfg = std::env::var("CARGO_CFG_WINDOWS");
+ eprintln!("CARGO_CFG_WINDOWS={cfg:?}");
+ let cfg = std::env::var("CARGO_CFG_UNIX");
+ eprintln!("CARGO_CFG_UNIX={cfg:?}");
+ }
+ "#;
+ let p = project()
+ .file("src/lib.rs", r#""#)
+ .file("build.rs", build_rs)
+ .build();
+
+ let mut check = p.cargo("check -vv");
+ #[cfg(target_has_atomic = "64")]
+ check.with_stderr_contains("[foo 0.0.1] CARGO_CFG_TARGET_HAS_ATOMIC=Ok(\"[..]64[..]\")");
+ #[cfg(windows)]
+ check.with_stderr_contains("[foo 0.0.1] CARGO_CFG_WINDOWS=Ok(\"\")");
+ #[cfg(unix)]
+ check.with_stderr_contains("[foo 0.0.1] CARGO_CFG_UNIX=Ok(\"\")");
+ check.run();
+}
diff --git a/src/tools/cargo/tests/testsuite/build_script_extra_link_arg.rs b/src/tools/cargo/tests/testsuite/build_script_extra_link_arg.rs
new file mode 100644
index 000000000..ade262fec
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/build_script_extra_link_arg.rs
@@ -0,0 +1,376 @@
+//! Tests for additional link arguments.
+
+// NOTE: Many of these tests use `without_status()` when passing bogus flags
+// because MSVC link.exe just gives a warning on unknown flags (how helpful!),
+// and other linkers will return an error.
+
+use cargo_test_support::registry::Package;
+use cargo_test_support::{basic_bin_manifest, basic_lib_manifest, basic_manifest, project};
+
+#[cargo_test]
+fn build_script_extra_link_arg_bin() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-link-arg-bins=--this-is-a-bogus-flag");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v")
+ .without_status()
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name foo [..]-C link-arg=--this-is-a-bogus-flag[..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_script_extra_link_arg_bin_single() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foobar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [[bin]]
+ name = "foo"
+ [[bin]]
+ name = "bar"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-link-arg-bins=--bogus-flag-all");
+ println!("cargo:rustc-link-arg-bin=foo=--bogus-flag-foo");
+ println!("cargo:rustc-link-arg-bin=bar=--bogus-flag-bar");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v")
+ .without_status()
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name foo [..]-C link-arg=--bogus-flag-all -C link-arg=--bogus-flag-foo[..]",
+ )
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name bar [..]-C link-arg=--bogus-flag-all -C link-arg=--bogus-flag-bar[..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_script_extra_link_arg() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-link-arg=--this-is-a-bogus-flag");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v")
+ .without_status()
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name foo [..]-C link-arg=--this-is-a-bogus-flag[..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn link_arg_missing_target() {
+ // Errors when a given target doesn't exist.
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"fn main() { println!("cargo:rustc-link-arg-cdylib=--bogus"); }"#,
+ )
+ .build();
+
+ // TODO: Uncomment this if cdylib restriction is re-added (see
+ // cdylib_link_arg_transitive below).
+ // p.cargo("check")
+ // .with_status(101)
+ // .with_stderr("\
+ // [COMPILING] foo [..]
+ // error: invalid instruction `cargo:rustc-link-arg-cdylib` from build script of `foo v0.0.1 ([ROOT]/foo)`
+ // The package foo v0.0.1 ([ROOT]/foo) does not have a cdylib target.
+ // ")
+ // .run();
+
+ p.change_file(
+ "build.rs",
+ r#"fn main() { println!("cargo:rustc-link-arg-bins=--bogus"); }"#,
+ );
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr("\
+[COMPILING] foo [..]
+error: invalid instruction `cargo:rustc-link-arg-bins` from build script of `foo v0.0.1 ([ROOT]/foo)`
+The package foo v0.0.1 ([ROOT]/foo) does not have a bin target.
+")
+ .run();
+
+ p.change_file(
+ "build.rs",
+ r#"fn main() { println!("cargo:rustc-link-arg-bin=abc=--bogus"); }"#,
+ );
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[COMPILING] foo [..]
+error: invalid instruction `cargo:rustc-link-arg-bin` from build script of `foo v0.0.1 ([ROOT]/foo)`
+The package foo v0.0.1 ([ROOT]/foo) does not have a bin target with the name `abc`.
+",
+ )
+ .run();
+
+ p.change_file(
+ "build.rs",
+ r#"fn main() { println!("cargo:rustc-link-arg-bin=abc"); }"#,
+ );
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[COMPILING] foo [..]
+error: invalid instruction `cargo:rustc-link-arg-bin=abc` from build script of `foo v0.0.1 ([ROOT]/foo)`
+The instruction should have the form cargo:rustc-link-arg-bin=BIN=ARG
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cdylib_link_arg_transitive() {
+ // There was an unintended regression in 1.50 where rustc-link-arg-cdylib
+ // arguments from dependencies were being applied in the parent package.
+ // Previously it was silently ignored.
+ // See https://github.com/rust-lang/cargo/issues/9562
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [lib]
+ crate-type = ["cdylib"]
+
+ [dependencies]
+ bar = {path="bar"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "1.0.0"))
+ .file("bar/src/lib.rs", "")
+ .file(
+ "bar/build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-link-arg-cdylib=--bogus");
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build -v")
+ .without_status()
+ .with_stderr_contains(
+ "\
+[COMPILING] bar v1.0.0 [..]
+[RUNNING] `rustc --crate-name build_script_build bar/build.rs [..]
+[RUNNING] `[..]build-script-build[..]
+warning: cargo:rustc-link-arg-cdylib was specified in the build script of bar v1.0.0 \
+([ROOT]/foo/bar), but that package does not contain a cdylib target
+
+Allowing this was an unintended change in the 1.50 release, and may become an error in \
+the future. For more information, see <https://github.com/rust-lang/cargo/issues/9562>.
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]
+[COMPILING] foo v0.1.0 [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]-C link-arg=--bogus[..]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn link_arg_transitive_not_allowed() {
+ // Verify that transitive dependencies don't pass link args.
+ //
+ // Note that rustc-link-arg doesn't have any errors or warnings when it is
+ // unused. Perhaps that could be more aggressive, but it is difficult
+ // since it could be used for test binaries.
+ Package::new("bar", "1.0.0")
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-link-arg=--bogus");
+ }
+ "#,
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [lib]
+ crate-type = ["cdylib"]
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] [..]
+[DOWNLOADED] [..]
+[COMPILING] bar v1.0.0
+[RUNNING] `rustc --crate-name build_script_build [..]
+[RUNNING] `[..]/build-script-build[..]
+[RUNNING] `rustc --crate-name bar [..]
+[COMPILING] foo v0.1.0 [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]
+[FINISHED] dev [..]
+",
+ )
+ .with_stderr_does_not_contain("--bogus")
+ .run();
+}
+
+#[cargo_test]
+fn link_arg_with_doctest() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ //! ```
+ //! let x = 5;
+ //! assert_eq!(x, 5);
+ //! ```
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-link-arg=--this-is-a-bogus-flag");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("test --doc -v")
+ .without_status()
+ .with_stderr_contains(
+ "[RUNNING] `rustdoc [..]--crate-name foo [..]-C link-arg=--this-is-a-bogus-flag[..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_script_extra_link_arg_tests() {
+ let p = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file("src/lib.rs", "")
+ .file("tests/test_foo.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-link-arg-tests=--this-is-a-bogus-flag");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("test -v")
+ .without_status()
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name test_foo [..]-C link-arg=--this-is-a-bogus-flag[..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_script_extra_link_arg_benches() {
+ let p = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file("src/lib.rs", "")
+ .file("benches/bench_foo.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-link-arg-benches=--this-is-a-bogus-flag");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("bench -v")
+ .without_status()
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name bench_foo [..]-C link-arg=--this-is-a-bogus-flag[..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_script_extra_link_arg_examples() {
+ let p = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file("src/lib.rs", "")
+ .file("examples/example_foo.rs", "fn main() {}")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-link-arg-examples=--this-is-a-bogus-flag");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v --examples")
+ .without_status()
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name example_foo [..]-C link-arg=--this-is-a-bogus-flag[..]",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/cache_messages.rs b/src/tools/cargo/tests/testsuite/cache_messages.rs
new file mode 100644
index 000000000..b856ed152
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cache_messages.rs
@@ -0,0 +1,488 @@
+//! Tests for caching compiler diagnostics.
+
+use super::messages::raw_rustc_output;
+use cargo_test_support::tools;
+use cargo_test_support::{basic_manifest, is_coarse_mtime, project, registry::Package, sleep_ms};
+
+fn as_str(bytes: &[u8]) -> &str {
+ std::str::from_utf8(bytes).expect("valid utf-8")
+}
+
+#[cargo_test]
+fn simple() {
+ // A simple example that generates two warnings (unused functions).
+ let p = project()
+ .file(
+ "src/lib.rs",
+ "
+ fn a() {}
+ fn b() {}
+ ",
+ )
+ .build();
+
+ // Capture what rustc actually emits. This is done to avoid relying on the
+ // exact message formatting in rustc.
+ let rustc_output = raw_rustc_output(&p, "src/lib.rs", &[]);
+
+ // -q so the output is the same as rustc (no "Compiling" or "Finished").
+ let cargo_output1 = p
+ .cargo("check -q --color=never")
+ .exec_with_output()
+ .expect("cargo to run");
+ assert_eq!(rustc_output, as_str(&cargo_output1.stderr));
+ assert!(cargo_output1.stdout.is_empty());
+ // Check that the cached version is exactly the same.
+ let cargo_output2 = p
+ .cargo("check -q")
+ .exec_with_output()
+ .expect("cargo to run");
+ assert_eq!(rustc_output, as_str(&cargo_output2.stderr));
+ assert!(cargo_output2.stdout.is_empty());
+}
+
+// same as `simple`, except everything is using the short format
+#[cargo_test]
+fn simple_short() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ "
+ fn a() {}
+ fn b() {}
+ ",
+ )
+ .build();
+
+ let rustc_output = raw_rustc_output(&p, "src/lib.rs", &["--error-format=short"]);
+
+ let cargo_output1 = p
+ .cargo("check -q --color=never --message-format=short")
+ .exec_with_output()
+ .expect("cargo to run");
+ assert_eq!(rustc_output, as_str(&cargo_output1.stderr));
+ // assert!(cargo_output1.stdout.is_empty());
+ let cargo_output2 = p
+ .cargo("check -q --message-format=short")
+ .exec_with_output()
+ .expect("cargo to run");
+ println!("{}", String::from_utf8_lossy(&cargo_output2.stdout));
+ assert_eq!(rustc_output, as_str(&cargo_output2.stderr));
+ assert!(cargo_output2.stdout.is_empty());
+}
+
+#[cargo_test]
+fn color() {
+ // Check enabling/disabling color.
+ let p = project().file("src/lib.rs", "fn a() {}").build();
+
+ // Hack for issue in fwdansi 1.1. It is squashing multiple resets
+ // into a single reset.
+ // https://github.com/kennytm/fwdansi/issues/2
+ fn normalize(s: &str) -> String {
+ #[cfg(windows)]
+ return s.replace("\x1b[0m\x1b[0m", "\x1b[0m");
+ #[cfg(not(windows))]
+ return s.to_string();
+ }
+
+ let compare = |a, b| {
+ assert_eq!(normalize(a), normalize(b));
+ };
+
+ // Capture the original color output.
+ let rustc_color = raw_rustc_output(&p, "src/lib.rs", &["--color=always"]);
+ assert!(rustc_color.contains("\x1b["));
+
+ // Capture the original non-color output.
+ let rustc_nocolor = raw_rustc_output(&p, "src/lib.rs", &[]);
+ assert!(!rustc_nocolor.contains("\x1b["));
+
+ // First pass, non-cached, with color, should be the same.
+ let cargo_output1 = p
+ .cargo("check -q --color=always")
+ .exec_with_output()
+ .expect("cargo to run");
+ compare(&rustc_color, as_str(&cargo_output1.stderr));
+
+ // Replay cached, with color.
+ let cargo_output2 = p
+ .cargo("check -q --color=always")
+ .exec_with_output()
+ .expect("cargo to run");
+ compare(&rustc_color, as_str(&cargo_output2.stderr));
+
+ // Replay cached, no color.
+ let cargo_output_nocolor = p
+ .cargo("check -q --color=never")
+ .exec_with_output()
+ .expect("cargo to run");
+ compare(&rustc_nocolor, as_str(&cargo_output_nocolor.stderr));
+}
+
+#[cargo_test]
+fn cached_as_json() {
+ // Check that cached JSON output is the same.
+ let p = project().file("src/lib.rs", "fn a() {}").build();
+
+ // Grab the non-cached output, feature disabled.
+ // NOTE: When stabilizing, this will need to be redone.
+ let cargo_output = p
+ .cargo("check --message-format=json")
+ .exec_with_output()
+ .expect("cargo to run");
+ assert!(cargo_output.status.success());
+ let orig_cargo_out = as_str(&cargo_output.stdout);
+ assert!(orig_cargo_out.contains("compiler-message"));
+ p.cargo("clean").run();
+
+ // Check JSON output, not fresh.
+ let cargo_output1 = p
+ .cargo("check --message-format=json")
+ .exec_with_output()
+ .expect("cargo to run");
+ assert_eq!(as_str(&cargo_output1.stdout), orig_cargo_out);
+
+ // Check JSON output, fresh.
+ let cargo_output2 = p
+ .cargo("check --message-format=json")
+ .exec_with_output()
+ .expect("cargo to run");
+ // The only difference should be this field.
+ let fix_fresh = as_str(&cargo_output2.stdout).replace("\"fresh\":true", "\"fresh\":false");
+ assert_eq!(fix_fresh, orig_cargo_out);
+}
+
+#[cargo_test]
+fn clears_cache_after_fix() {
+ // Make sure the cache is invalidated when there is no output.
+ let p = project().file("src/lib.rs", "fn asdf() {}").build();
+ // Fill the cache.
+ p.cargo("check").with_stderr_contains("[..]asdf[..]").run();
+ let cpath = p
+ .glob("target/debug/.fingerprint/foo-*/output-*")
+ .next()
+ .unwrap()
+ .unwrap();
+ assert!(std::fs::read_to_string(cpath).unwrap().contains("asdf"));
+
+ // Fix it.
+ if is_coarse_mtime() {
+ sleep_ms(1000);
+ }
+ p.change_file("src/lib.rs", "");
+
+ p.cargo("check")
+ .with_stdout("")
+ .with_stderr(
+ "\
+[CHECKING] foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ assert_eq!(
+ p.glob("target/debug/.fingerprint/foo-*/output-*").count(),
+ 0
+ );
+
+ // And again, check the cache is correct.
+ p.cargo("check")
+ .with_stdout("")
+ .with_stderr(
+ "\
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustdoc() {
+ // Create a warning in rustdoc.
+ let p = project()
+ .file(
+ "src/lib.rs",
+ "
+ #![warn(missing_docs)]
+ pub fn f() {}
+ ",
+ )
+ .build();
+
+ let rustdoc_output = p
+ .cargo("doc -q --color=always")
+ .exec_with_output()
+ .expect("rustdoc to run");
+ assert!(rustdoc_output.status.success());
+ let rustdoc_stderr = as_str(&rustdoc_output.stderr);
+ assert!(rustdoc_stderr.contains("missing"));
+ assert!(rustdoc_stderr.contains("\x1b["));
+ assert_eq!(
+ p.glob("target/debug/.fingerprint/foo-*/output-*").count(),
+ 1
+ );
+
+ // Check the cached output.
+ let rustdoc_output = p
+ .cargo("doc -q --color=always")
+ .exec_with_output()
+ .expect("rustdoc to run");
+ assert_eq!(as_str(&rustdoc_output.stderr), rustdoc_stderr);
+}
+
+#[cargo_test]
+fn fix() {
+ // Make sure `fix` is not broken by caching.
+ let p = project().file("src/lib.rs", "pub fn try() {}").build();
+
+ p.cargo("fix --edition --allow-no-vcs").run();
+
+ assert_eq!(p.read_file("src/lib.rs"), "pub fn r#try() {}");
+}
+
+#[cargo_test]
+fn very_verbose() {
+ // Handle cap-lints in dependencies.
+ Package::new("bar", "1.0.0")
+ .file("src/lib.rs", "fn not_used() {}")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check -vv")
+ .with_stderr_contains("[..]not_used[..]")
+ .run();
+
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+
+ p.cargo("check -vv")
+ .with_stderr_contains("[..]not_used[..]")
+ .run();
+}
+
+#[cargo_test]
+fn doesnt_create_extra_files() {
+ // Ensure it doesn't create `output` files when not needed.
+ Package::new("dep", "1.0.0")
+ .file("src/lib.rs", "fn unused() {}")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ dep = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check").run();
+
+ assert_eq!(
+ p.glob("target/debug/.fingerprint/foo-*/output-*").count(),
+ 0
+ );
+ assert_eq!(
+ p.glob("target/debug/.fingerprint/dep-*/output-*").count(),
+ 0
+ );
+ if is_coarse_mtime() {
+ sleep_ms(1000);
+ }
+ p.change_file("src/lib.rs", "fn unused() {}");
+ p.cargo("check").run();
+ assert_eq!(
+ p.glob("target/debug/.fingerprint/foo-*/output-*").count(),
+ 1
+ );
+}
+
+#[cargo_test]
+fn replay_non_json() {
+ // Handles non-json output.
+ let rustc = project()
+ .at("rustc")
+ .file("Cargo.toml", &basic_manifest("rustc_alt", "1.0.0"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ eprintln!("line 1");
+ eprintln!("line 2");
+ let r = std::process::Command::new("rustc")
+ .args(std::env::args_os().skip(1))
+ .status();
+ std::process::exit(r.unwrap().code().unwrap_or(2));
+ }
+ "#,
+ )
+ .build();
+ rustc.cargo("build").run();
+ let p = project().file("src/lib.rs", "").build();
+ p.cargo("check")
+ .env("RUSTC", rustc.bin("rustc_alt"))
+ .with_stderr(
+ "\
+[CHECKING] foo [..]
+line 1
+line 2
+[FINISHED] dev [..]
+",
+ )
+ .run();
+
+ p.cargo("check")
+ .env("RUSTC", rustc.bin("rustc_alt"))
+ .with_stderr(
+ "\
+line 1
+line 2
+[FINISHED] dev [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn caching_large_output() {
+ // Handles large number of messages.
+ // This is an arbitrary amount that is greater than the 100 used in
+ // job_queue. This is here to check for deadlocks or any other problems.
+ const COUNT: usize = 250;
+ let rustc = project()
+ .at("rustc")
+ .file("Cargo.toml", &basic_manifest("rustc_alt", "1.0.0"))
+ .file(
+ "src/main.rs",
+ &format!(
+ r#"
+ fn main() {{
+ for i in 0..{} {{
+ eprintln!("{{{{\"message\": \"test message {{}}\", \"level\": \"warning\", \
+ \"spans\": [], \"children\": [], \"rendered\": \"test message {{}}\"}}}}",
+ i, i);
+ }}
+ let r = std::process::Command::new("rustc")
+ .args(std::env::args_os().skip(1))
+ .status();
+ std::process::exit(r.unwrap().code().unwrap_or(2));
+ }}
+ "#,
+ COUNT
+ ),
+ )
+ .build();
+
+ let mut expected = String::new();
+ for i in 0..COUNT {
+ expected.push_str(&format!("test message {}\n", i));
+ }
+
+ rustc.cargo("build").run();
+ let p = project().file("src/lib.rs", "").build();
+ p.cargo("check")
+ .env("RUSTC", rustc.bin("rustc_alt"))
+ .with_stderr(&format!(
+ "\
+[CHECKING] foo [..]
+{}warning: `foo` (lib) generated 250 warnings
+[FINISHED] dev [..]
+",
+ expected
+ ))
+ .run();
+
+ p.cargo("check")
+ .env("RUSTC", rustc.bin("rustc_alt"))
+ .with_stderr(&format!(
+ "\
+{}warning: `foo` (lib) generated 250 warnings
+[FINISHED] dev [..]
+",
+ expected
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn rustc_workspace_wrapper() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ "pub fn f() { assert!(true); }\n\
+ fn unused_func() {}",
+ )
+ .build();
+
+ p.cargo("check -v")
+ .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper())
+ .with_stderr_contains("WRAPPER CALLED: rustc --crate-name foo src/lib.rs [..]")
+ .run();
+
+ // Check without a wrapper should rebuild
+ p.cargo("check -v")
+ .with_stderr_contains(
+ "\
+[CHECKING] foo [..]
+[RUNNING] `rustc[..]
+[WARNING] [..]unused_func[..]
+",
+ )
+ .with_stdout_does_not_contain("WRAPPER CALLED: rustc --crate-name foo src/lib.rs [..]")
+ .run();
+
+ // Again, reading from the cache.
+ p.cargo("check -v")
+ .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper())
+ .with_stderr_contains("[FRESH] foo [..]")
+ .with_stdout_does_not_contain("WRAPPER CALLED: rustc --crate-name foo src/lib.rs [..]")
+ .run();
+
+ // And `check` should also be fresh, reading from cache.
+ p.cargo("check -v")
+ .with_stderr_contains("[FRESH] foo [..]")
+ .with_stderr_contains("[WARNING] [..]unused_func[..]")
+ .with_stdout_does_not_contain("WRAPPER CALLED: rustc --crate-name foo src/lib.rs [..]")
+ .run();
+}
+
+#[cargo_test]
+fn wacky_hashless_fingerprint() {
+ // On Windows, executables don't have hashes. This checks for a bad
+ // assumption that caused bad caching.
+ let p = project()
+ .file("src/bin/a.rs", "fn main() { let unused = 1; }")
+ .file("src/bin/b.rs", "fn main() {}")
+ .build();
+ p.cargo("check --bin b")
+ .with_stderr_does_not_contain("[..]unused[..]")
+ .run();
+ p.cargo("check --bin a")
+ .with_stderr_contains("[..]unused[..]")
+ .run();
+ // This should not pick up the cache from `a`.
+ p.cargo("check --bin b")
+ .with_stderr_does_not_contain("[..]unused[..]")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/add-basic.in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/add-basic.in/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/add-basic.in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/add-basic.in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/add-basic.in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/add-basic.in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/add_basic/in b/src/tools/cargo/tests/testsuite/cargo_add/add_basic/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/add_basic/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/add_basic/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/add_basic/mod.rs
new file mode 100644
index 000000000..33889dffa
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/add_basic/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/add_basic/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/add_basic/out/Cargo.toml
new file mode 100644
index 000000000..5964c87be
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/add_basic/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = "99999.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/add_basic/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/add_basic/stderr.log
new file mode 100644
index 000000000..fd6b711e3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/add_basic/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding my-package v99999.0.0 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/add_basic/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/add_basic/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/add_basic/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/add_multiple/in b/src/tools/cargo/tests/testsuite/cargo_add/add_multiple/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/add_multiple/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/add_multiple/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/add_multiple/mod.rs
new file mode 100644
index 000000000..a9cc20575
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/add_multiple/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package1 my-package2")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/add_multiple/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/add_multiple/out/Cargo.toml
new file mode 100644
index 000000000..ba8d7eabe
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/add_multiple/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = "99999.0.0"
+my-package2 = "99999.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/add_multiple/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/add_multiple/stderr.log
new file mode 100644
index 000000000..d0b4e73c1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/add_multiple/stderr.log
@@ -0,0 +1,3 @@
+ Updating `dummy-registry` index
+ Adding my-package1 v99999.0.0 to dependencies.
+ Adding my-package2 v99999.0.0 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/add_multiple/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/add_multiple/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/add_multiple/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/in b/src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/mod.rs
new file mode 100644
index 000000000..63605d8dc
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("linked_hash_map Inflector")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/out/Cargo.toml
new file mode 100644
index 000000000..3d0dec343
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+inflector = "0.11.4"
+linked-hash-map = "0.5.4"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/stderr.log
new file mode 100644
index 000000000..c7d451143
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/stderr.log
@@ -0,0 +1,18 @@
+ Updating `dummy-registry` index
+warning: translating `linked_hash_map` to `linked-hash-map`
+warning: translating `Inflector` to `inflector`
+ Adding linked-hash-map v0.5.4 to dependencies.
+ Features:
+ - clippy
+ - heapsize
+ - heapsize_impl
+ - nightly
+ - serde
+ - serde_impl
+ - serde_test
+ Adding inflector v0.11.4 to dependencies.
+ Features:
+ + heavyweight
+ + lazy_static
+ + regex
+ - unstable
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/add_normalized_name_external/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/build/in b/src/tools/cargo/tests/testsuite/cargo_add/build/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/build/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/build/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/build/mod.rs
new file mode 100644
index 000000000..130ecfbb0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/build/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("--build my-build-package1 my-build-package2")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/build/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/build/out/Cargo.toml
new file mode 100644
index 000000000..cceb448ed
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/build/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[build-dependencies]
+my-build-package1 = "99999.0.0"
+my-build-package2 = "99999.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/build/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/build/stderr.log
new file mode 100644
index 000000000..b873c5a80
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/build/stderr.log
@@ -0,0 +1,3 @@
+ Updating `dummy-registry` index
+ Adding my-build-package1 v99999.0.0 to build-dependencies.
+ Adding my-build-package2 v99999.0.0 to build-dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/build/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/build/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/build/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/in/Cargo.toml
new file mode 100644
index 000000000..6a6ac823f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/in/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+exclude = ["dependency"]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+cargo-list-test-fixture-dependency = { version = "0.0.0", path = "dependency", optional = true, default-features = false, features = ["one", "two"], registry = "alternative" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/in/dependency/Cargo.toml
new file mode 100644
index 000000000..58b909cef
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/in/dependency/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
+
+[features]
+one = []
+two = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/mod.rs
new file mode 100644
index 000000000..b0bb2e03b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/mod.rs
@@ -0,0 +1,28 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_alt_registry;
+
+#[cargo_test]
+fn case() {
+ init_alt_registry();
+ let project =
+ Project::from_template("tests/testsuite/cargo_add/build_prefer_existing_version/in");
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("cargo-list-test-fixture-dependency --build")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path("tests/testsuite/cargo_add/build_prefer_existing_version/stdout.log")
+ .stderr_matches_path("tests/testsuite/cargo_add/build_prefer_existing_version/stderr.log");
+
+ assert_ui().subset_matches(
+ "tests/testsuite/cargo_add/build_prefer_existing_version/out",
+ &project_root,
+ );
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/out/Cargo.toml
new file mode 100644
index 000000000..123af6d22
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/out/Cargo.toml
@@ -0,0 +1,12 @@
+[workspace]
+exclude = ["dependency"]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+cargo-list-test-fixture-dependency = { version = "0.0.0", path = "dependency", optional = true, default-features = false, features = ["one", "two"], registry = "alternative" }
+
+[build-dependencies]
+cargo-list-test-fixture-dependency = { version = "0.0.0", path = "dependency", registry = "alternative" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/out/dependency/Cargo.toml
new file mode 100644
index 000000000..58b909cef
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/out/dependency/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
+
+[features]
+one = []
+two = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/stderr.log
new file mode 100644
index 000000000..554aa2ef3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/stderr.log
@@ -0,0 +1,4 @@
+ Adding cargo-list-test-fixture-dependency (local) to build-dependencies.
+ Features:
+ - one
+ - two
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/build_prefer_existing_version/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/in/Cargo.toml
new file mode 100644
index 000000000..e81a76b4b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/in/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+some-package = { package = "my-package1", version = "0.1.1", optional = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/mod.rs
new file mode 100644
index 000000000..94309b3ab
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package2 --rename some-package")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/out/Cargo.toml
new file mode 100644
index 000000000..70cd31826
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+some-package = { package = "my-package2", version = "99999.0.0", optional = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/stderr.log
new file mode 100644
index 000000000..674f62602
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding my-package2 v99999.0.0 to optional dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/in b/src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/mod.rs
new file mode 100644
index 000000000..5dffac323
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("test_cyclic_features")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/out/Cargo.toml
new file mode 100644
index 000000000..27a5c31f8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+test_cyclic_features = "0.1.1"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/stderr.log
new file mode 100644
index 000000000..2d4a2db4a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/stderr.log
@@ -0,0 +1,5 @@
+ Updating `dummy-registry` index
+ Adding test_cyclic_features v0.1.1 to dependencies.
+ Features:
+ + feature-one
+ + feature-two
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/cyclic_features/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/default_features/in b/src/tools/cargo/tests/testsuite/cargo_add/default_features/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/default_features/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/default_features/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/default_features/mod.rs
new file mode 100644
index 000000000..88bdd8065
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/default_features/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package1 my-package2@0.4.1 --default-features")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/default_features/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/default_features/out/Cargo.toml
new file mode 100644
index 000000000..c5e017892
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/default_features/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = "99999.0.0"
+my-package2 = "0.4.1"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/default_features/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/default_features/stderr.log
new file mode 100644
index 000000000..fb8d4903d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/default_features/stderr.log
@@ -0,0 +1,3 @@
+ Updating `dummy-registry` index
+ Adding my-package1 v99999.0.0 to dependencies.
+ Adding my-package2 v0.4.1 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/default_features/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/default_features/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/default_features/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/in/Cargo.toml
new file mode 100644
index 000000000..c0fc374ea
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/in/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = { version = "99999.0.0", default_features = false }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/mod.rs
new file mode 100644
index 000000000..10d4e4e98
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package")
+ .current_dir(&cwd)
+ .assert()
+ .failure()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/out/Cargo.toml
new file mode 100644
index 000000000..c0fc374ea
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = { version = "99999.0.0", default_features = false }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/stderr.log
new file mode 100644
index 000000000..46d99d15d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/stderr.log
@@ -0,0 +1 @@
+error: Use of `default_features` in `my-package` is unsupported, please switch to `default-features`
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_default_features/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/in/Cargo.toml
new file mode 100644
index 000000000..a83d2c621
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/in/Cargo.toml
@@ -0,0 +1,11 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dev_dependencies]
+my-package = "99999.0.0"
+
+[build_dependencies]
+my-package = "99999.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/mod.rs
new file mode 100644
index 000000000..10d4e4e98
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package")
+ .current_dir(&cwd)
+ .assert()
+ .failure()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/out/Cargo.toml
new file mode 100644
index 000000000..a83d2c621
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/out/Cargo.toml
@@ -0,0 +1,11 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dev_dependencies]
+my-package = "99999.0.0"
+
+[build_dependencies]
+my-package = "99999.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/stderr.log
new file mode 100644
index 000000000..b3b9c10f9
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/stderr.log
@@ -0,0 +1 @@
+error: Deprecated dependency sections are unsupported: dev_dependencies, build_dependencies
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/deprecated_section/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/Cargo.toml
new file mode 100644
index 000000000..24c50556b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency"} \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/dependency/Cargo.toml
new file mode 100644
index 000000000..2d247d4d2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/primary/Cargo.toml
new file mode 100644
index 000000000..b867edbbe
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/primary/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "bar"
+version = "0.0.0" \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/mod.rs
new file mode 100644
index 000000000..065fb4f93
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["foo", "-p", "bar"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/out/Cargo.toml
new file mode 100644
index 000000000..24c50556b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency"} \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/out/dependency/Cargo.toml
new file mode 100644
index 000000000..2d247d4d2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/out/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/out/primary/Cargo.toml
new file mode 100644
index 000000000..a5740941b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/out/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo.workspace = true
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/stderr.log
new file mode 100644
index 000000000..d2efcc0c0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/stderr.log
@@ -0,0 +1 @@
+ Adding foo (workspace) to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/Cargo.toml
new file mode 100644
index 000000000..b1d9b3995
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency", features = ["merge"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/dependency/Cargo.toml
new file mode 100644
index 000000000..f34d7a685
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/dependency/Cargo.toml
@@ -0,0 +1,14 @@
+[package]
+name = "foo"
+version = "0.0.0"
+
+[features]
+default-base = []
+default-test-base = []
+default-merge-base = []
+default = ["default-base", "default-test-base", "default-merge-base"]
+test-base = []
+test = ["test-base", "default-test-base"]
+merge-base = []
+merge = ["merge-base", "default-merge-base"]
+unrelated = [] \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/primary/Cargo.toml
new file mode 100644
index 000000000..b867edbbe
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/primary/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "bar"
+version = "0.0.0" \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/mod.rs
new file mode 100644
index 000000000..11ab2b1bf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["foo", "-p", "bar", "--features", "test"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/out/Cargo.toml
new file mode 100644
index 000000000..b1d9b3995
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency", features = ["merge"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/out/dependency/Cargo.toml
new file mode 100644
index 000000000..f34d7a685
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/out/dependency/Cargo.toml
@@ -0,0 +1,14 @@
+[package]
+name = "foo"
+version = "0.0.0"
+
+[features]
+default-base = []
+default-test-base = []
+default-merge-base = []
+default = ["default-base", "default-test-base", "default-merge-base"]
+test-base = []
+test = ["test-base", "default-test-base"]
+merge-base = []
+merge = ["merge-base", "default-merge-base"]
+unrelated = [] \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/out/primary/Cargo.toml
new file mode 100644
index 000000000..fb4a12619
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/out/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo = { workspace = true, features = ["test"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/stderr.log
new file mode 100644
index 000000000..02dde7a34
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/stderr.log
@@ -0,0 +1,10 @@
+ Adding foo (workspace) to dependencies.
+ Features as of v0.0.0:
+ + default-base
+ + default-merge-base
+ + default-test-base
+ + merge
+ + merge-base
+ + test
+ + test-base
+ - unrelated
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_features/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/Cargo.toml
new file mode 100644
index 000000000..24c50556b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency"} \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/dependency/Cargo.toml
new file mode 100644
index 000000000..2d247d4d2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/primary/Cargo.toml
new file mode 100644
index 000000000..b867edbbe
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/primary/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "bar"
+version = "0.0.0" \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/mod.rs
new file mode 100644
index 000000000..7557b520d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["foo", "-p", "bar", "--optional"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/Cargo.toml
new file mode 100644
index 000000000..24c50556b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency"} \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/dependency/Cargo.toml
new file mode 100644
index 000000000..2d247d4d2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/primary/Cargo.toml
new file mode 100644
index 000000000..6dd7fb6d6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo = { workspace = true, optional = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/stderr.log
new file mode 100644
index 000000000..da03b11f7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/stderr.log
@@ -0,0 +1 @@
+ Adding foo (workspace) to optional dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dev/in b/src/tools/cargo/tests/testsuite/cargo_add/dev/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dev/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dev/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/dev/mod.rs
new file mode 100644
index 000000000..112e92285
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dev/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("--dev my-dev-package1 my-dev-package2")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dev/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/dev/out/Cargo.toml
new file mode 100644
index 000000000..28d9e81ce
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dev/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dev-dependencies]
+my-dev-package1 = "99999.0.0"
+my-dev-package2 = "99999.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dev/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/dev/stderr.log
new file mode 100644
index 000000000..f8e187ce9
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dev/stderr.log
@@ -0,0 +1,3 @@
+ Updating `dummy-registry` index
+ Adding my-dev-package1 v99999.0.0 to dev-dependencies.
+ Adding my-dev-package2 v99999.0.0 to dev-dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dev/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/dev/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dev/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/in b/src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/mod.rs
new file mode 100644
index 000000000..3f57c6b76
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package --dev --build")
+ .current_dir(cwd)
+ .assert()
+ .code(1)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/stderr.log
new file mode 100644
index 000000000..69c520912
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/stderr.log
@@ -0,0 +1,7 @@
+error: the argument '--dev' cannot be used with '--build'
+
+Usage: cargo add [OPTIONS] <DEP>[@<VERSION>] ...
+ cargo add [OPTIONS] --path <PATH> ...
+ cargo add [OPTIONS] --git <URL> ...
+
+For more information, try '--help'.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dev_build_conflict/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/in/Cargo.toml
new file mode 100644
index 000000000..6a6ac823f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/in/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+exclude = ["dependency"]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+cargo-list-test-fixture-dependency = { version = "0.0.0", path = "dependency", optional = true, default-features = false, features = ["one", "two"], registry = "alternative" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/in/dependency/Cargo.toml
new file mode 100644
index 000000000..58b909cef
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/in/dependency/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
+
+[features]
+one = []
+two = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/mod.rs
new file mode 100644
index 000000000..1785ac820
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_alt_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_alt_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("cargo-list-test-fixture-dependency --dev")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/out/Cargo.toml
new file mode 100644
index 000000000..247f345cf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/out/Cargo.toml
@@ -0,0 +1,12 @@
+[workspace]
+exclude = ["dependency"]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+cargo-list-test-fixture-dependency = { version = "0.0.0", path = "dependency", optional = true, default-features = false, features = ["one", "two"], registry = "alternative" }
+
+[dev-dependencies]
+cargo-list-test-fixture-dependency = { path = "dependency" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/out/dependency/Cargo.toml
new file mode 100644
index 000000000..58b909cef
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/out/dependency/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
+
+[features]
+one = []
+two = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/stderr.log
new file mode 100644
index 000000000..32f9a3e82
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/stderr.log
@@ -0,0 +1,4 @@
+ Adding cargo-list-test-fixture-dependency (local) to dev-dependencies.
+ Features as of v0.0.0:
+ - one
+ - two
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dev_prefer_existing_version/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dry_run/in b/src/tools/cargo/tests/testsuite/cargo_add/dry_run/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dry_run/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dry_run/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/dry_run/mod.rs
new file mode 100644
index 000000000..209d20873
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dry_run/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package --dry-run")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dry_run/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/dry_run/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dry_run/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dry_run/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/dry_run/stderr.log
new file mode 100644
index 000000000..c80dba942
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dry_run/stderr.log
@@ -0,0 +1,3 @@
+ Updating `dummy-registry` index
+ Adding my-package v99999.0.0 to dependencies.
+warning: aborting add due to dry run
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/dry_run/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/dry_run/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/dry_run/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features/in b/src/tools/cargo/tests/testsuite/cargo_add/features/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/features/mod.rs
new file mode 100644
index 000000000..5e4115390
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("your-face --features eyes")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/features/out/Cargo.toml
new file mode 100644
index 000000000..11419b203
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+your-face = { version = "99999.0.0", features = ["eyes"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/features/stderr.log
new file mode 100644
index 000000000..386f3db5a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features/stderr.log
@@ -0,0 +1,7 @@
+ Updating `dummy-registry` index
+ Adding your-face v99999.0.0 to dependencies.
+ Features:
+ + eyes
+ - ears
+ - mouth
+ - nose
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/features/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_empty/in b/src/tools/cargo/tests/testsuite/cargo_add/features_empty/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_empty/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_empty/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/features_empty/mod.rs
new file mode 100644
index 000000000..81dffc1ee
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_empty/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("your-face --features ''")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_empty/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/features_empty/out/Cargo.toml
new file mode 100644
index 000000000..79d735a12
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_empty/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+your-face = "99999.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_empty/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/features_empty/stderr.log
new file mode 100644
index 000000000..796b9601b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_empty/stderr.log
@@ -0,0 +1,7 @@
+ Updating `dummy-registry` index
+ Adding your-face v99999.0.0 to dependencies.
+ Features:
+ - ears
+ - eyes
+ - mouth
+ - nose
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_empty/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/features_empty/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_empty/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/in b/src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/mod.rs
new file mode 100644
index 000000000..db47f860d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("your-face --features eyes --features nose")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/out/Cargo.toml
new file mode 100644
index 000000000..0060d24bc
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+your-face = { version = "99999.0.0", features = ["eyes", "nose"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/stderr.log
new file mode 100644
index 000000000..615459052
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/stderr.log
@@ -0,0 +1,7 @@
+ Updating `dummy-registry` index
+ Adding your-face v99999.0.0 to dependencies.
+ Features:
+ + eyes
+ + nose
+ - ears
+ - mouth
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_multiple_occurrences/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_preserve/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/features_preserve/in/Cargo.toml
new file mode 100644
index 000000000..11419b203
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_preserve/in/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+your-face = { version = "99999.0.0", features = ["eyes"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_preserve/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/features_preserve/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_preserve/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_preserve/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/features_preserve/mod.rs
new file mode 100644
index 000000000..ed99a3111
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_preserve/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("your-face")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_preserve/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/features_preserve/out/Cargo.toml
new file mode 100644
index 000000000..11419b203
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_preserve/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+your-face = { version = "99999.0.0", features = ["eyes"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_preserve/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/features_preserve/stderr.log
new file mode 100644
index 000000000..386f3db5a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_preserve/stderr.log
@@ -0,0 +1,7 @@
+ Updating `dummy-registry` index
+ Adding your-face v99999.0.0 to dependencies.
+ Features:
+ + eyes
+ - ears
+ - mouth
+ - nose
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_preserve/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/features_preserve/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_preserve/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/in b/src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/mod.rs
new file mode 100644
index 000000000..2ef212e59
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("your-face --features eyes,nose")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/out/Cargo.toml
new file mode 100644
index 000000000..0060d24bc
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+your-face = { version = "99999.0.0", features = ["eyes", "nose"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/stderr.log
new file mode 100644
index 000000000..615459052
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/stderr.log
@@ -0,0 +1,7 @@
+ Updating `dummy-registry` index
+ Adding your-face v99999.0.0 to dependencies.
+ Features:
+ + eyes
+ + nose
+ - ears
+ - mouth
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_spaced_values/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_unknown/in b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_unknown/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown/mod.rs
new file mode 100644
index 000000000..7fd8d9529
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("your-face --features noze")
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_unknown/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_unknown/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown/stderr.log
new file mode 100644
index 000000000..58afcb66b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown/stderr.log
@@ -0,0 +1,5 @@
+ Updating `dummy-registry` index
+ Adding your-face v99999.0.0 to dependencies.
+error: unrecognized feature for crate your-face: noze
+disabled features:
+ ears, eyes, mouth, nose
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_unknown/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/in b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/mod.rs
new file mode 100644
index 000000000..9f59a0353
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package --features noze")
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/stderr.log
new file mode 100644
index 000000000..f1d046d53
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/stderr.log
@@ -0,0 +1,4 @@
+ Updating `dummy-registry` index
+ Adding my-package v99999.0.0 to dependencies.
+error: unrecognized feature for crate my-package: noze
+no features available for crate my-package
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/features_unknown_no_features/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git/in b/src/tools/cargo/tests/testsuite/cargo_add/git/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/git/mod.rs
new file mode 100644
index 000000000..bd82b3015
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git/mod.rs
@@ -0,0 +1,34 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+ let git_dep = cargo_test_support::git::new("git-package", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ &cargo_test_support::basic_manifest("git-package", "0.3.0+git-package"),
+ )
+ .file("src/lib.rs", "")
+ });
+ let git_url = git_dep.url().to_string();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["git-package", "--git", &git_url])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/git/out/Cargo.toml
new file mode 100644
index 000000000..7f2d2f188
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+git-package = { git = "[ROOTURL]/git-package", version = "0.3.0" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/git/stderr.log
new file mode 100644
index 000000000..839d8bb32
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git/stderr.log
@@ -0,0 +1,3 @@
+ Updating git repository `[ROOTURL]/git-package`
+ Adding git-package (git) to dependencies.
+ Updating git repository `[ROOTURL]/git-package`
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/git/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_branch/in b/src/tools/cargo/tests/testsuite/cargo_add/git_branch/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_branch/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_branch/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/git_branch/mod.rs
new file mode 100644
index 000000000..051564566
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_branch/mod.rs
@@ -0,0 +1,37 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+ let (git_dep, git_repo) = cargo_test_support::git::new_repo("git-package", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ &cargo_test_support::basic_manifest("git-package", "0.3.0+git-package"),
+ )
+ .file("src/lib.rs", "")
+ });
+ let branch = "dev";
+ let find_head = || (git_repo.head().unwrap().peel_to_commit().unwrap());
+ git_repo.branch(branch, &find_head(), false).unwrap();
+ let git_url = git_dep.url().to_string();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["git-package", "--git", &git_url, "--branch", branch])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_branch/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/git_branch/out/Cargo.toml
new file mode 100644
index 000000000..2eb295581
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_branch/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+git-package = { git = "[ROOTURL]/git-package", branch = "dev", version = "0.3.0" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_branch/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/git_branch/stderr.log
new file mode 100644
index 000000000..839d8bb32
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_branch/stderr.log
@@ -0,0 +1,3 @@
+ Updating git repository `[ROOTURL]/git-package`
+ Adding git-package (git) to dependencies.
+ Updating git repository `[ROOTURL]/git-package`
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_branch/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/git_branch/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_branch/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/in b/src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/mod.rs
new file mode 100644
index 000000000..f123298ae
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/mod.rs
@@ -0,0 +1,29 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args([
+ "my-package@0.4.3",
+ "--git",
+ "https://github.com/dcjanus/invalid",
+ ])
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/stderr.log
new file mode 100644
index 000000000..207e0ded3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/stderr.log
@@ -0,0 +1 @@
+error: cannot specify a git URL (`https://github.com/dcjanus/invalid`) with a version (`0.4.3`).
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_conflicts_namever/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_dev/in b/src/tools/cargo/tests/testsuite/cargo_add/git_dev/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_dev/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_dev/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/git_dev/mod.rs
new file mode 100644
index 000000000..9e14a4007
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_dev/mod.rs
@@ -0,0 +1,34 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+ let git_dep = cargo_test_support::git::new("git-package", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ &cargo_test_support::basic_manifest("git-package", "0.3.0+git-package"),
+ )
+ .file("src/lib.rs", "")
+ });
+ let git_url = git_dep.url().to_string();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["git-package", "--git", &git_url, "--dev"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_dev/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/git_dev/out/Cargo.toml
new file mode 100644
index 000000000..ceb131757
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_dev/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dev-dependencies]
+git-package = { git = "[ROOTURL]/git-package" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_dev/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/git_dev/stderr.log
new file mode 100644
index 000000000..8e53bb4be
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_dev/stderr.log
@@ -0,0 +1,3 @@
+ Updating git repository `[ROOTURL]/git-package`
+ Adding git-package (git) to dev-dependencies.
+ Updating git repository `[ROOTURL]/git-package`
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_dev/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/git_dev/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_dev/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/in b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/mod.rs
new file mode 100644
index 000000000..52183adf4
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/mod.rs
@@ -0,0 +1,34 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+ let git_dep = cargo_test_support::git::new("git-package", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ &cargo_test_support::basic_manifest("git-package", "0.3.0+git-package"),
+ )
+ .file("src/lib.rs", "")
+ });
+ let git_url = git_dep.url().to_string();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["--git", &git_url])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/out/Cargo.toml
new file mode 100644
index 000000000..7f2d2f188
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+git-package = { git = "[ROOTURL]/git-package", version = "0.3.0" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/stderr.log
new file mode 100644
index 000000000..b5e8b1c9b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/stderr.log
@@ -0,0 +1,4 @@
+ Updating git repository `[ROOTURL]/git-package`
+ Updating git repository `[ROOTURL]/git-package`
+ Adding git-package (git) to dependencies.
+ Updating git repository `[ROOTURL]/git-package`
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/in b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/mod.rs
new file mode 100644
index 000000000..a708a8ae7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/mod.rs
@@ -0,0 +1,74 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+ let git_dep = cargo_test_support::git::new("git-package", |project| {
+ project
+ .file(
+ "p1/Cargo.toml",
+ &cargo_test_support::basic_manifest("my-package1", "0.3.0+my-package1"),
+ )
+ .file("p1/src/lib.rs", "")
+ .file(
+ "p2/Cargo.toml",
+ &cargo_test_support::basic_manifest("my-package2", "0.3.0+my-package2"),
+ )
+ .file("p2/src/lib.rs", "")
+ .file(
+ "p3/Cargo.toml",
+ &cargo_test_support::basic_manifest("my-package3", "0.3.0+my-package2"),
+ )
+ .file("p3/src/lib.rs", "")
+ .file(
+ "p4/Cargo.toml",
+ &cargo_test_support::basic_manifest("my-package4", "0.3.0+my-package2"),
+ )
+ .file("p4/src/lib.rs", "")
+ .file(
+ "p5/Cargo.toml",
+ &cargo_test_support::basic_manifest("my-package5", "0.3.0+my-package2"),
+ )
+ .file("p5/src/lib.rs", "")
+ .file(
+ "p6/Cargo.toml",
+ &cargo_test_support::basic_manifest("my-package6", "0.3.0+my-package2"),
+ )
+ .file("p6/src/lib.rs", "")
+ .file(
+ "p7/Cargo.toml",
+ &cargo_test_support::basic_manifest("my-package7", "0.3.0+my-package2"),
+ )
+ .file("p7/src/lib.rs", "")
+ .file(
+ "p8/Cargo.toml",
+ &cargo_test_support::basic_manifest("my-package8", "0.3.0+my-package2"),
+ )
+ .file("p8/src/lib.rs", "")
+ .file(
+ "p9/Cargo.toml",
+ &cargo_test_support::basic_manifest("my-package9", "0.3.0+my-package2"),
+ )
+ .file("p9/src/lib.rs", "")
+ });
+ let git_url = git_dep.url().to_string();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["--git", &git_url])
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/stderr.log
new file mode 100644
index 000000000..2e045db6f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/stderr.log
@@ -0,0 +1,5 @@
+ Updating git repository `[ROOTURL]/git-package`
+error: multiple packages found at `[ROOTURL]/git-package`:
+ my-package1, my-package2, my-package3, my-package4, my-package5, my-package6
+ my-package7, my-package8, my-package9
+To disambiguate, run `cargo add --git [ROOTURL]/git-package <package>`
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_inferred_name_multiple/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/in b/src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/mod.rs
new file mode 100644
index 000000000..39eb6e626
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/mod.rs
@@ -0,0 +1,39 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+ let git_dep = cargo_test_support::git::new("git-package", |project| {
+ project
+ .file(
+ "p1/Cargo.toml",
+ &cargo_test_support::basic_manifest("my-package1", "0.3.0+my-package1"),
+ )
+ .file("p1/src/lib.rs", "")
+ .file(
+ "p2/Cargo.toml",
+ &cargo_test_support::basic_manifest("my-package2", "0.3.0+my-package2"),
+ )
+ .file("p2/src/lib.rs", "")
+ });
+ let git_url = git_dep.url().to_string();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["my-package1", "my-package2", "--git", &git_url])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/out/Cargo.toml
new file mode 100644
index 000000000..ba9d3c5ea
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = { git = "[ROOTURL]/git-package", version = "0.3.0" }
+my-package2 = { git = "[ROOTURL]/git-package", version = "0.3.0" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/stderr.log
new file mode 100644
index 000000000..454f0c797
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/stderr.log
@@ -0,0 +1,4 @@
+ Updating git repository `[ROOTURL]/git-package`
+ Adding my-package1 (git) to dependencies.
+ Adding my-package2 (git) to dependencies.
+ Updating git repository `[ROOTURL]/git-package`
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_multiple_names/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/in b/src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/mod.rs
new file mode 100644
index 000000000..03d861856
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/mod.rs
@@ -0,0 +1,34 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+ let git_dep = cargo_test_support::git::new("git-package", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ &cargo_test_support::basic_manifest("git-package", "0.3.0+git-package"),
+ )
+ .file("src/lib.rs", "")
+ });
+ let git_url = git_dep.url().to_string();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["git_package", "--git", &git_url])
+ .current_dir(cwd)
+ .assert()
+ .failure() // Fuzzy searching for paths isn't supported at this time
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/stderr.log
new file mode 100644
index 000000000..fedf82861
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/stderr.log
@@ -0,0 +1,2 @@
+ Updating git repository `[ROOTURL]/git-package`
+error: the crate `git_package@[ROOTURL]/git-package` could not be found at `[ROOTURL]/git-package`
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_normalized_name/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_registry/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/git_registry/in/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_registry/in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_registry/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/git_registry/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_registry/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_registry/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/git_registry/mod.rs
new file mode 100644
index 000000000..6bf6f8933
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_registry/mod.rs
@@ -0,0 +1,40 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_alt_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_alt_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+ let git_dep = cargo_test_support::git::new("versioned-package", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ &cargo_test_support::basic_manifest("versioned-package", "0.3.0+versioned-package"),
+ )
+ .file("src/lib.rs", "")
+ });
+ let git_url = git_dep.url().to_string();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args([
+ "versioned-package",
+ "--git",
+ &git_url,
+ "--registry",
+ "alternative",
+ ])
+ .current_dir(cwd)
+ .assert()
+ .failure()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_registry/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/git_registry/out/Cargo.toml
new file mode 100644
index 000000000..3773d1c80
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_registry/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+versioned-package = { git = "[ROOTURL]/versioned-package", version = "0.3.0", registry = "alternative" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_registry/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/git_registry/stderr.log
new file mode 100644
index 000000000..c554c7ec0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_registry/stderr.log
@@ -0,0 +1,6 @@
+ Updating git repository `[ROOTURL]/versioned-package`
+ Adding versioned-package (git) to dependencies.
+error: failed to parse manifest at `[ROOT]/case/Cargo.toml`
+
+Caused by:
+ dependency (versioned-package) specification is ambiguous. Only one of `git` or `registry` is allowed.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_registry/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/git_registry/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_registry/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_rev/in b/src/tools/cargo/tests/testsuite/cargo_add/git_rev/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_rev/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_rev/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/git_rev/mod.rs
new file mode 100644
index 000000000..612607203
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_rev/mod.rs
@@ -0,0 +1,36 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+ let (git_dep, git_repo) = cargo_test_support::git::new_repo("git-package", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ &cargo_test_support::basic_manifest("git-package", "0.3.0+git-package"),
+ )
+ .file("src/lib.rs", "")
+ });
+ let find_head = || (git_repo.head().unwrap().peel_to_commit().unwrap());
+ let head = find_head().id().to_string();
+ let git_url = git_dep.url().to_string();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["git-package", "--git", &git_url, "--rev", &head])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_rev/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/git_rev/out/Cargo.toml
new file mode 100644
index 000000000..efc00a01a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_rev/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+git-package = { git = "[ROOTURL]/git-package", rev = "[..]", version = "0.3.0" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_rev/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/git_rev/stderr.log
new file mode 100644
index 000000000..839d8bb32
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_rev/stderr.log
@@ -0,0 +1,3 @@
+ Updating git repository `[ROOTURL]/git-package`
+ Adding git-package (git) to dependencies.
+ Updating git repository `[ROOTURL]/git-package`
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_rev/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/git_rev/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_rev/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_tag/in b/src/tools/cargo/tests/testsuite/cargo_add/git_tag/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_tag/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_tag/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/git_tag/mod.rs
new file mode 100644
index 000000000..b355b1706
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_tag/mod.rs
@@ -0,0 +1,36 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+ let (git_dep, git_repo) = cargo_test_support::git::new_repo("git-package", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ &cargo_test_support::basic_manifest("git-package", "0.3.0+git-package"),
+ )
+ .file("src/lib.rs", "")
+ });
+ let tag = "v1.0.0";
+ cargo_test_support::git::tag(&git_repo, tag);
+ let git_url = git_dep.url().to_string();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["git-package", "--git", &git_url, "--tag", tag])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_tag/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/git_tag/out/Cargo.toml
new file mode 100644
index 000000000..233f26e65
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_tag/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+git-package = { git = "[ROOTURL]/git-package", tag = "v1.0.0", version = "0.3.0" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_tag/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/git_tag/stderr.log
new file mode 100644
index 000000000..839d8bb32
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_tag/stderr.log
@@ -0,0 +1,3 @@
+ Updating git repository `[ROOTURL]/git-package`
+ Adding git-package (git) to dependencies.
+ Updating git repository `[ROOTURL]/git-package`
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/git_tag/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/git_tag/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/git_tag/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/in b/src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/mod.rs
new file mode 100644
index 000000000..94533f979
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("prerelease_only")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/out/Cargo.toml
new file mode 100644
index 000000000..4a86322ad
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+prerelease_only = "0.2.0-alpha.1"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/stderr.log
new file mode 100644
index 000000000..0696d8f7b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding prerelease_only v0.2.0-alpha.1 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/infer_prerelease/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/in b/src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/mod.rs
new file mode 100644
index 000000000..265a571bc
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package --flag")
+ .current_dir(cwd)
+ .assert()
+ .code(1)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/stderr.log
new file mode 100644
index 000000000..96d067ed1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/stderr.log
@@ -0,0 +1,9 @@
+error: unexpected argument '--flag' found
+
+ tip: a similar argument exists: '--tag'
+
+Usage: cargo add [OPTIONS] <DEP>[@<VERSION>] ...
+ cargo add [OPTIONS] --path <PATH> ...
+ cargo add [OPTIONS] --git <URL> ...
+
+For more information, try '--help'.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_arg/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/in b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/mod.rs
new file mode 100644
index 000000000..705182f20
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/mod.rs
@@ -0,0 +1,28 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+ let git_url = url::Url::from_directory_path(cwd.join("does-not-exist"))
+ .unwrap()
+ .to_string();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["fake-git", "--git", &git_url])
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/stderr.log
new file mode 100644
index 000000000..18656300b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/stderr.log
@@ -0,0 +1,12 @@
+ Updating git repository `[ROOTURL]/case/does-not-exist/`
+...
+error: failed to load source for dependency `fake-git`
+
+Caused by:
+ Unable to update [ROOTURL]/case/does-not-exist/
+
+Caused by:
+ failed to clone into: [ROOT]/home/.cargo/git/db/does-not-exist-[..]
+
+Caused by:
+...
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_external/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/in b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/mod.rs
new file mode 100644
index 000000000..0aff8c090
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/mod.rs
@@ -0,0 +1,34 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+ let git_dep = cargo_test_support::git::new("git-package", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ &cargo_test_support::basic_manifest("git-package", "0.3.0+git-package"),
+ )
+ .file("src/lib.rs", "")
+ });
+ let git_url = git_dep.url().to_string();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["not-in-git", "--git", &git_url])
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/stderr.log
new file mode 100644
index 000000000..68fc4e49d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/stderr.log
@@ -0,0 +1,2 @@
+ Updating git repository `[ROOTURL]/git-package`
+error: the crate `not-in-git@[ROOTURL]/git-package` could not be found at `[ROOTURL]/git-package`
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_git_name/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/Cargo.toml
new file mode 100644
index 000000000..afd30d446
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency"}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/dependency/Cargo.toml
new file mode 100644
index 000000000..2d247d4d2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/primary/Cargo.toml
new file mode 100644
index 000000000..dd275f440
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/primary/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "bar"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/mod.rs
new file mode 100644
index 000000000..837293e5f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/mod.rs
@@ -0,0 +1,23 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["foo", "--default-features", "-p", "bar"])
+ .current_dir(cwd)
+ .assert()
+ .failure()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/out/Cargo.toml
new file mode 100644
index 000000000..afd30d446
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency"}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/out/dependency/Cargo.toml
new file mode 100644
index 000000000..2d247d4d2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/out/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/out/primary/Cargo.toml
new file mode 100644
index 000000000..dd275f440
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/out/primary/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "bar"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/stderr.log
new file mode 100644
index 000000000..85bd8da0a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/stderr.log
@@ -0,0 +1 @@
+error: cannot override workspace dependency with `--default-features`, either change `workspace.dependencies.foo.default-features` or define the dependency exclusively in the package's manifest
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_inherit_dependency/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/Cargo.toml
new file mode 100644
index 000000000..afd30d446
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency"}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/dependency/Cargo.toml
new file mode 100644
index 000000000..2d247d4d2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/primary/Cargo.toml
new file mode 100644
index 000000000..a5740941b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo.workspace = true
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/mod.rs
new file mode 100644
index 000000000..837293e5f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/mod.rs
@@ -0,0 +1,23 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["foo", "--default-features", "-p", "bar"])
+ .current_dir(cwd)
+ .assert()
+ .failure()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/out/Cargo.toml
new file mode 100644
index 000000000..afd30d446
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency"}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/out/dependency/Cargo.toml
new file mode 100644
index 000000000..2d247d4d2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/out/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/out/primary/Cargo.toml
new file mode 100644
index 000000000..a5740941b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/out/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo.workspace = true
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/stderr.log
new file mode 100644
index 000000000..85bd8da0a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/stderr.log
@@ -0,0 +1 @@
+error: cannot override workspace dependency with `--default-features`, either change `workspace.dependencies.foo.default-features` or define the dependency exclusively in the package's manifest
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/Cargo.toml
new file mode 100644
index 000000000..12c6ee5fe
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency", "dependency-alt"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency"}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency-alt/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency-alt/Cargo.toml
new file mode 100644
index 000000000..bb6472901
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency-alt/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo-alt"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency-alt/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency-alt/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency-alt/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency/Cargo.toml
new file mode 100644
index 000000000..2d247d4d2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/primary/Cargo.toml
new file mode 100644
index 000000000..dd275f440
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/primary/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "bar"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/mod.rs
new file mode 100644
index 000000000..bee132560
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/mod.rs
@@ -0,0 +1,23 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["--rename", "foo", "foo-alt", "-p", "bar"])
+ .current_dir(cwd)
+ .assert()
+ .failure()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/Cargo.toml
new file mode 100644
index 000000000..12c6ee5fe
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency", "dependency-alt"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency"}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/dependency-alt/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/dependency-alt/Cargo.toml
new file mode 100644
index 000000000..bb6472901
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/dependency-alt/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo-alt"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/dependency/Cargo.toml
new file mode 100644
index 000000000..2d247d4d2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/primary/Cargo.toml
new file mode 100644
index 000000000..dd275f440
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/primary/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "bar"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/stderr.log
new file mode 100644
index 000000000..35bcdb694
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/stderr.log
@@ -0,0 +1 @@
+error: cannot override workspace dependency with `--rename`, either change `workspace.dependencies.foo.package` or define the dependency exclusively in the package's manifest
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/in/Cargo.toml
new file mode 100644
index 000000000..94ee95994
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/in/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "manifest-invalid-test-fixture"
+version = "0.1.0"
+
+[invalid-section]
+key = invalid-value
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/mod.rs
new file mode 100644
index 000000000..e385cfc3f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package")
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/out/Cargo.toml
new file mode 100644
index 000000000..94ee95994
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "manifest-invalid-test-fixture"
+version = "0.1.0"
+
+[invalid-section]
+key = invalid-value
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/stderr.log
new file mode 100644
index 000000000..3dabde349
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/stderr.log
@@ -0,0 +1,12 @@
+error: failed to parse manifest at `[ROOT]/case/Cargo.toml`
+
+Caused by:
+ could not parse input as TOML
+
+Caused by:
+ TOML parse error at line 8, column 7
+ |
+ 8 | key = invalid-value
+ | ^
+ invalid string
+ expected `"`, `'`
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_manifest/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/in b/src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/mod.rs
new file mode 100644
index 000000000..16e041738
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("lets_hope_nobody_ever_publishes_this_crate")
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/stderr.log
new file mode 100644
index 000000000..5e574ceda
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+error: the crate `lets_hope_nobody_ever_publishes_this_crate` could not be found in registry index.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_name_external/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_path/in b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_path/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path/mod.rs
new file mode 100644
index 000000000..0d26b552d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("cargo-list-test-fixture --path ./tests/fixtures/local")
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_path/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_path/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path/stderr.log
new file mode 100644
index 000000000..f6c404330
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path/stderr.log
@@ -0,0 +1,10 @@
+error: failed to load source for dependency `cargo-list-test-fixture`
+
+Caused by:
+ Unable to update [ROOT]/case/tests/fixtures/local
+
+Caused by:
+ failed to read `[ROOT]/case/tests/fixtures/local/Cargo.toml`
+
+Caused by:
+ [..]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_path/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/in/dependency/Cargo.toml
new file mode 100644
index 000000000..cbe244113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/in/dependency/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/in/primary/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/in/primary/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/mod.rs
new file mode 100644
index 000000000..10d841475
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = project_root.join("primary");
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("not-at-path --path ../dependency")
+ .current_dir(&cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/out/dependency/Cargo.toml
new file mode 100644
index 000000000..cbe244113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/out/dependency/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/out/primary/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/out/primary/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/stderr.log
new file mode 100644
index 000000000..b35ea8233
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/stderr.log
@@ -0,0 +1 @@
+error: the crate `not-at-path@[ROOT]/case/dependency` could not be found at `[ROOT]/case/dependency`
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_name/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/in b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/mod.rs
new file mode 100644
index 000000000..a64190f44
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("cargo-list-test-fixture --path .")
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/stderr.log
new file mode 100644
index 000000000..62a25dbb4
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/stderr.log
@@ -0,0 +1,2 @@
+ Adding cargo-list-test-fixture (local) to dependencies.
+error: cannot add `cargo-list-test-fixture` as a dependency to itself
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_path_self/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/in b/src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/mod.rs
new file mode 100644
index 000000000..da93c4eb8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package --target ''")
+ .current_dir(cwd)
+ .assert()
+ .code(1)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/stderr.log
new file mode 100644
index 000000000..4b1a2c315
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/stderr.log
@@ -0,0 +1,3 @@
+error: a value is required for '--target <TARGET>' but none was supplied
+
+For more information, try '--help'.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_target_empty/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/in b/src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/mod.rs
new file mode 100644
index 000000000..c3b4d1f97
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package@invalid-version-string")
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/stderr.log
new file mode 100644
index 000000000..64f908eac
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/stderr.log
@@ -0,0 +1,4 @@
+error: invalid version requirement `invalid-version-string`
+
+Caused by:
+ unexpected character 'i' while parsing major version number
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/invalid_vers/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features/in b/src/tools/cargo/tests/testsuite/cargo_add/list_features/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/list_features/mod.rs
new file mode 100644
index 000000000..e1e1b212f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["your-face"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/list_features/out/Cargo.toml
new file mode 100644
index 000000000..79d735a12
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+your-face = "99999.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/list_features/stderr.log
new file mode 100644
index 000000000..796b9601b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features/stderr.log
@@ -0,0 +1,7 @@
+ Updating `dummy-registry` index
+ Adding your-face v99999.0.0 to dependencies.
+ Features:
+ - ears
+ - eyes
+ - mouth
+ - nose
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/list_features/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/Cargo.toml
new file mode 100644
index 000000000..299859e79
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/Cargo.toml
@@ -0,0 +1,2 @@
+[workspace]
+members = ["primary", "dependency", "optional"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/dependency/Cargo.toml
new file mode 100644
index 000000000..18041a53f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/dependency/Cargo.toml
@@ -0,0 +1,13 @@
+[package]
+name = "your-face"
+version = "0.1.3"
+
+[dependencies]
+my-package = "0.1.1"
+optional-dependency = { path = "../optional", optional = true }
+
+[features]
+default = ["mouth"]
+nose = []
+mouth = ["nose"]
+eyes = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/optional/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/optional/Cargo.toml
new file mode 100644
index 000000000..cb61a0514
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/optional/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "optional-dependency"
+version = "0.1.3"
+
+[dependencies]
+my-package = "0.1.1"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/optional/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/optional/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/optional/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/primary/Cargo.toml
new file mode 100644
index 000000000..5e20016d7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/primary/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/mod.rs
new file mode 100644
index 000000000..22733b883
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = project_root.join("primary");
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("your-face --path ../dependency")
+ .current_dir(&cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/out/Cargo.toml
new file mode 100644
index 000000000..299859e79
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/out/Cargo.toml
@@ -0,0 +1,2 @@
+[workspace]
+members = ["primary", "dependency", "optional"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/out/dependency/Cargo.toml
new file mode 100644
index 000000000..18041a53f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/out/dependency/Cargo.toml
@@ -0,0 +1,13 @@
+[package]
+name = "your-face"
+version = "0.1.3"
+
+[dependencies]
+my-package = "0.1.1"
+optional-dependency = { path = "../optional", optional = true }
+
+[features]
+default = ["mouth"]
+nose = []
+mouth = ["nose"]
+eyes = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/out/primary/Cargo.toml
new file mode 100644
index 000000000..2461d0932
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/out/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+your-face = { version = "0.1.3", path = "../dependency" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/stderr.log
new file mode 100644
index 000000000..af6747fe8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/stderr.log
@@ -0,0 +1,7 @@
+ Adding your-face (local) to dependencies.
+ Features:
+ + mouth
+ + nose
+ - eyes
+ - optional-dependency
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/Cargo.toml
new file mode 100644
index 000000000..299859e79
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/Cargo.toml
@@ -0,0 +1,2 @@
+[workspace]
+members = ["primary", "dependency", "optional"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/dependency/Cargo.toml
new file mode 100644
index 000000000..18041a53f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/dependency/Cargo.toml
@@ -0,0 +1,13 @@
+[package]
+name = "your-face"
+version = "0.1.3"
+
+[dependencies]
+my-package = "0.1.1"
+optional-dependency = { path = "../optional", optional = true }
+
+[features]
+default = ["mouth"]
+nose = []
+mouth = ["nose"]
+eyes = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/optional/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/optional/Cargo.toml
new file mode 100644
index 000000000..cb61a0514
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/optional/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "optional-dependency"
+version = "0.1.3"
+
+[dependencies]
+my-package = "0.1.1"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/optional/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/optional/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/optional/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/primary/Cargo.toml
new file mode 100644
index 000000000..5e20016d7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/primary/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/mod.rs
new file mode 100644
index 000000000..f520b2aca
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/mod.rs
@@ -0,0 +1,30 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = project_root.join("primary");
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args([
+ "your-face",
+ "--path",
+ "../dependency",
+ "--no-default-features",
+ ])
+ .current_dir(&cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/out/Cargo.toml
new file mode 100644
index 000000000..299859e79
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/out/Cargo.toml
@@ -0,0 +1,2 @@
+[workspace]
+members = ["primary", "dependency", "optional"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/out/dependency/Cargo.toml
new file mode 100644
index 000000000..18041a53f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/out/dependency/Cargo.toml
@@ -0,0 +1,13 @@
+[package]
+name = "your-face"
+version = "0.1.3"
+
+[dependencies]
+my-package = "0.1.1"
+optional-dependency = { path = "../optional", optional = true }
+
+[features]
+default = ["mouth"]
+nose = []
+mouth = ["nose"]
+eyes = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/out/primary/Cargo.toml
new file mode 100644
index 000000000..0b0400d51
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/out/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+your-face = { version = "0.1.3", path = "../dependency", default-features = false }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/stderr.log
new file mode 100644
index 000000000..7f47a220e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/stderr.log
@@ -0,0 +1,7 @@
+ Adding your-face (local) to dependencies.
+ Features:
+ - eyes
+ - mouth
+ - nose
+ - optional-dependency
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/list_features_path_no_default/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/locked_changed/in b/src/tools/cargo/tests/testsuite/cargo_add/locked_changed/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/locked_changed/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/locked_changed/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/locked_changed/mod.rs
new file mode 100644
index 000000000..9e3e57fe5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/locked_changed/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package --locked")
+ .current_dir(cwd)
+ .assert()
+ .failure()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/locked_changed/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/locked_changed/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/locked_changed/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/locked_changed/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/locked_changed/stderr.log
new file mode 100644
index 000000000..8af168373
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/locked_changed/stderr.log
@@ -0,0 +1,3 @@
+ Updating `dummy-registry` index
+ Adding my-package v99999.0.0 to dependencies.
+error: the manifest file [ROOT]/case/Cargo.toml needs to be updated but --locked was passed to prevent this
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/locked_changed/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/locked_changed/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/locked_changed/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/in/Cargo.lock b/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/in/Cargo.lock
new file mode 100644
index 000000000..011b33592
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/in/Cargo.lock
@@ -0,0 +1,16 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+dependencies = [
+ "my-package",
+]
+
+[[package]]
+name = "my-package"
+version = "99999.0.0+my-package"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62c45acf9e11d2f97f5b318143219c0b4102eafef1c22a4b545b47104691d915"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/in/Cargo.toml
new file mode 100644
index 000000000..5964c87be
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/in/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = "99999.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/mod.rs
new file mode 100644
index 000000000..aba9918f7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package --locked")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/out/Cargo.toml
new file mode 100644
index 000000000..5964c87be
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = "99999.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/stderr.log
new file mode 100644
index 000000000..fd6b711e3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding my-package v99999.0.0 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/locked_unchanged/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/in/Cargo.lock b/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/in/Cargo.lock
new file mode 100644
index 000000000..d9bcc988d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/in/Cargo.lock
@@ -0,0 +1,17 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+dependencies = [
+ "my-package",
+ "unrelateed-crate",
+]
+
+[[package]]
+name = "unrelateed-crate"
+version = "0.2.0+my-package"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "266de4849a570b5dfda5e8e082a2aff885e9d2d4965dae8f8b6c8535e1ec731f"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/in/Cargo.toml
new file mode 100644
index 000000000..95276d7c5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/in/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+unrelateed-crate = "0.2.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/mod.rs
new file mode 100644
index 000000000..33889dffa
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/out/Cargo.lock b/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/out/Cargo.lock
new file mode 100644
index 000000000..4b5fb465f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/out/Cargo.lock
@@ -0,0 +1,23 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+dependencies = [
+ "my-package",
+ "unrelateed-crate",
+]
+
+[[package]]
+name = "my-package"
+version = "99999.0.0+my-package"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62c45acf9e11d2f97f5b318143219c0b4102eafef1c22a4b545b47104691d915"
+
+[[package]]
+name = "unrelateed-crate"
+version = "0.2.0+my-package"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "266de4849a570b5dfda5e8e082a2aff885e9d2d4965dae8f8b6c8535e1ec731f"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/out/Cargo.toml
new file mode 100644
index 000000000..3176a986a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = "99999.0.0"
+unrelateed-crate = "0.2.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/stderr.log
new file mode 100644
index 000000000..fd6b711e3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding my-package v99999.0.0 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/lockfile_updated/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/Cargo.toml
new file mode 100644
index 000000000..57e1f3085
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/Cargo.toml
@@ -0,0 +1,2 @@
+[workspace]
+members = ["primary", "dependency"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/dependency/Cargo.toml
new file mode 100644
index 000000000..ca4f36d72
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/primary/Cargo.toml
new file mode 100644
index 000000000..5e20016d7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/primary/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/mod.rs
new file mode 100644
index 000000000..008c2d33d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/mod.rs
@@ -0,0 +1,31 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args([
+ "--manifest-path",
+ "Cargo.toml",
+ "--package",
+ "cargo-list-test-fixture",
+ "cargo-list-test-fixture-dependency",
+ ])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/out/Cargo.toml
new file mode 100644
index 000000000..57e1f3085
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/out/Cargo.toml
@@ -0,0 +1,2 @@
+[workspace]
+members = ["primary", "dependency"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/out/dependency/Cargo.toml
new file mode 100644
index 000000000..ca4f36d72
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/out/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/out/primary/Cargo.toml
new file mode 100644
index 000000000..a693df54f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/out/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+cargo-list-test-fixture-dependency = { version = "0.0.0", path = "../dependency" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/stderr.log
new file mode 100644
index 000000000..8109d3cc5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/stderr.log
@@ -0,0 +1 @@
+ Adding cargo-list-test-fixture-dependency (local) to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/manifest_path_package/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/Cargo.toml
new file mode 100644
index 000000000..b1d9b3995
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency", features = ["merge"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/dependency/Cargo.toml
new file mode 100644
index 000000000..f34d7a685
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/dependency/Cargo.toml
@@ -0,0 +1,14 @@
+[package]
+name = "foo"
+version = "0.0.0"
+
+[features]
+default-base = []
+default-test-base = []
+default-merge-base = []
+default = ["default-base", "default-test-base", "default-merge-base"]
+test-base = []
+test = ["test-base", "default-test-base"]
+merge-base = []
+merge = ["merge-base", "default-merge-base"]
+unrelated = [] \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/primary/Cargo.toml
new file mode 100644
index 000000000..a131c946d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo = { workspace = true, features = ["test"] } \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/mod.rs
new file mode 100644
index 000000000..161783282
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/mod.rs
@@ -0,0 +1,23 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["foo", "-p", "bar"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/out/Cargo.toml
new file mode 100644
index 000000000..b1d9b3995
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency", features = ["merge"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/out/dependency/Cargo.toml
new file mode 100644
index 000000000..f34d7a685
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/out/dependency/Cargo.toml
@@ -0,0 +1,14 @@
+[package]
+name = "foo"
+version = "0.0.0"
+
+[features]
+default-base = []
+default-test-base = []
+default-merge-base = []
+default = ["default-base", "default-test-base", "default-merge-base"]
+test-base = []
+test = ["test-base", "default-test-base"]
+merge-base = []
+merge = ["merge-base", "default-merge-base"]
+unrelated = [] \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/out/primary/Cargo.toml
new file mode 100644
index 000000000..fb4a12619
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/out/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo = { workspace = true, features = ["test"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/stderr.log
new file mode 100644
index 000000000..02dde7a34
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/stderr.log
@@ -0,0 +1,10 @@
+ Adding foo (workspace) to dependencies.
+ Features as of v0.0.0:
+ + default-base
+ + default-merge-base
+ + default-test-base
+ + merge
+ + merge-base
+ + test
+ + test-base
+ - unrelated
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/merge_activated_features/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/mod.rs
new file mode 100644
index 000000000..ca58474d2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/mod.rs
@@ -0,0 +1,203 @@
+mod add_basic;
+mod add_multiple;
+mod add_normalized_name_external;
+mod build;
+mod build_prefer_existing_version;
+mod change_rename_target;
+mod cyclic_features;
+mod default_features;
+mod deprecated_default_features;
+mod deprecated_section;
+mod detect_workspace_inherit;
+mod detect_workspace_inherit_features;
+mod detect_workspace_inherit_optional;
+mod dev;
+mod dev_build_conflict;
+mod dev_prefer_existing_version;
+mod dry_run;
+mod features;
+mod features_empty;
+mod features_multiple_occurrences;
+mod features_preserve;
+mod features_spaced_values;
+mod features_unknown;
+mod features_unknown_no_features;
+mod git;
+mod git_branch;
+mod git_conflicts_namever;
+mod git_dev;
+mod git_inferred_name;
+mod git_inferred_name_multiple;
+mod git_multiple_names;
+mod git_normalized_name;
+mod git_registry;
+mod git_rev;
+mod git_tag;
+mod infer_prerelease;
+mod invalid_arg;
+mod invalid_git_external;
+mod invalid_git_name;
+mod invalid_key_inherit_dependency;
+mod invalid_key_overwrite_inherit_dependency;
+mod invalid_key_rename_inherit_dependency;
+mod invalid_manifest;
+mod invalid_name_external;
+mod invalid_path;
+mod invalid_path_name;
+mod invalid_path_self;
+mod invalid_target_empty;
+mod invalid_vers;
+mod list_features;
+mod list_features_path;
+mod list_features_path_no_default;
+mod locked_changed;
+mod locked_unchanged;
+mod lockfile_updated;
+mod manifest_path_package;
+mod merge_activated_features;
+mod multiple_conflicts_with_features;
+mod multiple_conflicts_with_rename;
+mod namever;
+mod no_args;
+mod no_default_features;
+mod no_optional;
+mod offline_empty_cache;
+mod optional;
+mod overwrite_default_features;
+mod overwrite_default_features_with_no_default_features;
+mod overwrite_features;
+mod overwrite_git_with_path;
+mod overwrite_inherit_features_noop;
+mod overwrite_inherit_noop;
+mod overwrite_inherit_optional_noop;
+mod overwrite_inline_features;
+mod overwrite_name_dev_noop;
+mod overwrite_name_noop;
+mod overwrite_no_default_features;
+mod overwrite_no_default_features_with_default_features;
+mod overwrite_no_optional;
+mod overwrite_no_optional_with_optional;
+mod overwrite_optional;
+mod overwrite_optional_with_no_optional;
+mod overwrite_path_noop;
+mod overwrite_path_with_version;
+mod overwrite_preserves_inline_table;
+mod overwrite_rename_with_no_rename;
+mod overwrite_rename_with_rename;
+mod overwrite_rename_with_rename_noop;
+mod overwrite_version_with_git;
+mod overwrite_version_with_path;
+mod overwrite_with_rename;
+mod overwrite_workspace_dep;
+mod overwrite_workspace_dep_features;
+mod path;
+mod path_dev;
+mod path_inferred_name;
+mod path_inferred_name_conflicts_full_feature;
+mod path_normalized_name;
+mod preserve_sorted;
+mod preserve_unsorted;
+mod quiet;
+mod registry;
+mod rename;
+mod require_weak;
+mod sorted_table_with_dotted_item;
+mod target;
+mod target_cfg;
+mod unknown_inherited_feature;
+mod vers;
+mod workspace_name;
+mod workspace_path;
+mod workspace_path_dev;
+
+fn init_registry() {
+ cargo_test_support::registry::init();
+ add_registry_packages(false);
+}
+
+fn init_alt_registry() {
+ cargo_test_support::registry::alt_init();
+ add_registry_packages(true);
+}
+
+fn add_registry_packages(alt: bool) {
+ for name in [
+ "my-package",
+ "my-package1",
+ "my-package2",
+ "my-dev-package1",
+ "my-dev-package2",
+ "my-build-package1",
+ "my-build-package2",
+ "toml",
+ "versioned-package",
+ "cargo-list-test-fixture-dependency",
+ "unrelateed-crate",
+ ] {
+ cargo_test_support::registry::Package::new(name, "0.1.1+my-package")
+ .alternative(alt)
+ .publish();
+ cargo_test_support::registry::Package::new(name, "0.2.0+my-package")
+ .alternative(alt)
+ .publish();
+ cargo_test_support::registry::Package::new(name, "0.2.3+my-package")
+ .alternative(alt)
+ .publish();
+ cargo_test_support::registry::Package::new(name, "0.4.1+my-package")
+ .alternative(alt)
+ .publish();
+ cargo_test_support::registry::Package::new(name, "20.0.0+my-package")
+ .alternative(alt)
+ .publish();
+ cargo_test_support::registry::Package::new(name, "99999.0.0+my-package")
+ .alternative(alt)
+ .publish();
+ cargo_test_support::registry::Package::new(name, "99999.0.0-alpha.1+my-package")
+ .alternative(alt)
+ .publish();
+ }
+
+ cargo_test_support::registry::Package::new("prerelease_only", "0.2.0-alpha.1")
+ .alternative(alt)
+ .publish();
+ cargo_test_support::registry::Package::new("test_breaking", "0.2.0")
+ .alternative(alt)
+ .publish();
+ cargo_test_support::registry::Package::new("test_nonbreaking", "0.1.1")
+ .alternative(alt)
+ .publish();
+ cargo_test_support::registry::Package::new("test_cyclic_features", "0.1.1")
+ .alternative(alt)
+ .feature("default", &["feature-one", "feature-two"])
+ .feature("feature-one", &["feature-two"])
+ .feature("feature-two", &["feature-one"])
+ .publish();
+
+ // Normalization
+ cargo_test_support::registry::Package::new("linked-hash-map", "0.5.4")
+ .alternative(alt)
+ .feature("clippy", &[])
+ .feature("heapsize", &[])
+ .feature("heapsize_impl", &[])
+ .feature("nightly", &[])
+ .feature("serde", &[])
+ .feature("serde_impl", &[])
+ .feature("serde_test", &[])
+ .publish();
+ cargo_test_support::registry::Package::new("inflector", "0.11.4")
+ .alternative(alt)
+ .feature("default", &["heavyweight", "lazy_static", "regex"])
+ .feature("heavyweight", &[])
+ .feature("lazy_static", &[])
+ .feature("regex", &[])
+ .feature("unstable", &[])
+ .publish();
+
+ cargo_test_support::registry::Package::new("your-face", "99999.0.0+my-package")
+ .alternative(alt)
+ .feature("nose", &[])
+ .feature("mouth", &[])
+ .feature("eyes", &[])
+ .feature("ears", &[])
+ .publish();
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/in b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/mod.rs
new file mode 100644
index 000000000..10f824484
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package1 your-face --features nose")
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/stderr.log
new file mode 100644
index 000000000..72fd9fc9d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/stderr.log
@@ -0,0 +1 @@
+error: feature `nose` must be qualified by the dependency it's being activated for, like `my-package1/nose`, `your-face/nose`
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_features/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/in b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/mod.rs
new file mode 100644
index 000000000..293ed3eea
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package1 my-package2 --rename renamed")
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/stderr.log
new file mode 100644
index 000000000..e83250e73
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/stderr.log
@@ -0,0 +1 @@
+error: cannot specify multiple crates with `--rename`
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/multiple_conflicts_with_rename/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/namever/in b/src/tools/cargo/tests/testsuite/cargo_add/namever/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/namever/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/namever/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/namever/mod.rs
new file mode 100644
index 000000000..90fda1a9f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/namever/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package1@>=0.1.1 my-package2@0.2.3 my-package")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/namever/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/namever/out/Cargo.toml
new file mode 100644
index 000000000..1704d3435
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/namever/out/Cargo.toml
@@ -0,0 +1,10 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = "99999.0.0"
+my-package1 = ">=0.1.1"
+my-package2 = "0.2.3"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/namever/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/namever/stderr.log
new file mode 100644
index 000000000..17be8f9d8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/namever/stderr.log
@@ -0,0 +1,4 @@
+ Updating `dummy-registry` index
+ Adding my-package1 >=0.1.1 to dependencies.
+ Adding my-package2 v0.2.3 to dependencies.
+ Adding my-package v99999.0.0 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/namever/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/namever/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/namever/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_args/in b/src/tools/cargo/tests/testsuite/cargo_add/no_args/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_args/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_args/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/no_args/mod.rs
new file mode 100644
index 000000000..7eca17b56
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_args/mod.rs
@@ -0,0 +1,24 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .current_dir(cwd)
+ .assert()
+ .code(1)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_args/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/no_args/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_args/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_args/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/no_args/stderr.log
new file mode 100644
index 000000000..0274950a5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_args/stderr.log
@@ -0,0 +1,8 @@
+error: the following required arguments were not provided:
+ <DEP_ID|--path <PATH>|--git <URI>>
+
+Usage: cargo add [OPTIONS] <DEP>[@<VERSION>] ...
+ cargo add [OPTIONS] --path <PATH> ...
+ cargo add [OPTIONS] --git <URL> ...
+
+For more information, try '--help'.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_args/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/no_args/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_args/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_default_features/in b/src/tools/cargo/tests/testsuite/cargo_add/no_default_features/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_default_features/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_default_features/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/no_default_features/mod.rs
new file mode 100644
index 000000000..e72ca3be2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_default_features/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package1 my-package2@0.4.1 --no-default-features")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_default_features/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/no_default_features/out/Cargo.toml
new file mode 100644
index 000000000..ddd02b1f0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_default_features/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = { version = "99999.0.0", default-features = false }
+my-package2 = { version = "0.4.1", default-features = false }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_default_features/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/no_default_features/stderr.log
new file mode 100644
index 000000000..fb8d4903d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_default_features/stderr.log
@@ -0,0 +1,3 @@
+ Updating `dummy-registry` index
+ Adding my-package1 v99999.0.0 to dependencies.
+ Adding my-package2 v0.4.1 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_default_features/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/no_default_features/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_default_features/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_optional/in b/src/tools/cargo/tests/testsuite/cargo_add/no_optional/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_optional/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_optional/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/no_optional/mod.rs
new file mode 100644
index 000000000..fdb983b21
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_optional/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package1 my-package2@0.4.1 --no-optional")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_optional/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/no_optional/out/Cargo.toml
new file mode 100644
index 000000000..c5e017892
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_optional/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = "99999.0.0"
+my-package2 = "0.4.1"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_optional/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/no_optional/stderr.log
new file mode 100644
index 000000000..fb8d4903d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_optional/stderr.log
@@ -0,0 +1,3 @@
+ Updating `dummy-registry` index
+ Adding my-package1 v99999.0.0 to dependencies.
+ Adding my-package2 v0.4.1 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_optional/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/no_optional/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_optional/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/in b/src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/mod.rs
new file mode 100644
index 000000000..ae7485979
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("--offline my-package")
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/stderr.log
new file mode 100644
index 000000000..e0260b795
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/stderr.log
@@ -0,0 +1 @@
+error: the crate `my-package` could not be found in registry index.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/offline_empty_cache/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/optional/in b/src/tools/cargo/tests/testsuite/cargo_add/optional/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/optional/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/optional/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/optional/mod.rs
new file mode 100644
index 000000000..94d1cbf34
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/optional/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package1 my-package2@0.4.1 --optional")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/optional/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/optional/out/Cargo.toml
new file mode 100644
index 000000000..eda5445c5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/optional/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = { version = "99999.0.0", optional = true }
+my-package2 = { version = "0.4.1", optional = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/optional/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/optional/stderr.log
new file mode 100644
index 000000000..8cf4812cf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/optional/stderr.log
@@ -0,0 +1,3 @@
+ Updating `dummy-registry` index
+ Adding my-package1 v99999.0.0 to optional dependencies.
+ Adding my-package2 v0.4.1 to optional dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/optional/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/optional/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/optional/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/in/Cargo.toml
new file mode 100644
index 000000000..c5e017892
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/in/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = "99999.0.0"
+my-package2 = "0.4.1"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/mod.rs
new file mode 100644
index 000000000..88bdd8065
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package1 my-package2@0.4.1 --default-features")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/out/Cargo.toml
new file mode 100644
index 000000000..c5e017892
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = "99999.0.0"
+my-package2 = "0.4.1"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/stderr.log
new file mode 100644
index 000000000..fb8d4903d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/stderr.log
@@ -0,0 +1,3 @@
+ Updating `dummy-registry` index
+ Adding my-package1 v99999.0.0 to dependencies.
+ Adding my-package2 v0.4.1 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/in/Cargo.toml
new file mode 100644
index 000000000..73f56a7a3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/in/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = { version = "99999.0.0", default-features = true }
+my-package2 = { version = "0.4.1", default-features = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/mod.rs
new file mode 100644
index 000000000..e72ca3be2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package1 my-package2@0.4.1 --no-default-features")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/out/Cargo.toml
new file mode 100644
index 000000000..ddd02b1f0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = { version = "99999.0.0", default-features = false }
+my-package2 = { version = "0.4.1", default-features = false }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/stderr.log
new file mode 100644
index 000000000..fb8d4903d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/stderr.log
@@ -0,0 +1,3 @@
+ Updating `dummy-registry` index
+ Adding my-package1 v99999.0.0 to dependencies.
+ Adding my-package2 v0.4.1 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/in/Cargo.toml
new file mode 100644
index 000000000..11419b203
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/in/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+your-face = { version = "99999.0.0", features = ["eyes"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/mod.rs
new file mode 100644
index 000000000..0b2ab18b8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("your-face --features nose")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/out/Cargo.toml
new file mode 100644
index 000000000..0060d24bc
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+your-face = { version = "99999.0.0", features = ["eyes", "nose"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/stderr.log
new file mode 100644
index 000000000..615459052
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/stderr.log
@@ -0,0 +1,7 @@
+ Updating `dummy-registry` index
+ Adding your-face v99999.0.0 to dependencies.
+ Features:
+ + eyes
+ + nose
+ - ears
+ - mouth
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_features/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/in/dependency/Cargo.toml
new file mode 100644
index 000000000..cbe244113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/in/dependency/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/in/primary/Cargo.toml
new file mode 100644
index 000000000..6cb4d6a7d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/in/primary/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+cargo-list-test-fixture-dependency = { git = "git://git.git", branch = "main", optional = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/mod.rs
new file mode 100644
index 000000000..ab89e3a6d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = project_root.join("primary");
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("cargo-list-test-fixture-dependency --path ../dependency")
+ .current_dir(&cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/out/dependency/Cargo.toml
new file mode 100644
index 000000000..cbe244113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/out/dependency/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/out/primary/Cargo.toml
new file mode 100644
index 000000000..ad1205481
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/out/primary/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+cargo-list-test-fixture-dependency = { optional = true, path = "../dependency", version = "0.0.0" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/stderr.log
new file mode 100644
index 000000000..98abcfc99
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/stderr.log
@@ -0,0 +1 @@
+ Adding cargo-list-test-fixture-dependency (local) to optional dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/Cargo.toml
new file mode 100644
index 000000000..24c50556b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency"} \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/dependency/Cargo.toml
new file mode 100644
index 000000000..bed932047
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/dependency/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "foo"
+version = "0.0.0"
+
+[features]
+test = [] \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/primary/Cargo.toml
new file mode 100644
index 000000000..a131c946d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo = { workspace = true, features = ["test"] } \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/mod.rs
new file mode 100644
index 000000000..161783282
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/mod.rs
@@ -0,0 +1,23 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["foo", "-p", "bar"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/out/Cargo.toml
new file mode 100644
index 000000000..24c50556b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency"} \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/out/dependency/Cargo.toml
new file mode 100644
index 000000000..bed932047
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/out/dependency/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "foo"
+version = "0.0.0"
+
+[features]
+test = [] \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/out/primary/Cargo.toml
new file mode 100644
index 000000000..fb4a12619
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/out/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo = { workspace = true, features = ["test"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/stderr.log
new file mode 100644
index 000000000..3c7133bbc
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/stderr.log
@@ -0,0 +1,3 @@
+ Adding foo (workspace) to dependencies.
+ Features as of v0.0.0:
+ + test
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_features_noop/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/Cargo.toml
new file mode 100644
index 000000000..24c50556b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency"} \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/dependency/Cargo.toml
new file mode 100644
index 000000000..2d247d4d2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/primary/Cargo.toml
new file mode 100644
index 000000000..2ac789d55
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo.workspace = true \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/mod.rs
new file mode 100644
index 000000000..065fb4f93
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["foo", "-p", "bar"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/out/Cargo.toml
new file mode 100644
index 000000000..24c50556b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency"} \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/out/dependency/Cargo.toml
new file mode 100644
index 000000000..2d247d4d2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/out/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/out/primary/Cargo.toml
new file mode 100644
index 000000000..a5740941b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/out/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo.workspace = true
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/stderr.log
new file mode 100644
index 000000000..d2efcc0c0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/stderr.log
@@ -0,0 +1 @@
+ Adding foo (workspace) to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_noop/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/Cargo.toml
new file mode 100644
index 000000000..24c50556b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency"} \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/dependency/Cargo.toml
new file mode 100644
index 000000000..2d247d4d2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/primary/Cargo.toml
new file mode 100644
index 000000000..228aef664
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo = { workspace = true, optional = true } \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/mod.rs
new file mode 100644
index 000000000..065fb4f93
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["foo", "-p", "bar"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/Cargo.toml
new file mode 100644
index 000000000..24c50556b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency"} \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/dependency/Cargo.toml
new file mode 100644
index 000000000..2d247d4d2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/primary/Cargo.toml
new file mode 100644
index 000000000..6dd7fb6d6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo = { workspace = true, optional = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/stderr.log
new file mode 100644
index 000000000..da03b11f7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/stderr.log
@@ -0,0 +1 @@
+ Adding foo (workspace) to optional dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/in/Cargo.toml
new file mode 100644
index 000000000..11419b203
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/in/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+your-face = { version = "99999.0.0", features = ["eyes"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/mod.rs
new file mode 100644
index 000000000..356b4d788
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/mod.rs
@@ -0,0 +1,27 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line(
+ "unrelateed-crate your-face --features your-face/nose,your-face/mouth -Fyour-face/ears",
+ )
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/out/Cargo.toml
new file mode 100644
index 000000000..8e9579dc6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+unrelateed-crate = "99999.0.0"
+your-face = { version = "99999.0.0", features = ["eyes", "nose", "mouth", "ears"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/stderr.log
new file mode 100644
index 000000000..a686cba55
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/stderr.log
@@ -0,0 +1,8 @@
+ Updating `dummy-registry` index
+ Adding unrelateed-crate v99999.0.0 to dependencies.
+ Adding your-face v99999.0.0 to dependencies.
+ Features:
+ + ears
+ + eyes
+ + mouth
+ + nose
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inline_features/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/Cargo.toml
new file mode 100644
index 000000000..b69b5d38e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+exclude = ["dependency"]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dev-dependencies]
+your-face = { version = "0.0.0", path = "dependency", default-features = false, features = ["nose", "mouth"], registry = "alternative" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/dependency/Cargo.toml
new file mode 100644
index 000000000..8243797eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/dependency/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "your-face"
+version = "0.0.0"
+
+[features]
+mouth = []
+nose = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/mod.rs
new file mode 100644
index 000000000..b418c7809
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_alt_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_alt_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("your-face --dev")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/out/Cargo.toml
new file mode 100644
index 000000000..b69b5d38e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+exclude = ["dependency"]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dev-dependencies]
+your-face = { version = "0.0.0", path = "dependency", default-features = false, features = ["nose", "mouth"], registry = "alternative" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/out/dependency/Cargo.toml
new file mode 100644
index 000000000..8243797eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/out/dependency/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "your-face"
+version = "0.0.0"
+
+[features]
+mouth = []
+nose = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/stderr.log
new file mode 100644
index 000000000..2fe3f6a29
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/stderr.log
@@ -0,0 +1,4 @@
+ Adding your-face (local) to dev-dependencies.
+ Features:
+ + mouth
+ + nose
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_dev_noop/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/in/Cargo.toml
new file mode 100644
index 000000000..bbaf4f552
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/in/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+exclude = ["dependency"]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+your-face = { version = "0.0.0", path = "dependency", optional = true, default-features = false, features = ["nose", "mouth"], registry = "alternative" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/in/dependency/Cargo.toml
new file mode 100644
index 000000000..8243797eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/in/dependency/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "your-face"
+version = "0.0.0"
+
+[features]
+mouth = []
+nose = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/mod.rs
new file mode 100644
index 000000000..193c5880b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_alt_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_alt_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("your-face")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/out/Cargo.toml
new file mode 100644
index 000000000..bbaf4f552
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+exclude = ["dependency"]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+your-face = { version = "0.0.0", path = "dependency", optional = true, default-features = false, features = ["nose", "mouth"], registry = "alternative" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/out/dependency/Cargo.toml
new file mode 100644
index 000000000..8243797eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/out/dependency/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "your-face"
+version = "0.0.0"
+
+[features]
+mouth = []
+nose = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/stderr.log
new file mode 100644
index 000000000..2f0b90de0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/stderr.log
@@ -0,0 +1,4 @@
+ Adding your-face (local) to optional dependencies.
+ Features:
+ + mouth
+ + nose
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/in/Cargo.toml
new file mode 100644
index 000000000..c5e017892
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/in/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = "99999.0.0"
+my-package2 = "0.4.1"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/mod.rs
new file mode 100644
index 000000000..e72ca3be2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package1 my-package2@0.4.1 --no-default-features")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/out/Cargo.toml
new file mode 100644
index 000000000..ddd02b1f0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = { version = "99999.0.0", default-features = false }
+my-package2 = { version = "0.4.1", default-features = false }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/stderr.log
new file mode 100644
index 000000000..fb8d4903d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/stderr.log
@@ -0,0 +1,3 @@
+ Updating `dummy-registry` index
+ Adding my-package1 v99999.0.0 to dependencies.
+ Adding my-package2 v0.4.1 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/in/Cargo.toml
new file mode 100644
index 000000000..ddd02b1f0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/in/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = { version = "99999.0.0", default-features = false }
+my-package2 = { version = "0.4.1", default-features = false }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/mod.rs
new file mode 100644
index 000000000..88bdd8065
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package1 my-package2@0.4.1 --default-features")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/out/Cargo.toml
new file mode 100644
index 000000000..b9e8985c6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = { version = "99999.0.0" }
+my-package2 = { version = "0.4.1" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/stderr.log
new file mode 100644
index 000000000..fb8d4903d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/stderr.log
@@ -0,0 +1,3 @@
+ Updating `dummy-registry` index
+ Adding my-package1 v99999.0.0 to dependencies.
+ Adding my-package2 v0.4.1 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/in/Cargo.toml
new file mode 100644
index 000000000..c5e017892
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/in/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = "99999.0.0"
+my-package2 = "0.4.1"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/mod.rs
new file mode 100644
index 000000000..fdb983b21
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package1 my-package2@0.4.1 --no-optional")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/out/Cargo.toml
new file mode 100644
index 000000000..c5e017892
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = "99999.0.0"
+my-package2 = "0.4.1"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/stderr.log
new file mode 100644
index 000000000..fb8d4903d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/stderr.log
@@ -0,0 +1,3 @@
+ Updating `dummy-registry` index
+ Adding my-package1 v99999.0.0 to dependencies.
+ Adding my-package2 v0.4.1 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/in/Cargo.toml
new file mode 100644
index 000000000..8cd2616d4
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/in/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = { version = "99999.0.0", optional = false }
+my-package2 = { version = "0.4.1", optional = false }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/mod.rs
new file mode 100644
index 000000000..94d1cbf34
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package1 my-package2@0.4.1 --optional")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/out/Cargo.toml
new file mode 100644
index 000000000..eda5445c5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = { version = "99999.0.0", optional = true }
+my-package2 = { version = "0.4.1", optional = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/stderr.log
new file mode 100644
index 000000000..8cf4812cf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/stderr.log
@@ -0,0 +1,3 @@
+ Updating `dummy-registry` index
+ Adding my-package1 v99999.0.0 to optional dependencies.
+ Adding my-package2 v0.4.1 to optional dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/in/Cargo.toml
new file mode 100644
index 000000000..c5e017892
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/in/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = "99999.0.0"
+my-package2 = "0.4.1"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/mod.rs
new file mode 100644
index 000000000..94d1cbf34
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package1 my-package2@0.4.1 --optional")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/out/Cargo.toml
new file mode 100644
index 000000000..eda5445c5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = { version = "99999.0.0", optional = true }
+my-package2 = { version = "0.4.1", optional = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/stderr.log
new file mode 100644
index 000000000..8cf4812cf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/stderr.log
@@ -0,0 +1,3 @@
+ Updating `dummy-registry` index
+ Adding my-package1 v99999.0.0 to optional dependencies.
+ Adding my-package2 v0.4.1 to optional dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/in/Cargo.toml
new file mode 100644
index 000000000..5ef953209
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/in/Cargo.toml
@@ -0,0 +1,13 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[features]
+default = ["your-face"]
+other = ["your-face/nose"]
+
+[dependencies]
+your-face = { version = "99999.0.0", optional = true }
+my-package2 = { version = "0.4.1", optional = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/mod.rs
new file mode 100644
index 000000000..c34c293f9
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("your-face my-package2@0.4.1 --no-optional")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/out/Cargo.toml
new file mode 100644
index 000000000..bf6c52963
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/out/Cargo.toml
@@ -0,0 +1,13 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[features]
+default = []
+other = ["your-face/nose"]
+
+[dependencies]
+your-face = { version = "99999.0.0" }
+my-package2 = { version = "0.4.1" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/stderr.log
new file mode 100644
index 000000000..5fe113e86
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/stderr.log
@@ -0,0 +1,8 @@
+ Updating `dummy-registry` index
+ Adding your-face v99999.0.0 to dependencies.
+ Features:
+ - ears
+ - eyes
+ - mouth
+ - nose
+ Adding my-package2 v0.4.1 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/in/Cargo.toml
new file mode 100644
index 000000000..bbaf4f552
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/in/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+exclude = ["dependency"]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+your-face = { version = "0.0.0", path = "dependency", optional = true, default-features = false, features = ["nose", "mouth"], registry = "alternative" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/in/dependency/Cargo.toml
new file mode 100644
index 000000000..8243797eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/in/dependency/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "your-face"
+version = "0.0.0"
+
+[features]
+mouth = []
+nose = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/mod.rs
new file mode 100644
index 000000000..f04405a34
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_alt_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_alt_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("your-face --path ./dependency")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/out/Cargo.toml
new file mode 100644
index 000000000..bbaf4f552
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+exclude = ["dependency"]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+your-face = { version = "0.0.0", path = "dependency", optional = true, default-features = false, features = ["nose", "mouth"], registry = "alternative" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/out/dependency/Cargo.toml
new file mode 100644
index 000000000..8243797eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/out/dependency/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "your-face"
+version = "0.0.0"
+
+[features]
+mouth = []
+nose = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/stderr.log
new file mode 100644
index 000000000..2f0b90de0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/stderr.log
@@ -0,0 +1,4 @@
+ Adding your-face (local) to optional dependencies.
+ Features:
+ + mouth
+ + nose
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/in/dependency/Cargo.toml
new file mode 100644
index 000000000..cbe244113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/in/dependency/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/in/primary/Cargo.toml
new file mode 100644
index 000000000..9d20b2240
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/in/primary/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+cargo-list-test-fixture-dependency = { optional = true, path = "../dependency" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/mod.rs
new file mode 100644
index 000000000..32674e23d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = project_root.join("primary");
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("cargo-list-test-fixture-dependency@20.0")
+ .current_dir(&cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/out/dependency/Cargo.toml
new file mode 100644
index 000000000..cbe244113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/out/dependency/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/out/primary/Cargo.toml
new file mode 100644
index 000000000..a20f2095d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/out/primary/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+cargo-list-test-fixture-dependency = { optional = true, version = "20.0" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/stderr.log
new file mode 100644
index 000000000..d0b3a4cf2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding cargo-list-test-fixture-dependency v20.0 to optional dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/in/Cargo.toml
new file mode 100644
index 000000000..3dddbbd10
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/in/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+your-face={version="99999.0.0",features=["eyes"]} # Hello world
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/mod.rs
new file mode 100644
index 000000000..0b2ab18b8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("your-face --features nose")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/out/Cargo.toml
new file mode 100644
index 000000000..f204a895e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+your-face={ version = "99999.0.0", features = ["eyes", "nose"] } # Hello world
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/stderr.log
new file mode 100644
index 000000000..615459052
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/stderr.log
@@ -0,0 +1,7 @@
+ Updating `dummy-registry` index
+ Adding your-face v99999.0.0 to dependencies.
+ Features:
+ + eyes
+ + nose
+ - ears
+ - mouth
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_preserves_inline_table/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/in/Cargo.toml
new file mode 100644
index 000000000..450229245
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/in/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+a1 = { package = "versioned-package", version = "0.1.1", optional = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/mod.rs
new file mode 100644
index 000000000..a006c95fd
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("versioned-package")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/out/Cargo.toml
new file mode 100644
index 000000000..9951492da
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+a1 = { package = "versioned-package", version = "0.1.1", optional = true }
+versioned-package = "99999.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/stderr.log
new file mode 100644
index 000000000..305b89f26
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding versioned-package v99999.0.0 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/in/Cargo.toml
new file mode 100644
index 000000000..450229245
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/in/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+a1 = { package = "versioned-package", version = "0.1.1", optional = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/mod.rs
new file mode 100644
index 000000000..e14282bc1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("versioned-package --rename a2")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/out/Cargo.toml
new file mode 100644
index 000000000..790f6546c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+a1 = { package = "versioned-package", version = "0.1.1", optional = true }
+a2 = { version = "99999.0.0", package = "versioned-package" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/stderr.log
new file mode 100644
index 000000000..305b89f26
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding versioned-package v99999.0.0 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/in/Cargo.toml
new file mode 100644
index 000000000..450229245
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/in/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+a1 = { package = "versioned-package", version = "0.1.1", optional = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/mod.rs
new file mode 100644
index 000000000..c0ca2e552
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("versioned-package --rename a1")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/out/Cargo.toml
new file mode 100644
index 000000000..450229245
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+a1 = { package = "versioned-package", version = "0.1.1", optional = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/stderr.log
new file mode 100644
index 000000000..d69dc92cd
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding versioned-package v0.1.1 to optional dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/in/Cargo.toml
new file mode 100644
index 000000000..fe41f2a90
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/in/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+versioned-package = { version = "0.1.1", optional = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/mod.rs
new file mode 100644
index 000000000..ce7a0acb0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/mod.rs
@@ -0,0 +1,34 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+ let git_dep = cargo_test_support::git::new("versioned-package", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ &cargo_test_support::basic_manifest("versioned-package", "0.3.0+versioned-package"),
+ )
+ .file("src/lib.rs", "")
+ });
+ let git_url = git_dep.url().to_string();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["versioned-package", "--git", &git_url])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/out/Cargo.toml
new file mode 100644
index 000000000..260014024
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+versioned-package = { version = "0.3.0", optional = true, git = "[ROOTURL]/versioned-package" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/stderr.log
new file mode 100644
index 000000000..1b77cbe0e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/stderr.log
@@ -0,0 +1,3 @@
+ Updating git repository `[ROOTURL]/versioned-package`
+ Adding versioned-package (git) to optional dependencies.
+ Updating git repository `[ROOTURL]/versioned-package`
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/in/dependency/Cargo.toml
new file mode 100644
index 000000000..cbe244113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/in/dependency/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/in/primary/Cargo.toml
new file mode 100644
index 000000000..063b89192
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/in/primary/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+cargo-list-test-fixture-dependency = { version = "0.1.1", optional = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/mod.rs
new file mode 100644
index 000000000..ab89e3a6d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = project_root.join("primary");
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("cargo-list-test-fixture-dependency --path ../dependency")
+ .current_dir(&cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/out/dependency/Cargo.toml
new file mode 100644
index 000000000..cbe244113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/out/dependency/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/out/primary/Cargo.toml
new file mode 100644
index 000000000..07253670a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/out/primary/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+cargo-list-test-fixture-dependency = { version = "0.0.0", optional = true, path = "../dependency" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/stderr.log
new file mode 100644
index 000000000..98abcfc99
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/stderr.log
@@ -0,0 +1 @@
+ Adding cargo-list-test-fixture-dependency (local) to optional dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/in/Cargo.toml
new file mode 100644
index 000000000..fe41f2a90
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/in/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+versioned-package = { version = "0.1.1", optional = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/mod.rs
new file mode 100644
index 000000000..05cc2d109
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("versioned-package --rename renamed")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/out/Cargo.toml
new file mode 100644
index 000000000..4b7485181
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+renamed = { version = "99999.0.0", package = "versioned-package" }
+versioned-package = { version = "0.1.1", optional = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/stderr.log
new file mode 100644
index 000000000..305b89f26
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding versioned-package v99999.0.0 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_with_rename/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/Cargo.toml
new file mode 100644
index 000000000..a80d49949
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency" } \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/dependency/Cargo.toml
new file mode 100644
index 000000000..2d247d4d2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/primary/Cargo.toml
new file mode 100644
index 000000000..2ac789d55
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo.workspace = true \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/mod.rs
new file mode 100644
index 000000000..87ed58f7f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["foo", "--path", "./dependency", "-p", "bar"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/out/Cargo.toml
new file mode 100644
index 000000000..a80d49949
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency" } \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/out/dependency/Cargo.toml
new file mode 100644
index 000000000..2d247d4d2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/out/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/out/primary/Cargo.toml
new file mode 100644
index 000000000..da32f4ead
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/out/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo = { version = "0.0.0", path = "../dependency" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/stderr.log
new file mode 100644
index 000000000..d1bc50757
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/stderr.log
@@ -0,0 +1 @@
+ Adding foo (local) to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/Cargo.toml
new file mode 100644
index 000000000..a80d49949
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency" } \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/dependency/Cargo.toml
new file mode 100644
index 000000000..ef9ec7701
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/dependency/Cargo.toml
@@ -0,0 +1,14 @@
+[package]
+name = "foo"
+version = "0.0.0"
+
+[features]
+default-base = []
+default-test-base = []
+default-merge-base = []
+default = ["default-base", "default-test-base", "default-merge-base"]
+test-base = []
+test = ["test-base", "default-test-base"]
+merge-base = []
+merge = ["merge-base", "default-merge-base"]
+unrelated = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/primary/Cargo.toml
new file mode 100644
index 000000000..a131c946d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo = { workspace = true, features = ["test"] } \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/mod.rs
new file mode 100644
index 000000000..87ed58f7f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["foo", "--path", "./dependency", "-p", "bar"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/out/Cargo.toml
new file mode 100644
index 000000000..a80d49949
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency" } \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/out/dependency/Cargo.toml
new file mode 100644
index 000000000..ef9ec7701
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/out/dependency/Cargo.toml
@@ -0,0 +1,14 @@
+[package]
+name = "foo"
+version = "0.0.0"
+
+[features]
+default-base = []
+default-test-base = []
+default-merge-base = []
+default = ["default-base", "default-test-base", "default-merge-base"]
+test-base = []
+test = ["test-base", "default-test-base"]
+merge-base = []
+merge = ["merge-base", "default-merge-base"]
+unrelated = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/out/primary/Cargo.toml
new file mode 100644
index 000000000..6f95ded6f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/out/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo = { features = ["test"], path = "../dependency", version = "0.0.0" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/stderr.log
new file mode 100644
index 000000000..18ed7c2d8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/stderr.log
@@ -0,0 +1,10 @@
+ Adding foo (local) to dependencies.
+ Features:
+ + default-base
+ + default-merge-base
+ + default-test-base
+ + test
+ + test-base
+ - merge
+ - merge-base
+ - unrelated
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_workspace_dep_features/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path/in/dependency/Cargo.toml
new file mode 100644
index 000000000..cbe244113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path/in/dependency/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/path/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path/in/primary/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path/in/primary/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/path/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/path/mod.rs
new file mode 100644
index 000000000..ab89e3a6d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = project_root.join("primary");
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("cargo-list-test-fixture-dependency --path ../dependency")
+ .current_dir(&cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path/out/dependency/Cargo.toml
new file mode 100644
index 000000000..cbe244113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path/out/dependency/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path/out/primary/Cargo.toml
new file mode 100644
index 000000000..93476d743
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path/out/primary/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+cargo-list-test-fixture-dependency = { version = "0.0.0", path = "../dependency" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/path/stderr.log
new file mode 100644
index 000000000..8109d3cc5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path/stderr.log
@@ -0,0 +1 @@
+ Adding cargo-list-test-fixture-dependency (local) to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/path/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_dev/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path_dev/in/dependency/Cargo.toml
new file mode 100644
index 000000000..cbe244113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_dev/in/dependency/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_dev/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/path_dev/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_dev/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_dev/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path_dev/in/primary/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_dev/in/primary/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_dev/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/path_dev/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_dev/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_dev/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/path_dev/mod.rs
new file mode 100644
index 000000000..4ae04c70a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_dev/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = project_root.join("primary");
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("cargo-list-test-fixture-dependency --path ../dependency --dev")
+ .current_dir(&cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_dev/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path_dev/out/dependency/Cargo.toml
new file mode 100644
index 000000000..cbe244113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_dev/out/dependency/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_dev/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path_dev/out/primary/Cargo.toml
new file mode 100644
index 000000000..92be59dc6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_dev/out/primary/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dev-dependencies]
+cargo-list-test-fixture-dependency = { path = "../dependency" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_dev/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/path_dev/stderr.log
new file mode 100644
index 000000000..d8093d6ae
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_dev/stderr.log
@@ -0,0 +1 @@
+ Adding cargo-list-test-fixture-dependency (local) to dev-dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_dev/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/path_dev/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_dev/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/in/dependency/Cargo.toml
new file mode 100644
index 000000000..cbe244113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/in/dependency/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/in/primary/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/in/primary/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/mod.rs
new file mode 100644
index 000000000..ab89e3a6d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = project_root.join("primary");
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("cargo-list-test-fixture-dependency --path ../dependency")
+ .current_dir(&cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/out/dependency/Cargo.toml
new file mode 100644
index 000000000..cbe244113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/out/dependency/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/out/primary/Cargo.toml
new file mode 100644
index 000000000..93476d743
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/out/primary/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+cargo-list-test-fixture-dependency = { version = "0.0.0", path = "../dependency" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/stderr.log
new file mode 100644
index 000000000..8109d3cc5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/stderr.log
@@ -0,0 +1 @@
+ Adding cargo-list-test-fixture-dependency (local) to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/Cargo.toml
new file mode 100644
index 000000000..299859e79
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/Cargo.toml
@@ -0,0 +1,2 @@
+[workspace]
+members = ["primary", "dependency", "optional"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/dependency/Cargo.toml
new file mode 100644
index 000000000..34157f411
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/dependency/Cargo.toml
@@ -0,0 +1,14 @@
+[package]
+name = "your-face"
+version = "0.1.3"
+
+[dependencies]
+toml_edit = "0.1.5"
+atty = "0.2.13"
+optional-dependency = { path = "../optional", optional = true }
+
+[features]
+default = ["mouth"]
+nose = []
+mouth = ["nose"]
+eyes = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/optional/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/optional/Cargo.toml
new file mode 100644
index 000000000..0216dba89
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/optional/Cargo.toml
@@ -0,0 +1,7 @@
+[package]
+name = "optional-dep"
+version = "0.1.3"
+
+[dependencies]
+toml_edit = "0.1.5"
+atty = "0.2.13"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/optional/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/optional/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/optional/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/primary/Cargo.toml
new file mode 100644
index 000000000..5e20016d7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/primary/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/mod.rs
new file mode 100644
index 000000000..eadd096aa
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = project_root.join("primary");
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("--path ../dependency --features your-face/nose")
+ .current_dir(&cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/out/Cargo.toml
new file mode 100644
index 000000000..299859e79
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/out/Cargo.toml
@@ -0,0 +1,2 @@
+[workspace]
+members = ["primary", "dependency", "optional"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/out/dependency/Cargo.toml
new file mode 100644
index 000000000..34157f411
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/out/dependency/Cargo.toml
@@ -0,0 +1,14 @@
+[package]
+name = "your-face"
+version = "0.1.3"
+
+[dependencies]
+toml_edit = "0.1.5"
+atty = "0.2.13"
+optional-dependency = { path = "../optional", optional = true }
+
+[features]
+default = ["mouth"]
+nose = []
+mouth = ["nose"]
+eyes = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/out/primary/Cargo.toml
new file mode 100644
index 000000000..5e20016d7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/out/primary/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/stderr.log
new file mode 100644
index 000000000..791ca6008
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/stderr.log
@@ -0,0 +1 @@
+error: `your-face/nose` is unsupported when inferring the crate name, use `nose`
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/in/dependency/Cargo.toml
new file mode 100644
index 000000000..cbe244113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/in/dependency/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/in/primary/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/in/primary/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/mod.rs
new file mode 100644
index 000000000..754f2783f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = project_root.join("primary");
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("cargo_list_test_fixture_dependency --path ../dependency")
+ .current_dir(&cwd)
+ .assert()
+ .failure() // Fuzzy searching for paths isn't supported at this time
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/out/dependency/Cargo.toml
new file mode 100644
index 000000000..cbe244113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/out/dependency/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/out/primary/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/out/primary/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/stderr.log
new file mode 100644
index 000000000..59b35e3c4
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/stderr.log
@@ -0,0 +1 @@
+error: the crate `cargo_list_test_fixture_dependency@[ROOT]/case/dependency` could not be found at `[ROOT]/case/dependency`
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/path_normalized_name/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/in/Cargo.toml
new file mode 100644
index 000000000..550e41b07
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/in/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = "0.1.1"
+versioned-package = "0.1.1"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/mod.rs
new file mode 100644
index 000000000..4dfb06ed1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("toml")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/out/Cargo.toml
new file mode 100644
index 000000000..cacd510cc
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/out/Cargo.toml
@@ -0,0 +1,10 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = "0.1.1"
+toml = "99999.0.0"
+versioned-package = "0.1.1"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/stderr.log
new file mode 100644
index 000000000..7c83976f8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding toml v99999.0.0 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/preserve_sorted/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/in/Cargo.toml
new file mode 100644
index 000000000..f803120a3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/in/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+versioned-package = "0.1.1"
+my-package = "0.1.1"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/mod.rs
new file mode 100644
index 000000000..4dfb06ed1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("toml")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/out/Cargo.toml
new file mode 100644
index 000000000..244a06ab9
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/out/Cargo.toml
@@ -0,0 +1,10 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+versioned-package = "0.1.1"
+my-package = "0.1.1"
+toml = "99999.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/stderr.log
new file mode 100644
index 000000000..7c83976f8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding toml v99999.0.0 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/preserve_unsorted/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/quiet/in b/src/tools/cargo/tests/testsuite/cargo_add/quiet/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/quiet/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/quiet/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/quiet/mod.rs
new file mode 100644
index 000000000..357843901
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/quiet/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("--quiet your-face")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/quiet/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/quiet/out/Cargo.toml
new file mode 100644
index 000000000..79d735a12
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/quiet/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+your-face = "99999.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/quiet/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/quiet/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/quiet/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/quiet/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/quiet/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/quiet/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/registry/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/registry/in/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/registry/in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/registry/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/registry/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/registry/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/registry/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/registry/mod.rs
new file mode 100644
index 000000000..d5ba9ef28
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/registry/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_alt_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_alt_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package1 my-package2 --registry alternative")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/registry/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/registry/out/Cargo.toml
new file mode 100644
index 000000000..e856bee5d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/registry/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = { version = "99999.0.0", registry = "alternative" }
+my-package2 = { version = "99999.0.0", registry = "alternative" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/registry/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/registry/stderr.log
new file mode 100644
index 000000000..437e780af
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/registry/stderr.log
@@ -0,0 +1,3 @@
+ Updating `alternative` index
+ Adding my-package1 v99999.0.0 to dependencies.
+ Adding my-package2 v99999.0.0 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/registry/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/registry/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/registry/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/rename/in b/src/tools/cargo/tests/testsuite/cargo_add/rename/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/rename/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/rename/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/rename/mod.rs
new file mode 100644
index 000000000..3fefcccf3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/rename/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package --rename renamed")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/rename/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/rename/out/Cargo.toml
new file mode 100644
index 000000000..ebcfbbd99
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/rename/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+renamed = { version = "99999.0.0", package = "my-package" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/rename/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/rename/stderr.log
new file mode 100644
index 000000000..fd6b711e3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/rename/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding my-package v99999.0.0 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/rename/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/rename/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/rename/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/require_weak/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/require_weak/in/Cargo.toml
new file mode 100644
index 000000000..54faf173a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/require_weak/in/Cargo.toml
@@ -0,0 +1,11 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[features]
+eyes = ["your-face?/eyes"]
+
+[dependencies]
+your-face = { version = "99999.0.0", optional = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/require_weak/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/require_weak/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/require_weak/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/require_weak/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/require_weak/mod.rs
new file mode 100644
index 000000000..d99e4482a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/require_weak/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("your-face --no-optional")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/require_weak/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/require_weak/out/Cargo.toml
new file mode 100644
index 000000000..a0e4b9753
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/require_weak/out/Cargo.toml
@@ -0,0 +1,11 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[features]
+eyes = ["your-face/eyes"]
+
+[dependencies]
+your-face = { version = "99999.0.0" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/require_weak/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/require_weak/stderr.log
new file mode 100644
index 000000000..796b9601b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/require_weak/stderr.log
@@ -0,0 +1,7 @@
+ Updating `dummy-registry` index
+ Adding your-face v99999.0.0 to dependencies.
+ Features:
+ - ears
+ - eyes
+ - mouth
+ - nose
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/require_weak/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/require_weak/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/require_weak/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/in/Cargo.toml
new file mode 100644
index 000000000..19aa939d9
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/in/Cargo.toml
@@ -0,0 +1,13 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+toml = "0.1.1"
+versioned-package = "0.1.1"
+
+[dependencies.my-build-package1]
+version = "0.1.1"
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/mod.rs
new file mode 100644
index 000000000..55e4c2281
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("unrelateed-crate")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/out/Cargo.toml
new file mode 100644
index 000000000..008ff4f62
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/out/Cargo.toml
@@ -0,0 +1,14 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+toml = "0.1.1"
+unrelateed-crate = "99999.0.0"
+versioned-package = "0.1.1"
+
+[dependencies.my-build-package1]
+version = "0.1.1"
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/stderr.log
new file mode 100644
index 000000000..be1db1c4d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding unrelateed-crate v99999.0.0 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/sorted_table_with_dotted_item/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/target/in b/src/tools/cargo/tests/testsuite/cargo_add/target/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/target/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/target/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/target/mod.rs
new file mode 100644
index 000000000..e263bad36
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/target/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package1 my-package2 --target i686-unknown-linux-gnu")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/target/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/target/out/Cargo.toml
new file mode 100644
index 000000000..9c96ede51
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/target/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[target.i686-unknown-linux-gnu.dependencies]
+my-package1 = "99999.0.0"
+my-package2 = "99999.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/target/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/target/stderr.log
new file mode 100644
index 000000000..3413bcc1b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/target/stderr.log
@@ -0,0 +1,3 @@
+ Updating `dummy-registry` index
+ Adding my-package1 v99999.0.0 to dependencies for target `i686-unknown-linux-gnu`.
+ Adding my-package2 v99999.0.0 to dependencies for target `i686-unknown-linux-gnu`.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/target/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/target/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/target/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/target_cfg/in b/src/tools/cargo/tests/testsuite/cargo_add/target_cfg/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/target_cfg/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/target_cfg/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/target_cfg/mod.rs
new file mode 100644
index 000000000..43efe8e8d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/target_cfg/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package1 my-package2 --target cfg(unix)")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/target_cfg/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/target_cfg/out/Cargo.toml
new file mode 100644
index 000000000..212ec571b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/target_cfg/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[target."cfg(unix)".dependencies]
+my-package1 = "99999.0.0"
+my-package2 = "99999.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/target_cfg/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/target_cfg/stderr.log
new file mode 100644
index 000000000..e405c8dc1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/target_cfg/stderr.log
@@ -0,0 +1,3 @@
+ Updating `dummy-registry` index
+ Adding my-package1 v99999.0.0 to dependencies for target `cfg(unix)`.
+ Adding my-package2 v99999.0.0 to dependencies for target `cfg(unix)`.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/target_cfg/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/target_cfg/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/target_cfg/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/Cargo.toml
new file mode 100644
index 000000000..b2a34c92e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency", features = ["not_recognized"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/dependency/Cargo.toml
new file mode 100644
index 000000000..9a7bc7f77
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/dependency/Cargo.toml
@@ -0,0 +1,20 @@
+[package]
+name = "foo"
+version = "0.0.0"
+
+[features]
+default-base = []
+default-test-base = []
+default-merge-base = []
+long-feature-name-because-of-formatting-reasons = []
+default = [
+ "default-base",
+ "default-test-base",
+ "default-merge-base",
+ "long-feature-name-because-of-formatting-reasons",
+]
+test-base = []
+test = ["test-base", "default-test-base"]
+merge-base = []
+merge = ["merge-base", "default-merge-base"]
+unrelated = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/primary/Cargo.toml
new file mode 100644
index 000000000..fb4a12619
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo = { workspace = true, features = ["test"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/mod.rs
new file mode 100644
index 000000000..8184dac8f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/mod.rs
@@ -0,0 +1,23 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["foo", "-p", "bar"])
+ .current_dir(cwd)
+ .assert()
+ .failure()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/out/Cargo.toml
new file mode 100644
index 000000000..b2a34c92e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency", features = ["not_recognized"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/out/dependency/Cargo.toml
new file mode 100644
index 000000000..9a7bc7f77
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/out/dependency/Cargo.toml
@@ -0,0 +1,20 @@
+[package]
+name = "foo"
+version = "0.0.0"
+
+[features]
+default-base = []
+default-test-base = []
+default-merge-base = []
+long-feature-name-because-of-formatting-reasons = []
+default = [
+ "default-base",
+ "default-test-base",
+ "default-merge-base",
+ "long-feature-name-because-of-formatting-reasons",
+]
+test-base = []
+test = ["test-base", "default-test-base"]
+merge-base = []
+merge = ["merge-base", "default-merge-base"]
+unrelated = []
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/out/primary/Cargo.toml
new file mode 100644
index 000000000..fb4a12619
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/out/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo = { workspace = true, features = ["test"] }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/stderr.log
new file mode 100644
index 000000000..c5aee4dc1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/stderr.log
@@ -0,0 +1,7 @@
+ Adding foo (workspace) to dependencies.
+error: unrecognized feature for crate foo: not_recognized
+disabled features:
+ merge, merge-base, unrelated
+enabled features:
+ default-base, default-merge-base, default-test-base
+ long-feature-name-because-of-formatting-reasons, test, test-base
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/unknown_inherited_feature/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/vers/in b/src/tools/cargo/tests/testsuite/cargo_add/vers/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/vers/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/vers/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/vers/mod.rs
new file mode 100644
index 000000000..fb78739e9
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/vers/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package@>=0.1.1")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/vers/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/vers/out/Cargo.toml
new file mode 100644
index 000000000..c6ca3d67a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/vers/out/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = ">=0.1.1"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/vers/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/vers/stderr.log
new file mode 100644
index 000000000..7ef92d22e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/vers/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding my-package >=0.1.1 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/vers/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/vers/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/vers/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/Cargo.toml
new file mode 100644
index 000000000..57e1f3085
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/Cargo.toml
@@ -0,0 +1,2 @@
+[workspace]
+members = ["primary", "dependency"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/dependency/Cargo.toml
new file mode 100644
index 000000000..ca4f36d72
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/primary/Cargo.toml
new file mode 100644
index 000000000..5e20016d7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/primary/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/mod.rs
new file mode 100644
index 000000000..ccaf850f9
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = project_root.join("primary");
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("cargo-list-test-fixture-dependency")
+ .current_dir(&cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/out/Cargo.toml
new file mode 100644
index 000000000..57e1f3085
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/out/Cargo.toml
@@ -0,0 +1,2 @@
+[workspace]
+members = ["primary", "dependency"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/out/dependency/Cargo.toml
new file mode 100644
index 000000000..ca4f36d72
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/out/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/out/primary/Cargo.toml
new file mode 100644
index 000000000..a693df54f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/out/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+cargo-list-test-fixture-dependency = { version = "0.0.0", path = "../dependency" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/stderr.log
new file mode 100644
index 000000000..8109d3cc5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/stderr.log
@@ -0,0 +1 @@
+ Adding cargo-list-test-fixture-dependency (local) to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_name/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/Cargo.toml
new file mode 100644
index 000000000..57e1f3085
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/Cargo.toml
@@ -0,0 +1,2 @@
+[workspace]
+members = ["primary", "dependency"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/dependency/Cargo.toml
new file mode 100644
index 000000000..ca4f36d72
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/primary/Cargo.toml
new file mode 100644
index 000000000..5e20016d7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/primary/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/mod.rs
new file mode 100644
index 000000000..ab89e3a6d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = project_root.join("primary");
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("cargo-list-test-fixture-dependency --path ../dependency")
+ .current_dir(&cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/out/Cargo.toml
new file mode 100644
index 000000000..57e1f3085
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/out/Cargo.toml
@@ -0,0 +1,2 @@
+[workspace]
+members = ["primary", "dependency"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/out/dependency/Cargo.toml
new file mode 100644
index 000000000..ca4f36d72
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/out/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/out/primary/Cargo.toml
new file mode 100644
index 000000000..a693df54f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/out/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+cargo-list-test-fixture-dependency = { version = "0.0.0", path = "../dependency" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/stderr.log
new file mode 100644
index 000000000..8109d3cc5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/stderr.log
@@ -0,0 +1 @@
+ Adding cargo-list-test-fixture-dependency (local) to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/Cargo.toml
new file mode 100644
index 000000000..57e1f3085
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/Cargo.toml
@@ -0,0 +1,2 @@
+[workspace]
+members = ["primary", "dependency"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/dependency/Cargo.toml
new file mode 100644
index 000000000..ca4f36d72
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/primary/Cargo.toml
new file mode 100644
index 000000000..5e20016d7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/primary/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/mod.rs
new file mode 100644
index 000000000..4ae04c70a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use crate::cargo_add::init_registry;
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = project_root.join("primary");
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("cargo-list-test-fixture-dependency --path ../dependency --dev")
+ .current_dir(&cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/out/Cargo.toml
new file mode 100644
index 000000000..57e1f3085
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/out/Cargo.toml
@@ -0,0 +1,2 @@
+[workspace]
+members = ["primary", "dependency"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/out/dependency/Cargo.toml
new file mode 100644
index 000000000..ca4f36d72
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/out/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "cargo-list-test-fixture-dependency"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/out/primary/Cargo.toml
new file mode 100644
index 000000000..8dfa5c218
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/out/primary/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dev-dependencies]
+cargo-list-test-fixture-dependency = { path = "../dependency" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/stderr.log
new file mode 100644
index 000000000..d8093d6ae
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/stderr.log
@@ -0,0 +1 @@
+ Adding cargo-list-test-fixture-dependency (local) to dev-dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/workspace_path_dev/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_alias_config.rs b/src/tools/cargo/tests/testsuite/cargo_alias_config.rs
new file mode 100644
index 000000000..fd4aec917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_alias_config.rs
@@ -0,0 +1,434 @@
+//! Tests for `[alias]` config command aliases.
+
+use std::env;
+
+use cargo_test_support::tools::echo_subcommand;
+use cargo_test_support::{basic_bin_manifest, project};
+
+#[cargo_test]
+fn alias_incorrect_config_type() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [alias]
+ b-cargo-test = 5
+ "#,
+ )
+ .build();
+
+ p.cargo("b-cargo-test -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] invalid configuration for key `alias.b-cargo-test`
+expected a list, but found a integer for [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn alias_malformed_config_string() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [alias]
+ b-cargo-test = `
+ "#,
+ )
+ .build();
+
+ p.cargo("b-cargo-test -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] could not load Cargo configuration
+
+Caused by:
+ could not parse TOML configuration in `[..]/config`
+
+Caused by:
+ [..]
+
+Caused by:
+ TOML parse error at line [..]
+ |
+ 3 | b-cargo-test = `
+ | ^
+ invalid string
+ expected `\"`, `'`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn alias_malformed_config_list() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [alias]
+ b-cargo-test = [1, 2]
+ "#,
+ )
+ .build();
+
+ p.cargo("b-cargo-test -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] could not load Cargo configuration
+
+Caused by:
+ failed to load TOML configuration from `[..]/config`
+
+Caused by:
+ [..] `alias`
+
+Caused by:
+ [..] `b-cargo-test`
+
+Caused by:
+ expected string but found integer in list
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn alias_config() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [alias]
+ b-cargo-test = "build"
+ "#,
+ )
+ .build();
+
+ p.cargo("b-cargo-test -v")
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.5.0 [..]
+[RUNNING] `rustc --crate-name foo [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dependent_alias() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [alias]
+ b-cargo-test = "build"
+ a-cargo-test = ["b-cargo-test", "-v"]
+ "#,
+ )
+ .build();
+
+ p.cargo("a-cargo-test")
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.5.0 [..]
+[RUNNING] `rustc --crate-name foo [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn builtin_alias_shadowing_external_subcommand() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() {}")
+ .executable("cargo-t", "")
+ .build();
+
+ let mut paths: Vec<_> = env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect();
+ paths.push(p.root());
+ let path = env::join_paths(paths).unwrap();
+
+ p.cargo("t")
+ .env("PATH", &path)
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 [..]
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] unittests src/main.rs [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn alias_shadowing_external_subcommand() {
+ let echo = echo_subcommand();
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [alias]
+ echo = "build"
+ "#,
+ )
+ .build();
+
+ let mut paths: Vec<_> = env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect();
+ paths.push(echo.target_debug_dir());
+ let path = env::join_paths(paths).unwrap();
+
+ p.cargo("echo")
+ .env("PATH", &path)
+ .with_stderr("\
+[WARNING] user-defined alias `echo` is shadowing an external subcommand found at: `[ROOT]/cargo-echo/target/debug/cargo-echo[EXE]`
+This was previously accepted but is being phased out; it will become a hard error in a future release.
+For more information, see issue #10049 <https://github.com/rust-lang/cargo/issues/10049>.
+[COMPILING] foo v0.5.0 [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn default_args_alias() {
+ let echo = echo_subcommand();
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [alias]
+ echo = "echo --flag1 --flag2"
+ test-1 = "echo"
+ build = "build --verbose"
+ "#,
+ )
+ .build();
+
+ let mut paths: Vec<_> = env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect();
+ paths.push(echo.target_debug_dir());
+ let path = env::join_paths(paths).unwrap();
+
+ p.cargo("echo")
+ .env("PATH", &path)
+ .with_status(101)
+ .with_stderr("\
+[WARNING] user-defined alias `echo` is shadowing an external subcommand found at: `[ROOT]/cargo-echo/target/debug/cargo-echo[EXE]`
+This was previously accepted but is being phased out; it will become a hard error in a future release.
+For more information, see issue #10049 <https://github.com/rust-lang/cargo/issues/10049>.
+error: alias echo has unresolvable recursive definition: echo -> echo
+",
+ )
+ .run();
+
+ p.cargo("test-1")
+ .env("PATH", &path)
+ .with_status(101)
+ .with_stderr("\
+[WARNING] user-defined alias `echo` is shadowing an external subcommand found at: `[ROOT]/cargo-echo/target/debug/cargo-echo[EXE]`
+This was previously accepted but is being phased out; it will become a hard error in a future release.
+For more information, see issue #10049 <https://github.com/rust-lang/cargo/issues/10049>.
+error: alias test-1 has unresolvable recursive definition: test-1 -> echo -> echo
+",
+ )
+ .run();
+
+ // Builtins are not expanded by rule
+ p.cargo("build")
+ .with_stderr(
+ "\
+[WARNING] user-defined alias `build` is ignored, because it is shadowed by a built-in command
+[COMPILING] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn corecursive_alias() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [alias]
+ test-1 = "test-2 --flag1"
+ test-2 = "test-3 --flag2"
+ test-3 = "test-1 --flag3"
+ "#,
+ )
+ .build();
+
+ p.cargo("test-1")
+ .with_status(101)
+ .with_stderr(
+ "error: alias test-1 has unresolvable recursive definition: test-1 -> test-2 -> test-3 -> test-1",
+ )
+ .run();
+
+ p.cargo("test-2")
+ .with_status(101)
+ .with_stderr(
+ "error: alias test-2 has unresolvable recursive definition: test-2 -> test-3 -> test-1 -> test-2",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn alias_list_test() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [alias]
+ b-cargo-test = ["build", "--release"]
+ "#,
+ )
+ .build();
+
+ p.cargo("b-cargo-test -v")
+ .with_stderr_contains("[COMPILING] foo v0.5.0 [..]")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name [..]")
+ .run();
+}
+
+#[cargo_test]
+fn alias_with_flags_config() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [alias]
+ b-cargo-test = "build --release"
+ "#,
+ )
+ .build();
+
+ p.cargo("b-cargo-test -v")
+ .with_stderr_contains("[COMPILING] foo v0.5.0 [..]")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]")
+ .run();
+}
+
+#[cargo_test]
+fn alias_cannot_shadow_builtin_command() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [alias]
+ build = "fetch"
+ "#,
+ )
+ .build();
+
+ p.cargo("build")
+ .with_stderr(
+ "\
+[WARNING] user-defined alias `build` is ignored, because it is shadowed by a built-in command
+[COMPILING] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn alias_override_builtin_alias() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [alias]
+ b = "run"
+ "#,
+ )
+ .build();
+
+ p.cargo("b")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/foo[EXE]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn builtin_alias_takes_options() {
+ // #6381
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "examples/ex1.rs",
+ r#"fn main() { println!("{}", std::env::args().skip(1).next().unwrap()) }"#,
+ )
+ .build();
+
+ p.cargo("r --example ex1 -- asdf").with_stdout("asdf").run();
+}
+
+#[cargo_test]
+fn global_options_with_alias() {
+ // Check that global options are passed through.
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("-v c")
+ .with_stderr(
+ "\
+[CHECKING] foo [..]
+[RUNNING] `rustc [..]
+[FINISHED] dev [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn weird_check() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("-- check --invalid_argument -some-other-argument")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] trailing arguments after built-in command `check` are unsupported: `--invalid_argument -some-other-argument`
+
+To pass the arguments to the subcommand, remove `--`
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_command.rs b/src/tools/cargo/tests/testsuite/cargo_command.rs
new file mode 100644
index 000000000..62869387f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_command.rs
@@ -0,0 +1,535 @@
+//! Tests for custom cargo commands and other global command features.
+
+use std::env;
+use std::fs;
+use std::io::Read;
+use std::path::{Path, PathBuf};
+use std::process::Stdio;
+use std::str;
+
+use cargo_test_support::basic_manifest;
+use cargo_test_support::paths::CargoPathExt;
+use cargo_test_support::registry::Package;
+use cargo_test_support::tools::echo_subcommand;
+use cargo_test_support::{
+ basic_bin_manifest, cargo_exe, cargo_process, paths, project, project_in_home,
+};
+use cargo_util::paths::join_paths;
+
+fn path() -> Vec<PathBuf> {
+ env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect()
+}
+
+#[cargo_test]
+fn list_commands_with_descriptions() {
+ let p = project().build();
+ p.cargo("--list")
+ .with_stdout_contains(
+ " build Compile a local package and all of its dependencies",
+ )
+ // Assert that `read-manifest` prints the right one-line description followed by another
+ // command, indented.
+ .with_stdout_contains(
+ " read-manifest Print a JSON representation of a Cargo.toml manifest.",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn list_builtin_aliases_with_descriptions() {
+ let p = project().build();
+ p.cargo("--list")
+ .with_stdout_contains(" b alias: build")
+ .with_stdout_contains(" c alias: check")
+ .with_stdout_contains(" r alias: run")
+ .with_stdout_contains(" t alias: test")
+ .run();
+}
+
+#[cargo_test]
+fn list_custom_aliases_with_descriptions() {
+ let p = project_in_home("proj")
+ .file(
+ &paths::home().join(".cargo").join("config"),
+ r#"
+ [alias]
+ myaliasstr = "foo --bar"
+ myaliasvec = ["foo", "--bar"]
+ "#,
+ )
+ .build();
+
+ p.cargo("--list")
+ .with_stdout_contains(" myaliasstr alias: foo --bar")
+ .with_stdout_contains(" myaliasvec alias: foo --bar")
+ .run();
+}
+
+#[cargo_test]
+fn list_dedupe() {
+ let p = project()
+ .executable(Path::new("path-test-1").join("cargo-dupe"), "")
+ .executable(Path::new("path-test-2").join("cargo-dupe"), "")
+ .build();
+
+ let mut path = path();
+ path.push(p.root().join("path-test-1"));
+ path.push(p.root().join("path-test-2"));
+ let path = env::join_paths(path.iter()).unwrap();
+
+ p.cargo("--list")
+ .env("PATH", &path)
+ .with_stdout_contains_n(" dupe", 1)
+ .run();
+}
+
+#[cargo_test]
+fn list_command_looks_at_path() {
+ let proj = project()
+ .executable(Path::new("path-test").join("cargo-1"), "")
+ .build();
+
+ let mut path = path();
+ path.push(proj.root().join("path-test"));
+ let path = env::join_paths(path.iter()).unwrap();
+ let output = cargo_process("-v --list")
+ .env("PATH", &path)
+ .exec_with_output()
+ .unwrap();
+ let output = str::from_utf8(&output.stdout).unwrap();
+ assert!(
+ output.contains("\n 1 "),
+ "missing 1: {}",
+ output
+ );
+}
+
+#[cfg(windows)]
+#[cargo_test]
+fn list_command_looks_at_path_case_mismatch() {
+ let proj = project()
+ .executable(Path::new("path-test").join("cargo-1"), "")
+ .build();
+
+ let mut path = path();
+ path.push(proj.root().join("path-test"));
+ let path = env::join_paths(path.iter()).unwrap();
+
+ // See issue #11814: Environment variable names are case-insensitive on Windows.
+ // We need to check that having "Path" instead of "PATH" is okay.
+ let output = cargo_process("-v --list")
+ .env("Path", &path)
+ .env_remove("PATH")
+ .exec_with_output()
+ .unwrap();
+ let output = str::from_utf8(&output.stdout).unwrap();
+ assert!(
+ output.contains("\n 1 "),
+ "missing 1: {}",
+ output
+ );
+}
+
+#[cargo_test]
+fn list_command_handles_known_external_commands() {
+ let p = project()
+ .executable(Path::new("path-test").join("cargo-fmt"), "")
+ .build();
+
+ let fmt_desc = " fmt Formats all bin and lib files of the current crate using rustfmt.";
+
+ // Without path - fmt isn't there
+ p.cargo("--list")
+ .env("PATH", "")
+ .with_stdout_does_not_contain(fmt_desc)
+ .run();
+
+ // With path - fmt is there with known description
+ let mut path = path();
+ path.push(p.root().join("path-test"));
+ let path = env::join_paths(path.iter()).unwrap();
+
+ p.cargo("--list")
+ .env("PATH", &path)
+ .with_stdout_contains(fmt_desc)
+ .run();
+}
+
+#[cargo_test]
+fn list_command_resolves_symlinks() {
+ let proj = project()
+ .symlink(cargo_exe(), Path::new("path-test").join("cargo-2"))
+ .build();
+
+ let mut path = path();
+ path.push(proj.root().join("path-test"));
+ let path = env::join_paths(path.iter()).unwrap();
+ let output = cargo_process("-v --list")
+ .env("PATH", &path)
+ .exec_with_output()
+ .unwrap();
+ let output = str::from_utf8(&output.stdout).unwrap();
+ assert!(
+ output.contains("\n 2 "),
+ "missing 2: {}",
+ output
+ );
+}
+
+#[cargo_test]
+fn find_closest_capital_c_to_c() {
+ cargo_process("C")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: no such command: `C`
+
+<tab>Did you mean `c`?
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn find_closest_capital_b_to_b() {
+ cargo_process("B")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: no such command: `B`
+
+<tab>Did you mean `b`?
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn find_closest_biuld_to_build() {
+ cargo_process("biuld")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: no such command: `biuld`
+
+<tab>Did you mean `build`?
+",
+ )
+ .run();
+
+ // But, if we actually have `biuld`, it must work!
+ // https://github.com/rust-lang/cargo/issues/5201
+ Package::new("cargo-biuld", "1.0.0")
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ println!("Similar, but not identical to, build");
+ }
+ "#,
+ )
+ .publish();
+
+ cargo_process("install cargo-biuld").run();
+ cargo_process("biuld")
+ .with_stdout("Similar, but not identical to, build\n")
+ .run();
+ cargo_process("--list")
+ .with_stdout_contains(
+ " build Compile a local package and all of its dependencies\n",
+ )
+ .with_stdout_contains(" biuld\n")
+ .run();
+}
+
+#[cargo_test]
+fn find_closest_alias() {
+ let root = paths::root();
+ let my_home = root.join("my_home");
+ fs::create_dir(&my_home).unwrap();
+ fs::write(
+ &my_home.join("config"),
+ r#"
+ [alias]
+ myalias = "build"
+ "#,
+ )
+ .unwrap();
+
+ cargo_process("myalais")
+ .env("CARGO_HOME", &my_home)
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: no such command: `myalais`
+
+<tab>Did you mean `myalias`?
+",
+ )
+ .run();
+
+ // But, if no alias is defined, it must not suggest one!
+ cargo_process("myalais")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: no such command: `myalais`
+",
+ )
+ .with_stderr_does_not_contain(
+ "\
+<tab>Did you mean `myalias`?
+",
+ )
+ .run();
+}
+
+// If a subcommand is more than an edit distance of 3 away, we don't make a suggestion.
+#[cargo_test]
+fn find_closest_dont_correct_nonsense() {
+ cargo_process("there-is-no-way-that-there-is-a-command-close-to-this")
+ .cwd(&paths::root())
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] no such command: `there-is-no-way-that-there-is-a-command-close-to-this`
+
+<tab>View all installed commands with `cargo --list`",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn displays_subcommand_on_error() {
+ cargo_process("invalid-command")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] no such command: `invalid-command`
+
+<tab>View all installed commands with `cargo --list`",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn override_cargo_home() {
+ let root = paths::root();
+ let my_home = root.join("my_home");
+ fs::create_dir(&my_home).unwrap();
+ fs::write(
+ &my_home.join("config"),
+ r#"
+ [cargo-new]
+ vcs = "none"
+ "#,
+ )
+ .unwrap();
+
+ cargo_process("new foo").env("CARGO_HOME", &my_home).run();
+
+ assert!(!paths::root().join("foo/.git").is_dir());
+
+ cargo_process("new foo2").run();
+
+ assert!(paths::root().join("foo2/.git").is_dir());
+}
+
+#[cargo_test]
+fn cargo_subcommand_env() {
+ let src = format!(
+ r#"
+ use std::env;
+
+ fn main() {{
+ println!("{{}}", env::var("{}").unwrap());
+ }}
+ "#,
+ cargo::CARGO_ENV
+ );
+
+ let p = project()
+ .at("cargo-envtest")
+ .file("Cargo.toml", &basic_bin_manifest("cargo-envtest"))
+ .file("src/main.rs", &src)
+ .build();
+
+ let target_dir = p.target_debug_dir();
+
+ p.cargo("build").run();
+ assert!(p.bin("cargo-envtest").is_file());
+
+ let cargo = cargo_exe().canonicalize().unwrap();
+ let mut path = path();
+ path.push(target_dir.clone());
+ let path = env::join_paths(path.iter()).unwrap();
+
+ cargo_process("envtest")
+ .env("PATH", &path)
+ .with_stdout(cargo.to_str().unwrap())
+ .run();
+
+ // Check that subcommands inherit an overridden $CARGO
+ let envtest_bin = target_dir
+ .join("cargo-envtest")
+ .with_extension(std::env::consts::EXE_EXTENSION)
+ .canonicalize()
+ .unwrap();
+ let envtest_bin = envtest_bin.to_str().unwrap();
+ cargo_process("envtest")
+ .env("PATH", &path)
+ .env(cargo::CARGO_ENV, &envtest_bin)
+ .with_stdout(envtest_bin)
+ .run();
+}
+
+#[cargo_test]
+fn cargo_cmd_bins_vs_explicit_path() {
+ // Set up `cargo-foo` binary in two places: inside `$HOME/.cargo/bin` and outside of it
+ //
+ // Return paths to both places
+ fn set_up_cargo_foo() -> (PathBuf, PathBuf) {
+ let p = project()
+ .at("cargo-foo")
+ .file("Cargo.toml", &basic_manifest("cargo-foo", "1.0.0"))
+ .file(
+ "src/bin/cargo-foo.rs",
+ r#"fn main() { println!("INSIDE"); }"#,
+ )
+ .file(
+ "src/bin/cargo-foo2.rs",
+ r#"fn main() { println!("OUTSIDE"); }"#,
+ )
+ .build();
+ p.cargo("build").run();
+ let cargo_bin_dir = paths::home().join(".cargo/bin");
+ cargo_bin_dir.mkdir_p();
+ let root_bin_dir = paths::root().join("bin");
+ root_bin_dir.mkdir_p();
+ let exe_name = format!("cargo-foo{}", env::consts::EXE_SUFFIX);
+ fs::rename(p.bin("cargo-foo"), cargo_bin_dir.join(&exe_name)).unwrap();
+ fs::rename(p.bin("cargo-foo2"), root_bin_dir.join(&exe_name)).unwrap();
+
+ (root_bin_dir, cargo_bin_dir)
+ }
+
+ let (outside_dir, inside_dir) = set_up_cargo_foo();
+
+ // If `$CARGO_HOME/bin` is not in a path, prefer it over anything in `$PATH`.
+ //
+ // This is the historical behavior we don't want to break.
+ cargo_process("foo").with_stdout_contains("INSIDE").run();
+
+ // When `$CARGO_HOME/bin` is in the `$PATH`
+ // use only `$PATH` so the user-defined ordering is respected.
+ {
+ cargo_process("foo")
+ .env(
+ "PATH",
+ join_paths(&[&inside_dir, &outside_dir], "PATH").unwrap(),
+ )
+ .with_stdout_contains("INSIDE")
+ .run();
+
+ cargo_process("foo")
+ // Note: trailing slash
+ .env(
+ "PATH",
+ join_paths(&[inside_dir.join(""), outside_dir.join("")], "PATH").unwrap(),
+ )
+ .with_stdout_contains("INSIDE")
+ .run();
+
+ cargo_process("foo")
+ .env(
+ "PATH",
+ join_paths(&[&outside_dir, &inside_dir], "PATH").unwrap(),
+ )
+ .with_stdout_contains("OUTSIDE")
+ .run();
+
+ cargo_process("foo")
+ // Note: trailing slash
+ .env(
+ "PATH",
+ join_paths(&[outside_dir.join(""), inside_dir.join("")], "PATH").unwrap(),
+ )
+ .with_stdout_contains("OUTSIDE")
+ .run();
+ }
+}
+
+#[cargo_test]
+fn cargo_subcommand_args() {
+ let p = echo_subcommand();
+ let cargo_foo_bin = p.bin("cargo-echo");
+ assert!(cargo_foo_bin.is_file());
+
+ let mut path = path();
+ path.push(p.target_debug_dir());
+ let path = env::join_paths(path.iter()).unwrap();
+
+ cargo_process("echo bar -v --help")
+ .env("PATH", &path)
+ .with_stdout("echo bar -v --help")
+ .run();
+}
+
+#[cargo_test]
+fn explain() {
+ cargo_process("--explain E0001")
+ .with_stdout_contains(
+ "This error suggests that the expression arm corresponding to the noted pattern",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn closed_output_ok() {
+ // Checks that closed output doesn't cause an error.
+ let mut p = cargo_process("--list").build_command();
+ p.stdout(Stdio::piped()).stderr(Stdio::piped());
+ let mut child = p.spawn().unwrap();
+ // Close stdout
+ drop(child.stdout.take());
+ // Read stderr
+ let mut s = String::new();
+ child
+ .stderr
+ .as_mut()
+ .unwrap()
+ .read_to_string(&mut s)
+ .unwrap();
+ let status = child.wait().unwrap();
+ assert!(status.success());
+ assert!(s.is_empty(), "{}", s);
+}
+
+#[cargo_test]
+fn subcommand_leading_plus_output_contains() {
+ cargo_process("+nightly")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: no such command: `+nightly`
+
+<tab>Cargo does not handle `+toolchain` directives.
+<tab>Did you mean to invoke `cargo` through `rustup` instead?",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn full_did_you_mean() {
+ cargo_process("bluid")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: no such command: `bluid`
+
+<tab>Did you mean `build`?
+
+<tab>View all installed commands with `cargo --list`",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_config.rs b/src/tools/cargo/tests/testsuite/cargo_config.rs
new file mode 100644
index 000000000..e367f8e06
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_config.rs
@@ -0,0 +1,520 @@
+//! Tests for the `cargo config` command.
+
+use super::config::write_config_at;
+use cargo_test_support::paths;
+use std::fs;
+use std::path::PathBuf;
+
+fn cargo_process(s: &str) -> cargo_test_support::Execs {
+ let mut p = cargo_test_support::cargo_process(s);
+ // Clear out some of the environment added by the default cargo_process so
+ // the tests don't need to deal with it.
+ p.env_remove("CARGO_PROFILE_DEV_SPLIT_DEBUGINFO")
+ .env_remove("CARGO_PROFILE_TEST_SPLIT_DEBUGINFO")
+ .env_remove("CARGO_PROFILE_RELEASE_SPLIT_DEBUGINFO")
+ .env_remove("CARGO_PROFILE_BENCH_SPLIT_DEBUGINFO")
+ .env_remove("CARGO_INCREMENTAL");
+ p
+}
+
+#[cargo_test]
+fn gated() {
+ cargo_process("config get")
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .with_status(101)
+ .with_stderr("\
+error: the `cargo config` command is unstable, pass `-Z unstable-options` to enable it
+See https://github.com/rust-lang/cargo/issues/9301 for more information about the `cargo config` command.
+")
+ .run();
+}
+
+fn common_setup() -> PathBuf {
+ write_config_at(
+ paths::home().join(".cargo/config.toml"),
+ "
+ [alias]
+ foo = \"abc --xyz\"
+ [build]
+ jobs = 99
+ rustflags = [\"--flag-global\"]
+ [profile.dev]
+ opt-level = 3
+ [profile.dev.package.foo]
+ opt-level = 1
+ [target.'cfg(target_os = \"linux\")']
+ runner = \"runme\"
+
+ # How unknown keys are handled.
+ [extra-table]
+ somekey = \"somevalue\"
+ ",
+ );
+ let sub_folder = paths::root().join("foo/.cargo");
+ write_config_at(
+ sub_folder.join("config.toml"),
+ "
+ [alias]
+ sub-example = [\"sub\", \"example\"]
+ [build]
+ rustflags = [\"--flag-directory\"]
+ ",
+ );
+ sub_folder
+}
+
+#[cargo_test]
+fn get_toml() {
+ // Notes:
+ // - The "extra-table" is shown without a warning. I'm not sure how that
+ // should be handled, since displaying warnings could cause problems
+ // with ingesting the output.
+ // - Environment variables aren't loaded. :(
+ let sub_folder = common_setup();
+ cargo_process("config get -Zunstable-options")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .env("CARGO_ALIAS_BAR", "cat dog")
+ .env("CARGO_BUILD_JOBS", "100")
+ // The weird forward slash in the linux line is due to testsuite normalization.
+ .with_stdout(
+ "\
+alias.foo = \"abc --xyz\"
+alias.sub-example = [\"sub\", \"example\"]
+build.jobs = 99
+build.rustflags = [\"--flag-directory\", \"--flag-global\"]
+extra-table.somekey = \"somevalue\"
+profile.dev.opt-level = 3
+profile.dev.package.foo.opt-level = 1
+target.\"cfg(target_os = \\\"linux\\\")\".runner = \"runme\"
+# The following environment variables may affect the loaded values.
+# CARGO_ALIAS_BAR=[..]cat dog[..]
+# CARGO_BUILD_JOBS=100
+# CARGO_HOME=[ROOT]/home/.cargo
+",
+ )
+ .with_stderr("")
+ .run();
+
+ // Env keys work if they are specific.
+ cargo_process("config get build.jobs -Zunstable-options")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .env("CARGO_BUILD_JOBS", "100")
+ .with_stdout("build.jobs = 100")
+ .with_stderr("")
+ .run();
+
+ // Array value.
+ cargo_process("config get build.rustflags -Zunstable-options")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .with_stdout("build.rustflags = [\"--flag-directory\", \"--flag-global\"]")
+ .with_stderr("")
+ .run();
+
+ // Sub-table
+ cargo_process("config get profile -Zunstable-options")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .with_stdout(
+ "\
+profile.dev.opt-level = 3
+profile.dev.package.foo.opt-level = 1
+",
+ )
+ .with_stderr("")
+ .run();
+
+ // Specific profile entry.
+ cargo_process("config get profile.dev.opt-level -Zunstable-options")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .with_stdout("profile.dev.opt-level = 3")
+ .with_stderr("")
+ .run();
+
+ // A key that isn't set.
+ cargo_process("config get build.rustc -Zunstable-options")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .with_status(101)
+ .with_stdout("")
+ .with_stderr("error: config value `build.rustc` is not set")
+ .run();
+
+ // A key that is not part of Cargo's config schema.
+ cargo_process("config get not.set -Zunstable-options")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .with_status(101)
+ .with_stdout("")
+ .with_stderr("error: config value `not.set` is not set")
+ .run();
+}
+
+#[cargo_test]
+fn get_json() {
+ // Notes:
+ // - This does not show env vars at all. :(
+ let all_json = r#"
+ {
+ "alias": {
+ "foo": "abc --xyz",
+ "sub-example": [
+ "sub",
+ "example"
+ ]
+ },
+ "build": {
+ "jobs": 99,
+ "rustflags": [
+ "--flag-directory",
+ "--flag-global"
+ ]
+ },
+ "extra-table": {
+ "somekey": "somevalue"
+ },
+ "profile": {
+ "dev": {
+ "opt-level": 3,
+ "package": {
+ "foo": {
+ "opt-level": 1
+ }
+ }
+ }
+ },
+ "target": {
+ "cfg(target_os = \"linux\")": {
+ "runner": "runme"
+ }
+ }
+ }
+ "#;
+ let sub_folder = common_setup();
+ cargo_process("config get --format=json -Zunstable-options")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .env("CARGO_ALIAS_BAR", "cat dog")
+ .env("CARGO_BUILD_JOBS", "100")
+ .with_json(all_json)
+ .with_stderr(
+ "\
+note: The following environment variables may affect the loaded values.
+CARGO_ALIAS_BAR=[..]cat dog[..]
+CARGO_BUILD_JOBS=100
+CARGO_HOME=[ROOT]/home/.cargo
+",
+ )
+ .run();
+
+ // json-value is the same for the entire root table
+ cargo_process("config get --format=json-value -Zunstable-options")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .with_json(all_json)
+ .with_stderr(
+ "\
+note: The following environment variables may affect the loaded values.
+CARGO_HOME=[ROOT]/home/.cargo
+",
+ )
+ .run();
+
+ cargo_process("config get --format=json build.jobs -Zunstable-options")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .with_json(
+ r#"
+ {"build": {"jobs": 99}}
+ "#,
+ )
+ .with_stderr("")
+ .run();
+
+ cargo_process("config get --format=json-value build.jobs -Zunstable-options")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .with_stdout("99")
+ .with_stderr("")
+ .run();
+}
+
+#[cargo_test]
+fn show_origin_toml() {
+ let sub_folder = common_setup();
+ cargo_process("config get --show-origin -Zunstable-options")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .with_stdout(
+ "\
+alias.foo = \"abc --xyz\" # [ROOT]/home/.cargo/config.toml
+alias.sub-example = [
+ \"sub\", # [ROOT]/foo/.cargo/config.toml
+ \"example\", # [ROOT]/foo/.cargo/config.toml
+]
+build.jobs = 99 # [ROOT]/home/.cargo/config.toml
+build.rustflags = [
+ \"--flag-directory\", # [ROOT]/foo/.cargo/config.toml
+ \"--flag-global\", # [ROOT]/home/.cargo/config.toml
+]
+extra-table.somekey = \"somevalue\" # [ROOT]/home/.cargo/config.toml
+profile.dev.opt-level = 3 # [ROOT]/home/.cargo/config.toml
+profile.dev.package.foo.opt-level = 1 # [ROOT]/home/.cargo/config.toml
+target.\"cfg(target_os = \\\"linux\\\")\".runner = \"runme\" # [ROOT]/home/.cargo/config.toml
+# The following environment variables may affect the loaded values.
+# CARGO_HOME=[ROOT]/home/.cargo
+",
+ )
+ .with_stderr("")
+ .run();
+
+ cargo_process("config get --show-origin build.rustflags -Zunstable-options")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .env("CARGO_BUILD_RUSTFLAGS", "env1 env2")
+ .with_stdout(
+ "\
+build.rustflags = [
+ \"--flag-directory\", # [ROOT]/foo/.cargo/config.toml
+ \"--flag-global\", # [ROOT]/home/.cargo/config.toml
+ \"env1\", # environment variable `CARGO_BUILD_RUSTFLAGS`
+ \"env2\", # environment variable `CARGO_BUILD_RUSTFLAGS`
+]
+",
+ )
+ .with_stderr("")
+ .run();
+}
+
+#[cargo_test]
+fn show_origin_toml_cli() {
+ let sub_folder = common_setup();
+ cargo_process("config get --show-origin build.jobs -Zunstable-options --config build.jobs=123")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .env("CARGO_BUILD_JOBS", "1")
+ .with_stdout("build.jobs = 123 # --config cli option")
+ .with_stderr("")
+ .run();
+
+ cargo_process("config get --show-origin build.rustflags -Zunstable-options --config")
+ .arg("build.rustflags=[\"cli1\",\"cli2\"]")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .env("CARGO_BUILD_RUSTFLAGS", "env1 env2")
+ .with_stdout(
+ "\
+build.rustflags = [
+ \"--flag-directory\", # [ROOT]/foo/.cargo/config.toml
+ \"--flag-global\", # [ROOT]/home/.cargo/config.toml
+ \"cli1\", # --config cli option
+ \"cli2\", # --config cli option
+ \"env1\", # environment variable `CARGO_BUILD_RUSTFLAGS`
+ \"env2\", # environment variable `CARGO_BUILD_RUSTFLAGS`
+]
+",
+ )
+ .with_stderr("")
+ .run();
+}
+
+#[cargo_test]
+fn show_origin_json() {
+ let sub_folder = common_setup();
+ cargo_process("config get --show-origin --format=json -Zunstable-options")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .with_status(101)
+ .with_stderr("error: the `json` format does not support --show-origin, try the `toml` format instead")
+ .run();
+}
+
+#[cargo_test]
+fn unmerged_toml() {
+ let sub_folder = common_setup();
+ cargo_process("config get --merged=no -Zunstable-options")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .env("CARGO_ALIAS_BAR", "cat dog")
+ .env("CARGO_BUILD_JOBS", "100")
+ .with_stdout(
+ "\
+# Environment variables
+# CARGO=[..]
+# CARGO_ALIAS_BAR=[..]cat dog[..]
+# CARGO_BUILD_JOBS=100
+# CARGO_HOME=[ROOT]/home/.cargo
+
+# [ROOT]/foo/.cargo/config.toml
+alias.sub-example = [\"sub\", \"example\"]
+build.rustflags = [\"--flag-directory\"]
+
+# [ROOT]/home/.cargo/config.toml
+alias.foo = \"abc --xyz\"
+build.jobs = 99
+build.rustflags = [\"--flag-global\"]
+extra-table.somekey = \"somevalue\"
+profile.dev.opt-level = 3
+profile.dev.package.foo.opt-level = 1
+target.\"cfg(target_os = \\\"linux\\\")\".runner = \"runme\"
+
+",
+ )
+ .with_stderr("")
+ .run();
+
+ cargo_process("config get --merged=no build.rustflags -Zunstable-options")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .env("CARGO_BUILD_RUSTFLAGS", "env1 env2")
+ .with_stdout(
+ "\
+# Environment variables
+# CARGO_BUILD_RUSTFLAGS=[..]env1 env2[..]
+
+# [ROOT]/foo/.cargo/config.toml
+build.rustflags = [\"--flag-directory\"]
+
+# [ROOT]/home/.cargo/config.toml
+build.rustflags = [\"--flag-global\"]
+
+",
+ )
+ .with_stderr("")
+ .run();
+
+ cargo_process("config get --merged=no does.not.exist -Zunstable-options")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .with_stderr("")
+ .with_stderr("")
+ .run();
+
+ cargo_process("config get --merged=no build.rustflags.extra -Zunstable-options")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .with_status(101)
+ .with_stderr(
+ "error: expected table for configuration key `build.rustflags`, \
+ but found array in [ROOT]/foo/.cargo/config.toml",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn unmerged_toml_cli() {
+ let sub_folder = common_setup();
+ cargo_process("config get --merged=no build.rustflags -Zunstable-options --config")
+ .arg("build.rustflags=[\"cli1\",\"cli2\"]")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .env("CARGO_BUILD_RUSTFLAGS", "env1 env2")
+ .with_stdout(
+ "\
+# --config cli option
+build.rustflags = [\"cli1\", \"cli2\"]
+
+# Environment variables
+# CARGO_BUILD_RUSTFLAGS=[..]env1 env2[..]
+
+# [ROOT]/foo/.cargo/config.toml
+build.rustflags = [\"--flag-directory\"]
+
+# [ROOT]/home/.cargo/config.toml
+build.rustflags = [\"--flag-global\"]
+
+",
+ )
+ .with_stderr("")
+ .run();
+}
+
+#[cargo_test]
+fn unmerged_json() {
+ let sub_folder = common_setup();
+ cargo_process("config get --merged=no --format=json -Zunstable-options")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config"])
+ .with_status(101)
+ .with_stderr(
+ "error: the `json` format does not support --merged=no, try the `toml` format instead",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn includes() {
+ let sub_folder = common_setup();
+ fs::write(
+ sub_folder.join("config.toml"),
+ "
+ include = 'other.toml'
+ [build]
+ rustflags = [\"--flag-directory\"]
+ ",
+ )
+ .unwrap();
+ fs::write(
+ sub_folder.join("other.toml"),
+ "
+ [build]
+ rustflags = [\"--flag-other\"]
+ ",
+ )
+ .unwrap();
+
+ cargo_process("config get build.rustflags -Zunstable-options -Zconfig-include")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config", "config-include"])
+ .with_stdout(r#"build.rustflags = ["--flag-other", "--flag-directory", "--flag-global"]"#)
+ .with_stderr("")
+ .run();
+
+ cargo_process("config get build.rustflags --show-origin -Zunstable-options -Zconfig-include")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config", "config-include"])
+ .with_stdout(
+ "\
+build.rustflags = [
+ \"--flag-other\", # [ROOT]/foo/.cargo/other.toml
+ \"--flag-directory\", # [ROOT]/foo/.cargo/config.toml
+ \"--flag-global\", # [ROOT]/home/.cargo/config.toml
+]
+",
+ )
+ .with_stderr("")
+ .run();
+
+ cargo_process("config get --merged=no -Zunstable-options -Zconfig-include")
+ .cwd(&sub_folder.parent().unwrap())
+ .masquerade_as_nightly_cargo(&["cargo-config", "config-include"])
+ .with_stdout(
+ "\
+# Environment variables
+# CARGO=[..]
+# CARGO_HOME=[ROOT]/home/.cargo
+
+# [ROOT]/foo/.cargo/other.toml
+build.rustflags = [\"--flag-other\"]
+
+# [ROOT]/foo/.cargo/config.toml
+build.rustflags = [\"--flag-directory\"]
+include = \"other.toml\"
+
+# [ROOT]/home/.cargo/config.toml
+alias.foo = \"abc --xyz\"
+build.jobs = 99
+build.rustflags = [\"--flag-global\"]
+extra-table.somekey = \"somevalue\"
+profile.dev.opt-level = 3
+profile.dev.package.foo.opt-level = 1
+target.\"cfg(target_os = \\\"linux\\\")\".runner = \"runme\"
+
+",
+ )
+ .with_stderr("")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_env_config.rs b/src/tools/cargo/tests/testsuite/cargo_env_config.rs
new file mode 100644
index 000000000..d80c38d0e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_env_config.rs
@@ -0,0 +1,181 @@
+//! Tests for `[env]` config.
+
+use cargo_test_support::{basic_bin_manifest, project};
+
+#[cargo_test]
+fn env_basic() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ "src/main.rs",
+ r#"
+ use std::env;
+ fn main() {
+ println!( "compile-time:{}", env!("ENV_TEST_1233") );
+ println!( "run-time:{}", env::var("ENV_TEST_1233").unwrap());
+ }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
+ [env]
+ ENV_TEST_1233 = "Hello"
+ "#,
+ )
+ .build();
+
+ p.cargo("run")
+ .with_stdout_contains("compile-time:Hello")
+ .with_stdout_contains("run-time:Hello")
+ .run();
+}
+
+#[cargo_test]
+fn env_invalid() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
+ [env]
+ ENV_TEST_BOOL = false
+ "#,
+ )
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains("[..]could not load config key `env.ENV_TEST_BOOL`")
+ .run();
+}
+
+#[cargo_test]
+fn env_no_cargo_home() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
+ [env]
+ CARGO_HOME = "/"
+ "#,
+ )
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains("[..]setting the `CARGO_HOME` environment variable is not supported in the `[env]` configuration table")
+ .run();
+}
+
+#[cargo_test]
+fn env_force() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ "src/main.rs",
+ r#"
+ use std::env;
+ fn main() {
+ println!( "ENV_TEST_FORCED:{}", env!("ENV_TEST_FORCED") );
+ println!( "ENV_TEST_UNFORCED:{}", env!("ENV_TEST_UNFORCED") );
+ println!( "ENV_TEST_UNFORCED_DEFAULT:{}", env!("ENV_TEST_UNFORCED_DEFAULT") );
+ }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
+ [env]
+ ENV_TEST_UNFORCED_DEFAULT = "from-config"
+ ENV_TEST_UNFORCED = { value = "from-config", force = false }
+ ENV_TEST_FORCED = { value = "from-config", force = true }
+ "#,
+ )
+ .build();
+
+ p.cargo("run")
+ .env("ENV_TEST_FORCED", "from-env")
+ .env("ENV_TEST_UNFORCED", "from-env")
+ .env("ENV_TEST_UNFORCED_DEFAULT", "from-env")
+ .with_stdout_contains("ENV_TEST_FORCED:from-config")
+ .with_stdout_contains("ENV_TEST_UNFORCED:from-env")
+ .with_stdout_contains("ENV_TEST_UNFORCED_DEFAULT:from-env")
+ .run();
+}
+
+#[cargo_test]
+fn env_relative() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo2"))
+ .file(
+ "src/main.rs",
+ r#"
+ use std::env;
+ use std::path::Path;
+ fn main() {
+ println!( "ENV_TEST_REGULAR:{}", env!("ENV_TEST_REGULAR") );
+ println!( "ENV_TEST_REGULAR_DEFAULT:{}", env!("ENV_TEST_REGULAR_DEFAULT") );
+ println!( "ENV_TEST_RELATIVE:{}", env!("ENV_TEST_RELATIVE") );
+
+ assert!( Path::new(env!("ENV_TEST_RELATIVE")).is_absolute() );
+ assert!( !Path::new(env!("ENV_TEST_REGULAR")).is_absolute() );
+ assert!( !Path::new(env!("ENV_TEST_REGULAR_DEFAULT")).is_absolute() );
+ }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
+ [env]
+ ENV_TEST_REGULAR = { value = "Cargo.toml", relative = false }
+ ENV_TEST_REGULAR_DEFAULT = "Cargo.toml"
+ ENV_TEST_RELATIVE = { value = "Cargo.toml", relative = true }
+ "#,
+ )
+ .build();
+
+ p.cargo("run").run();
+}
+
+#[cargo_test]
+fn env_no_override() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("unchanged"))
+ .file(
+ "src/main.rs",
+ r#"
+ use std::env;
+ fn main() {
+ println!( "CARGO_PKG_NAME:{}", env!("CARGO_PKG_NAME") );
+ }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
+ [env]
+ CARGO_PKG_NAME = { value = "from-config", force = true }
+ "#,
+ )
+ .build();
+
+ p.cargo("run")
+ .with_stdout_contains("CARGO_PKG_NAME:unchanged")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_features.rs b/src/tools/cargo/tests/testsuite/cargo_features.rs
new file mode 100644
index 000000000..6e5531431
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_features.rs
@@ -0,0 +1,714 @@
+//! Tests for `cargo-features` definitions.
+
+use cargo_test_support::registry::Package;
+use cargo_test_support::{project, registry};
+
+#[cargo_test]
+fn feature_required() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+ im-a-teapot = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("check")
+ .masquerade_as_nightly_cargo(&["test-dummy-unstable"])
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ the `im-a-teapot` manifest key is unstable and may not work properly in England
+
+Caused by:
+ feature `test-dummy-unstable` is required
+
+ The package requires the Cargo feature called `test-dummy-unstable`, \
+ but that feature is not stabilized in this version of Cargo (1.[..]).
+ Consider adding `cargo-features = [\"test-dummy-unstable\"]` to the top of Cargo.toml \
+ (above the [package] table) to tell Cargo you are opting in to use this unstable feature.
+ See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html for more information \
+ about the status of this feature.
+",
+ )
+ .run();
+
+ // Same, but stable.
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ the `im-a-teapot` manifest key is unstable and may not work properly in England
+
+Caused by:
+ feature `test-dummy-unstable` is required
+
+ The package requires the Cargo feature called `test-dummy-unstable`, \
+ but that feature is not stabilized in this version of Cargo (1.[..]).
+ Consider trying a newer version of Cargo (this may require the nightly release).
+ See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html \
+ for more information about the status of this feature.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn feature_required_dependency() {
+ // The feature has been stabilized by a future version of Cargo, and
+ // someone published something uses it, but this version of Cargo has not
+ // yet stabilized it. Don't suggest editing Cargo.toml, since published
+ // packages shouldn't be edited.
+ Package::new("bar", "1.0.0")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ im-a-teapot = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .masquerade_as_nightly_cargo(&["test-dummy-unstable"])
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] [..]
+[DOWNLOADED] bar v1.0.0 [..]
+error: failed to download replaced source registry `crates-io`
+
+Caused by:
+ failed to parse manifest at `[..]/bar-1.0.0/Cargo.toml`
+
+Caused by:
+ the `im-a-teapot` manifest key is unstable and may not work properly in England
+
+Caused by:
+ feature `test-dummy-unstable` is required
+
+ The package requires the Cargo feature called `test-dummy-unstable`, \
+ but that feature is not stabilized in this version of Cargo (1.[..]).
+ Consider trying a more recent nightly release.
+ See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html \
+ for more information about the status of this feature.
+",
+ )
+ .run();
+
+ // Same, but stable.
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to download `bar v1.0.0`
+
+Caused by:
+ unable to get packages from source
+
+Caused by:
+ failed to download replaced source registry `crates-io`
+
+Caused by:
+ failed to parse manifest at `[..]/bar-1.0.0/Cargo.toml`
+
+Caused by:
+ the `im-a-teapot` manifest key is unstable and may not work properly in England
+
+Caused by:
+ feature `test-dummy-unstable` is required
+
+ The package requires the Cargo feature called `test-dummy-unstable`, \
+ but that feature is not stabilized in this version of Cargo (1.[..]).
+ Consider trying a newer version of Cargo (this may require the nightly release).
+ See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html \
+ for more information about the status of this feature.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn unknown_feature() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["foo"]
+
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ unknown cargo feature `foo`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn stable_feature_warns() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["test-dummy-stable"]
+
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("check")
+ .with_stderr(
+ "\
+warning: the cargo feature `test-dummy-stable` has been stabilized in the 1.0 \
+release and is no longer necessary to be listed in the manifest
+ See https://doc.rust-lang.org/[..]cargo/ for more information about using this feature.
+[CHECKING] a [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "-Zallow-features is unstable")]
+fn allow_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["test-dummy-unstable"]
+
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+ im-a-teapot = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("-Zallow-features=test-dummy-unstable check")
+ .masquerade_as_nightly_cargo(&["allow-features", "test-dummy-unstable"])
+ .with_stderr(
+ "\
+[CHECKING] a [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ p.cargo("-Zallow-features=test-dummy-unstable,print-im-a-teapot -Zprint-im-a-teapot check")
+ .masquerade_as_nightly_cargo(&[
+ "allow-features",
+ "test-dummy-unstable",
+ "print-im-a-teapot",
+ ])
+ .with_stdout("im-a-teapot = true")
+ .run();
+
+ p.cargo("-Zallow-features=test-dummy-unstable -Zprint-im-a-teapot check")
+ .masquerade_as_nightly_cargo(&[
+ "allow-features",
+ "test-dummy-unstable",
+ "print-im-a-teapot",
+ ])
+ .with_status(101)
+ .with_stderr(
+ "\
+error: the feature `print-im-a-teapot` is not in the list of allowed features: [test-dummy-unstable]
+",
+ )
+ .run();
+
+ p.cargo("-Zallow-features= check")
+ .masquerade_as_nightly_cargo(&["allow-features", "test-dummy-unstable"])
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ the feature `test-dummy-unstable` is not in the list of allowed features: []
+",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "-Zallow-features is unstable")]
+fn allow_features_to_rustc() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #![feature(test_2018_feature)]
+ "#,
+ )
+ .build();
+
+ p.cargo("-Zallow-features= check")
+ .masquerade_as_nightly_cargo(&["allow-features"])
+ .with_status(101)
+ .with_stderr_contains("[..]E0725[..]")
+ .run();
+
+ p.cargo("-Zallow-features=test_2018_feature check")
+ .masquerade_as_nightly_cargo(&["allow-features"])
+ .with_stderr(
+ "\
+[CHECKING] a [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "-Zallow-features is unstable")]
+fn allow_features_in_cfg() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["test-dummy-unstable"]
+
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+ im-a-teapot = true
+ "#,
+ )
+ .file(
+ ".cargo/config.toml",
+ r#"
+ [unstable]
+ allow-features = ["test-dummy-unstable", "print-im-a-teapot"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .masquerade_as_nightly_cargo(&[
+ "allow-features",
+ "test-dummy-unstable",
+ "print-im-a-teapot",
+ ])
+ .with_stderr(
+ "\
+[CHECKING] a [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ p.cargo("-Zprint-im-a-teapot check")
+ .masquerade_as_nightly_cargo(&[
+ "allow-features",
+ "test-dummy-unstable",
+ "print-im-a-teapot",
+ ])
+ .with_stdout("im-a-teapot = true")
+ .with_stderr("[FINISHED] [..]")
+ .run();
+
+ p.cargo("-Zunstable-options check")
+ .masquerade_as_nightly_cargo(&["allow-features", "test-dummy-unstable", "print-im-a-teapot"])
+ .with_status(101)
+ .with_stderr(
+ "\
+error: the feature `unstable-options` is not in the list of allowed features: [print-im-a-teapot, test-dummy-unstable]
+",
+ )
+ .run();
+
+ // -Zallow-features overrides .cargo/config
+ p.cargo("-Zallow-features=test-dummy-unstable -Zprint-im-a-teapot check")
+ .masquerade_as_nightly_cargo(&[
+ "allow-features",
+ "test-dummy-unstable",
+ "print-im-a-teapot",
+ ])
+ .with_status(101)
+ .with_stderr(
+ "\
+error: the feature `print-im-a-teapot` is not in the list of allowed features: [test-dummy-unstable]
+",
+ )
+ .run();
+
+ p.cargo("-Zallow-features= check")
+ .masquerade_as_nightly_cargo(&[
+ "allow-features",
+ "test-dummy-unstable",
+ "print-im-a-teapot",
+ ])
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ the feature `test-dummy-unstable` is not in the list of allowed features: []
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn nightly_feature_requires_nightly() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["test-dummy-unstable"]
+
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+ im-a-teapot = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("check")
+ .masquerade_as_nightly_cargo(&["test-dummy-unstable"])
+ .with_stderr(
+ "\
+[CHECKING] a [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \
+ but this is the `stable` channel
+ See [..]
+ See https://doc.rust-lang.org/[..]cargo/reference/unstable.html for more \
+ information about using this feature.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn nightly_feature_requires_nightly_in_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = { path = "a" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ cargo-features = ["test-dummy-unstable"]
+
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+ im-a-teapot = true
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .build();
+ p.cargo("check")
+ .masquerade_as_nightly_cargo(&["test-dummy-unstable"])
+ .with_stderr(
+ "\
+[CHECKING] a [..]
+[CHECKING] b [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to get `a` as a dependency of package `b v0.0.1 ([..])`
+
+Caused by:
+ failed to load source for dependency `a`
+
+Caused by:
+ Unable to update [..]
+
+Caused by:
+ failed to parse manifest at `[..]`
+
+Caused by:
+ the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \
+ but this is the `stable` channel
+ See [..]
+ See https://doc.rust-lang.org/[..]cargo/reference/unstable.html for more \
+ information about using this feature.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cant_publish() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["test-dummy-unstable"]
+
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+ im-a-teapot = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("check")
+ .masquerade_as_nightly_cargo(&["test-dummy-unstable"])
+ .with_stderr(
+ "\
+[CHECKING] a [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \
+ but this is the `stable` channel
+ See [..]
+ See https://doc.rust-lang.org/[..]cargo/reference/unstable.html for more \
+ information about using this feature.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn z_flags_rejected() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["test-dummy-unstable"]
+
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+ im-a-teapot = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("check -Zprint-im-a-teapot")
+ .with_status(101)
+ .with_stderr(
+ "error: the `-Z` flag is only accepted on the nightly \
+ channel of Cargo, but this is the `stable` channel\n\
+ See [..]",
+ )
+ .run();
+
+ p.cargo("check -Zarg")
+ .masquerade_as_nightly_cargo(&["test-dummy-unstable"])
+ .with_status(101)
+ .with_stderr("error: unknown `-Z` flag specified: arg")
+ .run();
+
+ p.cargo("check -Zprint-im-a-teapot")
+ .masquerade_as_nightly_cargo(&["test-dummy-unstable"])
+ .with_stdout("im-a-teapot = true\n")
+ .with_stderr(
+ "\
+[CHECKING] a [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn publish_allowed() {
+ let registry = registry::RegistryBuilder::new()
+ .http_api()
+ .http_index()
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["test-dummy-unstable"]
+
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .masquerade_as_nightly_cargo(&["test-dummy-unstable"])
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[WARNING] [..]
+[..]
+[PACKAGING] a v0.0.1 [..]
+[VERIFYING] a v0.0.1 [..]
+[COMPILING] a v0.0.1 [..]
+[FINISHED] [..]
+[PACKAGED] [..]
+[UPLOADING] a v0.0.1 [..]
+[UPLOADED] a v0.0.1 to registry `crates-io`
+note: Waiting for `a v0.0.1` to be available at registry `crates-io`.
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+[PUBLISHED] a v0.0.1 at registry `crates-io`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn wrong_position() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ cargo-features = ["test-dummy-unstable"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("check")
+ .masquerade_as_nightly_cargo(&["test-dummy-unstable"])
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at [..]
+
+Caused by:
+ cargo-features = [\"test-dummy-unstable\"] was found in the wrong location: it \
+ should be set at the top of Cargo.toml before any tables
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn z_stabilized() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("check -Z cache-messages")
+ .masquerade_as_nightly_cargo(&["always_nightly"])
+ .with_stderr(
+ "\
+warning: flag `-Z cache-messages` has been stabilized in the 1.40 release, \
+ and is no longer necessary
+ Message caching is now always enabled.
+
+[CHECKING] foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ p.cargo("check -Z offline")
+ .masquerade_as_nightly_cargo(&["always_nightly"])
+ .with_status(101)
+ .with_stderr(
+ "\
+error: flag `-Z offline` has been stabilized in the 1.36 release
+ Offline mode is now available via the --offline CLI option
+
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/in b/src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/in
new file mode 120000
index 000000000..7fd0ba5eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/in
@@ -0,0 +1 @@
+../remove-basic.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/mod.rs
new file mode 100644
index 000000000..59a2333d6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["clippy"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/out/Cargo.toml
new file mode 100644
index 000000000..09a9ee86e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/out/Cargo.toml
@@ -0,0 +1,23 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/stderr.log
new file mode 100644
index 000000000..dd71023a8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/stderr.log
@@ -0,0 +1,2 @@
+ Removing clippy from dependencies
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/build/in b/src/tools/cargo/tests/testsuite/cargo_remove/build/in
new file mode 120000
index 000000000..7fd0ba5eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/build/in
@@ -0,0 +1 @@
+../remove-basic.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/build/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/build/mod.rs
new file mode 100644
index 000000000..f4c9dcb94
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/build/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["--build", "semver"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/build/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/build/out/Cargo.toml
new file mode 100644
index 000000000..babdc0a99
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/build/out/Cargo.toml
@@ -0,0 +1,21 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/build/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/build/stderr.log
new file mode 100644
index 000000000..f037ebe28
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/build/stderr.log
@@ -0,0 +1,2 @@
+ Removing semver from build-dependencies
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/build/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/build/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/build/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/dev/in b/src/tools/cargo/tests/testsuite/cargo_remove/dev/in
new file mode 120000
index 000000000..7fd0ba5eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/dev/in
@@ -0,0 +1 @@
+../remove-basic.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/dev/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/dev/mod.rs
new file mode 100644
index 000000000..7d61fa954
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/dev/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["--dev", "regex"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/dev/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/dev/out/Cargo.toml
new file mode 100644
index 000000000..40744a566
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/dev/out/Cargo.toml
@@ -0,0 +1,23 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/dev/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/dev/stderr.log
new file mode 100644
index 000000000..c629b26b1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/dev/stderr.log
@@ -0,0 +1,2 @@
+ Removing regex from dev-dependencies
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/dev/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/dev/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/dev/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/dry_run/in b/src/tools/cargo/tests/testsuite/cargo_remove/dry_run/in
new file mode 120000
index 000000000..7fd0ba5eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/dry_run/in
@@ -0,0 +1 @@
+../remove-basic.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/dry_run/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/dry_run/mod.rs
new file mode 100644
index 000000000..dca189315
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/dry_run/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["semver", "--dry-run"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/dry_run/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/dry_run/out/Cargo.toml
new file mode 100644
index 000000000..340f06cda
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/dry_run/out/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/dry_run/out/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_remove/dry_run/out/src/lib.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/dry_run/out/src/lib.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/dry_run/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/dry_run/stderr.log
new file mode 100644
index 000000000..8b118911c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/dry_run/stderr.log
@@ -0,0 +1,2 @@
+ Removing semver from dependencies
+warning: aborting remove due to dry run
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/dry_run/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/dry_run/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/dry_run/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/mod.rs
new file mode 100644
index 000000000..2c1d592fb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/mod.rs
@@ -0,0 +1,72 @@
+use cargo_test_support::basic_manifest;
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::git;
+use cargo_test_support::project;
+use cargo_test_support::CargoCommand;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+
+ let git_project1 = git::new("bar1", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "")
+ })
+ .url();
+
+ let git_project2 = git::new("bar2", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "")
+ })
+ .url();
+
+ let in_project = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ "[workspace]\n\
+ members = [ \"my-member\" ]\n\
+ \n\
+ [package]\n\
+ name = \"my-project\"\n\
+ version = \"0.1.0\"\n\
+ \n\
+ [dependencies]\n\
+ bar = {{ git = \"{git_project1}\" }}\n\
+ \n\
+ [patch.\"{git_project1}\"]\n\
+ bar = {{ git = \"{git_project2}\" }}\n\
+ \n\
+ [patch.crates-io]\n\
+ bar = {{ git = \"{git_project2}\" }}\n",
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "my-member/Cargo.toml",
+ "[package]\n\
+ name = \"my-member\"\n\
+ version = \"0.1.0\"\n\
+ \n\
+ [dependencies]\n\
+ bar = \"0.1.0\"\n",
+ )
+ .file("my-member/src/lib.rs", "")
+ .build();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["bar"])
+ .current_dir(&in_project.root())
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &in_project.root());
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/out/Cargo.toml
new file mode 100644
index 000000000..2d8c22115
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/out/Cargo.toml
@@ -0,0 +1,9 @@
+[workspace]
+members = [ "my-member" ]
+
+[package]
+name = "my-project"
+version = "0.1.0"
+
+[patch.crates-io]
+bar = { git = "[ROOTURL]/bar2" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/out/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/out/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/out/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/stderr.log
new file mode 100644
index 000000000..1dd2e7757
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/stderr.log
@@ -0,0 +1,3 @@
+ Removing bar from dependencies
+ Updating git repository `[ROOTURL]/bar2`
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/in/Cargo.toml
new file mode 100644
index 000000000..d781ad5a5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/in/Cargo.toml
@@ -0,0 +1,36 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+toml = "0.2.3"
+docopt = "0.6"
+
+[features]
+std = ["serde/std", "semver/std"]
+
+[profile.dev.package.docopt]
+opt-level = 3
+
+[profile.dev.package."toml@0.1.0"]
+opt-level = 3
+
+[profile.release.package.toml]
+opt-level = 1
+overflow-checks = false
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/in/src/lib.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/in/src/lib.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/mod.rs
new file mode 100644
index 000000000..7047c92e2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["toml"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/out/Cargo.toml
new file mode 100644
index 000000000..21b43fe68
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/out/Cargo.toml
@@ -0,0 +1,32 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+toml = "0.2.3"
+docopt = "0.6"
+
+[features]
+std = ["serde/std", "semver/std"]
+
+[profile.dev.package.docopt]
+opt-level = 3
+
+[profile.release.package.toml]
+opt-level = 1
+overflow-checks = false
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/stderr.log
new file mode 100644
index 000000000..0e2e38f26
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/stderr.log
@@ -0,0 +1,2 @@
+ Removing toml from dependencies
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/in/Cargo.toml
new file mode 100644
index 000000000..48242c2d3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = [ "my-package" ]
+
+[replace]
+"toml:0.1.0" = { path = "../toml" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/in/my-package/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/in/my-package/Cargo.toml
new file mode 100644
index 000000000..bee343a8b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/in/my-package/Cargo.toml
@@ -0,0 +1,26 @@
+[package]
+name = "my-package"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+toml = "0.2.3"
+docopt = "0.6"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/in/my-package/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/in/my-package/src/main.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/in/my-package/src/main.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/mod.rs
new file mode 100644
index 000000000..717adef3e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["--package", "my-package", "toml"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/out/Cargo.toml
new file mode 100644
index 000000000..83a6a04d0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/out/Cargo.toml
@@ -0,0 +1,2 @@
+[workspace]
+members = [ "my-package" ]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/out/my-package/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/out/my-package/Cargo.toml
new file mode 100644
index 000000000..36ddf7a04
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/out/my-package/Cargo.toml
@@ -0,0 +1,25 @@
+[package]
+name = "my-package"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+toml = "0.2.3"
+docopt = "0.6"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/out/my-package/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/out/my-package/src/main.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/out/my-package/src/main.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/stderr.log
new file mode 100644
index 000000000..0e2e38f26
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/stderr.log
@@ -0,0 +1,2 @@
+ Removing toml from dependencies
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/in b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/in
new file mode 120000
index 000000000..7fd0ba5eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/in
@@ -0,0 +1 @@
+../remove-basic.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/mod.rs
new file mode 100644
index 000000000..eac3c8b46
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["foo", "--flag"])
+ .current_dir(cwd)
+ .assert()
+ .code(1)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/out/Cargo.toml
new file mode 100644
index 000000000..340f06cda
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/out/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/stderr.log
new file mode 100644
index 000000000..ac5f3cfd1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/stderr.log
@@ -0,0 +1,7 @@
+error: unexpected argument '--flag' found
+
+ tip: to pass '--flag' as a value, use '-- --flag'
+
+Usage: cargo[EXE] remove <DEP_ID>...
+
+For more information, try '--help'.
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_arg/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/in b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/in
new file mode 120000
index 000000000..7fd0ba5eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/in
@@ -0,0 +1 @@
+../remove-basic.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/mod.rs
new file mode 100644
index 000000000..c4dbeae91
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["invalid_dependency_name"])
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/out/Cargo.toml
new file mode 100644
index 000000000..340f06cda
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/out/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/stderr.log
new file mode 100644
index 000000000..eea124d65
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/stderr.log
@@ -0,0 +1,2 @@
+ Removing invalid_dependency_name from dependencies
+error: the dependency `invalid_dependency_name` could not be found in `dependencies`.
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_dep/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/in b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/in
new file mode 120000
index 000000000..e2165e8cb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/in
@@ -0,0 +1 @@
+../remove-package.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/mod.rs
new file mode 100644
index 000000000..bff09882e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["docopt", "--package", "dep-c"])
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/Cargo.toml
new file mode 100644
index 000000000..733857113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = [
+ "dep-a",
+ "dep-b"
+]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/dep-a/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/dep-a/Cargo.toml
new file mode 100644
index 000000000..7e87ce314
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/dep-a/Cargo.toml
@@ -0,0 +1,23 @@
+[package]
+name = "dep-a"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/dep-a/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/dep-a/src/lib.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/dep-a/src/lib.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/dep-b/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/dep-b/Cargo.toml
new file mode 100644
index 000000000..37d2d3ddf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/dep-b/Cargo.toml
@@ -0,0 +1,23 @@
+[package]
+name = "dep-b"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/dep-b/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/dep-b/src/lib.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/out/dep-b/src/lib.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/stderr.log
new file mode 100644
index 000000000..683512ca0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/stderr.log
@@ -0,0 +1 @@
+error: package(s) `dep-c` not found in workspace `[ROOT]/case`
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/in b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/in
new file mode 120000
index 000000000..e2165e8cb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/in
@@ -0,0 +1 @@
+../remove-package.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/mod.rs
new file mode 100644
index 000000000..5093d5d2d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["docopt"])
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/Cargo.toml
new file mode 100644
index 000000000..733857113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = [
+ "dep-a",
+ "dep-b"
+]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-a/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-a/Cargo.toml
new file mode 100644
index 000000000..7e87ce314
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-a/Cargo.toml
@@ -0,0 +1,23 @@
+[package]
+name = "dep-a"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-a/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-a/src/lib.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-a/src/lib.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-b/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-b/Cargo.toml
new file mode 100644
index 000000000..37d2d3ddf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-b/Cargo.toml
@@ -0,0 +1,23 @@
+[package]
+name = "dep-b"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-b/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-b/src/lib.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-b/src/lib.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/stderr.log
new file mode 100644
index 000000000..8a03c9e5b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/stderr.log
@@ -0,0 +1,2 @@
+error: `cargo remove` could not determine which package to modify. Use the `--package` option to specify a package.
+available packages: dep-a, dep-b
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_package_multiple/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/in b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/in
new file mode 120000
index 000000000..7fd0ba5eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/in
@@ -0,0 +1 @@
+../remove-basic.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/mod.rs
new file mode 100644
index 000000000..80d42be1d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["--build", "docopt"])
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/out/Cargo.toml
new file mode 100644
index 000000000..340f06cda
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/out/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/stderr.log
new file mode 100644
index 000000000..fff5ff00a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/stderr.log
@@ -0,0 +1,2 @@
+ Removing docopt from build-dependencies
+error: the dependency `docopt` could not be found in `build-dependencies`.
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/in b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/in
new file mode 120000
index 000000000..7fd0ba5eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/in
@@ -0,0 +1 @@
+../remove-basic.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/mod.rs
new file mode 100644
index 000000000..7be8fd628
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["--dev", "semver", "regex"])
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/out/Cargo.toml
new file mode 100644
index 000000000..340f06cda
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/out/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/stderr.log
new file mode 100644
index 000000000..1926f9577
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/stderr.log
@@ -0,0 +1,2 @@
+ Removing semver from dev-dependencies
+error: the dependency `semver` could not be found in `dev-dependencies`.
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_section_dep/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/in b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/in
new file mode 120000
index 000000000..d5742d038
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/in
@@ -0,0 +1 @@
+../remove-target.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/mod.rs
new file mode 100644
index 000000000..34deb6cb8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["--target", "powerpc-unknown-linux-gnu", "dbus"])
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/out/Cargo.toml
new file mode 100644
index 000000000..14747c70b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/out/Cargo.toml
@@ -0,0 +1,33 @@
+[package]
+name = "cargo-remove-target-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[target.x86_64-unknown-freebsd.build-dependencies]
+semver = "0.1.0"
+
+[target.x86_64-unknown-linux-gnu.build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[target.x86_64-unknown-linux-gnu.dependencies]
+dbus = "0.6.2"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[target.x86_64-unknown-linux-gnu.dev-dependencies]
+ncurses = "20.0"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/stderr.log
new file mode 100644
index 000000000..5075b80b7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/stderr.log
@@ -0,0 +1,2 @@
+ Removing dbus from dependencies for target `powerpc-unknown-linux-gnu`
+error: the dependency `dbus` could not be found in `target.powerpc-unknown-linux-gnu.dependencies`.
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/in b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/in
new file mode 120000
index 000000000..d5742d038
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/in
@@ -0,0 +1 @@
+../remove-target.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/mod.rs
new file mode 100644
index 000000000..e04418fa8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["--target", "x86_64-unknown-linux-gnu", "toml"])
+ .current_dir(cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/out/Cargo.toml
new file mode 100644
index 000000000..14747c70b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/out/Cargo.toml
@@ -0,0 +1,33 @@
+[package]
+name = "cargo-remove-target-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[target.x86_64-unknown-freebsd.build-dependencies]
+semver = "0.1.0"
+
+[target.x86_64-unknown-linux-gnu.build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[target.x86_64-unknown-linux-gnu.dependencies]
+dbus = "0.6.2"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[target.x86_64-unknown-linux-gnu.dev-dependencies]
+ncurses = "20.0"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/stderr.log
new file mode 100644
index 000000000..54bfe085f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/stderr.log
@@ -0,0 +1,2 @@
+ Removing toml from dependencies for target `x86_64-unknown-linux-gnu`
+error: the dependency `toml` could not be found in `target.x86_64-unknown-linux-gnu.dependencies`.
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/invalid_target_dep/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/mod.rs
new file mode 100644
index 000000000..fd8b4a233
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/mod.rs
@@ -0,0 +1,88 @@
+mod avoid_empty_tables;
+mod build;
+mod dev;
+mod dry_run;
+mod gc_patch;
+mod gc_profile;
+mod gc_replace;
+mod invalid_arg;
+mod invalid_dep;
+mod invalid_package;
+mod invalid_package_multiple;
+mod invalid_section;
+mod invalid_section_dep;
+mod invalid_target;
+mod invalid_target_dep;
+mod multiple_deps;
+mod multiple_dev;
+mod no_arg;
+mod offline;
+mod optional_dep_feature;
+mod optional_feature;
+mod package;
+mod remove_basic;
+mod target;
+mod target_build;
+mod target_dev;
+mod update_lock_file;
+mod workspace;
+mod workspace_non_virtual;
+mod workspace_preserved;
+
+fn init_registry() {
+ cargo_test_support::registry::init();
+ add_registry_packages(false);
+}
+
+fn add_registry_packages(alt: bool) {
+ for name in [
+ "clippy",
+ "dbus",
+ "docopt",
+ "ncurses",
+ "pad",
+ "regex",
+ "rustc-serialize",
+ "toml",
+ ] {
+ cargo_test_support::registry::Package::new(name, "0.1.1+my-package")
+ .alternative(alt)
+ .publish();
+ cargo_test_support::registry::Package::new(name, "0.2.0+my-package")
+ .alternative(alt)
+ .publish();
+ cargo_test_support::registry::Package::new(name, "0.2.3+my-package")
+ .alternative(alt)
+ .publish();
+ cargo_test_support::registry::Package::new(name, "0.4.1+my-package")
+ .alternative(alt)
+ .publish();
+ cargo_test_support::registry::Package::new(name, "0.6.2+my-package")
+ .alternative(alt)
+ .publish();
+ cargo_test_support::registry::Package::new(name, "0.9.9+my-package")
+ .alternative(alt)
+ .publish();
+ cargo_test_support::registry::Package::new(name, "1.0.90+my-package")
+ .alternative(alt)
+ .publish();
+ cargo_test_support::registry::Package::new(name, "20.0.0+my-package")
+ .alternative(alt)
+ .publish();
+ }
+
+ for name in ["semver", "serde"] {
+ cargo_test_support::registry::Package::new(name, "0.1.1")
+ .alternative(alt)
+ .feature("std", &[])
+ .publish();
+ cargo_test_support::registry::Package::new(name, "0.9.0")
+ .alternative(alt)
+ .feature("std", &[])
+ .publish();
+ cargo_test_support::registry::Package::new(name, "1.0.90")
+ .alternative(alt)
+ .feature("std", &[])
+ .publish();
+ }
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/in b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/in
new file mode 120000
index 000000000..7fd0ba5eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/in
@@ -0,0 +1 @@
+../remove-basic.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/mod.rs
new file mode 100644
index 000000000..35922b738
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["docopt", "semver"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/out/Cargo.toml
new file mode 100644
index 000000000..53cde0829
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/out/Cargo.toml
@@ -0,0 +1,22 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+rustc-serialize = "0.4"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/stderr.log
new file mode 100644
index 000000000..1eb59aca1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/stderr.log
@@ -0,0 +1,3 @@
+ Removing docopt from dependencies
+ Removing semver from dependencies
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/in b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/in
new file mode 120000
index 000000000..7fd0ba5eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/in
@@ -0,0 +1 @@
+../remove-basic.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/mod.rs
new file mode 100644
index 000000000..5eac7e2f8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["--dev", "regex", "serde"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/out/Cargo.toml
new file mode 100644
index 000000000..d961b2bb1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/out/Cargo.toml
@@ -0,0 +1,20 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[features]
+std = ["semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/stderr.log
new file mode 100644
index 000000000..a3042dcc3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/stderr.log
@@ -0,0 +1,3 @@
+ Removing regex from dev-dependencies
+ Removing serde from dev-dependencies
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/no_arg/in b/src/tools/cargo/tests/testsuite/cargo_remove/no_arg/in
new file mode 120000
index 000000000..7fd0ba5eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/no_arg/in
@@ -0,0 +1 @@
+../remove-basic.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/no_arg/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/no_arg/mod.rs
new file mode 100644
index 000000000..d0c66f9b0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/no_arg/mod.rs
@@ -0,0 +1,24 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .current_dir(cwd)
+ .assert()
+ .code(1)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/no_arg/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/no_arg/out/Cargo.toml
new file mode 100644
index 000000000..340f06cda
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/no_arg/out/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/no_arg/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/no_arg/stderr.log
new file mode 100644
index 000000000..54fa9f424
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/no_arg/stderr.log
@@ -0,0 +1,6 @@
+error: the following required arguments were not provided:
+ <DEP_ID>...
+
+Usage: cargo[EXE] remove <DEP_ID>...
+
+For more information, try '--help'.
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/no_arg/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/no_arg/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/no_arg/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/offline/in b/src/tools/cargo/tests/testsuite/cargo_remove/offline/in
new file mode 120000
index 000000000..7fd0ba5eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/offline/in
@@ -0,0 +1 @@
+../remove-basic.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/offline/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/offline/mod.rs
new file mode 100644
index 000000000..d03463927
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/offline/mod.rs
@@ -0,0 +1,32 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ // run the metadata command to populate the cache
+ snapbox::cmd::Command::cargo_ui()
+ .arg("metadata")
+ .current_dir(cwd)
+ .assert()
+ .success();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["docopt", "--offline"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/offline/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/offline/out/Cargo.toml
new file mode 100644
index 000000000..b8628eed1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/offline/out/Cargo.toml
@@ -0,0 +1,23 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/offline/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/offline/stderr.log
new file mode 100644
index 000000000..7083976b1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/offline/stderr.log
@@ -0,0 +1 @@
+ Removing docopt from dependencies
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/offline/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/offline/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/offline/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/in b/src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/in
new file mode 120000
index 000000000..7fd0ba5eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/in
@@ -0,0 +1 @@
+../remove-basic.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/mod.rs
new file mode 100644
index 000000000..cae736b34
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["--dev", "serde"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/out/Cargo.toml
new file mode 100644
index 000000000..63112d334
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/out/Cargo.toml
@@ -0,0 +1,23 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+
+[features]
+std = ["semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/stderr.log
new file mode 100644
index 000000000..72c9f9217
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/stderr.log
@@ -0,0 +1,2 @@
+ Removing serde from dev-dependencies
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/in b/src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/in
new file mode 120000
index 000000000..7fd0ba5eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/in
@@ -0,0 +1 @@
+../remove-basic.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/mod.rs
new file mode 100644
index 000000000..af54226bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["semver"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/out/Cargo.toml
new file mode 100644
index 000000000..9ac0b1b32
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/out/Cargo.toml
@@ -0,0 +1,23 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/stderr.log
new file mode 100644
index 000000000..2dc546fa7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/stderr.log
@@ -0,0 +1,2 @@
+ Removing semver from dependencies
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/package/in b/src/tools/cargo/tests/testsuite/cargo_remove/package/in
new file mode 120000
index 000000000..e2165e8cb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/package/in
@@ -0,0 +1 @@
+../remove-package.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/package/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/package/mod.rs
new file mode 100644
index 000000000..2714f3197
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/package/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["docopt", "--package", "dep-a"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/package/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/package/out/Cargo.toml
new file mode 100644
index 000000000..733857113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/package/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = [
+ "dep-a",
+ "dep-b"
+]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/package/out/dep-a/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/package/out/dep-a/Cargo.toml
new file mode 100644
index 000000000..5f2bfe6fb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/package/out/dep-a/Cargo.toml
@@ -0,0 +1,22 @@
+[package]
+name = "dep-a"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/package/out/dep-a/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_remove/package/out/dep-a/src/lib.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/package/out/dep-a/src/lib.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/package/out/dep-b/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/package/out/dep-b/Cargo.toml
new file mode 100644
index 000000000..37d2d3ddf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/package/out/dep-b/Cargo.toml
@@ -0,0 +1,23 @@
+[package]
+name = "dep-b"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/package/out/dep-b/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_remove/package/out/dep-b/src/lib.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/package/out/dep-b/src/lib.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/package/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/package/stderr.log
new file mode 100644
index 000000000..231026f2b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/package/stderr.log
@@ -0,0 +1,2 @@
+ Removing docopt from dependencies
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/package/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/package/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/package/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/remove-basic.in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/remove-basic.in/Cargo.toml
new file mode 100644
index 000000000..340f06cda
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/remove-basic.in/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/remove-basic.in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_remove/remove-basic.in/src/lib.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/remove-basic.in/src/lib.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/Cargo.toml
new file mode 100644
index 000000000..733857113
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = [
+ "dep-a",
+ "dep-b"
+]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/dep-a/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/dep-a/Cargo.toml
new file mode 100644
index 000000000..7e87ce314
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/dep-a/Cargo.toml
@@ -0,0 +1,23 @@
+[package]
+name = "dep-a"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/dep-a/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/dep-a/src/lib.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/dep-a/src/lib.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/dep-b/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/dep-b/Cargo.toml
new file mode 100644
index 000000000..37d2d3ddf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/dep-b/Cargo.toml
@@ -0,0 +1,23 @@
+[package]
+name = "dep-b"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/dep-b/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/dep-b/src/lib.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/remove-package.in/dep-b/src/lib.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/remove-target.in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/remove-target.in/Cargo.toml
new file mode 100644
index 000000000..14747c70b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/remove-target.in/Cargo.toml
@@ -0,0 +1,33 @@
+[package]
+name = "cargo-remove-target-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[target.x86_64-unknown-freebsd.build-dependencies]
+semver = "0.1.0"
+
+[target.x86_64-unknown-linux-gnu.build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[target.x86_64-unknown-linux-gnu.dependencies]
+dbus = "0.6.2"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[target.x86_64-unknown-linux-gnu.dev-dependencies]
+ncurses = "20.0"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/in b/src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/in
new file mode 120000
index 000000000..7fd0ba5eb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/in
@@ -0,0 +1 @@
+../remove-basic.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/mod.rs
new file mode 100644
index 000000000..53381e6bc
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["docopt"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/out/Cargo.toml
new file mode 100644
index 000000000..b8628eed1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/out/Cargo.toml
@@ -0,0 +1,23 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/stderr.log
new file mode 100644
index 000000000..231026f2b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/stderr.log
@@ -0,0 +1,2 @@
+ Removing docopt from dependencies
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/target/in b/src/tools/cargo/tests/testsuite/cargo_remove/target/in
new file mode 120000
index 000000000..d5742d038
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/target/in
@@ -0,0 +1 @@
+../remove-target.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/target/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/target/mod.rs
new file mode 100644
index 000000000..1447c753d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/target/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["--target", "x86_64-unknown-linux-gnu", "dbus"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/target/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/target/out/Cargo.toml
new file mode 100644
index 000000000..e29fbbd00
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/target/out/Cargo.toml
@@ -0,0 +1,30 @@
+[package]
+name = "cargo-remove-target-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[target.x86_64-unknown-freebsd.build-dependencies]
+semver = "0.1.0"
+
+[target.x86_64-unknown-linux-gnu.build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[target.x86_64-unknown-linux-gnu.dev-dependencies]
+ncurses = "20.0"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/target/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/target/stderr.log
new file mode 100644
index 000000000..810abd994
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/target/stderr.log
@@ -0,0 +1,2 @@
+ Removing dbus from dependencies for target `x86_64-unknown-linux-gnu`
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/target/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/target/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/target/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/target_build/in b/src/tools/cargo/tests/testsuite/cargo_remove/target_build/in
new file mode 120000
index 000000000..d5742d038
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/target_build/in
@@ -0,0 +1 @@
+../remove-target.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/target_build/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/target_build/mod.rs
new file mode 100644
index 000000000..11afbbf8f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/target_build/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["--build", "--target", "x86_64-unknown-linux-gnu", "semver"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/target_build/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/target_build/out/Cargo.toml
new file mode 100644
index 000000000..7353c7a89
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/target_build/out/Cargo.toml
@@ -0,0 +1,30 @@
+[package]
+name = "cargo-remove-target-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[target.x86_64-unknown-freebsd.build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[target.x86_64-unknown-linux-gnu.dependencies]
+dbus = "0.6.2"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[target.x86_64-unknown-linux-gnu.dev-dependencies]
+ncurses = "20.0"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/target_build/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/target_build/stderr.log
new file mode 100644
index 000000000..b06f8f319
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/target_build/stderr.log
@@ -0,0 +1,2 @@
+ Removing semver from build-dependencies for target `x86_64-unknown-linux-gnu`
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/target_build/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/target_build/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/target_build/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/target_dev/in b/src/tools/cargo/tests/testsuite/cargo_remove/target_dev/in
new file mode 120000
index 000000000..d5742d038
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/target_dev/in
@@ -0,0 +1 @@
+../remove-target.in/ \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/target_dev/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/target_dev/mod.rs
new file mode 100644
index 000000000..d303c2b85
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/target_dev/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["--dev", "--target", "x86_64-unknown-linux-gnu", "ncurses"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/target_dev/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/target_dev/out/Cargo.toml
new file mode 100644
index 000000000..a477b3d55
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/target_dev/out/Cargo.toml
@@ -0,0 +1,30 @@
+[package]
+name = "cargo-remove-target-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[target.x86_64-unknown-freebsd.build-dependencies]
+semver = "0.1.0"
+
+[target.x86_64-unknown-linux-gnu.build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[target.x86_64-unknown-linux-gnu.dependencies]
+dbus = "0.6.2"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/target_dev/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/target_dev/stderr.log
new file mode 100644
index 000000000..68553a3bd
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/target_dev/stderr.log
@@ -0,0 +1,2 @@
+ Removing ncurses from dev-dependencies for target `x86_64-unknown-linux-gnu`
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/target_dev/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/target_dev/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/target_dev/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.lock b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.lock
new file mode 100644
index 000000000..06c2052d5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.lock
@@ -0,0 +1,58 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+dependencies = [
+ "clippy",
+ "docopt",
+ "regex",
+ "rustc-serialize",
+ "semver",
+ "serde",
+ "toml",
+]
+
+[[package]]
+name = "clippy"
+version = "0.4.1+my-package"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "47ced0eda54e9ddc6063f0e1d0164493cd16c84c6b6a0329a536967c44e205f7"
+
+[[package]]
+name = "docopt"
+version = "0.6.2+my-package"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b600540c4fafb27bf6e6961f0f1e6f547c9d6126ce581ab3a92f878c8e2c9a2c"
+
+[[package]]
+name = "regex"
+version = "0.1.1+my-package"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "84949cb53285a6c481d0133065a7b669871acfd9e20f273f4ce1283c309775d5"
+
+[[package]]
+name = "rustc-serialize"
+version = "0.4.1+my-package"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "31162e7d23a085553c42dee375787b451a481275473f7779c4a63bcc267a24fd"
+
+[[package]]
+name = "semver"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3031434e07edc922bf1b8262f075fac1522694f17b1ee7ad314c4cabd5d2723f"
+
+[[package]]
+name = "serde"
+version = "1.0.90"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "75d9264696ebbf5315a6b068e9910c4df9274365afac2d88abf66525df660218"
+
+[[package]]
+name = "toml"
+version = "0.1.1+my-package"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a0f6c7804525ce0a968ef270e55a516cf4bdcf1fea0b09d130e0aa34a66745b3"
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.toml
new file mode 100644
index 000000000..340f06cda
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/in/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/in/src/main.rs
new file mode 100644
index 000000000..f328e4d9d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/in/src/main.rs
@@ -0,0 +1 @@
+fn main() {}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/mod.rs
new file mode 100644
index 000000000..be5bc87f5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["rustc-serialize"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.lock b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.lock
new file mode 100644
index 000000000..bd8c90f46
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.lock
@@ -0,0 +1,51 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+dependencies = [
+ "clippy",
+ "docopt",
+ "regex",
+ "semver",
+ "serde",
+ "toml",
+]
+
+[[package]]
+name = "clippy"
+version = "0.4.1+my-package"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "47ced0eda54e9ddc6063f0e1d0164493cd16c84c6b6a0329a536967c44e205f7"
+
+[[package]]
+name = "docopt"
+version = "0.6.2+my-package"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b600540c4fafb27bf6e6961f0f1e6f547c9d6126ce581ab3a92f878c8e2c9a2c"
+
+[[package]]
+name = "regex"
+version = "0.1.1+my-package"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "84949cb53285a6c481d0133065a7b669871acfd9e20f273f4ce1283c309775d5"
+
+[[package]]
+name = "semver"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3031434e07edc922bf1b8262f075fac1522694f17b1ee7ad314c4cabd5d2723f"
+
+[[package]]
+name = "serde"
+version = "1.0.90"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "75d9264696ebbf5315a6b068e9910c4df9274365afac2d88abf66525df660218"
+
+[[package]]
+name = "toml"
+version = "0.1.1+my-package"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a0f6c7804525ce0a968ef270e55a516cf4bdcf1fea0b09d130e0aa34a66745b3"
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.toml
new file mode 100644
index 000000000..5e7d7f0a0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.toml
@@ -0,0 +1,23 @@
+[package]
+name = "cargo-remove-test-fixture"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = "0.1.0"
+
+[dependencies]
+docopt = "0.6"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/out/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/out/src/main.rs
new file mode 100644
index 000000000..f328e4d9d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/out/src/main.rs
@@ -0,0 +1 @@
+fn main() {}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/stderr.log
new file mode 100644
index 000000000..164f8f4b9
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/stderr.log
@@ -0,0 +1,2 @@
+ Removing rustc-serialize from dependencies
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/in/Cargo.toml
new file mode 100644
index 000000000..fd5e80a8b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = [ "my-package" ]
+
+[workspace.dependencies]
+semver = "0.1.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace/in/my-package/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/in/my-package/Cargo.toml
new file mode 100644
index 000000000..6690d593b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/in/my-package/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "my-package"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = { workspace = true }
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace/in/my-package/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/in/my-package/src/main.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/in/my-package/src/main.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/mod.rs
new file mode 100644
index 000000000..225fbec00
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["--package", "my-package", "--build", "semver"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/out/Cargo.toml
new file mode 100644
index 000000000..83a6a04d0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/out/Cargo.toml
@@ -0,0 +1,2 @@
+[workspace]
+members = [ "my-package" ]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace/out/my-package/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/out/my-package/Cargo.toml
new file mode 100644
index 000000000..402780535
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/out/my-package/Cargo.toml
@@ -0,0 +1,21 @@
+[package]
+name = "my-package"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace/out/my-package/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/out/my-package/src/main.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/out/my-package/src/main.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/stderr.log
new file mode 100644
index 000000000..f037ebe28
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/stderr.log
@@ -0,0 +1,2 @@
+ Removing semver from build-dependencies
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/in/Cargo.toml
new file mode 100644
index 000000000..dbac8ab44
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/in/Cargo.toml
@@ -0,0 +1,30 @@
+[workspace]
+members = [ "my-member" ]
+
+[workspace.dependencies]
+semver = "0.1.0"
+
+[package]
+name = "my-package"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = { workspace = true }
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/in/my-member/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/in/my-member/Cargo.toml
new file mode 100644
index 000000000..bb78904ff
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/in/my-member/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "my-member"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/in/my-member/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/in/my-member/src/main.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/in/my-member/src/main.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/mod.rs
new file mode 100644
index 000000000..225fbec00
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["--package", "my-package", "--build", "semver"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/out/Cargo.toml
new file mode 100644
index 000000000..9a3261484
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/out/Cargo.toml
@@ -0,0 +1,24 @@
+[workspace]
+members = [ "my-member" ]
+
+[package]
+name = "my-package"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/out/my-member/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/out/my-member/Cargo.toml
new file mode 100644
index 000000000..bb78904ff
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/out/my-member/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "my-member"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/out/my-member/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/out/my-member/src/main.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/out/my-member/src/main.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/stderr.log
new file mode 100644
index 000000000..f037ebe28
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/stderr.log
@@ -0,0 +1,2 @@
+ Removing semver from build-dependencies
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/Cargo.toml
new file mode 100644
index 000000000..f1992ac88
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = [ "my-package", "my-other-package" ]
+
+[workspace.dependencies]
+semver = "0.1.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/my-other-package/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/my-other-package/Cargo.toml
new file mode 100644
index 000000000..d65972868
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/my-other-package/Cargo.toml
@@ -0,0 +1,22 @@
+[package]
+name = "my-other-package"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+semver = { workspace = true }
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/my-other-package/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/my-other-package/src/main.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/my-other-package/src/main.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/my-package/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/my-package/Cargo.toml
new file mode 100644
index 000000000..6690d593b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/my-package/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "my-package"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[build-dependencies]
+semver = { workspace = true }
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/my-package/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/my-package/src/main.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/in/my-package/src/main.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/mod.rs
new file mode 100644
index 000000000..225fbec00
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/mod.rs
@@ -0,0 +1,25 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+use crate::cargo_remove::init_registry;
+
+#[cargo_test]
+fn case() {
+ init_registry();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .args(["--package", "my-package", "--build", "semver"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/Cargo.toml
new file mode 100644
index 000000000..f1992ac88
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = [ "my-package", "my-other-package" ]
+
+[workspace.dependencies]
+semver = "0.1.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/my-other-package/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/my-other-package/Cargo.toml
new file mode 100644
index 000000000..d65972868
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/my-other-package/Cargo.toml
@@ -0,0 +1,22 @@
+[package]
+name = "my-other-package"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+semver = { workspace = true }
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/my-other-package/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/my-other-package/src/main.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/my-other-package/src/main.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/my-package/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/my-package/Cargo.toml
new file mode 100644
index 000000000..402780535
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/my-package/Cargo.toml
@@ -0,0 +1,21 @@
+[package]
+name = "my-package"
+version = "0.1.0"
+
+[[bin]]
+name = "main"
+path = "src/main.rs"
+
+[dependencies]
+docopt = "0.6"
+rustc-serialize = "0.4"
+semver = "0.1"
+toml = "0.1"
+clippy = "0.4"
+
+[dev-dependencies]
+regex = "0.1.1"
+serde = "1.0.90"
+
+[features]
+std = ["serde/std", "semver/std"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/my-package/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/my-package/src/main.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/out/my-package/src/main.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/stderr.log
new file mode 100644
index 000000000..f037ebe28
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/stderr.log
@@ -0,0 +1,2 @@
+ Removing semver from build-dependencies
+ Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_targets.rs b/src/tools/cargo/tests/testsuite/cargo_targets.rs
new file mode 100644
index 000000000..fcf293019
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_targets.rs
@@ -0,0 +1,68 @@
+//! Tests specifically related to target handling (lib, bins, examples, tests, benches).
+
+use cargo_test_support::project;
+
+#[cargo_test]
+fn warn_unmatched_target_filters() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [lib]
+ test = false
+ bench = false
+ "#,
+ )
+ .file("src/lib.rs", r#"fn main() {}"#)
+ .build();
+
+ p.cargo("check --tests --bins --examples --benches")
+ .with_stderr(
+ "\
+[WARNING] Target filters `bins`, `tests`, `examples`, `benches` specified, \
+but no targets matched. This is a no-op
+[FINISHED][..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn reserved_windows_target_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [[bin]]
+ name = "con"
+ path = "src/main.rs"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ if cfg!(windows) {
+ p.cargo("check")
+ .with_stderr(
+ "\
+[WARNING] binary target `con` is a reserved Windows filename, \
+this target will not work on Windows platforms
+[CHECKING] foo[..]
+[FINISHED][..]
+",
+ )
+ .run();
+ } else {
+ p.cargo("check")
+ .with_stderr("[CHECKING] foo[..]\n[FINISHED][..]")
+ .run();
+ }
+}
diff --git a/src/tools/cargo/tests/testsuite/cfg.rs b/src/tools/cargo/tests/testsuite/cfg.rs
new file mode 100644
index 000000000..dcce65402
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cfg.rs
@@ -0,0 +1,515 @@
+//! Tests for cfg() expressions.
+
+use cargo_test_support::registry::Package;
+use cargo_test_support::rustc_host;
+use cargo_test_support::{basic_manifest, project};
+
+#[cargo_test]
+fn cfg_easy() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+
+ [target.'cfg(unix)'.dependencies]
+ b = { path = 'b' }
+ [target."cfg(windows)".dependencies]
+ b = { path = 'b' }
+ "#,
+ )
+ .file("src/lib.rs", "extern crate b;")
+ .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+ .file("b/src/lib.rs", "")
+ .build();
+ p.cargo("check -v").run();
+}
+
+#[cargo_test]
+fn dont_include() {
+ let other_family = if cfg!(unix) { "windows" } else { "unix" };
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+
+ [target.'cfg({})'.dependencies]
+ b = {{ path = 'b' }}
+ "#,
+ other_family
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+ .file("b/src/lib.rs", "")
+ .build();
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] a v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn works_through_the_registry() {
+ Package::new("baz", "0.1.0").publish();
+ Package::new("bar", "0.1.0")
+ .target_dep("baz", "0.1.0", "cfg(unix)")
+ .target_dep("baz", "0.1.0", "cfg(windows)")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "#[allow(unused_extern_crates)] extern crate bar;",
+ )
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] [..] index
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..]
+[DOWNLOADED] [..]
+[CHECKING] baz v0.1.0
+[CHECKING] bar v0.1.0
+[CHECKING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn ignore_version_from_other_platform() {
+ let this_family = if cfg!(unix) { "unix" } else { "windows" };
+ let other_family = if cfg!(unix) { "windows" } else { "unix" };
+ Package::new("bar", "0.1.0").publish();
+ Package::new("bar", "0.2.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [target.'cfg({})'.dependencies]
+ bar = "0.1.0"
+
+ [target.'cfg({})'.dependencies]
+ bar = "0.2.0"
+ "#,
+ this_family, other_family
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "#[allow(unused_extern_crates)] extern crate bar;",
+ )
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] [..] index
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..]
+[CHECKING] bar v0.1.0
+[CHECKING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_target_spec() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [target.'cfg(4)'.dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ failed to parse `4` as a cfg expression: unexpected character `4` in cfg, [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_target_spec2() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [target.'cfg(bar =)'.dependencies]
+ baz = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ failed to parse `bar =` as a cfg expression: expected a string, but cfg expression ended
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn multiple_match_ok() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+
+ [target.'cfg(unix)'.dependencies]
+ b = {{ path = 'b' }}
+ [target.'cfg(target_family = "unix")'.dependencies]
+ b = {{ path = 'b' }}
+ [target."cfg(windows)".dependencies]
+ b = {{ path = 'b' }}
+ [target.'cfg(target_family = "windows")'.dependencies]
+ b = {{ path = 'b' }}
+ [target."cfg(any(windows, unix))".dependencies]
+ b = {{ path = 'b' }}
+
+ [target.{}.dependencies]
+ b = {{ path = 'b' }}
+ "#,
+ rustc_host()
+ ),
+ )
+ .file("src/lib.rs", "extern crate b;")
+ .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+ .file("b/src/lib.rs", "")
+ .build();
+ p.cargo("check -v").run();
+}
+
+#[cargo_test]
+fn any_ok() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+
+ [target."cfg(any(windows, unix))".dependencies]
+ b = { path = 'b' }
+ "#,
+ )
+ .file("src/lib.rs", "extern crate b;")
+ .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+ .file("b/src/lib.rs", "")
+ .build();
+ p.cargo("check -v").run();
+}
+
+// https://github.com/rust-lang/cargo/issues/5313
+#[cargo_test]
+#[cfg(all(target_arch = "x86_64", target_os = "linux", target_env = "gnu"))]
+fn cfg_looks_at_rustflags_for_target() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+
+ [target.'cfg(with_b)'.dependencies]
+ b = { path = 'b' }
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[cfg(with_b)]
+ extern crate b;
+
+ fn main() { b::foo(); }
+ "#,
+ )
+ .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+ .file("b/src/lib.rs", "pub fn foo() {}")
+ .build();
+
+ p.cargo("check --target x86_64-unknown-linux-gnu")
+ .env("RUSTFLAGS", "--cfg with_b")
+ .run();
+}
+
+#[cargo_test]
+fn bad_cfg_discovery() {
+ // Check error messages when `rustc -v` and `rustc --print=*` parsing fails.
+ //
+ // This is a `rustc` replacement which behaves differently based on an
+ // environment variable.
+ let p = project()
+ .at("compiler")
+ .file("Cargo.toml", &basic_manifest("compiler", "0.1.0"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn run_rustc() -> String {
+ let mut cmd = std::process::Command::new("rustc");
+ for arg in std::env::args_os().skip(1) {
+ cmd.arg(arg);
+ }
+ String::from_utf8(cmd.output().unwrap().stdout).unwrap()
+ }
+
+ fn main() {
+ let mode = std::env::var("FUNKY_MODE").unwrap();
+ if mode == "bad-version" {
+ println!("foo");
+ return;
+ }
+ if std::env::args_os().any(|a| a == "-vV") {
+ print!("{}", run_rustc());
+ return;
+ }
+ if mode == "no-crate-types" {
+ return;
+ }
+ if mode == "bad-crate-type" {
+ println!("foo");
+ return;
+ }
+ let output = run_rustc();
+ let mut lines = output.lines();
+ let sysroot = loop {
+ let line = lines.next().unwrap();
+ if line.contains("___") {
+ println!("{}", line);
+ } else {
+ break line;
+ }
+ };
+ if mode == "no-sysroot" {
+ return;
+ }
+ println!("{}", sysroot);
+
+ if mode == "no-split-debuginfo" {
+ return;
+ }
+ loop {
+ let line = lines.next().unwrap();
+ if line == "___" {
+ println!("\n{line}");
+ break;
+ } else {
+ // As the number split-debuginfo options varies,
+ // concat them into one line.
+ print!("{line},");
+ }
+ };
+
+ if mode != "bad-cfg" {
+ panic!("unexpected");
+ }
+ println!("123");
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build").run();
+ let funky_rustc = p.bin("compiler");
+
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("check")
+ .env("RUSTC", &funky_rustc)
+ .env("FUNKY_MODE", "bad-version")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] `rustc -vV` didn't have a line for `host:`, got:
+foo
+
+",
+ )
+ .run();
+
+ p.cargo("check")
+ .env("RUSTC", &funky_rustc)
+ .env("FUNKY_MODE", "no-crate-types")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] malformed output when learning about crate-type bin information
+command was: `[..]compiler[..] --crate-name ___ [..]`
+(no output received)
+",
+ )
+ .run();
+
+ p.cargo("check")
+ .env("RUSTC", &funky_rustc)
+ .env("FUNKY_MODE", "no-sysroot")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] output of --print=sysroot missing when learning about target-specific information from rustc
+command was: `[..]compiler[..]--crate-type [..]`
+
+--- stdout
+[..]___[..]
+[..]___[..]
+[..]___[..]
+[..]___[..]
+[..]___[..]
+[..]___[..]
+
+",
+ )
+ .run();
+
+ p.cargo("check")
+ .env("RUSTC", &funky_rustc)
+ .env("FUNKY_MODE", "no-split-debuginfo")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] output of --print=split-debuginfo missing when learning about target-specific information from rustc
+command was: `[..]compiler[..]--crate-type [..]`
+
+--- stdout
+[..]___[..]
+[..]___[..]
+[..]___[..]
+[..]___[..]
+[..]___[..]
+[..]___[..]
+[..]
+
+",
+ )
+ .run();
+
+ p.cargo("check")
+ .env("RUSTC", &funky_rustc)
+ .env("FUNKY_MODE", "bad-cfg")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse the cfg from `rustc --print=cfg`, got:
+[..]___[..]
+[..]___[..]
+[..]___[..]
+[..]___[..]
+[..]___[..]
+[..]___[..]
+[..]
+[..],[..]
+___
+123
+
+
+Caused by:
+ failed to parse `123` as a cfg expression: unexpected character `1` in cfg, \
+ expected parens, a comma, an identifier, or a string
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn exclusive_dep_kinds() {
+ // Checks for a bug where the same package with different cfg expressions
+ // was not being filtered correctly.
+ Package::new("bar", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [target.'cfg(abc)'.dependencies]
+ bar = "1.0"
+
+ [target.'cfg(not(abc))'.build-dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "extern crate bar; fn main() {}")
+ .build();
+
+ p.cargo("check").run();
+ p.change_file("src/lib.rs", "extern crate bar;");
+ p.cargo("check")
+ .with_status(101)
+ // can't find crate for `bar`
+ .with_stderr_contains("[..]E0463[..]")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/check.rs b/src/tools/cargo/tests/testsuite/check.rs
new file mode 100644
index 000000000..bbcf750fb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/check.rs
@@ -0,0 +1,1521 @@
+//! Tests for the `cargo check` command.
+
+use std::fmt::{self, Write};
+
+use crate::messages::raw_rustc_output;
+use cargo_test_support::install::exe;
+use cargo_test_support::paths::CargoPathExt;
+use cargo_test_support::registry::Package;
+use cargo_test_support::tools;
+use cargo_test_support::{basic_bin_manifest, basic_manifest, git, project};
+
+#[cargo_test]
+fn check_success() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "extern crate bar; fn main() { ::bar::baz(); }",
+ )
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ foo.cargo("check").run();
+}
+
+#[cargo_test]
+fn check_fail() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "extern crate bar; fn main() { ::bar::baz(42); }",
+ )
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ foo.cargo("check")
+ .with_status(101)
+ .with_stderr_contains("[..]this function takes 0[..]")
+ .run();
+}
+
+#[cargo_test]
+fn custom_derive() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[macro_use]
+ extern crate bar;
+
+ trait B {
+ fn b(&self);
+ }
+
+ #[derive(B)]
+ struct A;
+
+ fn main() {
+ let a = A;
+ a.b();
+ }
+ "#,
+ )
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate proc_macro;
+
+ use proc_macro::TokenStream;
+
+ #[proc_macro_derive(B)]
+ pub fn derive(_input: TokenStream) -> TokenStream {
+ format!("impl B for A {{ fn b(&self) {{}} }}").parse().unwrap()
+ }
+ "#,
+ )
+ .build();
+
+ foo.cargo("check").run();
+}
+
+#[cargo_test]
+fn check_build() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "extern crate bar; fn main() { ::bar::baz(); }",
+ )
+ .build();
+
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ foo.cargo("check").run();
+ foo.cargo("build").run();
+}
+
+#[cargo_test]
+fn build_check() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "extern crate bar; fn main() { ::bar::baz(); }",
+ )
+ .build();
+
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ foo.cargo("build -v").run();
+ foo.cargo("check -v").run();
+}
+
+// Checks that where a project has both a lib and a bin, the lib is only checked
+// not built.
+#[cargo_test]
+fn issue_3418() {
+ let foo = project()
+ .file("src/lib.rs", "")
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ foo.cargo("check -v")
+ .with_stderr_contains("[..] --emit=[..]metadata [..]")
+ .run();
+}
+
+// Some weirdness that seems to be caused by a crate being built as well as
+// checked, but in this case with a proc macro too.
+#[cargo_test]
+fn issue_3419() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ rustc-serialize = "*"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate rustc_serialize;
+
+ use rustc_serialize::Decodable;
+
+ pub fn take<T: Decodable>() {}
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ extern crate rustc_serialize;
+
+ extern crate foo;
+
+ #[derive(RustcDecodable)]
+ pub struct Foo;
+
+ fn main() {
+ foo::take::<Foo>();
+ }
+ "#,
+ )
+ .build();
+
+ Package::new("rustc-serialize", "1.0.0")
+ .file(
+ "src/lib.rs",
+ r#"
+ pub trait Decodable: Sized {
+ fn decode<D: Decoder>(d: &mut D) -> Result<Self, D::Error>;
+ }
+ pub trait Decoder {
+ type Error;
+ fn read_struct<T, F>(&mut self, s_name: &str, len: usize, f: F)
+ -> Result<T, Self::Error>
+ where F: FnOnce(&mut Self) -> Result<T, Self::Error>;
+ }
+ "#,
+ )
+ .publish();
+
+ p.cargo("check").run();
+}
+
+// Check on a dylib should have a different metadata hash than build.
+#[cargo_test]
+fn dylib_check_preserves_build_cache() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [lib]
+ crate-type = ["dylib"]
+
+ [dependencies]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_stderr(
+ "\
+[..]Compiling foo v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("check").run();
+
+ p.cargo("build")
+ .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+ .run();
+}
+
+// test `cargo rustc --profile check`
+#[cargo_test]
+fn rustc_check() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "extern crate bar; fn main() { ::bar::baz(); }",
+ )
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ foo.cargo("rustc --profile check -- --emit=metadata").run();
+
+ // Verify compatible usage of --profile with --release, issue #7488
+ foo.cargo("rustc --profile check --release -- --emit=metadata")
+ .run();
+ foo.cargo("rustc --profile test --release -- --emit=metadata")
+ .run();
+}
+
+#[cargo_test]
+fn rustc_check_err() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "extern crate bar; fn main() { ::bar::qux(); }",
+ )
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ foo.cargo("rustc --profile check -- --emit=metadata")
+ .with_status(101)
+ .with_stderr_contains("[CHECKING] bar [..]")
+ .with_stderr_contains("[CHECKING] foo [..]")
+ .with_stderr_contains("[..]cannot find function `qux` in [..] `bar`")
+ .run();
+}
+
+#[cargo_test]
+fn check_all() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [workspace]
+ [dependencies]
+ b = { path = "b" }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("examples/a.rs", "fn main() {}")
+ .file("tests/a.rs", "")
+ .file("src/lib.rs", "")
+ .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+ .file("b/src/main.rs", "fn main() {}")
+ .file("b/src/lib.rs", "")
+ .build();
+
+ p.cargo("check --workspace -v")
+ .with_stderr_contains("[..] --crate-name foo src/lib.rs [..]")
+ .with_stderr_contains("[..] --crate-name foo src/main.rs [..]")
+ .with_stderr_contains("[..] --crate-name b b/src/lib.rs [..]")
+ .with_stderr_contains("[..] --crate-name b b/src/main.rs [..]")
+ .run();
+}
+
+#[cargo_test]
+fn check_all_exclude() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }")
+ .build();
+
+ p.cargo("check --workspace --exclude baz")
+ .with_stderr_does_not_contain("[CHECKING] baz v0.1.0 [..]")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn check_all_exclude_glob() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }")
+ .build();
+
+ p.cargo("check --workspace --exclude '*z'")
+ .with_stderr_does_not_contain("[CHECKING] baz v0.1.0 [..]")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn check_virtual_all_implied() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ p.cargo("check -v")
+ .with_stderr_contains("[..] --crate-name bar bar/src/lib.rs [..]")
+ .with_stderr_contains("[..] --crate-name baz baz/src/lib.rs [..]")
+ .run();
+}
+
+#[cargo_test]
+fn check_virtual_manifest_one_project() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }")
+ .build();
+
+ p.cargo("check -p bar")
+ .with_stderr_does_not_contain("[CHECKING] baz v0.1.0 [..]")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn check_virtual_manifest_glob() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() { break_the_build(); }")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ p.cargo("check -p '*z'")
+ .with_stderr_does_not_contain("[CHECKING] bar v0.1.0 [..]")
+ .with_stderr(
+ "\
+[CHECKING] baz v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn exclude_warns_on_non_existing_package() {
+ let p = project().file("src/lib.rs", "").build();
+ p.cargo("check --workspace --exclude bar")
+ .with_stdout("")
+ .with_stderr(
+ "\
+[WARNING] excluded package(s) `bar` not found in workspace `[CWD]`
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn targets_selected_default() {
+ let foo = project()
+ .file("src/main.rs", "fn main() {}")
+ .file("src/lib.rs", "pub fn smth() {}")
+ .file("examples/example1.rs", "fn main() {}")
+ .file("tests/test2.rs", "#[test] fn t() {}")
+ .file("benches/bench3.rs", "")
+ .build();
+
+ foo.cargo("check -v")
+ .with_stderr_contains("[..] --crate-name foo src/lib.rs [..]")
+ .with_stderr_contains("[..] --crate-name foo src/main.rs [..]")
+ .with_stderr_does_not_contain("[..] --crate-name example1 examples/example1.rs [..]")
+ .with_stderr_does_not_contain("[..] --crate-name test2 tests/test2.rs [..]")
+ .with_stderr_does_not_contain("[..] --crate-name bench3 benches/bench3.rs [..]")
+ .run();
+}
+
+#[cargo_test]
+fn targets_selected_all() {
+ let foo = project()
+ .file("src/main.rs", "fn main() {}")
+ .file("src/lib.rs", "pub fn smth() {}")
+ .file("examples/example1.rs", "fn main() {}")
+ .file("tests/test2.rs", "#[test] fn t() {}")
+ .file("benches/bench3.rs", "")
+ .build();
+
+ foo.cargo("check --all-targets -v")
+ .with_stderr_contains("[..] --crate-name foo src/lib.rs [..]")
+ .with_stderr_contains("[..] --crate-name foo src/main.rs [..]")
+ .with_stderr_contains("[..] --crate-name example1 examples/example1.rs [..]")
+ .with_stderr_contains("[..] --crate-name test2 tests/test2.rs [..]")
+ .with_stderr_contains("[..] --crate-name bench3 benches/bench3.rs [..]")
+ .run();
+}
+
+#[cargo_test]
+fn check_unit_test_profile() {
+ let foo = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ #[cfg(test)]
+ mod tests {
+ #[test]
+ fn it_works() {
+ badtext
+ }
+ }
+ "#,
+ )
+ .build();
+
+ foo.cargo("check").run();
+ foo.cargo("check --profile test")
+ .with_status(101)
+ .with_stderr_contains("[..]badtext[..]")
+ .run();
+}
+
+// Verify what is checked with various command-line filters.
+#[cargo_test]
+fn check_filters() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ fn unused_normal_lib() {}
+ #[cfg(test)]
+ mod tests {
+ fn unused_unit_lib() {}
+ }
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {}
+ fn unused_normal_bin() {}
+ #[cfg(test)]
+ mod tests {
+ fn unused_unit_bin() {}
+ }
+ "#,
+ )
+ .file(
+ "tests/t1.rs",
+ r#"
+ fn unused_normal_t1() {}
+ #[cfg(test)]
+ mod tests {
+ fn unused_unit_t1() {}
+ }
+ "#,
+ )
+ .file(
+ "examples/ex1.rs",
+ r#"
+ fn main() {}
+ fn unused_normal_ex1() {}
+ #[cfg(test)]
+ mod tests {
+ fn unused_unit_ex1() {}
+ }
+ "#,
+ )
+ .file(
+ "benches/b1.rs",
+ r#"
+ fn unused_normal_b1() {}
+ #[cfg(test)]
+ mod tests {
+ fn unused_unit_b1() {}
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("check")
+ .with_stderr_contains("[..]unused_normal_lib[..]")
+ .with_stderr_contains("[..]unused_normal_bin[..]")
+ .with_stderr_does_not_contain("[..]unused_normal_t1[..]")
+ .with_stderr_does_not_contain("[..]unused_normal_ex1[..]")
+ .with_stderr_does_not_contain("[..]unused_normal_b1[..]")
+ .with_stderr_does_not_contain("[..]unused_unit_[..]")
+ .run();
+ p.root().join("target").rm_rf();
+ p.cargo("check --tests -v")
+ .with_stderr_contains("[..] --crate-name foo src/lib.rs [..] --test [..]")
+ .with_stderr_contains("[..] --crate-name foo src/lib.rs [..] --crate-type lib [..]")
+ .with_stderr_contains("[..] --crate-name foo src/main.rs [..] --test [..]")
+ .with_stderr_contains("[..]unused_unit_lib[..]")
+ .with_stderr_contains("[..]unused_unit_bin[..]")
+ .with_stderr_contains("[..]unused_normal_lib[..]")
+ .with_stderr_contains("[..]unused_normal_bin[..]")
+ .with_stderr_contains("[..]unused_unit_t1[..]")
+ .with_stderr_does_not_contain("[..]unused_normal_ex1[..]")
+ .with_stderr_does_not_contain("[..]unused_unit_ex1[..]")
+ .with_stderr_does_not_contain("[..]unused_normal_b1[..]")
+ .with_stderr_does_not_contain("[..]unused_unit_b1[..]")
+ .with_stderr_does_not_contain("[..]--crate-type bin[..]")
+ .run();
+ p.root().join("target").rm_rf();
+ p.cargo("check --test t1 -v")
+ .with_stderr_contains("[..]unused_normal_lib[..]")
+ .with_stderr_contains("[..]unused_unit_t1[..]")
+ .with_stderr_does_not_contain("[..]unused_unit_lib[..]")
+ .with_stderr_does_not_contain("[..]unused_normal_bin[..]")
+ .with_stderr_does_not_contain("[..]unused_unit_bin[..]")
+ .with_stderr_does_not_contain("[..]unused_normal_ex1[..]")
+ .with_stderr_does_not_contain("[..]unused_normal_b1[..]")
+ .with_stderr_does_not_contain("[..]unused_unit_ex1[..]")
+ .with_stderr_does_not_contain("[..]unused_unit_b1[..]")
+ .run();
+ p.root().join("target").rm_rf();
+ p.cargo("check --all-targets -v")
+ .with_stderr_contains("[..]unused_normal_lib[..]")
+ .with_stderr_contains("[..]unused_normal_bin[..]")
+ .with_stderr_contains("[..]unused_normal_t1[..]")
+ .with_stderr_contains("[..]unused_normal_ex1[..]")
+ .with_stderr_contains("[..]unused_normal_b1[..]")
+ .with_stderr_contains("[..]unused_unit_b1[..]")
+ .with_stderr_contains("[..]unused_unit_t1[..]")
+ .with_stderr_contains("[..]unused_unit_lib[..]")
+ .with_stderr_contains("[..]unused_unit_bin[..]")
+ .with_stderr_does_not_contain("[..]unused_unit_ex1[..]")
+ .run();
+}
+
+#[cargo_test]
+fn check_artifacts() {
+ // Verify which artifacts are created when running check (#4059).
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("src/main.rs", "fn main() {}")
+ .file("tests/t1.rs", "")
+ .file("examples/ex1.rs", "fn main() {}")
+ .file("benches/b1.rs", "")
+ .build();
+
+ p.cargo("check").run();
+ assert!(!p.root().join("target/debug/libfoo.rmeta").is_file());
+ assert!(!p.root().join("target/debug/libfoo.rlib").is_file());
+ assert!(!p.root().join("target/debug").join(exe("foo")).is_file());
+ assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 2);
+
+ p.root().join("target").rm_rf();
+ p.cargo("check --lib").run();
+ assert!(!p.root().join("target/debug/libfoo.rmeta").is_file());
+ assert!(!p.root().join("target/debug/libfoo.rlib").is_file());
+ assert!(!p.root().join("target/debug").join(exe("foo")).is_file());
+ assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 1);
+
+ p.root().join("target").rm_rf();
+ p.cargo("check --bin foo").run();
+ assert!(!p.root().join("target/debug/libfoo.rmeta").is_file());
+ assert!(!p.root().join("target/debug/libfoo.rlib").is_file());
+ assert!(!p.root().join("target/debug").join(exe("foo")).is_file());
+ assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 2);
+
+ p.root().join("target").rm_rf();
+ p.cargo("check --test t1").run();
+ assert!(!p.root().join("target/debug/libfoo.rmeta").is_file());
+ assert!(!p.root().join("target/debug/libfoo.rlib").is_file());
+ assert!(!p.root().join("target/debug").join(exe("foo")).is_file());
+ assert_eq!(p.glob("target/debug/t1-*").count(), 0);
+ assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 1);
+ assert_eq!(p.glob("target/debug/deps/libt1-*.rmeta").count(), 1);
+
+ p.root().join("target").rm_rf();
+ p.cargo("check --example ex1").run();
+ assert!(!p.root().join("target/debug/libfoo.rmeta").is_file());
+ assert!(!p.root().join("target/debug/libfoo.rlib").is_file());
+ assert!(!p
+ .root()
+ .join("target/debug/examples")
+ .join(exe("ex1"))
+ .is_file());
+ assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 1);
+ assert_eq!(p.glob("target/debug/examples/libex1-*.rmeta").count(), 1);
+
+ p.root().join("target").rm_rf();
+ p.cargo("check --bench b1").run();
+ assert!(!p.root().join("target/debug/libfoo.rmeta").is_file());
+ assert!(!p.root().join("target/debug/libfoo.rlib").is_file());
+ assert!(!p.root().join("target/debug").join(exe("foo")).is_file());
+ assert_eq!(p.glob("target/debug/b1-*").count(), 0);
+ assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 1);
+ assert_eq!(p.glob("target/debug/deps/libb1-*.rmeta").count(), 1);
+}
+
+#[cargo_test]
+fn short_message_format() {
+ let foo = project()
+ .file("src/lib.rs", "fn foo() { let _x: bool = 'a'; }")
+ .build();
+ foo.cargo("check --message-format=short")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+src/lib.rs:1:27: error[E0308]: mismatched types
+error: could not compile `foo` (lib) due to previous error
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn proc_macro() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "demo"
+ version = "0.0.1"
+
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate proc_macro;
+
+ use proc_macro::TokenStream;
+
+ #[proc_macro_derive(Foo)]
+ pub fn demo(_input: TokenStream) -> TokenStream {
+ "".parse().unwrap()
+ }
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[macro_use]
+ extern crate demo;
+
+ #[derive(Foo)]
+ struct A;
+
+ fn main() {}
+ "#,
+ )
+ .build();
+ p.cargo("check -v").env("CARGO_LOG", "cargo=trace").run();
+}
+
+#[cargo_test]
+fn check_keep_going() {
+ let foo = project()
+ .file("src/bin/one.rs", "compile_error!(\"ONE\"); fn main() {}")
+ .file("src/bin/two.rs", "compile_error!(\"TWO\"); fn main() {}")
+ .build();
+
+ // Due to -j1, without --keep-going only one of the two bins would be built.
+ foo.cargo("check -j1 --keep-going -Zunstable-options")
+ .masquerade_as_nightly_cargo(&["keep-going"])
+ .with_status(101)
+ .with_stderr_contains("error: ONE")
+ .with_stderr_contains("error: TWO")
+ .run();
+}
+
+#[cargo_test]
+fn does_not_use_empty_rustc_wrapper() {
+ // An empty RUSTC_WRAPPER environment variable won't be used.
+ // The env var will also override the config, essentially unsetting it.
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config.toml",
+ r#"
+ [build]
+ rustc-wrapper = "do-not-execute-me"
+ "#,
+ )
+ .build();
+ p.cargo("check").env("RUSTC_WRAPPER", "").run();
+}
+
+#[cargo_test]
+fn does_not_use_empty_rustc_workspace_wrapper() {
+ let p = project().file("src/lib.rs", "").build();
+ p.cargo("check").env("RUSTC_WORKSPACE_WRAPPER", "").run();
+}
+
+#[cargo_test]
+fn error_from_deep_recursion() -> Result<(), fmt::Error> {
+ let mut big_macro = String::new();
+ writeln!(big_macro, "macro_rules! m {{")?;
+ for i in 0..130 {
+ writeln!(big_macro, "({}) => {{ m!({}); }};", i, i + 1)?;
+ }
+ writeln!(big_macro, "}}")?;
+ writeln!(big_macro, "m!(0);")?;
+
+ let p = project().file("src/lib.rs", &big_macro).build();
+ p.cargo("check --message-format=json")
+ .with_status(101)
+ .with_stdout_contains(
+ "[..]\"message\":\"recursion limit reached while expanding [..]`m[..]`\"[..]",
+ )
+ .run();
+
+ Ok(())
+}
+
+#[cargo_test]
+fn rustc_workspace_wrapper_affects_all_workspace_members() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ p.cargo("check")
+ .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper())
+ .with_stderr_contains("WRAPPER CALLED: rustc --crate-name bar [..]")
+ .with_stderr_contains("WRAPPER CALLED: rustc --crate-name baz [..]")
+ .run();
+}
+
+#[cargo_test]
+fn rustc_workspace_wrapper_includes_path_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["bar"]
+
+ [dependencies]
+ baz = { path = "baz" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ p.cargo("check --workspace")
+ .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper())
+ .with_stderr_contains("WRAPPER CALLED: rustc --crate-name foo [..]")
+ .with_stderr_contains("WRAPPER CALLED: rustc --crate-name bar [..]")
+ .with_stderr_contains("WRAPPER CALLED: rustc --crate-name baz [..]")
+ .run();
+}
+
+#[cargo_test]
+fn rustc_workspace_wrapper_respects_primary_units() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ p.cargo("check -p bar")
+ .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper())
+ .with_stderr_contains("WRAPPER CALLED: rustc --crate-name bar [..]")
+ .with_stdout_does_not_contain("WRAPPER CALLED: rustc --crate-name baz [..]")
+ .run();
+}
+
+#[cargo_test]
+fn rustc_workspace_wrapper_excludes_published_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["bar"]
+
+ [dependencies]
+ baz = "1.0.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ Package::new("baz", "1.0.0").publish();
+
+ p.cargo("check --workspace -v")
+ .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper())
+ .with_stderr_contains("WRAPPER CALLED: rustc --crate-name foo [..]")
+ .with_stderr_contains("WRAPPER CALLED: rustc --crate-name bar [..]")
+ .with_stderr_contains("[CHECKING] baz [..]")
+ .with_stdout_does_not_contain("WRAPPER CALLED: rustc --crate-name baz [..]")
+ .run();
+}
+
+#[cargo_test]
+fn warn_manifest_package_and_project() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [project]
+ name = "foo"
+ version = "0.0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[WARNING] manifest at `[CWD]` contains both `project` and `package`, this could become a hard error in the future
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn git_manifest_package_and_project() {
+ let p = project();
+ let git_project = git::new("bar", |p| {
+ p.file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+
+ [project]
+ name = "bar"
+ version = "0.0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ });
+
+ let p = p
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies.bar]
+ version = "0.0.1"
+ git = '{}'
+
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] git repository `[..]`
+[CHECKING] bar v0.0.1 ([..])
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn warn_manifest_with_project() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [project]
+ name = "foo"
+ version = "0.0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[WARNING] manifest at `[CWD]` contains `[project]` instead of `[package]`, this could become a hard error in the future
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn git_manifest_with_project() {
+ let p = project();
+ let git_project = git::new("bar", |p| {
+ p.file(
+ "Cargo.toml",
+ r#"
+ [project]
+ name = "bar"
+ version = "0.0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ });
+
+ let p = p
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies.bar]
+ version = "0.0.1"
+ git = '{}'
+
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] git repository `[..]`
+[CHECKING] bar v0.0.1 ([..])
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn check_fixable_warning() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ "#,
+ )
+ .file("src/lib.rs", "use std::io;")
+ .build();
+
+ foo.cargo("check")
+ .with_stderr_contains("[..] (run `cargo fix --lib -p foo` to apply 1 suggestion)")
+ .run();
+}
+
+#[cargo_test]
+fn check_fixable_test_warning() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "\
+mod tests {
+ #[test]
+ fn t1() {
+ use std::io;
+ }
+}
+ ",
+ )
+ .build();
+
+ foo.cargo("check --all-targets")
+ .with_stderr_contains("[..] (run `cargo fix --lib -p foo --tests` to apply 1 suggestion)")
+ .run();
+ foo.cargo("fix --lib -p foo --tests --allow-no-vcs").run();
+ assert!(!foo.read_file("src/lib.rs").contains("use std::io;"));
+}
+
+#[cargo_test]
+fn check_fixable_error_no_fix() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "use std::io;\n#[derive(Debug(x))]\nstruct Foo;",
+ )
+ .build();
+
+ let rustc_message = raw_rustc_output(&foo, "src/lib.rs", &[]);
+ let expected_output = format!(
+ "\
+[CHECKING] foo v0.0.1 ([..])
+{}\
+[WARNING] `foo` (lib) generated 1 warning
+[ERROR] could not compile `foo` (lib) due to previous error; 1 warning emitted
+",
+ rustc_message
+ );
+ foo.cargo("check")
+ .with_status(101)
+ .with_stderr(expected_output)
+ .run();
+}
+
+#[cargo_test]
+fn check_fixable_warning_workspace() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo", "bar"]
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ "#,
+ )
+ .file("foo/src/lib.rs", "use std::io;")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+
+ [dependencies]
+ foo = { path = "../foo" }
+ "#,
+ )
+ .file("bar/src/lib.rs", "use std::io;")
+ .build();
+
+ p.cargo("check")
+ .with_stderr_contains("[..] (run `cargo fix --lib -p foo` to apply 1 suggestion)")
+ .with_stderr_contains("[..] (run `cargo fix --lib -p bar` to apply 1 suggestion)")
+ .run();
+}
+
+#[cargo_test]
+fn check_fixable_example() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn hello() -> &'static str {
+ "hello"
+ }
+
+ pub fn main() {
+ println!("{}", hello())
+ }
+ "#,
+ )
+ .file("examples/ex1.rs", "use std::fmt; fn main() {}")
+ .build();
+ p.cargo("check --all-targets")
+ .with_stderr_contains("[..] (run `cargo fix --example \"ex1\"` to apply 1 suggestion)")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn check_fixable_bench() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ "src/main.rs",
+ r#"
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+
+ fn hello() -> &'static str {
+ "hello"
+ }
+
+ pub fn main() {
+ println!("{}", hello())
+ }
+
+ #[bench]
+ fn bench_hello(_b: &mut test::Bencher) {
+ use std::io;
+ assert_eq!(hello(), "hello")
+ }
+ "#,
+ )
+ .file(
+ "benches/bench.rs",
+ "
+ #![feature(test)]
+ extern crate test;
+
+ #[bench]
+ fn bench(_b: &mut test::Bencher) { use std::fmt; }
+ ",
+ )
+ .build();
+ p.cargo("check --all-targets")
+ .with_stderr_contains("[..] (run `cargo fix --bench \"bench\"` to apply 1 suggestion)")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn check_fixable_mixed() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ "src/main.rs",
+ r#"
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+
+ fn hello() -> &'static str {
+ "hello"
+ }
+
+ pub fn main() {
+ println!("{}", hello())
+ }
+
+ #[bench]
+ fn bench_hello(_b: &mut test::Bencher) {
+ use std::io;
+ assert_eq!(hello(), "hello")
+ }
+ #[test]
+ fn t1() {
+ use std::fmt;
+ }
+ "#,
+ )
+ .file("examples/ex1.rs", "use std::fmt; fn main() {}")
+ .file(
+ "benches/bench.rs",
+ "
+ #![feature(test)]
+ extern crate test;
+
+ #[bench]
+ fn bench(_b: &mut test::Bencher) { use std::fmt; }
+ ",
+ )
+ .build();
+ p.cargo("check --all-targets")
+ .with_stderr_contains("[..] (run `cargo fix --bin \"foo\" --tests` to apply 2 suggestions)")
+ .with_stderr_contains("[..] (run `cargo fix --example \"ex1\"` to apply 1 suggestion)")
+ .with_stderr_contains("[..] (run `cargo fix --bench \"bench\"` to apply 1 suggestion)")
+ .run();
+}
+
+#[cargo_test]
+fn check_fixable_warning_for_clippy() {
+ // A wrapper around `rustc` instead of calling `clippy`
+ let clippy_driver = project()
+ .at(cargo_test_support::paths::global_root().join("clippy-driver"))
+ .file("Cargo.toml", &basic_manifest("clippy-driver", "0.0.1"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ let mut args = std::env::args_os();
+ let _me = args.next().unwrap();
+ let rustc = args.next().unwrap();
+ let status = std::process::Command::new(rustc).args(args).status().unwrap();
+ std::process::exit(status.code().unwrap_or(1));
+ }
+ "#,
+ )
+ .build();
+ clippy_driver.cargo("build").run();
+
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ "#,
+ )
+ // We don't want to show a warning that is `clippy`
+ // specific since we are using a `rustc` wrapper
+ // inplace of `clippy`
+ .file("src/lib.rs", "use std::io;")
+ .build();
+
+ foo.cargo("check")
+ // We can't use `clippy` so we use a `rustc` workspace wrapper instead
+ .env(
+ "RUSTC_WORKSPACE_WRAPPER",
+ clippy_driver.bin("clippy-driver"),
+ )
+ .with_stderr_contains("[..] (run `cargo clippy --fix --lib -p foo` to apply 1 suggestion)")
+ .run();
+}
+
+#[cargo_test]
+fn check_unused_manifest_keys() {
+ Package::new("dep", "0.1.0").publish();
+ Package::new("foo", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.2.0"
+ authors = []
+
+ [dependencies]
+ dep = { version = "0.1.0", wxz = "wxz" }
+ foo = { version = "0.1.0", abc = "abc" }
+
+ [dev-dependencies]
+ foo = { version = "0.1.0", wxz = "wxz" }
+
+ [build-dependencies]
+ foo = { version = "0.1.0", wxz = "wxz" }
+
+ [target.'cfg(windows)'.dependencies]
+ foo = { version = "0.1.0", wxz = "wxz" }
+
+ [target.x86_64-pc-windows-gnu.dev-dependencies]
+ foo = { version = "0.1.0", wxz = "wxz" }
+
+ [target.bar.build-dependencies]
+ foo = { version = "0.1.0", wxz = "wxz" }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[WARNING] unused manifest key: dependencies.dep.wxz
+[WARNING] unused manifest key: dependencies.foo.abc
+[WARNING] unused manifest key: dev-dependencies.foo.wxz
+[WARNING] unused manifest key: build-dependencies.foo.wxz
+[WARNING] unused manifest key: target.bar.build-dependencies.foo.wxz
+[WARNING] unused manifest key: target.cfg(windows).dependencies.foo.wxz
+[WARNING] unused manifest key: target.x86_64-pc-windows-gnu.dev-dependencies.foo.wxz
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v0.1.0 ([..])
+[DOWNLOADED] dep v0.1.0 ([..])
+[CHECKING] [..]
+[CHECKING] [..]
+[CHECKING] bar v0.2.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/check_cfg.rs b/src/tools/cargo/tests/testsuite/check_cfg.rs
new file mode 100644
index 000000000..c35da637d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/check_cfg.rs
@@ -0,0 +1,588 @@
+//! Tests for -Zcheck-cfg.
+
+use cargo_test_support::{basic_manifest, project};
+
+macro_rules! x {
+ ($tool:tt => $what:tt $(of $who:tt)?) => {{
+ #[cfg(windows)]
+ {
+ concat!("[RUNNING] [..]", $tool, "[..] --check-cfg ",
+ $what, '(', $($who,)* ')', "[..]")
+ }
+ #[cfg(not(windows))]
+ {
+ concat!("[RUNNING] [..]", $tool, "[..] --check-cfg '",
+ $what, '(', $($who,)* ')', "'", "[..]")
+ }
+ }};
+ ($tool:tt => $what:tt of $who:tt with $($values:tt)*) => {{
+ #[cfg(windows)]
+ {
+ concat!("[RUNNING] [..]", $tool, "[..] --check-cfg \"",
+ $what, '(', $who, $(", ", "/\"", $values, "/\"",)* ")", '"', "[..]")
+ }
+ #[cfg(not(windows))]
+ {
+ concat!("[RUNNING] [..]", $tool, "[..] --check-cfg '",
+ $what, '(', $who, $(", ", "\"", $values, "\"",)* ")", "'", "[..]")
+ }
+ }};
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [features]
+ f_a = []
+ f_b = []
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check -v -Zcheck-cfg=features")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustc" => "values" of "feature" with "f_a" "f_b"))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn features_with_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { path = "bar/" }
+
+ [features]
+ f_a = []
+ f_b = []
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "#[allow(dead_code)] fn bar() {}")
+ .build();
+
+ p.cargo("check -v -Zcheck-cfg=features")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustc" => "values" of "feature"))
+ .with_stderr_contains(x!("rustc" => "values" of "feature" with "f_a" "f_b"))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn features_with_opt_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { path = "bar/", optional = true }
+
+ [features]
+ default = ["bar"]
+ f_a = []
+ f_b = []
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "#[allow(dead_code)] fn bar() {}")
+ .build();
+
+ p.cargo("check -v -Zcheck-cfg=features")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustc" => "values" of "feature"))
+ .with_stderr_contains(x!("rustc" => "values" of "feature" with "bar" "default" "f_a" "f_b"))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn features_with_namespaced_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { path = "bar/", optional = true }
+
+ [features]
+ f_a = ["dep:bar"]
+ f_b = []
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "#[allow(dead_code)] fn bar() {}")
+ .build();
+
+ p.cargo("check -v -Zcheck-cfg=features")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustc" => "values" of "feature" with "f_a" "f_b"))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn well_known_names() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check -v -Zcheck-cfg=names")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustc" => "names"))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn well_known_values() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check -v -Zcheck-cfg=values")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustc" => "values"))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn cli_all_options() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [features]
+ f_a = []
+ f_b = []
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check -v -Zcheck-cfg=features,names,values")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustc" => "names"))
+ .with_stderr_contains(x!("rustc" => "values"))
+ .with_stderr_contains(x!("rustc" => "values" of "feature" with "f_a" "f_b"))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn features_with_cargo_check() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [features]
+ f_a = []
+ f_b = []
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check -v -Zcheck-cfg=features")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustc" => "values" of "feature" with "f_a" "f_b"))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn well_known_names_with_check() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check -v -Zcheck-cfg=names")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustc" => "names"))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn well_known_values_with_check() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check -v -Zcheck-cfg=values")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustc" => "values"))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn features_test() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [features]
+ f_a = []
+ f_b = []
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("test -v -Zcheck-cfg=features")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustc" => "values" of "feature" with "f_a" "f_b"))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn features_doctest() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [features]
+ default = ["f_a"]
+ f_a = []
+ f_b = []
+ "#,
+ )
+ .file("src/lib.rs", "#[allow(dead_code)] fn foo() {}")
+ .build();
+
+ p.cargo("test -v --doc -Zcheck-cfg=features")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustc" => "values" of "feature" with "default" "f_a" "f_b"))
+ .with_stderr_contains(x!("rustdoc" => "values" of "feature" with "default" "f_a" "f_b"))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn well_known_names_test() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("test -v -Zcheck-cfg=names")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustc" => "names"))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn well_known_values_test() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("test -v -Zcheck-cfg=values")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustc" => "values"))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn well_known_names_doctest() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/lib.rs", "#[allow(dead_code)] fn foo() {}")
+ .build();
+
+ p.cargo("test -v --doc -Zcheck-cfg=names")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustc" => "names"))
+ .with_stderr_contains(x!("rustdoc" => "names"))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn well_known_values_doctest() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/lib.rs", "#[allow(dead_code)] fn foo() {}")
+ .build();
+
+ p.cargo("test -v --doc -Zcheck-cfg=values")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustc" => "values"))
+ .with_stderr_contains(x!("rustdoc" => "values"))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn features_doc() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [features]
+ default = ["f_a"]
+ f_a = []
+ f_b = []
+ "#,
+ )
+ .file("src/lib.rs", "#[allow(dead_code)] fn foo() {}")
+ .build();
+
+ p.cargo("doc -v -Zcheck-cfg=features")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustdoc" => "values" of "feature" with "default" "f_a" "f_b"))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn build_script_feedback() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "build.rs",
+ r#"fn main() { println!("cargo:rustc-check-cfg=names(foo)"); }"#,
+ )
+ .build();
+
+ p.cargo("check -v -Zcheck-cfg=output")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustc" => "names" of "foo"))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn build_script_doc() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "build.rs",
+ r#"fn main() { println!("cargo:rustc-check-cfg=names(foo)"); }"#,
+ )
+ .build();
+ p.cargo("doc -v -Zcheck-cfg=output")
+ .with_stderr_does_not_contain("rustc [..] --check-cfg [..]")
+ .with_stderr_contains(x!("rustdoc" => "names" of "foo"))
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc [..] build.rs [..]`
+[RUNNING] `[..]/build-script-build`
+[DOCUMENTING] foo [..]
+[RUNNING] `rustdoc [..] src/main.rs [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn build_script_override() {
+ let target = cargo_test_support::rustc_host();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ links = "a"
+ build = "build.rs"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("build.rs", "")
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}.a]
+ rustc-check-cfg = ["names(foo)"]
+ "#,
+ target
+ ),
+ )
+ .build();
+
+ p.cargo("check -v -Zcheck-cfg=output")
+ .with_stderr_contains(x!("rustc" => "names" of "foo"))
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn build_script_test() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"fn main() {
+ println!("cargo:rustc-check-cfg=names(foo)");
+ println!("cargo:rustc-cfg=foo");
+ }"#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ ///
+ /// ```
+ /// extern crate foo;
+ ///
+ /// fn main() {
+ /// foo::foo()
+ /// }
+ /// ```
+ ///
+ #[cfg(foo)]
+ pub fn foo() {}
+
+ #[cfg(foo)]
+ #[test]
+ fn test_foo() {
+ foo()
+ }
+ "#,
+ )
+ .file("tests/test.rs", "#[cfg(foo)] #[test] fn test_bar() {}")
+ .build();
+
+ p.cargo("test -v -Zcheck-cfg=output")
+ .with_stderr_contains(x!("rustc" => "names" of "foo"))
+ .with_stderr_contains(x!("rustdoc" => "names" of "foo"))
+ .with_stdout_contains("test test_foo ... ok")
+ .with_stdout_contains("test test_bar ... ok")
+ .with_stdout_contains_n("test [..] ... ok", 3)
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn config_valid() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [features]
+ f_a = []
+ f_b = []
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config.toml",
+ r#"
+ [unstable]
+ check-cfg = ["features", "names", "values"]
+ "#,
+ )
+ .build();
+
+ p.cargo("check -v -Zcheck-cfg=features,names,values")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustc" => "names"))
+ .with_stderr_contains(x!("rustc" => "values"))
+ .with_stderr_contains(x!("rustc" => "values" of "feature" with "f_a" "f_b"))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn config_invalid() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config.toml",
+ r#"
+ [unstable]
+ check-cfg = ["va"]
+ "#,
+ )
+ .build();
+
+ p.cargo("check")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains("error: unstable check-cfg only takes `features`, `names`, `values` or `output` as valid inputs")
+ .with_status(101)
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/clean.rs b/src/tools/cargo/tests/testsuite/clean.rs
new file mode 100644
index 000000000..e0885fd26
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/clean.rs
@@ -0,0 +1,675 @@
+//! Tests for the `cargo clean` command.
+
+use cargo_test_support::registry::Package;
+use cargo_test_support::{
+ basic_bin_manifest, basic_manifest, git, main_file, project, project_in, rustc_host,
+};
+use glob::GlobError;
+use std::env;
+use std::path::{Path, PathBuf};
+
+#[cargo_test]
+fn cargo_clean_simple() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("build").run();
+ assert!(p.build_dir().is_dir());
+
+ p.cargo("clean").run();
+ assert!(!p.build_dir().is_dir());
+}
+
+#[cargo_test]
+fn different_dir() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .file("src/bar/a.rs", "")
+ .build();
+
+ p.cargo("build").run();
+ assert!(p.build_dir().is_dir());
+
+ p.cargo("clean").cwd("src").with_stdout("").run();
+ assert!(!p.build_dir().is_dir());
+}
+
+#[cargo_test]
+fn clean_multiple_packages() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.d1]
+ path = "d1"
+ [dependencies.d2]
+ path = "d2"
+
+ [[bin]]
+ name = "foo"
+ "#,
+ )
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .file("d1/Cargo.toml", &basic_bin_manifest("d1"))
+ .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }")
+ .file("d2/Cargo.toml", &basic_bin_manifest("d2"))
+ .file("d2/src/main.rs", "fn main() { println!(\"d2\"); }")
+ .build();
+
+ p.cargo("build -p d1 -p d2 -p foo").run();
+
+ let d1_path = &p
+ .build_dir()
+ .join("debug")
+ .join(format!("d1{}", env::consts::EXE_SUFFIX));
+ let d2_path = &p
+ .build_dir()
+ .join("debug")
+ .join(format!("d2{}", env::consts::EXE_SUFFIX));
+
+ assert!(p.bin("foo").is_file());
+ assert!(d1_path.is_file());
+ assert!(d2_path.is_file());
+
+ p.cargo("clean -p d1 -p d2")
+ .cwd("src")
+ .with_stdout("")
+ .run();
+ assert!(p.bin("foo").is_file());
+ assert!(!d1_path.is_file());
+ assert!(!d2_path.is_file());
+}
+
+#[cargo_test]
+fn clean_multiple_packages_in_glob_char_path() {
+ let p = project_in("[d1]")
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+ let foo_path = &p.build_dir().join("debug").join("deps");
+
+ #[cfg(not(target_env = "msvc"))]
+ let file_glob = "foo-*";
+
+ #[cfg(target_env = "msvc")]
+ let file_glob = "foo.pdb";
+
+ // Assert that build artifacts are produced
+ p.cargo("build").run();
+ assert_ne!(get_build_artifacts(foo_path, file_glob).len(), 0);
+
+ // Assert that build artifacts are destroyed
+ p.cargo("clean -p foo").run();
+ assert_eq!(get_build_artifacts(foo_path, file_glob).len(), 0);
+}
+
+fn get_build_artifacts(path: &PathBuf, file_glob: &str) -> Vec<Result<PathBuf, GlobError>> {
+ let pattern = path.to_str().expect("expected utf-8 path");
+ let pattern = glob::Pattern::escape(pattern);
+
+ let path = PathBuf::from(pattern).join(file_glob);
+ let path = path.to_str().expect("expected utf-8 path");
+ glob::glob(path)
+ .expect("expected glob to run")
+ .into_iter()
+ .collect::<Vec<Result<PathBuf, GlobError>>>()
+}
+
+#[cargo_test]
+fn clean_p_only_cleans_specified_package() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = [
+ "foo",
+ "foo_core",
+ "foo-base",
+ ]
+ "#,
+ )
+ .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("foo/src/lib.rs", "//! foo")
+ .file("foo_core/Cargo.toml", &basic_manifest("foo_core", "0.1.0"))
+ .file("foo_core/src/lib.rs", "//! foo_core")
+ .file("foo-base/Cargo.toml", &basic_manifest("foo-base", "0.1.0"))
+ .file("foo-base/src/lib.rs", "//! foo-base")
+ .build();
+
+ let fingerprint_path = &p.build_dir().join("debug").join(".fingerprint");
+
+ p.cargo("build -p foo -p foo_core -p foo-base").run();
+
+ let mut fingerprint_names = get_fingerprints_without_hashes(fingerprint_path);
+
+ // Artifacts present for all after building
+ assert!(fingerprint_names.iter().any(|e| e == "foo"));
+ let num_foo_core_artifacts = fingerprint_names
+ .iter()
+ .filter(|&e| e == "foo_core")
+ .count();
+ assert_ne!(num_foo_core_artifacts, 0);
+ let num_foo_base_artifacts = fingerprint_names
+ .iter()
+ .filter(|&e| e == "foo-base")
+ .count();
+ assert_ne!(num_foo_base_artifacts, 0);
+
+ p.cargo("clean -p foo").run();
+
+ fingerprint_names = get_fingerprints_without_hashes(fingerprint_path);
+
+ // Cleaning `foo` leaves artifacts for the others
+ assert!(!fingerprint_names.iter().any(|e| e == "foo"));
+ assert_eq!(
+ fingerprint_names
+ .iter()
+ .filter(|&e| e == "foo_core")
+ .count(),
+ num_foo_core_artifacts,
+ );
+ assert_eq!(
+ fingerprint_names
+ .iter()
+ .filter(|&e| e == "foo-base")
+ .count(),
+ num_foo_core_artifacts,
+ );
+}
+
+fn get_fingerprints_without_hashes(fingerprint_path: &Path) -> Vec<String> {
+ std::fs::read_dir(fingerprint_path)
+ .expect("Build dir should be readable")
+ .filter_map(|entry| entry.ok())
+ .map(|entry| {
+ let name = entry.file_name();
+ let name = name
+ .into_string()
+ .expect("fingerprint name should be UTF-8");
+ name.rsplit_once('-')
+ .expect("Name should contain at least one hyphen")
+ .0
+ .to_owned()
+ })
+ .collect()
+}
+
+#[cargo_test]
+fn clean_release() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = { path = "a" }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("build --release").run();
+
+ p.cargo("clean -p foo").run();
+ p.cargo("build --release").with_stdout("").run();
+
+ p.cargo("clean -p foo --release").run();
+ p.cargo("build --release")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("build").run();
+
+ p.cargo("clean").arg("--release").run();
+ assert!(p.build_dir().is_dir());
+ assert!(p.build_dir().join("debug").is_dir());
+ assert!(!p.build_dir().join("release").is_dir());
+}
+
+#[cargo_test]
+fn clean_doc() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = { path = "a" }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("doc").run();
+
+ let doc_path = &p.build_dir().join("doc");
+
+ assert!(doc_path.is_dir());
+
+ p.cargo("clean --doc").run();
+
+ assert!(!doc_path.is_dir());
+ assert!(p.build_dir().is_dir());
+}
+
+#[cargo_test]
+fn build_script() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "build.rs",
+ r#"
+ use std::path::PathBuf;
+ use std::env;
+
+ fn main() {
+ let out = PathBuf::from(env::var_os("OUT_DIR").unwrap());
+ if env::var("FIRST").is_ok() {
+ std::fs::File::create(out.join("out")).unwrap();
+ } else {
+ assert!(!out.join("out").exists());
+ }
+ }
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").env("FIRST", "1").run();
+ p.cargo("clean -p foo").run();
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] build.rs [..]`
+[RUNNING] `[..]build-script-build`
+[RUNNING] `rustc [..] src/main.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn clean_git() {
+ let git = git::new("dep", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("dep", "0.5.0"))
+ .file("src/lib.rs", "")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ dep = {{ git = '{}' }}
+ "#,
+ git.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build").run();
+ p.cargo("clean -p dep").with_stdout("").run();
+ p.cargo("build").run();
+}
+
+#[cargo_test]
+fn registry() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("bar", "0.1.0").publish();
+
+ p.cargo("build").run();
+ p.cargo("clean -p bar").with_stdout("").run();
+ p.cargo("build").run();
+}
+
+#[cargo_test]
+fn clean_verbose() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = "0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("bar", "0.1.0").publish();
+
+ p.cargo("build").run();
+ let mut expected = String::from(
+ "\
+[REMOVING] [..]target/debug/.fingerprint/bar[..]
+[REMOVING] [..]target/debug/deps/libbar[..].rlib
+[REMOVING] [..]target/debug/deps/bar-[..].d
+[REMOVING] [..]target/debug/deps/libbar[..].rmeta
+",
+ );
+ if cfg!(target_os = "macos") {
+ // Rust 1.69 has changed so that split-debuginfo=unpacked includes unpacked for rlibs.
+ for obj in p.glob("target/debug/deps/bar-*.o") {
+ expected.push_str(&format!("[REMOVING] [..]{}", obj.unwrap().display()));
+ }
+ }
+ p.cargo("clean -p bar --verbose")
+ .with_stderr_unordered(&expected)
+ .run();
+ p.cargo("build").run();
+}
+
+#[cargo_test]
+fn clean_remove_rlib_rmeta() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+ assert!(p.target_debug_dir().join("libfoo.rlib").exists());
+ let rmeta = p.glob("target/debug/deps/*.rmeta").next().unwrap().unwrap();
+ assert!(rmeta.exists());
+ p.cargo("clean -p foo").run();
+ assert!(!p.target_debug_dir().join("libfoo.rlib").exists());
+ assert!(!rmeta.exists());
+}
+
+#[cargo_test]
+fn package_cleans_all_the_things() {
+ // -p cleans everything
+ // Use dashes everywhere to make sure dash/underscore stuff is handled.
+ for crate_type in &["rlib", "dylib", "cdylib", "staticlib", "proc-macro"] {
+ // Try each crate type individually since the behavior changes when
+ // they are combined.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo-bar"
+ version = "0.1.0"
+
+ [lib]
+ crate-type = ["{}"]
+ "#,
+ crate_type
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("build").run();
+ p.cargo("clean -p foo-bar").run();
+ assert_all_clean(&p.build_dir());
+ }
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo-bar"
+ version = "0.1.0"
+ edition = "2018"
+
+ [lib]
+ crate-type = ["rlib", "dylib", "staticlib"]
+
+ [[example]]
+ name = "foo-ex-rlib"
+ crate-type = ["rlib"]
+ test = true
+
+ [[example]]
+ name = "foo-ex-cdylib"
+ crate-type = ["cdylib"]
+ test = true
+
+ [[example]]
+ name = "foo-ex-bin"
+ test = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("src/lib/some-main.rs", "fn main() {}")
+ .file("src/bin/other-main.rs", "fn main() {}")
+ .file("examples/foo-ex-rlib.rs", "")
+ .file("examples/foo-ex-cdylib.rs", "")
+ .file("examples/foo-ex-bin.rs", "fn main() {}")
+ .file("tests/foo-test.rs", "")
+ .file("benches/foo-bench.rs", "")
+ .file("build.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --all-targets")
+ .env("CARGO_INCREMENTAL", "1")
+ .run();
+ p.cargo("test --all-targets")
+ .env("CARGO_INCREMENTAL", "1")
+ .run();
+ p.cargo("check --all-targets")
+ .env("CARGO_INCREMENTAL", "1")
+ .run();
+ p.cargo("clean -p foo-bar").run();
+ assert_all_clean(&p.build_dir());
+
+ // Try some targets.
+ p.cargo("build --all-targets --target")
+ .arg(rustc_host())
+ .run();
+ p.cargo("clean -p foo-bar --target").arg(rustc_host()).run();
+ assert_all_clean(&p.build_dir());
+}
+
+// Ensures that all files for the package have been deleted.
+#[track_caller]
+fn assert_all_clean(build_dir: &Path) {
+ let walker = walkdir::WalkDir::new(build_dir).into_iter();
+ for entry in walker.filter_entry(|e| {
+ let path = e.path();
+ // This is a known limitation, clean can't differentiate between
+ // the different build scripts from different packages.
+ !(path
+ .file_name()
+ .unwrap()
+ .to_str()
+ .unwrap()
+ .starts_with("build_script_build")
+ && path
+ .parent()
+ .unwrap()
+ .file_name()
+ .unwrap()
+ .to_str()
+ .unwrap()
+ == "incremental")
+ }) {
+ let entry = entry.unwrap();
+ let path = entry.path();
+ if let ".rustc_info.json" | ".cargo-lock" | "CACHEDIR.TAG" =
+ path.file_name().unwrap().to_str().unwrap()
+ {
+ continue;
+ }
+ if path.is_symlink() || path.is_file() {
+ panic!("{:?} was not cleaned", path);
+ }
+ }
+}
+
+#[cargo_test]
+fn clean_spec_multiple() {
+ // clean -p foo where foo matches multiple versions
+ Package::new("bar", "1.0.0").publish();
+ Package::new("bar", "2.0.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar1 = {version="1.0", package="bar"}
+ bar2 = {version="2.0", package="bar"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+
+ // Check suggestion for bad pkgid.
+ p.cargo("clean -p baz")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: package ID specification `baz` did not match any packages
+
+<tab>Did you mean `bar`?
+",
+ )
+ .run();
+
+ p.cargo("clean -p bar:1.0.0")
+ .with_stderr(
+ "warning: version qualifier in `-p bar:1.0.0` is ignored, \
+ cleaning all versions of `bar` found",
+ )
+ .run();
+ let mut walker = walkdir::WalkDir::new(p.build_dir())
+ .into_iter()
+ .filter_map(|e| e.ok())
+ .filter(|e| {
+ let n = e.file_name().to_str().unwrap();
+ n.starts_with("bar") || n.starts_with("libbar")
+ });
+ if let Some(e) = walker.next() {
+ panic!("{:?} was not cleaned", e.path());
+ }
+}
+
+#[cargo_test]
+fn clean_spec_reserved() {
+ // Clean when a target (like a test) has a reserved name. In this case,
+ // make sure `clean -p` doesn't delete the reserved directory `build` when
+ // there is a test named `build`.
+ Package::new("bar", "1.0.0")
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("tests/build.rs", "")
+ .build();
+
+ p.cargo("build --all-targets").run();
+ assert!(p.target_debug_dir().join("build").is_dir());
+ let build_test = p.glob("target/debug/deps/build-*").next().unwrap().unwrap();
+ assert!(build_test.exists());
+ // Tests are never "uplifted".
+ assert!(p.glob("target/debug/build-*").next().is_none());
+
+ p.cargo("clean -p foo").run();
+ // Should not delete this.
+ assert!(p.target_debug_dir().join("build").is_dir());
+
+ // This should not rebuild bar.
+ p.cargo("build -v --all-targets")
+ .with_stderr(
+ "\
+[FRESH] bar v1.0.0
+[COMPILING] foo v0.1.0 [..]
+[RUNNING] `rustc [..]
+[RUNNING] `rustc [..]
+[RUNNING] `rustc [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/collisions.rs b/src/tools/cargo/tests/testsuite/collisions.rs
new file mode 100644
index 000000000..77e05dd9c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/collisions.rs
@@ -0,0 +1,550 @@
+//! Tests for when multiple artifacts have the same output filename.
+//! See https://github.com/rust-lang/cargo/issues/6313 for more details.
+//! Ideally these should never happen, but I don't think we'll ever be able to
+//! prevent all collisions.
+
+use cargo_test_support::registry::Package;
+use cargo_test_support::{basic_manifest, cross_compile, project};
+use std::env;
+
+#[cargo_test]
+fn collision_dylib() {
+ // Path dependencies don't include metadata hash in filename for dylibs.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "1.0.0"
+
+ [lib]
+ crate-type = ["dylib"]
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "1.0.0"
+
+ [lib]
+ crate-type = ["dylib"]
+ name = "a"
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .build();
+
+ // `j=1` is required because on Windows you'll get an error due to
+ // two processes writing to the file at the same time.
+ p.cargo("build -j=1")
+ .with_stderr_contains(&format!("\
+[WARNING] output filename collision.
+The lib target `a` in package `b v1.0.0 ([..]/foo/b)` has the same output filename as the lib target `a` in package `a v1.0.0 ([..]/foo/a)`.
+Colliding filename is: [..]/foo/target/debug/deps/{}a{}
+The targets should have unique names.
+Consider changing their names to be unique or compiling them separately.
+This may become a hard error in the future; see <https://github.com/rust-lang/cargo/issues/6313>.
+", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX))
+ .run();
+}
+
+#[cargo_test]
+fn collision_example() {
+ // Examples in a workspace can easily collide.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_manifest("a", "1.0.0"))
+ .file("a/examples/ex1.rs", "fn main() {}")
+ .file("b/Cargo.toml", &basic_manifest("b", "1.0.0"))
+ .file("b/examples/ex1.rs", "fn main() {}")
+ .build();
+
+ // `j=1` is required because on Windows you'll get an error due to
+ // two processes writing to the file at the same time.
+ p.cargo("build --examples -j=1")
+ .with_stderr_contains("\
+[WARNING] output filename collision.
+The example target `ex1` in package `b v1.0.0 ([..]/foo/b)` has the same output filename as the example target `ex1` in package `a v1.0.0 ([..]/foo/a)`.
+Colliding filename is: [..]/foo/target/debug/examples/ex1[EXE]
+The targets should have unique names.
+Consider changing their names to be unique or compiling them separately.
+This may become a hard error in the future; see <https://github.com/rust-lang/cargo/issues/6313>.
+")
+ .run();
+}
+
+#[cargo_test]
+// See https://github.com/rust-lang/cargo/issues/7493
+#[cfg_attr(
+ any(target_env = "msvc", target_vendor = "apple"),
+ ignore = "--out-dir and examples are currently broken on MSVC and apple"
+)]
+fn collision_export() {
+ // `--out-dir` combines some things which can cause conflicts.
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
+ .file("examples/foo.rs", "fn main() {}")
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ // -j1 to avoid issues with two processes writing to the same file at the
+ // same time.
+ p.cargo("build -j1 --out-dir=out -Z unstable-options --bins --examples")
+ .masquerade_as_nightly_cargo(&["out-dir"])
+ .with_stderr_contains("\
+[WARNING] `--out-dir` filename collision.
+The example target `foo` in package `foo v1.0.0 ([..]/foo)` has the same output filename as the bin target `foo` in package `foo v1.0.0 ([..]/foo)`.
+Colliding filename is: [..]/foo/out/foo[EXE]
+The exported filenames should be unique.
+Consider changing their names to be unique or compiling them separately.
+This may become a hard error in the future; see <https://github.com/rust-lang/cargo/issues/6313>.
+")
+ .run();
+}
+
+#[cargo_test]
+fn collision_doc() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ foo2 = { path = "foo2" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "foo2/Cargo.toml",
+ r#"
+ [package]
+ name = "foo2"
+ version = "0.1.0"
+
+ [lib]
+ name = "foo"
+ "#,
+ )
+ .file("foo2/src/lib.rs", "")
+ .build();
+
+ p.cargo("doc -j=1")
+ .with_stderr_contains(
+ "\
+[WARNING] output filename collision.
+The lib target `foo` in package `foo2 v0.1.0 ([..]/foo/foo2)` has the same output \
+filename as the lib target `foo` in package `foo v0.1.0 ([..]/foo)`.
+Colliding filename is: [..]/foo/target/doc/foo/index.html
+The targets should have unique names.
+This is a known bug where multiple crates with the same name use
+the same path; see <https://github.com/rust-lang/cargo/issues/6313>.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn collision_doc_multiple_versions() {
+ // Multiple versions of the same package.
+ Package::new("old-dep", "1.0.0").publish();
+ Package::new("bar", "1.0.0").dep("old-dep", "1.0").publish();
+ // Note that this removes "old-dep". Just checking what happens when there
+ // are orphans.
+ Package::new("bar", "2.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ bar2 = { package="bar", version="2.0" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // Should only document bar 2.0, should not document old-dep.
+ p.cargo("doc")
+ .with_stderr_unordered(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v2.0.0 [..]
+[DOWNLOADED] bar v1.0.0 [..]
+[DOWNLOADED] old-dep v1.0.0 [..]
+[CHECKING] old-dep v1.0.0
+[CHECKING] bar v2.0.0
+[CHECKING] bar v1.0.0
+[DOCUMENTING] bar v2.0.0
+[FINISHED] [..]
+[DOCUMENTING] foo v0.1.0 [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn collision_doc_host_target_feature_split() {
+ // Same dependency built twice due to different features.
+ //
+ // foo v0.1.0
+ // ├── common v1.0.0
+ // │ └── common-dep v1.0.0
+ // └── pm v0.1.0 (proc-macro)
+ // └── common v1.0.0
+ // └── common-dep v1.0.0
+ // [build-dependencies]
+ // └── common-dep v1.0.0
+ //
+ // Here `common` and `common-dep` are built twice. `common-dep` has
+ // different features for host versus target.
+ Package::new("common-dep", "1.0.0")
+ .feature("bdep-feat", &[])
+ .file(
+ "src/lib.rs",
+ r#"
+ /// Some doc
+ pub fn f() {}
+
+ /// Another doc
+ #[cfg(feature = "bdep-feat")]
+ pub fn bdep_func() {}
+ "#,
+ )
+ .publish();
+ Package::new("common", "1.0.0")
+ .dep("common-dep", "1.0")
+ .file(
+ "src/lib.rs",
+ r#"
+ /// Some doc
+ pub fn f() {}
+ "#,
+ )
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ resolver = "2"
+
+ [dependencies]
+ pm = { path = "pm" }
+ common = "1.0"
+
+ [build-dependencies]
+ common-dep = { version = "1.0", features = ["bdep-feat"] }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ /// Some doc
+ pub fn f() {}
+ "#,
+ )
+ .file("build.rs", "fn main() {}")
+ .file(
+ "pm/Cargo.toml",
+ r#"
+ [package]
+ name = "pm"
+ version = "0.1.0"
+ edition = "2018"
+
+ [lib]
+ proc-macro = true
+
+ [dependencies]
+ common = "1.0"
+ "#,
+ )
+ .file(
+ "pm/src/lib.rs",
+ r#"
+ use proc_macro::TokenStream;
+
+ /// Some doc
+ #[proc_macro]
+ pub fn pm(_input: TokenStream) -> TokenStream {
+ "".parse().unwrap()
+ }
+ "#,
+ )
+ .build();
+
+ // No warnings, no duplicates, common and common-dep only documented once.
+ p.cargo("doc")
+ // Cannot check full output due to https://github.com/rust-lang/cargo/issues/9076
+ .with_stderr_does_not_contain("[WARNING][..]")
+ .run();
+
+ assert!(p.build_dir().join("doc/common_dep/fn.f.html").exists());
+ assert!(!p
+ .build_dir()
+ .join("doc/common_dep/fn.bdep_func.html")
+ .exists());
+ assert!(p.build_dir().join("doc/common/fn.f.html").exists());
+ assert!(p.build_dir().join("doc/pm/macro.pm.html").exists());
+ assert!(p.build_dir().join("doc/foo/fn.f.html").exists());
+}
+
+#[cargo_test]
+fn collision_doc_profile_split() {
+ // Same dependency built twice due to different profile settings.
+ Package::new("common", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ pm = { path = "pm" }
+ common = "1.0"
+
+ [profile.dev]
+ opt-level = 2
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "pm/Cargo.toml",
+ r#"
+ [package]
+ name = "pm"
+ version = "0.1.0"
+
+ [dependencies]
+ common = "1.0"
+
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file("pm/src/lib.rs", "")
+ .build();
+
+ // Just to verify that common is normally built twice.
+ // This is unordered because in rare cases `pm` may start
+ // building in-between the two `common`.
+ p.cargo("build -v")
+ .with_stderr_unordered(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] common v1.0.0 [..]
+[COMPILING] common v1.0.0
+[RUNNING] `rustc --crate-name common [..]
+[RUNNING] `rustc --crate-name common [..]
+[COMPILING] pm v0.1.0 [..]
+[RUNNING] `rustc --crate-name pm [..]
+[COMPILING] foo v0.1.0 [..]
+[RUNNING] `rustc --crate-name foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ // Should only document common once, no warnings.
+ p.cargo("doc")
+ .with_stderr_unordered(
+ "\
+[CHECKING] common v1.0.0
+[DOCUMENTING] common v1.0.0
+[DOCUMENTING] pm v0.1.0 [..]
+[DOCUMENTING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn collision_doc_sources() {
+ // Different sources with the same package.
+ Package::new("bar", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ bar2 = { path = "bar", package = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "1.0.0"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("doc -j=1")
+ .with_stderr_unordered(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v1.0.0 [..]
+[WARNING] output filename collision.
+The lib target `bar` in package `bar v1.0.0` has the same output filename as \
+the lib target `bar` in package `bar v1.0.0 ([..]/foo/bar)`.
+Colliding filename is: [..]/foo/target/doc/bar/index.html
+The targets should have unique names.
+This is a known bug where multiple crates with the same name use
+the same path; see <https://github.com/rust-lang/cargo/issues/6313>.
+[CHECKING] bar v1.0.0 [..]
+[DOCUMENTING] bar v1.0.0 [..]
+[DOCUMENTING] bar v1.0.0
+[CHECKING] bar v1.0.0
+[DOCUMENTING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn collision_doc_target() {
+ // collision in doc with --target, doesn't fail due to orphans
+ if cross_compile::disabled() {
+ return;
+ }
+
+ Package::new("orphaned", "1.0.0").publish();
+ Package::new("bar", "1.0.0")
+ .dep("orphaned", "1.0")
+ .publish();
+ Package::new("bar", "2.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar2 = { version = "2.0", package="bar" }
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("doc --target")
+ .arg(cross_compile::alternate())
+ .with_stderr_unordered(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] orphaned v1.0.0 [..]
+[DOWNLOADED] bar v2.0.0 [..]
+[DOWNLOADED] bar v1.0.0 [..]
+[CHECKING] orphaned v1.0.0
+[DOCUMENTING] bar v2.0.0
+[CHECKING] bar v2.0.0
+[CHECKING] bar v1.0.0
+[DOCUMENTING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn collision_with_root() {
+ // Check for a doc collision between a root package and a dependency.
+ // In this case, `foo-macro` comes from both the workspace and crates.io.
+ // This checks that the duplicate correction code doesn't choke on this
+ // by removing the root unit.
+ Package::new("foo-macro", "1.0.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["abc", "foo-macro"]
+ "#,
+ )
+ .file(
+ "abc/Cargo.toml",
+ r#"
+ [package]
+ name = "abc"
+ version = "1.0.0"
+
+ [dependencies]
+ foo-macro = "1.0"
+ "#,
+ )
+ .file("abc/src/lib.rs", "")
+ .file(
+ "foo-macro/Cargo.toml",
+ r#"
+ [package]
+ name = "foo-macro"
+ version = "1.0.0"
+
+ [lib]
+ proc-macro = true
+
+ [dependencies]
+ abc = {path="../abc"}
+ "#,
+ )
+ .file("foo-macro/src/lib.rs", "")
+ .build();
+
+ p.cargo("doc -j=1")
+ .with_stderr_unordered("\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo-macro v1.0.0 [..]
+warning: output filename collision.
+The lib target `foo-macro` in package `foo-macro v1.0.0` has the same output filename as the lib target `foo-macro` in package `foo-macro v1.0.0 [..]`.
+Colliding filename is: [CWD]/target/doc/foo_macro/index.html
+The targets should have unique names.
+This is a known bug where multiple crates with the same name use
+the same path; see <https://github.com/rust-lang/cargo/issues/6313>.
+[CHECKING] foo-macro v1.0.0
+[DOCUMENTING] foo-macro v1.0.0
+[CHECKING] abc v1.0.0 [..]
+[DOCUMENTING] foo-macro v1.0.0 [..]
+[DOCUMENTING] abc v1.0.0 [..]
+[FINISHED] [..]
+")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/concurrent.rs b/src/tools/cargo/tests/testsuite/concurrent.rs
new file mode 100644
index 000000000..fe4ecfc42
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/concurrent.rs
@@ -0,0 +1,507 @@
+//! Tests for running multiple `cargo` processes at the same time.
+
+use std::fs;
+use std::net::TcpListener;
+use std::process::Stdio;
+use std::sync::mpsc::channel;
+use std::thread;
+use std::{env, str};
+
+use cargo_test_support::cargo_process;
+use cargo_test_support::git;
+use cargo_test_support::install::{assert_has_installed_exe, cargo_home};
+use cargo_test_support::registry::Package;
+use cargo_test_support::{basic_manifest, execs, project, slow_cpu_multiplier};
+
+fn pkg(name: &str, vers: &str) {
+ Package::new(name, vers)
+ .file("src/main.rs", "fn main() {{}}")
+ .publish();
+}
+
+#[cargo_test]
+fn multiple_installs() {
+ let p = project()
+ .no_manifest()
+ .file("a/Cargo.toml", &basic_manifest("foo", "0.0.0"))
+ .file("a/src/main.rs", "fn main() {}")
+ .file("b/Cargo.toml", &basic_manifest("bar", "0.0.0"))
+ .file("b/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ let mut a = p.cargo("install").cwd("a").build_command();
+ let mut b = p.cargo("install").cwd("b").build_command();
+
+ a.stdout(Stdio::piped()).stderr(Stdio::piped());
+ b.stdout(Stdio::piped()).stderr(Stdio::piped());
+
+ let a = a.spawn().unwrap();
+ let b = b.spawn().unwrap();
+ let a = thread::spawn(move || a.wait_with_output().unwrap());
+ let b = b.wait_with_output().unwrap();
+ let a = a.join().unwrap();
+
+ execs().run_output(&a);
+ execs().run_output(&b);
+
+ assert_has_installed_exe(cargo_home(), "foo");
+ assert_has_installed_exe(cargo_home(), "bar");
+}
+
+#[cargo_test]
+fn concurrent_installs() {
+ const LOCKED_BUILD: &str = "waiting for file lock on build directory";
+
+ pkg("foo", "0.0.1");
+ pkg("bar", "0.0.1");
+
+ let mut a = cargo_process("install foo").build_command();
+ let mut b = cargo_process("install bar").build_command();
+
+ a.stdout(Stdio::piped()).stderr(Stdio::piped());
+ b.stdout(Stdio::piped()).stderr(Stdio::piped());
+
+ let a = a.spawn().unwrap();
+ let b = b.spawn().unwrap();
+ let a = thread::spawn(move || a.wait_with_output().unwrap());
+ let b = b.wait_with_output().unwrap();
+ let a = a.join().unwrap();
+
+ assert!(!str::from_utf8(&a.stderr).unwrap().contains(LOCKED_BUILD));
+ assert!(!str::from_utf8(&b.stderr).unwrap().contains(LOCKED_BUILD));
+
+ execs().run_output(&a);
+ execs().run_output(&b);
+
+ assert_has_installed_exe(cargo_home(), "foo");
+ assert_has_installed_exe(cargo_home(), "bar");
+}
+
+#[cargo_test]
+fn one_install_should_be_bad() {
+ let p = project()
+ .no_manifest()
+ .file("a/Cargo.toml", &basic_manifest("foo", "0.0.0"))
+ .file("a/src/main.rs", "fn main() {}")
+ .file("b/Cargo.toml", &basic_manifest("foo", "0.0.0"))
+ .file("b/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ let mut a = p.cargo("install").cwd("a").build_command();
+ let mut b = p.cargo("install").cwd("b").build_command();
+
+ a.stdout(Stdio::piped()).stderr(Stdio::piped());
+ b.stdout(Stdio::piped()).stderr(Stdio::piped());
+
+ let a = a.spawn().unwrap();
+ let b = b.spawn().unwrap();
+ let a = thread::spawn(move || a.wait_with_output().unwrap());
+ let b = b.wait_with_output().unwrap();
+ let a = a.join().unwrap();
+
+ execs().run_output(&a);
+ execs().run_output(&b);
+
+ assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[cargo_test]
+fn multiple_registry_fetches() {
+ let mut pkg = Package::new("bar", "1.0.2");
+ for i in 0..10 {
+ let name = format!("foo{}", i);
+ Package::new(&name, "1.0.0").publish();
+ pkg.dep(&name, "*");
+ }
+ pkg.publish();
+
+ let p = project()
+ .no_manifest()
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.0"
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("a/src/main.rs", "fn main() {}")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ authors = []
+ version = "0.0.0"
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("b/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ let mut a = p.cargo("build").cwd("a").build_command();
+ let mut b = p.cargo("build").cwd("b").build_command();
+
+ a.stdout(Stdio::piped()).stderr(Stdio::piped());
+ b.stdout(Stdio::piped()).stderr(Stdio::piped());
+
+ let a = a.spawn().unwrap();
+ let b = b.spawn().unwrap();
+ let a = thread::spawn(move || a.wait_with_output().unwrap());
+ let b = b.wait_with_output().unwrap();
+ let a = a.join().unwrap();
+
+ execs().run_output(&a);
+ execs().run_output(&b);
+
+ let suffix = env::consts::EXE_SUFFIX;
+ assert!(p
+ .root()
+ .join("a/target/debug")
+ .join(format!("foo{}", suffix))
+ .is_file());
+ assert!(p
+ .root()
+ .join("b/target/debug")
+ .join(format!("bar{}", suffix))
+ .is_file());
+}
+
+#[cargo_test]
+fn git_same_repo_different_tags() {
+ let a = git::new("dep", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("dep", "0.5.0"))
+ .file("src/lib.rs", "pub fn tag1() {}")
+ });
+
+ let repo = git2::Repository::open(&a.root()).unwrap();
+ git::tag(&repo, "tag1");
+
+ a.change_file("src/lib.rs", "pub fn tag2() {}");
+ git::add(&repo);
+ git::commit(&repo);
+ git::tag(&repo, "tag2");
+
+ let p = project()
+ .no_manifest()
+ .file(
+ "a/Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.0"
+
+ [dependencies]
+ dep = {{ git = '{}', tag = 'tag1' }}
+ "#,
+ a.url()
+ ),
+ )
+ .file(
+ "a/src/main.rs",
+ "extern crate dep; fn main() { dep::tag1(); }",
+ )
+ .file(
+ "b/Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "bar"
+ authors = []
+ version = "0.0.0"
+
+ [dependencies]
+ dep = {{ git = '{}', tag = 'tag2' }}
+ "#,
+ a.url()
+ ),
+ )
+ .file(
+ "b/src/main.rs",
+ "extern crate dep; fn main() { dep::tag2(); }",
+ );
+ let p = p.build();
+
+ let mut a = p.cargo("build -v").cwd("a").build_command();
+ let mut b = p.cargo("build -v").cwd("b").build_command();
+
+ a.stdout(Stdio::piped()).stderr(Stdio::piped());
+ b.stdout(Stdio::piped()).stderr(Stdio::piped());
+
+ let a = a.spawn().unwrap();
+ let b = b.spawn().unwrap();
+ let a = thread::spawn(move || a.wait_with_output().unwrap());
+ let b = b.wait_with_output().unwrap();
+ let a = a.join().unwrap();
+
+ execs().run_output(&a);
+ execs().run_output(&b);
+}
+
+#[cargo_test]
+fn git_same_branch_different_revs() {
+ let a = git::new("dep", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("dep", "0.5.0"))
+ .file("src/lib.rs", "pub fn f1() {}")
+ });
+
+ let p = project()
+ .no_manifest()
+ .file(
+ "a/Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.0"
+
+ [dependencies]
+ dep = {{ git = '{}' }}
+ "#,
+ a.url()
+ ),
+ )
+ .file(
+ "a/src/main.rs",
+ "extern crate dep; fn main() { dep::f1(); }",
+ )
+ .file(
+ "b/Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "bar"
+ authors = []
+ version = "0.0.0"
+
+ [dependencies]
+ dep = {{ git = '{}' }}
+ "#,
+ a.url()
+ ),
+ )
+ .file(
+ "b/src/main.rs",
+ "extern crate dep; fn main() { dep::f2(); }",
+ );
+ let p = p.build();
+
+ // Generate a Cargo.lock pointing at the current rev, then clear out the
+ // target directory
+ p.cargo("build").cwd("a").run();
+ fs::remove_dir_all(p.root().join("a/target")).unwrap();
+
+ // Make a new commit on the master branch
+ let repo = git2::Repository::open(&a.root()).unwrap();
+ a.change_file("src/lib.rs", "pub fn f2() {}");
+ git::add(&repo);
+ git::commit(&repo);
+
+ // Now run both builds in parallel. The build of `b` should pick up the
+ // newest commit while the build of `a` should use the locked old commit.
+ let mut a = p.cargo("build").cwd("a").build_command();
+ let mut b = p.cargo("build").cwd("b").build_command();
+
+ a.stdout(Stdio::piped()).stderr(Stdio::piped());
+ b.stdout(Stdio::piped()).stderr(Stdio::piped());
+
+ let a = a.spawn().unwrap();
+ let b = b.spawn().unwrap();
+ let a = thread::spawn(move || a.wait_with_output().unwrap());
+ let b = b.wait_with_output().unwrap();
+ let a = a.join().unwrap();
+
+ execs().run_output(&a);
+ execs().run_output(&b);
+}
+
+#[cargo_test]
+fn same_project() {
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file("src/lib.rs", "");
+ let p = p.build();
+
+ let mut a = p.cargo("build").build_command();
+ let mut b = p.cargo("build").build_command();
+
+ a.stdout(Stdio::piped()).stderr(Stdio::piped());
+ b.stdout(Stdio::piped()).stderr(Stdio::piped());
+
+ let a = a.spawn().unwrap();
+ let b = b.spawn().unwrap();
+ let a = thread::spawn(move || a.wait_with_output().unwrap());
+ let b = b.wait_with_output().unwrap();
+ let a = a.join().unwrap();
+
+ execs().run_output(&a);
+ execs().run_output(&b);
+}
+
+// Make sure that if Cargo dies while holding a lock that it's released and the
+// next Cargo to come in will take over cleanly.
+#[cargo_test]
+fn killing_cargo_releases_the_lock() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.0"
+ build = "build.rs"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "build.rs",
+ r#"
+ use std::net::TcpStream;
+
+ fn main() {
+ if std::env::var("A").is_ok() {
+ TcpStream::connect(&std::env::var("ADDR").unwrap()[..])
+ .unwrap();
+ std::thread::sleep(std::time::Duration::new(10, 0));
+ }
+ }
+ "#,
+ );
+ let p = p.build();
+
+ // Our build script will connect to our local TCP socket to inform us that
+ // it's started and that's how we know that `a` will have the lock
+ // when we kill it.
+ let l = TcpListener::bind("127.0.0.1:0").unwrap();
+ let mut a = p.cargo("build").build_command();
+ let mut b = p.cargo("build").build_command();
+ a.stdout(Stdio::piped()).stderr(Stdio::piped());
+ b.stdout(Stdio::piped()).stderr(Stdio::piped());
+ a.env("ADDR", l.local_addr().unwrap().to_string())
+ .env("A", "a");
+ b.env("ADDR", l.local_addr().unwrap().to_string())
+ .env_remove("A");
+
+ // Spawn `a`, wait for it to get to the build script (at which point the
+ // lock is held), then kill it.
+ let mut a = a.spawn().unwrap();
+ l.accept().unwrap();
+ a.kill().unwrap();
+
+ // Spawn `b`, then just finish the output of a/b the same way the above
+ // tests does.
+ let b = b.spawn().unwrap();
+ let a = thread::spawn(move || a.wait_with_output().unwrap());
+ let b = b.wait_with_output().unwrap();
+ let a = a.join().unwrap();
+
+ // We killed `a`, so it shouldn't succeed, but `b` should have succeeded.
+ assert!(!a.status.success());
+ execs().run_output(&b);
+}
+
+#[cargo_test]
+fn debug_release_ok() {
+ let p = project().file("src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ p.cargo("build").run();
+ fs::remove_dir_all(p.root().join("target")).unwrap();
+
+ let mut a = p.cargo("build").build_command();
+ let mut b = p.cargo("build --release").build_command();
+ a.stdout(Stdio::piped()).stderr(Stdio::piped());
+ b.stdout(Stdio::piped()).stderr(Stdio::piped());
+ let a = a.spawn().unwrap();
+ let b = b.spawn().unwrap();
+ let a = thread::spawn(move || a.wait_with_output().unwrap());
+ let b = b.wait_with_output().unwrap();
+ let a = a.join().unwrap();
+
+ execs()
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.0.1 [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run_output(&a);
+ execs()
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.0.1 [..]
+[FINISHED] release [optimized] target(s) in [..]
+",
+ )
+ .run_output(&b);
+}
+
+#[cargo_test]
+fn no_deadlock_with_git_dependencies() {
+ let dep1 = git::new("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("dep1", "0.5.0"))
+ .file("src/lib.rs", "")
+ });
+
+ let dep2 = git::new("dep2", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("dep2", "0.5.0"))
+ .file("src/lib.rs", "")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.0"
+
+ [dependencies]
+ dep1 = {{ git = '{}' }}
+ dep2 = {{ git = '{}' }}
+ "#,
+ dep1.url(),
+ dep2.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() { }");
+ let p = p.build();
+
+ let n_concurrent_builds = 5;
+
+ let (tx, rx) = channel();
+ for _ in 0..n_concurrent_builds {
+ let cmd = p
+ .cargo("build")
+ .build_command()
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn();
+ let tx = tx.clone();
+ thread::spawn(move || {
+ let result = cmd.unwrap().wait_with_output().unwrap();
+ tx.send(result).unwrap()
+ });
+ }
+
+ for _ in 0..n_concurrent_builds {
+ let result = rx.recv_timeout(slow_cpu_multiplier(30)).expect("Deadlock!");
+ execs().run_output(&result);
+ }
+}
diff --git a/src/tools/cargo/tests/testsuite/config.rs b/src/tools/cargo/tests/testsuite/config.rs
new file mode 100644
index 000000000..92e1f4264
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/config.rs
@@ -0,0 +1,1596 @@
+//! Tests for config settings.
+
+use cargo::core::{PackageIdSpec, Shell};
+use cargo::util::config::{self, Config, Definition, SslVersionConfig, StringList};
+use cargo::util::interning::InternedString;
+use cargo::util::toml::{self as cargo_toml, VecStringOrBool as VSOB};
+use cargo::CargoResult;
+use cargo_test_support::compare;
+use cargo_test_support::{panic_error, paths, project, symlink_supported, t};
+use serde::Deserialize;
+use std::borrow::Borrow;
+use std::collections::{BTreeMap, HashMap};
+use std::fs;
+use std::io;
+use std::os;
+use std::path::{Path, PathBuf};
+
+/// Helper for constructing a `Config` object.
+pub struct ConfigBuilder {
+ env: HashMap<String, String>,
+ unstable: Vec<String>,
+ config_args: Vec<String>,
+ cwd: Option<PathBuf>,
+ enable_nightly_features: bool,
+}
+
+impl ConfigBuilder {
+ pub fn new() -> ConfigBuilder {
+ ConfigBuilder {
+ env: HashMap::new(),
+ unstable: Vec::new(),
+ config_args: Vec::new(),
+ cwd: None,
+ enable_nightly_features: false,
+ }
+ }
+
+ /// Passes a `-Z` flag.
+ pub fn unstable_flag(&mut self, s: impl Into<String>) -> &mut Self {
+ self.unstable.push(s.into());
+ self
+ }
+
+ /// Sets an environment variable.
+ pub fn env(&mut self, key: impl Into<String>, val: impl Into<String>) -> &mut Self {
+ self.env.insert(key.into(), val.into());
+ self
+ }
+
+ /// Unconditionally enable nightly features, even on stable channels.
+ pub fn nightly_features_allowed(&mut self, allowed: bool) -> &mut Self {
+ self.enable_nightly_features = allowed;
+ self
+ }
+
+ /// Passes a `--config` flag.
+ pub fn config_arg(&mut self, arg: impl Into<String>) -> &mut Self {
+ self.config_args.push(arg.into());
+ self
+ }
+
+ /// Sets the current working directory where config files will be loaded.
+ pub fn cwd(&mut self, path: impl AsRef<Path>) -> &mut Self {
+ self.cwd = Some(paths::root().join(path.as_ref()));
+ self
+ }
+
+ /// Creates the `Config`.
+ pub fn build(&self) -> Config {
+ self.build_err().unwrap()
+ }
+
+ /// Creates the `Config`, returning a Result.
+ pub fn build_err(&self) -> CargoResult<Config> {
+ let output = Box::new(fs::File::create(paths::root().join("shell.out")).unwrap());
+ let shell = Shell::from_write(output);
+ let cwd = self.cwd.clone().unwrap_or_else(|| paths::root());
+ let homedir = paths::home();
+ let mut config = Config::new(shell, cwd, homedir);
+ config.nightly_features_allowed = self.enable_nightly_features || !self.unstable.is_empty();
+ config.set_env(self.env.clone());
+ config.set_search_stop_path(paths::root());
+ config.configure(
+ 0,
+ false,
+ None,
+ false,
+ false,
+ false,
+ &None,
+ &self.unstable,
+ &self.config_args,
+ )?;
+ Ok(config)
+ }
+}
+
+fn new_config() -> Config {
+ ConfigBuilder::new().build()
+}
+
+/// Read the output from Config.
+pub fn read_output(config: Config) -> String {
+ drop(config); // Paranoid about flushing the file.
+ let path = paths::root().join("shell.out");
+ fs::read_to_string(path).unwrap()
+}
+
+#[cargo_test]
+fn read_env_vars_for_config() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.0"
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+ fn main() {
+ assert_eq!(env::var("NUM_JOBS").unwrap(), "100");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("check").env("CARGO_BUILD_JOBS", "100").run();
+}
+
+pub fn write_config(config: &str) {
+ write_config_at(paths::root().join(".cargo/config"), config);
+}
+
+pub fn write_config_at(path: impl AsRef<Path>, contents: &str) {
+ let path = paths::root().join(path.as_ref());
+ fs::create_dir_all(path.parent().unwrap()).unwrap();
+ fs::write(path, contents).unwrap();
+}
+
+pub fn write_config_toml(config: &str) {
+ write_config_at(paths::root().join(".cargo/config.toml"), config);
+}
+
+#[cfg(unix)]
+fn symlink_file(target: &Path, link: &Path) -> io::Result<()> {
+ os::unix::fs::symlink(target, link)
+}
+
+#[cfg(windows)]
+fn symlink_file(target: &Path, link: &Path) -> io::Result<()> {
+ os::windows::fs::symlink_file(target, link)
+}
+
+fn symlink_config_to_config_toml() {
+ let toml_path = paths::root().join(".cargo/config.toml");
+ let symlink_path = paths::root().join(".cargo/config");
+ t!(symlink_file(&toml_path, &symlink_path));
+}
+
+#[track_caller]
+pub fn assert_error<E: Borrow<anyhow::Error>>(error: E, msgs: &str) {
+ let causes = error
+ .borrow()
+ .chain()
+ .enumerate()
+ .map(|(i, e)| {
+ if i == 0 {
+ e.to_string()
+ } else {
+ format!("Caused by:\n {}", e)
+ }
+ })
+ .collect::<Vec<_>>()
+ .join("\n\n");
+ assert_match(msgs, &causes);
+}
+
+#[track_caller]
+pub fn assert_match(expected: &str, actual: &str) {
+ if let Err(e) = compare::match_exact(expected, actual, "output", "", None) {
+ panic_error("", e);
+ }
+}
+
+#[cargo_test]
+fn get_config() {
+ write_config(
+ "\
+[S]
+f1 = 123
+",
+ );
+
+ let config = new_config();
+
+ #[derive(Debug, Deserialize, Eq, PartialEq)]
+ struct S {
+ f1: Option<i64>,
+ }
+ let s: S = config.get("S").unwrap();
+ assert_eq!(s, S { f1: Some(123) });
+ let config = ConfigBuilder::new().env("CARGO_S_F1", "456").build();
+ let s: S = config.get("S").unwrap();
+ assert_eq!(s, S { f1: Some(456) });
+}
+
+#[cfg(windows)]
+#[cargo_test]
+fn environment_variable_casing() {
+ // Issue #11814: Environment variable names are case-insensitive on Windows.
+ let config = ConfigBuilder::new()
+ .env("Path", "abc")
+ .env("Two-Words", "abc")
+ .env("two_words", "def")
+ .build();
+
+ let var = config.get_env("PATH").unwrap();
+ assert_eq!(var, String::from("abc"));
+
+ let var = config.get_env("path").unwrap();
+ assert_eq!(var, String::from("abc"));
+
+ let var = config.get_env("TWO-WORDS").unwrap();
+ assert_eq!(var, String::from("abc"));
+
+ // Make sure that we can still distinguish between dashes and underscores
+ // in variable names.
+ let var = config.get_env("Two_Words").unwrap();
+ assert_eq!(var, String::from("def"));
+}
+
+#[cargo_test]
+fn config_works_with_extension() {
+ write_config_toml(
+ "\
+[foo]
+f1 = 1
+",
+ );
+
+ let config = new_config();
+
+ assert_eq!(config.get::<Option<i32>>("foo.f1").unwrap(), Some(1));
+}
+
+#[cargo_test]
+fn config_ambiguous_filename_symlink_doesnt_warn() {
+ // Windows requires special permissions to create symlinks.
+ // If we don't have permission, just skip this test.
+ if !symlink_supported() {
+ return;
+ };
+
+ write_config_toml(
+ "\
+[foo]
+f1 = 1
+",
+ );
+
+ symlink_config_to_config_toml();
+
+ let config = new_config();
+
+ assert_eq!(config.get::<Option<i32>>("foo.f1").unwrap(), Some(1));
+
+ // It should NOT have warned for the symlink.
+ let output = read_output(config);
+ assert_eq!(output, "");
+}
+
+#[cargo_test]
+fn config_ambiguous_filename() {
+ write_config(
+ "\
+[foo]
+f1 = 1
+",
+ );
+
+ write_config_toml(
+ "\
+[foo]
+f1 = 2
+",
+ );
+
+ let config = new_config();
+
+ // It should use the value from the one without the extension for
+ // backwards compatibility.
+ assert_eq!(config.get::<Option<i32>>("foo.f1").unwrap(), Some(1));
+
+ // But it also should have warned.
+ let output = read_output(config);
+ let expected = "\
+warning: Both `[..]/.cargo/config` and `[..]/.cargo/config.toml` exist. Using `[..]/.cargo/config`
+";
+ assert_match(expected, &output);
+}
+
+#[cargo_test]
+fn config_unused_fields() {
+ write_config(
+ "\
+[S]
+unused = 456
+",
+ );
+
+ let config = ConfigBuilder::new()
+ .env("CARGO_S_UNUSED2", "1")
+ .env("CARGO_S2_UNUSED", "2")
+ .build();
+
+ #[derive(Debug, Deserialize, Eq, PartialEq)]
+ struct S {
+ f1: Option<i64>,
+ }
+ // This prints a warning (verified below).
+ let s: S = config.get("S").unwrap();
+ assert_eq!(s, S { f1: None });
+ // This does not print anything, we cannot easily/reliably warn for
+ // environment variables.
+ let s: S = config.get("S2").unwrap();
+ assert_eq!(s, S { f1: None });
+
+ // Verify the warnings.
+ let output = read_output(config);
+ let expected = "\
+warning: unused config key `S.unused` in `[..]/.cargo/config`
+";
+ assert_match(expected, &output);
+}
+
+#[cargo_test]
+fn config_load_toml_profile() {
+ write_config(
+ "\
+[profile.dev]
+opt-level = 's'
+lto = true
+codegen-units=4
+debug = true
+debug-assertions = true
+rpath = true
+panic = 'abort'
+overflow-checks = true
+incremental = true
+
+[profile.dev.build-override]
+opt-level = 1
+
+[profile.dev.package.bar]
+codegen-units = 9
+
+[profile.no-lto]
+inherits = 'dev'
+dir-name = 'without-lto'
+lto = false
+",
+ );
+
+ let config = ConfigBuilder::new()
+ .unstable_flag("advanced-env")
+ .env("CARGO_PROFILE_DEV_CODEGEN_UNITS", "5")
+ .env("CARGO_PROFILE_DEV_BUILD_OVERRIDE_CODEGEN_UNITS", "11")
+ .env("CARGO_PROFILE_DEV_PACKAGE_env_CODEGEN_UNITS", "13")
+ .env("CARGO_PROFILE_DEV_PACKAGE_bar_OPT_LEVEL", "2")
+ .build();
+
+ // TODO: don't use actual `tomlprofile`.
+ let p: cargo_toml::TomlProfile = config.get("profile.dev").unwrap();
+ let mut packages = BTreeMap::new();
+ let key =
+ cargo_toml::ProfilePackageSpec::Spec(::cargo::core::PackageIdSpec::parse("bar").unwrap());
+ let o_profile = cargo_toml::TomlProfile {
+ opt_level: Some(cargo_toml::TomlOptLevel("2".to_string())),
+ codegen_units: Some(9),
+ ..Default::default()
+ };
+ packages.insert(key, o_profile);
+ let key =
+ cargo_toml::ProfilePackageSpec::Spec(::cargo::core::PackageIdSpec::parse("env").unwrap());
+ let o_profile = cargo_toml::TomlProfile {
+ codegen_units: Some(13),
+ ..Default::default()
+ };
+ packages.insert(key, o_profile);
+
+ assert_eq!(
+ p,
+ cargo_toml::TomlProfile {
+ opt_level: Some(cargo_toml::TomlOptLevel("s".to_string())),
+ lto: Some(cargo_toml::StringOrBool::Bool(true)),
+ codegen_units: Some(5),
+ debug: Some(cargo_toml::U32OrBool::Bool(true)),
+ debug_assertions: Some(true),
+ rpath: Some(true),
+ panic: Some("abort".to_string()),
+ overflow_checks: Some(true),
+ incremental: Some(true),
+ package: Some(packages),
+ build_override: Some(Box::new(cargo_toml::TomlProfile {
+ opt_level: Some(cargo_toml::TomlOptLevel("1".to_string())),
+ codegen_units: Some(11),
+ ..Default::default()
+ })),
+ ..Default::default()
+ }
+ );
+
+ let p: cargo_toml::TomlProfile = config.get("profile.no-lto").unwrap();
+ assert_eq!(
+ p,
+ cargo_toml::TomlProfile {
+ lto: Some(cargo_toml::StringOrBool::Bool(false)),
+ dir_name: Some(InternedString::new("without-lto")),
+ inherits: Some(InternedString::new("dev")),
+ ..Default::default()
+ }
+ );
+}
+
+#[cargo_test]
+fn profile_env_var_prefix() {
+ // Check for a bug with collision on DEBUG vs DEBUG_ASSERTIONS.
+ let config = ConfigBuilder::new()
+ .env("CARGO_PROFILE_DEV_DEBUG_ASSERTIONS", "false")
+ .build();
+ let p: cargo_toml::TomlProfile = config.get("profile.dev").unwrap();
+ assert_eq!(p.debug_assertions, Some(false));
+ assert_eq!(p.debug, None);
+
+ let config = ConfigBuilder::new()
+ .env("CARGO_PROFILE_DEV_DEBUG", "1")
+ .build();
+ let p: cargo_toml::TomlProfile = config.get("profile.dev").unwrap();
+ assert_eq!(p.debug_assertions, None);
+ assert_eq!(p.debug, Some(cargo_toml::U32OrBool::U32(1)));
+
+ let config = ConfigBuilder::new()
+ .env("CARGO_PROFILE_DEV_DEBUG_ASSERTIONS", "false")
+ .env("CARGO_PROFILE_DEV_DEBUG", "1")
+ .build();
+ let p: cargo_toml::TomlProfile = config.get("profile.dev").unwrap();
+ assert_eq!(p.debug_assertions, Some(false));
+ assert_eq!(p.debug, Some(cargo_toml::U32OrBool::U32(1)));
+}
+
+#[cargo_test]
+fn config_deserialize_any() {
+ // Some tests to exercise deserialize_any for deserializers that need to
+ // be told the format.
+ write_config(
+ "\
+a = true
+b = ['b']
+c = ['c']
+",
+ );
+
+ // advanced-env
+ let config = ConfigBuilder::new()
+ .unstable_flag("advanced-env")
+ .env("CARGO_ENVB", "false")
+ .env("CARGO_C", "['d']")
+ .env("CARGO_ENVL", "['a', 'b']")
+ .build();
+ assert_eq!(config.get::<VSOB>("a").unwrap(), VSOB::Bool(true));
+ assert_eq!(
+ config.get::<VSOB>("b").unwrap(),
+ VSOB::VecString(vec!["b".to_string()])
+ );
+ assert_eq!(
+ config.get::<VSOB>("c").unwrap(),
+ VSOB::VecString(vec!["c".to_string(), "d".to_string()])
+ );
+ assert_eq!(config.get::<VSOB>("envb").unwrap(), VSOB::Bool(false));
+ assert_eq!(
+ config.get::<VSOB>("envl").unwrap(),
+ VSOB::VecString(vec!["a".to_string(), "b".to_string()])
+ );
+
+ // Demonstrate where merging logic isn't very smart. This could be improved.
+ let config = ConfigBuilder::new().env("CARGO_A", "x y").build();
+ assert_error(
+ config.get::<VSOB>("a").unwrap_err(),
+ "\
+error in environment variable `CARGO_A`: could not load config key `a`
+
+Caused by:
+ invalid type: string \"x y\", expected a boolean or vector of strings",
+ );
+
+ // Normal env.
+ let config = ConfigBuilder::new()
+ .unstable_flag("advanced-env")
+ .env("CARGO_B", "d e")
+ .env("CARGO_C", "f g")
+ .build();
+ assert_eq!(
+ config.get::<VSOB>("b").unwrap(),
+ VSOB::VecString(vec!["b".to_string(), "d".to_string(), "e".to_string()])
+ );
+ assert_eq!(
+ config.get::<VSOB>("c").unwrap(),
+ VSOB::VecString(vec!["c".to_string(), "f".to_string(), "g".to_string()])
+ );
+
+ // config-cli
+ // This test demonstrates that ConfigValue::merge isn't very smart.
+ // It would be nice if it was smarter.
+ let config = ConfigBuilder::new().config_arg("a = ['a']").build_err();
+ assert_error(
+ config.unwrap_err(),
+ "\
+failed to merge --config key `a` into `[..]/.cargo/config`
+
+Caused by:
+ failed to merge config value from `--config cli option` into `[..]/.cargo/config`: \
+expected boolean, but found array",
+ );
+
+ // config-cli and advanced-env
+ let config = ConfigBuilder::new()
+ .unstable_flag("advanced-env")
+ .config_arg("b=['clib']")
+ .config_arg("c=['clic']")
+ .env("CARGO_B", "env1 env2")
+ .env("CARGO_C", "['e1', 'e2']")
+ .build();
+ assert_eq!(
+ config.get::<VSOB>("b").unwrap(),
+ VSOB::VecString(vec![
+ "b".to_string(),
+ "clib".to_string(),
+ "env1".to_string(),
+ "env2".to_string()
+ ])
+ );
+ assert_eq!(
+ config.get::<VSOB>("c").unwrap(),
+ VSOB::VecString(vec![
+ "c".to_string(),
+ "clic".to_string(),
+ "e1".to_string(),
+ "e2".to_string()
+ ])
+ );
+}
+
+#[cargo_test]
+fn config_toml_errors() {
+ write_config(
+ "\
+[profile.dev]
+opt-level = 'foo'
+",
+ );
+
+ let config = new_config();
+
+ assert_error(
+ config
+ .get::<cargo_toml::TomlProfile>("profile.dev")
+ .unwrap_err(),
+ "\
+error in [..]/.cargo/config: could not load config key `profile.dev.opt-level`
+
+Caused by:
+ must be `0`, `1`, `2`, `3`, `s` or `z`, but found the string: \"foo\"",
+ );
+
+ let config = ConfigBuilder::new()
+ .env("CARGO_PROFILE_DEV_OPT_LEVEL", "asdf")
+ .build();
+
+ assert_error(
+ config.get::<cargo_toml::TomlProfile>("profile.dev").unwrap_err(),
+ "\
+error in environment variable `CARGO_PROFILE_DEV_OPT_LEVEL`: could not load config key `profile.dev.opt-level`
+
+Caused by:
+ must be `0`, `1`, `2`, `3`, `s` or `z`, but found the string: \"asdf\"",
+ );
+}
+
+#[cargo_test]
+fn load_nested() {
+ write_config(
+ "\
+[nest.foo]
+f1 = 1
+f2 = 2
+[nest.bar]
+asdf = 3
+",
+ );
+
+ let config = ConfigBuilder::new()
+ .unstable_flag("advanced-env")
+ .env("CARGO_NEST_foo_f2", "3")
+ .env("CARGO_NESTE_foo_f1", "1")
+ .env("CARGO_NESTE_foo_f2", "3")
+ .env("CARGO_NESTE_bar_asdf", "3")
+ .build();
+
+ type Nested = HashMap<String, HashMap<String, u8>>;
+
+ let n: Nested = config.get("nest").unwrap();
+ let mut expected = HashMap::new();
+ let mut foo = HashMap::new();
+ foo.insert("f1".to_string(), 1);
+ foo.insert("f2".to_string(), 3);
+ expected.insert("foo".to_string(), foo);
+ let mut bar = HashMap::new();
+ bar.insert("asdf".to_string(), 3);
+ expected.insert("bar".to_string(), bar);
+ assert_eq!(n, expected);
+
+ let n: Nested = config.get("neste").unwrap();
+ assert_eq!(n, expected);
+}
+
+#[cargo_test]
+fn get_errors() {
+ write_config(
+ "\
+[S]
+f1 = 123
+f2 = 'asdf'
+big = 123456789
+",
+ );
+
+ let config = ConfigBuilder::new()
+ .env("CARGO_E_S", "asdf")
+ .env("CARGO_E_BIG", "123456789")
+ .build();
+ assert_error(
+ config.get::<i64>("foo").unwrap_err(),
+ "missing config key `foo`",
+ );
+ assert_error(
+ config.get::<i64>("foo.bar").unwrap_err(),
+ "missing config key `foo.bar`",
+ );
+ assert_error(
+ config.get::<i64>("S.f2").unwrap_err(),
+ "error in [..]/.cargo/config: `S.f2` expected an integer, but found a string",
+ );
+ assert_error(
+ config.get::<u8>("S.big").unwrap_err(),
+ "\
+error in [..].cargo/config: could not load config key `S.big`
+
+Caused by:
+ invalid value: integer `123456789`, expected u8",
+ );
+
+ // Environment variable type errors.
+ assert_error(
+ config.get::<i64>("e.s").unwrap_err(),
+ "error in environment variable `CARGO_E_S`: invalid digit found in string",
+ );
+ assert_error(
+ config.get::<i8>("e.big").unwrap_err(),
+ "\
+error in environment variable `CARGO_E_BIG`: could not load config key `e.big`
+
+Caused by:
+ invalid value: integer `123456789`, expected i8",
+ );
+
+ #[derive(Debug, Deserialize)]
+ #[allow(dead_code)]
+ struct S {
+ f1: i64,
+ f2: String,
+ f3: i64,
+ big: i64,
+ }
+ assert_error(config.get::<S>("S").unwrap_err(), "missing field `f3`");
+}
+
+#[cargo_test]
+fn config_get_option() {
+ write_config(
+ "\
+[foo]
+f1 = 1
+",
+ );
+
+ let config = ConfigBuilder::new().env("CARGO_BAR_ASDF", "3").build();
+
+ assert_eq!(config.get::<Option<i32>>("a").unwrap(), None);
+ assert_eq!(config.get::<Option<i32>>("a.b").unwrap(), None);
+ assert_eq!(config.get::<Option<i32>>("foo.f1").unwrap(), Some(1));
+ assert_eq!(config.get::<Option<i32>>("bar.asdf").unwrap(), Some(3));
+ assert_eq!(config.get::<Option<i32>>("bar.zzzz").unwrap(), None);
+}
+
+#[cargo_test]
+fn config_bad_toml() {
+ write_config("asdf");
+ let config = new_config();
+ assert_error(
+ config.get::<i32>("foo").unwrap_err(),
+ "\
+could not load Cargo configuration
+
+Caused by:
+ could not parse TOML configuration in `[..]/.cargo/config`
+
+Caused by:
+ could not parse input as TOML
+
+Caused by:
+ TOML parse error at line 1, column 5
+ |
+1 | asdf
+ | ^
+expected `.`, `=`",
+ );
+}
+
+#[cargo_test]
+fn config_get_list() {
+ write_config(
+ "\
+l1 = []
+l2 = ['one', 'two']
+l3 = 123
+l4 = ['one', 'two']
+
+[nested]
+l = ['x']
+
+[nested2]
+l = ['y']
+
+[nested-empty]
+",
+ );
+
+ type L = Vec<String>;
+
+ let config = ConfigBuilder::new()
+ .unstable_flag("advanced-env")
+ .env("CARGO_L4", "['three', 'four']")
+ .env("CARGO_L5", "['a']")
+ .env("CARGO_ENV_EMPTY", "[]")
+ .env("CARGO_ENV_BLANK", "")
+ .env("CARGO_ENV_NUM", "1")
+ .env("CARGO_ENV_NUM_LIST", "[1]")
+ .env("CARGO_ENV_TEXT", "asdf")
+ .env("CARGO_LEPAIR", "['a', 'b']")
+ .env("CARGO_NESTED2_L", "['z']")
+ .env("CARGO_NESTEDE_L", "['env']")
+ .env("CARGO_BAD_ENV", "[zzz]")
+ .build();
+
+ assert_eq!(config.get::<L>("unset").unwrap(), vec![] as Vec<String>);
+ assert_eq!(config.get::<L>("l1").unwrap(), vec![] as Vec<String>);
+ assert_eq!(config.get::<L>("l2").unwrap(), vec!["one", "two"]);
+ assert_error(
+ config.get::<L>("l3").unwrap_err(),
+ "\
+invalid configuration for key `l3`
+expected a list, but found a integer for `l3` in [..]/.cargo/config",
+ );
+ assert_eq!(
+ config.get::<L>("l4").unwrap(),
+ vec!["one", "two", "three", "four"]
+ );
+ assert_eq!(config.get::<L>("l5").unwrap(), vec!["a"]);
+ assert_eq!(config.get::<L>("env-empty").unwrap(), vec![] as Vec<String>);
+ assert_eq!(config.get::<L>("env-blank").unwrap(), vec![] as Vec<String>);
+ assert_eq!(config.get::<L>("env-num").unwrap(), vec!["1".to_string()]);
+ assert_error(
+ config.get::<L>("env-num-list").unwrap_err(),
+ "error in environment variable `CARGO_ENV_NUM_LIST`: \
+ expected string, found integer",
+ );
+ assert_eq!(
+ config.get::<L>("env-text").unwrap(),
+ vec!["asdf".to_string()]
+ );
+ // "invalid number" here isn't the best error, but I think it's just toml.rs.
+ assert_error(
+ config.get::<L>("bad-env").unwrap_err(),
+ "\
+error in environment variable `CARGO_BAD_ENV`: could not parse TOML list: TOML parse error at line 1, column 2
+ |
+1 | [zzz]
+ | ^
+invalid array
+expected `]`
+",
+ );
+
+ // Try some other sequence-like types.
+ assert_eq!(
+ config
+ .get::<(String, String, String, String)>("l4")
+ .unwrap(),
+ (
+ "one".to_string(),
+ "two".to_string(),
+ "three".to_string(),
+ "four".to_string()
+ )
+ );
+ assert_eq!(config.get::<(String,)>("l5").unwrap(), ("a".to_string(),));
+
+ // Tuple struct
+ #[derive(Debug, Deserialize, Eq, PartialEq)]
+ struct TupS(String, String);
+ assert_eq!(
+ config.get::<TupS>("lepair").unwrap(),
+ TupS("a".to_string(), "b".to_string())
+ );
+
+ // Nested with an option.
+ #[derive(Debug, Deserialize, Eq, PartialEq)]
+ struct S {
+ l: Option<Vec<String>>,
+ }
+ assert_eq!(config.get::<S>("nested-empty").unwrap(), S { l: None });
+ assert_eq!(
+ config.get::<S>("nested").unwrap(),
+ S {
+ l: Some(vec!["x".to_string()]),
+ }
+ );
+ assert_eq!(
+ config.get::<S>("nested2").unwrap(),
+ S {
+ l: Some(vec!["y".to_string(), "z".to_string()]),
+ }
+ );
+ assert_eq!(
+ config.get::<S>("nestede").unwrap(),
+ S {
+ l: Some(vec!["env".to_string()]),
+ }
+ );
+}
+
+#[cargo_test]
+fn config_get_other_types() {
+ write_config(
+ "\
+ns = 123
+ns2 = 456
+",
+ );
+
+ let config = ConfigBuilder::new()
+ .env("CARGO_NSE", "987")
+ .env("CARGO_NS2", "654")
+ .build();
+
+ #[derive(Debug, Deserialize, Eq, PartialEq)]
+ #[serde(transparent)]
+ struct NewS(i32);
+ assert_eq!(config.get::<NewS>("ns").unwrap(), NewS(123));
+ assert_eq!(config.get::<NewS>("ns2").unwrap(), NewS(654));
+ assert_eq!(config.get::<NewS>("nse").unwrap(), NewS(987));
+ assert_error(
+ config.get::<NewS>("unset").unwrap_err(),
+ "missing config key `unset`",
+ );
+}
+
+#[cargo_test]
+fn config_relative_path() {
+ write_config(&format!(
+ "\
+p1 = 'foo/bar'
+p2 = '../abc'
+p3 = 'b/c'
+abs = '{}'
+",
+ paths::home().display(),
+ ));
+
+ let config = ConfigBuilder::new()
+ .env("CARGO_EPATH", "a/b")
+ .env("CARGO_P3", "d/e")
+ .build();
+
+ assert_eq!(
+ config
+ .get::<config::ConfigRelativePath>("p1")
+ .unwrap()
+ .resolve_path(&config),
+ paths::root().join("foo/bar")
+ );
+ assert_eq!(
+ config
+ .get::<config::ConfigRelativePath>("p2")
+ .unwrap()
+ .resolve_path(&config),
+ paths::root().join("../abc")
+ );
+ assert_eq!(
+ config
+ .get::<config::ConfigRelativePath>("p3")
+ .unwrap()
+ .resolve_path(&config),
+ paths::root().join("d/e")
+ );
+ assert_eq!(
+ config
+ .get::<config::ConfigRelativePath>("abs")
+ .unwrap()
+ .resolve_path(&config),
+ paths::home()
+ );
+ assert_eq!(
+ config
+ .get::<config::ConfigRelativePath>("epath")
+ .unwrap()
+ .resolve_path(&config),
+ paths::root().join("a/b")
+ );
+}
+
+#[cargo_test]
+fn config_get_integers() {
+ write_config(
+ "\
+npos = 123456789
+nneg = -123456789
+i64max = 9223372036854775807
+",
+ );
+
+ let config = ConfigBuilder::new()
+ .env("CARGO_EPOS", "123456789")
+ .env("CARGO_ENEG", "-1")
+ .env("CARGO_EI64MAX", "9223372036854775807")
+ .build();
+
+ assert_eq!(
+ config.get::<u64>("i64max").unwrap(),
+ 9_223_372_036_854_775_807
+ );
+ assert_eq!(
+ config.get::<i64>("i64max").unwrap(),
+ 9_223_372_036_854_775_807
+ );
+ assert_eq!(
+ config.get::<u64>("ei64max").unwrap(),
+ 9_223_372_036_854_775_807
+ );
+ assert_eq!(
+ config.get::<i64>("ei64max").unwrap(),
+ 9_223_372_036_854_775_807
+ );
+
+ assert_error(
+ config.get::<u32>("nneg").unwrap_err(),
+ "\
+error in [..].cargo/config: could not load config key `nneg`
+
+Caused by:
+ invalid value: integer `-123456789`, expected u32",
+ );
+ assert_error(
+ config.get::<u32>("eneg").unwrap_err(),
+ "\
+error in environment variable `CARGO_ENEG`: could not load config key `eneg`
+
+Caused by:
+ invalid value: integer `-1`, expected u32",
+ );
+ assert_error(
+ config.get::<i8>("npos").unwrap_err(),
+ "\
+error in [..].cargo/config: could not load config key `npos`
+
+Caused by:
+ invalid value: integer `123456789`, expected i8",
+ );
+ assert_error(
+ config.get::<i8>("epos").unwrap_err(),
+ "\
+error in environment variable `CARGO_EPOS`: could not load config key `epos`
+
+Caused by:
+ invalid value: integer `123456789`, expected i8",
+ );
+}
+
+#[cargo_test]
+fn config_get_ssl_version_missing() {
+ write_config(
+ "\
+[http]
+hello = 'world'
+",
+ );
+
+ let config = new_config();
+
+ assert!(config
+ .get::<Option<SslVersionConfig>>("http.ssl-version")
+ .unwrap()
+ .is_none());
+}
+
+#[cargo_test]
+fn config_get_ssl_version_single() {
+ write_config(
+ "\
+[http]
+ssl-version = 'tlsv1.2'
+",
+ );
+
+ let config = new_config();
+
+ let a = config
+ .get::<Option<SslVersionConfig>>("http.ssl-version")
+ .unwrap()
+ .unwrap();
+ match a {
+ SslVersionConfig::Single(v) => assert_eq!(&v, "tlsv1.2"),
+ SslVersionConfig::Range(_) => panic!("Did not expect ssl version min/max."),
+ };
+}
+
+#[cargo_test]
+fn config_get_ssl_version_min_max() {
+ write_config(
+ "\
+[http]
+ssl-version.min = 'tlsv1.2'
+ssl-version.max = 'tlsv1.3'
+",
+ );
+
+ let config = new_config();
+
+ let a = config
+ .get::<Option<SslVersionConfig>>("http.ssl-version")
+ .unwrap()
+ .unwrap();
+ match a {
+ SslVersionConfig::Single(_) => panic!("Did not expect exact ssl version."),
+ SslVersionConfig::Range(range) => {
+ assert_eq!(range.min, Some(String::from("tlsv1.2")));
+ assert_eq!(range.max, Some(String::from("tlsv1.3")));
+ }
+ };
+}
+
+#[cargo_test]
+fn config_get_ssl_version_both_forms_configured() {
+ // this is not allowed
+ write_config(
+ "\
+[http]
+ssl-version = 'tlsv1.1'
+ssl-version.min = 'tlsv1.2'
+ssl-version.max = 'tlsv1.3'
+",
+ );
+
+ let config = new_config();
+
+ assert_error(
+ config
+ .get::<SslVersionConfig>("http.ssl-version")
+ .unwrap_err(),
+ "\
+could not load Cargo configuration
+
+Caused by:
+ could not parse TOML configuration in `[..]/.cargo/config`
+
+Caused by:
+ could not parse input as TOML
+
+Caused by:
+ TOML parse error at line 3, column 1
+ |
+3 | ssl-version.min = 'tlsv1.2'
+ | ^
+dotted key `ssl-version` attempted to extend non-table type (string)
+",
+ );
+}
+
+#[cargo_test]
+/// Assert that unstable options can be configured with the `unstable` table in
+/// cargo config files
+fn unstable_table_notation() {
+ write_config(
+ "\
+[unstable]
+print-im-a-teapot = true
+",
+ );
+ let config = ConfigBuilder::new().nightly_features_allowed(true).build();
+ assert_eq!(config.cli_unstable().print_im_a_teapot, true);
+}
+
+#[cargo_test]
+/// Assert that dotted notation works for configuring unstable options
+fn unstable_dotted_notation() {
+ write_config(
+ "\
+unstable.print-im-a-teapot = true
+",
+ );
+ let config = ConfigBuilder::new().nightly_features_allowed(true).build();
+ assert_eq!(config.cli_unstable().print_im_a_teapot, true);
+}
+
+#[cargo_test]
+/// Assert that Zflags on the CLI take precedence over those from config
+fn unstable_cli_precedence() {
+ write_config(
+ "\
+unstable.print-im-a-teapot = true
+",
+ );
+ let config = ConfigBuilder::new().nightly_features_allowed(true).build();
+ assert_eq!(config.cli_unstable().print_im_a_teapot, true);
+
+ let config = ConfigBuilder::new()
+ .unstable_flag("print-im-a-teapot=no")
+ .build();
+ assert_eq!(config.cli_unstable().print_im_a_teapot, false);
+}
+
+#[cargo_test]
+/// Assert that attempting to set an unstable flag that doesn't exist via config
+/// is ignored on stable
+fn unstable_invalid_flag_ignored_on_stable() {
+ write_config(
+ "\
+unstable.an-invalid-flag = 'yes'
+",
+ );
+ assert!(ConfigBuilder::new().build_err().is_ok());
+}
+
+#[cargo_test]
+/// Assert that unstable options can be configured with the `unstable` table in
+/// cargo config files
+fn unstable_flags_ignored_on_stable() {
+ write_config(
+ "\
+[unstable]
+print-im-a-teapot = true
+",
+ );
+ // Enforce stable channel even when testing on nightly.
+ let config = ConfigBuilder::new().nightly_features_allowed(false).build();
+ assert_eq!(config.cli_unstable().print_im_a_teapot, false);
+}
+
+#[cargo_test]
+fn table_merge_failure() {
+ // Config::merge fails to merge entries in two tables.
+ write_config_at(
+ "foo/.cargo/config",
+ "
+ [table]
+ key = ['foo']
+ ",
+ );
+ write_config_at(
+ ".cargo/config",
+ "
+ [table]
+ key = 'bar'
+ ",
+ );
+
+ #[derive(Debug, Deserialize)]
+ #[allow(dead_code)]
+ struct Table {
+ key: StringList,
+ }
+ let config = ConfigBuilder::new().cwd("foo").build();
+ assert_error(
+ config.get::<Table>("table").unwrap_err(),
+ "\
+could not load Cargo configuration
+
+Caused by:
+ failed to merge configuration at `[..]/.cargo/config`
+
+Caused by:
+ failed to merge key `table` between [..]/foo/.cargo/config and [..]/.cargo/config
+
+Caused by:
+ failed to merge key `key` between [..]/foo/.cargo/config and [..]/.cargo/config
+
+Caused by:
+ failed to merge config value from `[..]/.cargo/config` into `[..]/foo/.cargo/config`: \
+ expected array, but found string",
+ );
+}
+
+#[cargo_test]
+fn non_string_in_array() {
+ // Currently only strings are supported.
+ write_config("foo = [1, 2, 3]");
+ let config = new_config();
+ assert_error(
+ config.get::<Vec<i32>>("foo").unwrap_err(),
+ "\
+could not load Cargo configuration
+
+Caused by:
+ failed to load TOML configuration from `[..]/.cargo/config`
+
+Caused by:
+ failed to parse key `foo`
+
+Caused by:
+ expected string but found integer in list",
+ );
+}
+
+#[cargo_test]
+fn struct_with_opt_inner_struct() {
+ // Struct with a key that is Option of another struct.
+ // Check that can be defined with environment variable.
+ #[derive(Deserialize)]
+ struct Inner {
+ value: Option<i32>,
+ }
+ #[derive(Deserialize)]
+ struct Foo {
+ inner: Option<Inner>,
+ }
+ let config = ConfigBuilder::new()
+ .env("CARGO_FOO_INNER_VALUE", "12")
+ .build();
+ let f: Foo = config.get("foo").unwrap();
+ assert_eq!(f.inner.unwrap().value.unwrap(), 12);
+}
+
+#[cargo_test]
+fn struct_with_default_inner_struct() {
+ // Struct with serde defaults.
+ // Check that can be defined with environment variable.
+ #[derive(Deserialize, Default)]
+ #[serde(default)]
+ struct Inner {
+ value: i32,
+ }
+ #[derive(Deserialize, Default)]
+ #[serde(default)]
+ struct Foo {
+ inner: Inner,
+ }
+ let config = ConfigBuilder::new()
+ .env("CARGO_FOO_INNER_VALUE", "12")
+ .build();
+ let f: Foo = config.get("foo").unwrap();
+ assert_eq!(f.inner.value, 12);
+}
+
+#[cargo_test]
+fn overlapping_env_config() {
+ // Issue where one key is a prefix of another.
+ #[derive(Deserialize)]
+ #[serde(rename_all = "kebab-case")]
+ struct Ambig {
+ debug: Option<u32>,
+ debug_assertions: Option<bool>,
+ }
+ let config = ConfigBuilder::new()
+ .env("CARGO_AMBIG_DEBUG_ASSERTIONS", "true")
+ .build();
+
+ let s: Ambig = config.get("ambig").unwrap();
+ assert_eq!(s.debug_assertions, Some(true));
+ assert_eq!(s.debug, None);
+
+ let config = ConfigBuilder::new().env("CARGO_AMBIG_DEBUG", "0").build();
+ let s: Ambig = config.get("ambig").unwrap();
+ assert_eq!(s.debug_assertions, None);
+ assert_eq!(s.debug, Some(0));
+
+ let config = ConfigBuilder::new()
+ .env("CARGO_AMBIG_DEBUG", "1")
+ .env("CARGO_AMBIG_DEBUG_ASSERTIONS", "true")
+ .build();
+ let s: Ambig = config.get("ambig").unwrap();
+ assert_eq!(s.debug_assertions, Some(true));
+ assert_eq!(s.debug, Some(1));
+}
+
+#[cargo_test]
+fn overlapping_env_with_defaults_errors_out() {
+ // Issue where one key is a prefix of another.
+ // This is a limitation of mapping environment variables on to a hierarchy.
+ // Check that we error out when we hit ambiguity in this way, rather than
+ // the more-surprising defaulting through.
+ // If, in the future, we can handle this more correctly, feel free to delete
+ // this test.
+ #[derive(Deserialize, Default)]
+ #[serde(default, rename_all = "kebab-case")]
+ struct Ambig {
+ debug: u32,
+ debug_assertions: bool,
+ }
+ let config = ConfigBuilder::new()
+ .env("CARGO_AMBIG_DEBUG_ASSERTIONS", "true")
+ .build();
+ let err = config.get::<Ambig>("ambig").err().unwrap();
+ assert!(format!("{}", err).contains("missing config key `ambig.debug`"));
+
+ let config = ConfigBuilder::new().env("CARGO_AMBIG_DEBUG", "5").build();
+ let s: Ambig = config.get("ambig").unwrap();
+ assert_eq!(s.debug_assertions, bool::default());
+ assert_eq!(s.debug, 5);
+
+ let config = ConfigBuilder::new()
+ .env("CARGO_AMBIG_DEBUG", "1")
+ .env("CARGO_AMBIG_DEBUG_ASSERTIONS", "true")
+ .build();
+ let s: Ambig = config.get("ambig").unwrap();
+ assert_eq!(s.debug_assertions, true);
+ assert_eq!(s.debug, 1);
+}
+
+#[cargo_test]
+fn struct_with_overlapping_inner_struct_and_defaults() {
+ // Struct with serde defaults.
+ // Check that can be defined with environment variable.
+ #[derive(Deserialize, Default)]
+ #[serde(default)]
+ struct Inner {
+ value: i32,
+ }
+
+ // Containing struct with a prefix of inner
+ //
+ // This is a limitation of mapping environment variables on to a hierarchy.
+ // Check that we error out when we hit ambiguity in this way, rather than
+ // the more-surprising defaulting through.
+ // If, in the future, we can handle this more correctly, feel free to delete
+ // this case.
+ #[derive(Deserialize, Default)]
+ #[serde(default)]
+ struct PrefixContainer {
+ inn: bool,
+ inner: Inner,
+ }
+ let config = ConfigBuilder::new()
+ .env("CARGO_PREFIXCONTAINER_INNER_VALUE", "12")
+ .build();
+ let err = config
+ .get::<PrefixContainer>("prefixcontainer")
+ .err()
+ .unwrap();
+ assert!(format!("{}", err).contains("missing config key `prefixcontainer.inn`"));
+ let config = ConfigBuilder::new()
+ .env("CARGO_PREFIXCONTAINER_INNER_VALUE", "12")
+ .env("CARGO_PREFIXCONTAINER_INN", "true")
+ .build();
+ let f: PrefixContainer = config.get("prefixcontainer").unwrap();
+ assert_eq!(f.inner.value, 12);
+ assert_eq!(f.inn, true);
+
+ // Containing struct where the inner value's field is a prefix of another
+ //
+ // This is a limitation of mapping environment variables on to a hierarchy.
+ // Check that we error out when we hit ambiguity in this way, rather than
+ // the more-surprising defaulting through.
+ // If, in the future, we can handle this more correctly, feel free to delete
+ // this case.
+ #[derive(Deserialize, Default)]
+ #[serde(default)]
+ struct InversePrefixContainer {
+ inner_field: bool,
+ inner: Inner,
+ }
+ let config = ConfigBuilder::new()
+ .env("CARGO_INVERSEPREFIXCONTAINER_INNER_VALUE", "12")
+ .build();
+ let f: InversePrefixContainer = config.get("inverseprefixcontainer").unwrap();
+ assert_eq!(f.inner_field, bool::default());
+ assert_eq!(f.inner.value, 12);
+}
+
+#[cargo_test]
+fn string_list_tricky_env() {
+ // Make sure StringList handles typed env values.
+ let config = ConfigBuilder::new()
+ .env("CARGO_KEY1", "123")
+ .env("CARGO_KEY2", "true")
+ .env("CARGO_KEY3", "1 2")
+ .build();
+ let x = config.get::<StringList>("key1").unwrap();
+ assert_eq!(x.as_slice(), &["123".to_string()]);
+ let x = config.get::<StringList>("key2").unwrap();
+ assert_eq!(x.as_slice(), &["true".to_string()]);
+ let x = config.get::<StringList>("key3").unwrap();
+ assert_eq!(x.as_slice(), &["1".to_string(), "2".to_string()]);
+}
+
+#[cargo_test]
+fn string_list_wrong_type() {
+ // What happens if StringList is given then wrong type.
+ write_config("some_list = 123");
+ let config = ConfigBuilder::new().build();
+ assert_error(
+ config.get::<StringList>("some_list").unwrap_err(),
+ "\
+invalid configuration for key `some_list`
+expected a string or array of strings, but found a integer for `some_list` in [..]/.cargo/config",
+ );
+
+ write_config("some_list = \"1 2\"");
+ let config = ConfigBuilder::new().build();
+ let x = config.get::<StringList>("some_list").unwrap();
+ assert_eq!(x.as_slice(), &["1".to_string(), "2".to_string()]);
+}
+
+#[cargo_test]
+fn string_list_advanced_env() {
+ // StringList with advanced env.
+ let config = ConfigBuilder::new()
+ .unstable_flag("advanced-env")
+ .env("CARGO_KEY1", "[]")
+ .env("CARGO_KEY2", "['1 2', '3']")
+ .env("CARGO_KEY3", "[123]")
+ .build();
+ let x = config.get::<StringList>("key1").unwrap();
+ assert_eq!(x.as_slice(), &[] as &[String]);
+ let x = config.get::<StringList>("key2").unwrap();
+ assert_eq!(x.as_slice(), &["1 2".to_string(), "3".to_string()]);
+ assert_error(
+ config.get::<StringList>("key3").unwrap_err(),
+ "error in environment variable `CARGO_KEY3`: expected string, found integer",
+ );
+}
+
+#[cargo_test]
+fn parse_strip_with_string() {
+ write_config(
+ "\
+[profile.release]
+strip = 'debuginfo'
+",
+ );
+
+ let config = new_config();
+
+ let p: cargo_toml::TomlProfile = config.get("profile.release").unwrap();
+ let strip = p.strip.unwrap();
+ assert_eq!(
+ strip,
+ cargo_toml::StringOrBool::String("debuginfo".to_string())
+ );
+}
+
+#[cargo_test]
+fn cargo_target_empty_cfg() {
+ write_config(
+ "\
+[build]
+target-dir = ''
+",
+ );
+
+ let config = new_config();
+
+ assert_error(
+ config.target_dir().unwrap_err(),
+ "the target directory is set to an empty string in [..]/.cargo/config",
+ );
+}
+
+#[cargo_test]
+fn cargo_target_empty_env() {
+ let project = project().build();
+
+ project.cargo("check")
+ .env("CARGO_TARGET_DIR", "")
+ .with_stderr("error: the target directory is set to an empty string in the `CARGO_TARGET_DIR` environment variable")
+ .with_status(101)
+ .run()
+}
+
+#[cargo_test]
+fn all_profile_options() {
+ // Check that all profile options can be serialized/deserialized.
+ let base_settings = cargo_toml::TomlProfile {
+ opt_level: Some(cargo_toml::TomlOptLevel("0".to_string())),
+ lto: Some(cargo_toml::StringOrBool::String("thin".to_string())),
+ codegen_backend: Some(InternedString::new("example")),
+ codegen_units: Some(123),
+ debug: Some(cargo_toml::U32OrBool::U32(1)),
+ split_debuginfo: Some("packed".to_string()),
+ debug_assertions: Some(true),
+ rpath: Some(true),
+ panic: Some("abort".to_string()),
+ overflow_checks: Some(true),
+ incremental: Some(true),
+ dir_name: Some(InternedString::new("dir_name")),
+ inherits: Some(InternedString::new("debug")),
+ strip: Some(cargo_toml::StringOrBool::String("symbols".to_string())),
+ package: None,
+ build_override: None,
+ rustflags: None,
+ };
+ let mut overrides = BTreeMap::new();
+ let key = cargo_toml::ProfilePackageSpec::Spec(PackageIdSpec::parse("foo").unwrap());
+ overrides.insert(key, base_settings.clone());
+ let profile = cargo_toml::TomlProfile {
+ build_override: Some(Box::new(base_settings.clone())),
+ package: Some(overrides),
+ ..base_settings
+ };
+ let profile_toml = toml::to_string(&profile).unwrap();
+ let roundtrip: cargo_toml::TomlProfile = toml::from_str(&profile_toml).unwrap();
+ let roundtrip_toml = toml::to_string(&roundtrip).unwrap();
+ compare::assert_match_exact(&profile_toml, &roundtrip_toml);
+}
+
+#[cargo_test]
+fn value_in_array() {
+ // Value<String> in an array should work
+ let root_path = paths::root().join(".cargo/config.toml");
+ write_config_at(
+ &root_path,
+ "\
+[net.ssh]
+known-hosts = [
+ \"example.com ...\",
+ \"example.net ...\",
+]
+",
+ );
+
+ let foo_path = paths::root().join("foo/.cargo/config.toml");
+ write_config_at(
+ &foo_path,
+ "\
+[net.ssh]
+known-hosts = [
+ \"example.org ...\",
+]
+",
+ );
+
+ let config = ConfigBuilder::new()
+ .cwd("foo")
+ // environment variables don't actually work for known-hosts due to
+ // space splitting, but this is included here just to validate that
+ // they work (particularly if other Vec<Value> config vars are added
+ // in the future).
+ .env("CARGO_NET_SSH_KNOWN_HOSTS", "env-example")
+ .build();
+ let net_config = config.net_config().unwrap();
+ let kh = net_config
+ .ssh
+ .as_ref()
+ .unwrap()
+ .known_hosts
+ .as_ref()
+ .unwrap();
+ assert_eq!(kh.len(), 4);
+ assert_eq!(kh[0].val, "example.org ...");
+ assert_eq!(kh[0].definition, Definition::Path(foo_path.clone()));
+ assert_eq!(kh[1].val, "example.com ...");
+ assert_eq!(kh[1].definition, Definition::Path(root_path.clone()));
+ assert_eq!(kh[2].val, "example.net ...");
+ assert_eq!(kh[2].definition, Definition::Path(root_path.clone()));
+ assert_eq!(kh[3].val, "env-example");
+ assert_eq!(
+ kh[3].definition,
+ Definition::Environment("CARGO_NET_SSH_KNOWN_HOSTS".to_string())
+ );
+}
diff --git a/src/tools/cargo/tests/testsuite/config_cli.rs b/src/tools/cargo/tests/testsuite/config_cli.rs
new file mode 100644
index 000000000..1120e279d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/config_cli.rs
@@ -0,0 +1,564 @@
+//! Tests for the --config CLI option.
+
+use super::config::{
+ assert_error, assert_match, read_output, write_config, write_config_at, ConfigBuilder,
+};
+use cargo::util::config::Definition;
+use cargo_test_support::paths;
+use std::{collections::HashMap, fs};
+
+#[cargo_test]
+fn basic() {
+ // Simple example.
+ let config = ConfigBuilder::new().config_arg("foo='bar'").build();
+ assert_eq!(config.get::<String>("foo").unwrap(), "bar");
+}
+
+#[cargo_test]
+fn cli_priority() {
+ // Command line takes priority over files and env vars.
+ write_config(
+ "
+ demo_list = ['a']
+ [build]
+ jobs = 3
+ rustc = 'file'
+ [term]
+ quiet = false
+ verbose = false
+ ",
+ );
+ let config = ConfigBuilder::new().build();
+ assert_eq!(config.get::<i32>("build.jobs").unwrap(), 3);
+ assert_eq!(config.get::<String>("build.rustc").unwrap(), "file");
+ assert_eq!(config.get::<bool>("term.quiet").unwrap(), false);
+ assert_eq!(config.get::<bool>("term.verbose").unwrap(), false);
+
+ let config = ConfigBuilder::new()
+ .env("CARGO_BUILD_JOBS", "2")
+ .env("CARGO_BUILD_RUSTC", "env")
+ .env("CARGO_TERM_VERBOSE", "false")
+ .config_arg("build.jobs=1")
+ .config_arg("build.rustc='cli'")
+ .config_arg("term.verbose=true")
+ .build();
+ assert_eq!(config.get::<i32>("build.jobs").unwrap(), 1);
+ assert_eq!(config.get::<String>("build.rustc").unwrap(), "cli");
+ assert_eq!(config.get::<bool>("term.verbose").unwrap(), true);
+
+ // Setting both term.verbose and term.quiet is invalid and is tested
+ // in the run test suite.
+ let config = ConfigBuilder::new()
+ .env("CARGO_TERM_QUIET", "false")
+ .config_arg("term.quiet=true")
+ .build();
+ assert_eq!(config.get::<bool>("term.quiet").unwrap(), true);
+}
+
+#[cargo_test]
+fn merge_primitives_for_multiple_cli_occurrences() {
+ let config_path0 = ".cargo/file0.toml";
+ write_config_at(config_path0, "k = 'file0'");
+ let config_path1 = ".cargo/file1.toml";
+ write_config_at(config_path1, "k = 'file1'");
+
+ // k=env0
+ let config = ConfigBuilder::new().env("CARGO_K", "env0").build();
+ assert_eq!(config.get::<String>("k").unwrap(), "env0");
+
+ // k=env0
+ // --config k='cli0'
+ // --config k='cli1'
+ let config = ConfigBuilder::new()
+ .env("CARGO_K", "env0")
+ .config_arg("k='cli0'")
+ .config_arg("k='cli1'")
+ .build();
+ assert_eq!(config.get::<String>("k").unwrap(), "cli1");
+
+ // Env has a lower priority when comparing with file from CLI arg.
+ //
+ // k=env0
+ // --config k='cli0'
+ // --config k='cli1'
+ // --config .cargo/file0.toml
+ let config = ConfigBuilder::new()
+ .env("CARGO_K", "env0")
+ .config_arg("k='cli0'")
+ .config_arg("k='cli1'")
+ .config_arg(config_path0)
+ .build();
+ assert_eq!(config.get::<String>("k").unwrap(), "file0");
+
+ // k=env0
+ // --config k='cli0'
+ // --config k='cli1'
+ // --config .cargo/file0.toml
+ // --config k='cli2'
+ let config = ConfigBuilder::new()
+ .env("CARGO_K", "env0")
+ .config_arg("k='cli0'")
+ .config_arg("k='cli1'")
+ .config_arg(config_path0)
+ .config_arg("k='cli2'")
+ .build();
+ assert_eq!(config.get::<String>("k").unwrap(), "cli2");
+
+ // k=env0
+ // --config k='cli0'
+ // --config k='cli1'
+ // --config .cargo/file0.toml
+ // --config k='cli2'
+ // --config .cargo/file1.toml
+ let config = ConfigBuilder::new()
+ .env("CARGO_K", "env0")
+ .config_arg("k='cli0'")
+ .config_arg("k='cli1'")
+ .config_arg(config_path0)
+ .config_arg("k='cli2'")
+ .config_arg(config_path1)
+ .build();
+ assert_eq!(config.get::<String>("k").unwrap(), "file1");
+}
+
+#[cargo_test]
+fn merges_array() {
+ // Array entries are appended.
+ write_config(
+ "
+ [build]
+ rustflags = ['--file']
+ ",
+ );
+ let config = ConfigBuilder::new()
+ .config_arg("build.rustflags = ['--cli']")
+ .build();
+ assert_eq!(
+ config.get::<Vec<String>>("build.rustflags").unwrap(),
+ ["--file", "--cli"]
+ );
+
+ // With normal env.
+ let config = ConfigBuilder::new()
+ .env("CARGO_BUILD_RUSTFLAGS", "--env1 --env2")
+ .config_arg("build.rustflags = ['--cli']")
+ .build();
+ // The order of cli/env is a little questionable here, but would require
+ // much more complex merging logic.
+ assert_eq!(
+ config.get::<Vec<String>>("build.rustflags").unwrap(),
+ ["--file", "--cli", "--env1", "--env2"]
+ );
+
+ // With advanced-env.
+ let config = ConfigBuilder::new()
+ .unstable_flag("advanced-env")
+ .env("CARGO_BUILD_RUSTFLAGS", "--env")
+ .config_arg("build.rustflags = ['--cli']")
+ .build();
+ assert_eq!(
+ config.get::<Vec<String>>("build.rustflags").unwrap(),
+ ["--file", "--cli", "--env"]
+ );
+
+ // Merges multiple instances.
+ let config = ConfigBuilder::new()
+ .config_arg("build.rustflags=['--one']")
+ .config_arg("build.rustflags=['--two']")
+ .build();
+ assert_eq!(
+ config.get::<Vec<String>>("build.rustflags").unwrap(),
+ ["--file", "--one", "--two"]
+ );
+}
+
+#[cargo_test]
+fn string_list_array() {
+ // Using the StringList type.
+ write_config(
+ "
+ [build]
+ rustflags = ['--file']
+ ",
+ );
+ let config = ConfigBuilder::new()
+ .config_arg("build.rustflags = ['--cli']")
+ .build();
+ assert_eq!(
+ config
+ .get::<cargo::util::config::StringList>("build.rustflags")
+ .unwrap()
+ .as_slice(),
+ ["--file", "--cli"]
+ );
+
+ // With normal env.
+ let config = ConfigBuilder::new()
+ .env("CARGO_BUILD_RUSTFLAGS", "--env1 --env2")
+ .config_arg("build.rustflags = ['--cli']")
+ .build();
+ assert_eq!(
+ config
+ .get::<cargo::util::config::StringList>("build.rustflags")
+ .unwrap()
+ .as_slice(),
+ ["--file", "--cli", "--env1", "--env2"]
+ );
+
+ // With advanced-env.
+ let config = ConfigBuilder::new()
+ .unstable_flag("advanced-env")
+ .env("CARGO_BUILD_RUSTFLAGS", "['--env']")
+ .config_arg("build.rustflags = ['--cli']")
+ .build();
+ assert_eq!(
+ config
+ .get::<cargo::util::config::StringList>("build.rustflags")
+ .unwrap()
+ .as_slice(),
+ ["--file", "--cli", "--env"]
+ );
+}
+
+#[cargo_test]
+fn merges_table() {
+ // Tables are merged.
+ write_config(
+ "
+ [foo]
+ key1 = 1
+ key2 = 2
+ key3 = 3
+ ",
+ );
+ let config = ConfigBuilder::new()
+ .config_arg("foo.key2 = 4")
+ .config_arg("foo.key3 = 5")
+ .config_arg("foo.key4 = 6")
+ .build();
+ assert_eq!(config.get::<i32>("foo.key1").unwrap(), 1);
+ assert_eq!(config.get::<i32>("foo.key2").unwrap(), 4);
+ assert_eq!(config.get::<i32>("foo.key3").unwrap(), 5);
+ assert_eq!(config.get::<i32>("foo.key4").unwrap(), 6);
+
+ // With env.
+ let config = ConfigBuilder::new()
+ .env("CARGO_FOO_KEY3", "7")
+ .env("CARGO_FOO_KEY4", "8")
+ .env("CARGO_FOO_KEY5", "9")
+ .config_arg("foo.key2 = 4")
+ .config_arg("foo.key3 = 5")
+ .config_arg("foo.key4 = 6")
+ .build();
+ assert_eq!(config.get::<i32>("foo.key1").unwrap(), 1);
+ assert_eq!(config.get::<i32>("foo.key2").unwrap(), 4);
+ assert_eq!(config.get::<i32>("foo.key3").unwrap(), 5);
+ assert_eq!(config.get::<i32>("foo.key4").unwrap(), 6);
+ assert_eq!(config.get::<i32>("foo.key5").unwrap(), 9);
+}
+
+#[cargo_test]
+fn merge_array_mixed_def_paths() {
+ // Merging of arrays with different def sites.
+ write_config(
+ "
+ paths = ['file']
+ ",
+ );
+ // Create a directory for CWD to differentiate the paths.
+ let somedir = paths::root().join("somedir");
+ fs::create_dir(&somedir).unwrap();
+ let config = ConfigBuilder::new()
+ .cwd(&somedir)
+ .config_arg("paths=['cli']")
+ // env is currently ignored for get_list()
+ .env("CARGO_PATHS", "env")
+ .build();
+ let paths = config.get_list("paths").unwrap().unwrap();
+ // The definition for the root value is somewhat arbitrary, but currently starts with the file because that is what is loaded first.
+ assert_eq!(paths.definition, Definition::Path(paths::root()));
+ assert_eq!(paths.val.len(), 2);
+ assert_eq!(paths.val[0].0, "file");
+ assert_eq!(paths.val[0].1.root(&config), paths::root());
+ assert_eq!(paths.val[1].0, "cli");
+ assert_eq!(paths.val[1].1.root(&config), somedir);
+}
+
+#[cargo_test]
+fn enforces_format() {
+ // These dotted key expressions should all be fine.
+ let config = ConfigBuilder::new()
+ .config_arg("a=true")
+ .config_arg(" b.a = true ")
+ .config_arg("c.\"b\".'a'=true")
+ .config_arg("d.\"=\".'='=true")
+ .config_arg("e.\"'\".'\"'=true")
+ .build();
+ assert_eq!(config.get::<bool>("a").unwrap(), true);
+ assert_eq!(
+ config.get::<HashMap<String, bool>>("b").unwrap(),
+ HashMap::from([("a".to_string(), true)])
+ );
+ assert_eq!(
+ config
+ .get::<HashMap<String, HashMap<String, bool>>>("c")
+ .unwrap(),
+ HashMap::from([("b".to_string(), HashMap::from([("a".to_string(), true)]))])
+ );
+ assert_eq!(
+ config
+ .get::<HashMap<String, HashMap<String, bool>>>("d")
+ .unwrap(),
+ HashMap::from([("=".to_string(), HashMap::from([("=".to_string(), true)]))])
+ );
+ assert_eq!(
+ config
+ .get::<HashMap<String, HashMap<String, bool>>>("e")
+ .unwrap(),
+ HashMap::from([("'".to_string(), HashMap::from([("\"".to_string(), true)]))])
+ );
+
+ // But anything that's not a dotted key expression should be disallowed.
+ let _ = ConfigBuilder::new()
+ .config_arg("[a] foo=true")
+ .build_err()
+ .unwrap_err();
+ let _ = ConfigBuilder::new()
+ .config_arg("a = true\nb = true")
+ .build_err()
+ .unwrap_err();
+
+ // We also disallow overwriting with tables since it makes merging unclear.
+ let _ = ConfigBuilder::new()
+ .config_arg("a = { first = true, second = false }")
+ .build_err()
+ .unwrap_err();
+ let _ = ConfigBuilder::new()
+ .config_arg("a = { first = true }")
+ .build_err()
+ .unwrap_err();
+}
+
+#[cargo_test]
+fn unused_key() {
+ // Unused key passed on command line.
+ let config = ConfigBuilder::new().config_arg("build.unused = 2").build();
+
+ config.build_config().unwrap();
+ let output = read_output(config);
+ let expected = "\
+warning: unused config key `build.unused` in `--config cli option`
+";
+ assert_match(expected, &output);
+}
+
+#[cargo_test]
+fn rerooted_remains() {
+ // Re-rooting keeps cli args.
+ let somedir = paths::root().join("somedir");
+ fs::create_dir_all(somedir.join(".cargo")).unwrap();
+ fs::write(
+ somedir.join(".cargo").join("config"),
+ "
+ a = 'file1'
+ b = 'file2'
+ ",
+ )
+ .unwrap();
+ let mut config = ConfigBuilder::new()
+ .cwd(&somedir)
+ .config_arg("b='cli1'")
+ .config_arg("c='cli2'")
+ .build();
+ assert_eq!(config.get::<String>("a").unwrap(), "file1");
+ assert_eq!(config.get::<String>("b").unwrap(), "cli1");
+ assert_eq!(config.get::<String>("c").unwrap(), "cli2");
+
+ config.reload_rooted_at(paths::root()).unwrap();
+
+ assert_eq!(config.get::<Option<String>>("a").unwrap(), None);
+ assert_eq!(config.get::<String>("b").unwrap(), "cli1");
+ assert_eq!(config.get::<String>("c").unwrap(), "cli2");
+}
+
+#[cargo_test]
+fn bad_parse() {
+ // Fail to TOML parse.
+ let config = ConfigBuilder::new().config_arg("abc").build_err();
+ assert_error(
+ config.unwrap_err(),
+ "\
+failed to parse value from --config argument `abc` as a dotted key expression
+
+Caused by:
+ TOML parse error at line 1, column 4
+ |
+1 | abc
+ | ^
+expected `.`, `=`
+",
+ );
+
+ let config = ConfigBuilder::new().config_arg("").build_err();
+ assert_error(
+ config.unwrap_err(),
+ "--config argument `` was not a TOML dotted key expression (such as `build.jobs = 2`)",
+ );
+}
+
+#[cargo_test]
+fn too_many_values() {
+ // Currently restricted to only 1 value.
+ let config = ConfigBuilder::new().config_arg("a=1\nb=2").build_err();
+ assert_error(
+ config.unwrap_err(),
+ "\
+--config argument `a=1
+b=2` was not a TOML dotted key expression (such as `build.jobs = 2`)",
+ );
+}
+
+#[cargo_test]
+fn no_disallowed_values() {
+ let config = ConfigBuilder::new()
+ .config_arg("registry.token=\"hello\"")
+ .build_err();
+ assert_error(
+ config.unwrap_err(),
+ "registry.token cannot be set through --config for security reasons",
+ );
+ let config = ConfigBuilder::new()
+ .config_arg("registries.crates-io.token=\"hello\"")
+ .build_err();
+ assert_error(
+ config.unwrap_err(),
+ "registries.crates-io.token cannot be set through --config for security reasons",
+ );
+ let config = ConfigBuilder::new()
+ .config_arg("registry.secret-key=\"hello\"")
+ .build_err();
+ assert_error(
+ config.unwrap_err(),
+ "registry.secret-key cannot be set through --config for security reasons",
+ );
+ let config = ConfigBuilder::new()
+ .config_arg("registries.crates-io.secret-key=\"hello\"")
+ .build_err();
+ assert_error(
+ config.unwrap_err(),
+ "registries.crates-io.secret-key cannot be set through --config for security reasons",
+ );
+}
+
+#[cargo_test]
+fn no_inline_table_value() {
+ // Disallow inline tables
+ let config = ConfigBuilder::new()
+ .config_arg("a.b={c = \"d\"}")
+ .build_err();
+ assert_error(
+ config.unwrap_err(),
+ "--config argument `a.b={c = \"d\"}` sets a value to an inline table, which is not accepted"
+ );
+}
+
+#[cargo_test]
+fn no_array_of_tables_values() {
+ // Disallow array-of-tables when not in dotted form
+ let config = ConfigBuilder::new()
+ .config_arg("[[a.b]]\nc = \"d\"")
+ .build_err();
+ assert_error(
+ config.unwrap_err(),
+ "\
+--config argument `[[a.b]]
+c = \"d\"` was not a TOML dotted key expression (such as `build.jobs = 2`)",
+ );
+}
+
+#[cargo_test]
+fn no_comments() {
+ // Disallow comments in dotted form.
+ let config = ConfigBuilder::new()
+ .config_arg("a.b = \"c\" # exactly")
+ .build_err();
+ assert_error(
+ config.unwrap_err(),
+ "\
+--config argument `a.b = \"c\" # exactly` includes non-whitespace decoration",
+ );
+
+ let config = ConfigBuilder::new()
+ .config_arg("# exactly\na.b = \"c\"")
+ .build_err();
+ assert_error(
+ config.unwrap_err(),
+ "\
+--config argument `# exactly\na.b = \"c\"` includes non-whitespace decoration",
+ );
+}
+
+#[cargo_test]
+fn bad_cv_convert() {
+ // ConfigValue does not support all TOML types.
+ let config = ConfigBuilder::new().config_arg("a=2019-12-01").build_err();
+ assert_error(
+ config.unwrap_err(),
+ "\
+failed to convert --config argument `a=2019-12-01`
+
+Caused by:
+ failed to parse key `a`
+
+Caused by:
+ found TOML configuration value of unknown type `datetime`",
+ );
+}
+
+#[cargo_test]
+fn fail_to_merge_multiple_args() {
+ // Error message when multiple args fail to merge.
+ let config = ConfigBuilder::new()
+ .config_arg("foo='a'")
+ .config_arg("foo=['a']")
+ .build_err();
+ // This is a little repetitive, but hopefully the user can figure it out.
+ assert_error(
+ config.unwrap_err(),
+ "\
+failed to merge --config argument `foo=['a']`
+
+Caused by:
+ failed to merge key `foo` between --config cli option and --config cli option
+
+Caused by:
+ failed to merge config value from `--config cli option` into `--config cli option`: \
+ expected string, but found array",
+ );
+}
+
+#[cargo_test]
+fn cli_path() {
+ // --config path_to_file
+ fs::write(paths::root().join("myconfig.toml"), "key = 123").unwrap();
+ let config = ConfigBuilder::new()
+ .cwd(paths::root())
+ .config_arg("myconfig.toml")
+ .build();
+ assert_eq!(config.get::<u32>("key").unwrap(), 123);
+
+ let config = ConfigBuilder::new().config_arg("missing.toml").build_err();
+ assert_error(
+ config.unwrap_err(),
+ "\
+failed to parse value from --config argument `missing.toml` as a dotted key expression
+
+Caused by:
+ TOML parse error at line 1, column 13
+ |
+1 | missing.toml
+ | ^
+expected `.`, `=`
+",
+ );
+}
diff --git a/src/tools/cargo/tests/testsuite/config_include.rs b/src/tools/cargo/tests/testsuite/config_include.rs
new file mode 100644
index 000000000..ae568065a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/config_include.rs
@@ -0,0 +1,285 @@
+//! Tests for `include` config field.
+
+use super::config::{assert_error, write_config, write_config_at, ConfigBuilder};
+use cargo_test_support::{no_such_file_err_msg, project};
+
+#[cargo_test]
+fn gated() {
+ // Requires -Z flag.
+ write_config("include='other'");
+ write_config_at(
+ ".cargo/other",
+ "
+ othervalue = 1
+ ",
+ );
+ let config = ConfigBuilder::new().build();
+ assert_eq!(config.get::<Option<i32>>("othervalue").unwrap(), None);
+ let config = ConfigBuilder::new().unstable_flag("config-include").build();
+ assert_eq!(config.get::<i32>("othervalue").unwrap(), 1);
+}
+
+#[cargo_test]
+fn simple() {
+ // Simple test.
+ write_config_at(
+ ".cargo/config",
+ "
+ include = 'other'
+ key1 = 1
+ key2 = 2
+ ",
+ );
+ write_config_at(
+ ".cargo/other",
+ "
+ key2 = 3
+ key3 = 4
+ ",
+ );
+ let config = ConfigBuilder::new().unstable_flag("config-include").build();
+ assert_eq!(config.get::<i32>("key1").unwrap(), 1);
+ assert_eq!(config.get::<i32>("key2").unwrap(), 2);
+ assert_eq!(config.get::<i32>("key3").unwrap(), 4);
+}
+
+#[cargo_test]
+fn works_with_cli() {
+ write_config_at(
+ ".cargo/config.toml",
+ "
+ include = 'other.toml'
+ [build]
+ rustflags = ['-W', 'unused']
+ ",
+ );
+ write_config_at(
+ ".cargo/other.toml",
+ "
+ [build]
+ rustflags = ['-W', 'unsafe-code']
+ ",
+ );
+ let p = project().file("src/lib.rs", "").build();
+ p.cargo("check -v")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.1 [..]
+[RUNNING] `rustc [..]-W unused`
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.cargo("check -v -Z config-include")
+ .masquerade_as_nightly_cargo(&["config-include"])
+ .with_stderr(
+ "\
+[DIRTY] foo v0.0.1 ([..]): the rustflags changed
+[CHECKING] foo v0.0.1 [..]
+[RUNNING] `rustc [..]-W unsafe-code -W unused`
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn left_to_right() {
+ // How it merges multiple includes.
+ write_config_at(
+ ".cargo/config",
+ "
+ include = ['one', 'two']
+ primary = 1
+ ",
+ );
+ write_config_at(
+ ".cargo/one",
+ "
+ one = 1
+ primary = 2
+ ",
+ );
+ write_config_at(
+ ".cargo/two",
+ "
+ two = 2
+ primary = 3
+ ",
+ );
+ let config = ConfigBuilder::new().unstable_flag("config-include").build();
+ assert_eq!(config.get::<i32>("primary").unwrap(), 1);
+ assert_eq!(config.get::<i32>("one").unwrap(), 1);
+ assert_eq!(config.get::<i32>("two").unwrap(), 2);
+}
+
+#[cargo_test]
+fn missing_file() {
+ // Error when there's a missing file.
+ write_config("include='missing'");
+ let config = ConfigBuilder::new()
+ .unstable_flag("config-include")
+ .build_err();
+ assert_error(
+ config.unwrap_err(),
+ &format!(
+ "\
+could not load Cargo configuration
+
+Caused by:
+ failed to load config include `missing` from `[..]/.cargo/config`
+
+Caused by:
+ failed to read configuration file `[..]/.cargo/missing`
+
+Caused by:
+ {}",
+ no_such_file_err_msg()
+ ),
+ );
+}
+
+#[cargo_test]
+fn cycle() {
+ // Detects a cycle.
+ write_config_at(".cargo/config", "include='one'");
+ write_config_at(".cargo/one", "include='two'");
+ write_config_at(".cargo/two", "include='config'");
+ let config = ConfigBuilder::new()
+ .unstable_flag("config-include")
+ .build_err();
+ assert_error(
+ config.unwrap_err(),
+ "\
+could not load Cargo configuration
+
+Caused by:
+ failed to load config include `one` from `[..]/.cargo/config`
+
+Caused by:
+ failed to load config include `two` from `[..]/.cargo/one`
+
+Caused by:
+ failed to load config include `config` from `[..]/.cargo/two`
+
+Caused by:
+ config `include` cycle detected with path `[..]/.cargo/config`",
+ );
+}
+
+#[cargo_test]
+fn cli_include() {
+ // Using --config with include.
+ // CLI takes priority over files.
+ write_config_at(
+ ".cargo/config",
+ "
+ foo = 1
+ bar = 2
+ ",
+ );
+ write_config_at(".cargo/config-foo", "foo = 2");
+ let config = ConfigBuilder::new()
+ .unstable_flag("config-include")
+ .config_arg("include='.cargo/config-foo'")
+ .build();
+ assert_eq!(config.get::<i32>("foo").unwrap(), 2);
+ assert_eq!(config.get::<i32>("bar").unwrap(), 2);
+}
+
+#[cargo_test]
+fn bad_format() {
+ // Not a valid format.
+ write_config("include = 1");
+ let config = ConfigBuilder::new()
+ .unstable_flag("config-include")
+ .build_err();
+ assert_error(
+ config.unwrap_err(),
+ "\
+could not load Cargo configuration
+
+Caused by:
+ `include` expected a string or list, but found integer in `[..]/.cargo/config`",
+ );
+}
+
+#[cargo_test]
+fn cli_include_failed() {
+ // Error message when CLI include fails to load.
+ let config = ConfigBuilder::new()
+ .unstable_flag("config-include")
+ .config_arg("include='foobar'")
+ .build_err();
+ assert_error(
+ config.unwrap_err(),
+ &format!(
+ "\
+failed to load --config include
+
+Caused by:
+ failed to load config include `foobar` from `--config cli option`
+
+Caused by:
+ failed to read configuration file `[..]/foobar`
+
+Caused by:
+ {}",
+ no_such_file_err_msg()
+ ),
+ );
+}
+
+#[cargo_test]
+fn cli_merge_failed() {
+ // Error message when CLI include merge fails.
+ write_config("foo = ['a']");
+ write_config_at(
+ ".cargo/other",
+ "
+ foo = 'b'
+ ",
+ );
+ let config = ConfigBuilder::new()
+ .unstable_flag("config-include")
+ .config_arg("include='.cargo/other'")
+ .build_err();
+ // Maybe this error message should mention it was from an include file?
+ assert_error(
+ config.unwrap_err(),
+ "\
+failed to merge --config key `foo` into `[..]/.cargo/config`
+
+Caused by:
+ failed to merge config value from `[..]/.cargo/other` into `[..]/.cargo/config`: \
+ expected array, but found string",
+ );
+}
+
+#[cargo_test]
+fn cli_include_take_priority_over_env() {
+ write_config_at(".cargo/include.toml", "k='include'");
+
+ // k=env
+ let config = ConfigBuilder::new().env("CARGO_K", "env").build();
+ assert_eq!(config.get::<String>("k").unwrap(), "env");
+
+ // k=env
+ // --config 'include=".cargo/include.toml"'
+ let config = ConfigBuilder::new()
+ .env("CARGO_K", "env")
+ .unstable_flag("config-include")
+ .config_arg("include='.cargo/include.toml'")
+ .build();
+ assert_eq!(config.get::<String>("k").unwrap(), "include");
+
+ // k=env
+ // --config '.cargo/foo.toml'
+ write_config_at(".cargo/foo.toml", "include='include.toml'");
+ let config = ConfigBuilder::new()
+ .env("CARGO_K", "env")
+ .unstable_flag("config-include")
+ .config_arg(".cargo/foo.toml")
+ .build();
+ assert_eq!(config.get::<String>("k").unwrap(), "include");
+}
diff --git a/src/tools/cargo/tests/testsuite/corrupt_git.rs b/src/tools/cargo/tests/testsuite/corrupt_git.rs
new file mode 100644
index 000000000..2569e460b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/corrupt_git.rs
@@ -0,0 +1,159 @@
+//! Tests for corrupt git repos.
+
+use cargo_test_support::paths;
+use cargo_test_support::{basic_manifest, git, project};
+use cargo_util::paths as cargopaths;
+use std::fs;
+use std::path::{Path, PathBuf};
+
+#[cargo_test]
+fn deleting_database_files() {
+ let project = project();
+ let git_project = git::new("bar", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file("src/lib.rs", "")
+ });
+
+ let project = project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ bar = {{ git = '{}' }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ project.cargo("check").run();
+
+ let mut files = Vec::new();
+ find_files(&paths::home().join(".cargo/git/db"), &mut files);
+ assert!(!files.is_empty());
+
+ let log = "cargo::sources::git=trace";
+ for file in files {
+ if !file.exists() {
+ continue;
+ }
+ println!("deleting {}", file.display());
+ cargopaths::remove_file(&file).unwrap();
+ project.cargo("check -v").env("CARGO_LOG", log).run();
+
+ if !file.exists() {
+ continue;
+ }
+ println!("truncating {}", file.display());
+ make_writable(&file);
+ fs::OpenOptions::new()
+ .write(true)
+ .open(&file)
+ .unwrap()
+ .set_len(2)
+ .unwrap();
+ project.cargo("check -v").env("CARGO_LOG", log).run();
+ }
+}
+
+#[cargo_test]
+fn deleting_checkout_files() {
+ let project = project();
+ let git_project = git::new("bar", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file("src/lib.rs", "")
+ });
+
+ let project = project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ bar = {{ git = '{}' }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ project.cargo("check").run();
+
+ let dir = paths::home()
+ .join(".cargo/git/checkouts")
+ // get the first entry in the checkouts dir for the package's location
+ .read_dir()
+ .unwrap()
+ .next()
+ .unwrap()
+ .unwrap()
+ .path()
+ // get the first child of that checkout dir for our checkout
+ .read_dir()
+ .unwrap()
+ .next()
+ .unwrap()
+ .unwrap()
+ .path()
+ // and throw on .git to corrupt things
+ .join(".git");
+ let mut files = Vec::new();
+ find_files(&dir, &mut files);
+ assert!(!files.is_empty());
+
+ let log = "cargo::sources::git=trace";
+ for file in files {
+ if !file.exists() {
+ continue;
+ }
+ println!("deleting {}", file.display());
+ cargopaths::remove_file(&file).unwrap();
+ project.cargo("check -v").env("CARGO_LOG", log).run();
+
+ if !file.exists() {
+ continue;
+ }
+ println!("truncating {}", file.display());
+ make_writable(&file);
+ fs::OpenOptions::new()
+ .write(true)
+ .open(&file)
+ .unwrap()
+ .set_len(2)
+ .unwrap();
+ project.cargo("check -v").env("CARGO_LOG", log).run();
+ }
+}
+
+fn make_writable(path: &Path) {
+ let mut p = path.metadata().unwrap().permissions();
+ p.set_readonly(false);
+ fs::set_permissions(path, p).unwrap();
+}
+
+fn find_files(path: &Path, dst: &mut Vec<PathBuf>) {
+ for e in path.read_dir().unwrap() {
+ let e = e.unwrap();
+ let path = e.path();
+ if e.file_type().unwrap().is_dir() {
+ find_files(&path, dst);
+ } else {
+ dst.push(path);
+ }
+ }
+}
diff --git a/src/tools/cargo/tests/testsuite/credential_process.rs b/src/tools/cargo/tests/testsuite/credential_process.rs
new file mode 100644
index 000000000..8c202c6a3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/credential_process.rs
@@ -0,0 +1,504 @@
+//! Tests for credential-process.
+
+use cargo_test_support::registry::TestRegistry;
+use cargo_test_support::{basic_manifest, cargo_process, paths, project, registry, Project};
+use std::fs::{self, read_to_string};
+
+fn toml_bin(proj: &Project, name: &str) -> String {
+ proj.bin(name).display().to_string().replace('\\', "\\\\")
+}
+
+#[cargo_test]
+fn gated() {
+ let _alternative = registry::RegistryBuilder::new()
+ .alternative()
+ .no_configure_token()
+ .build();
+
+ let cratesio = registry::RegistryBuilder::new()
+ .no_configure_token()
+ .build();
+
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [registry]
+ credential-process = "false"
+ "#,
+ )
+ .file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify")
+ .replace_crates_io(cratesio.index_url())
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[ERROR] no token found, please run `cargo login`
+or use environment variable CARGO_REGISTRY_TOKEN
+",
+ )
+ .run();
+
+ p.change_file(
+ ".cargo/config",
+ r#"
+ [registry.alternative]
+ credential-process = "false"
+ "#,
+ );
+
+ p.cargo("publish --no-verify --registry alternative")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[ERROR] no token found for `alternative`, please run `cargo login --registry alternative`
+or use environment variable CARGO_REGISTRIES_ALTERNATIVE_TOKEN
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn warn_both_token_and_process() {
+ // Specifying both credential-process and a token in config should issue a warning.
+ let _server = registry::RegistryBuilder::new()
+ .http_api()
+ .http_index()
+ .alternative()
+ .no_configure_token()
+ .build();
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [registries.alternative]
+ token = "alternative-sekrit"
+ credential-process = "false"
+ "#,
+ )
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ description = "foo"
+ authors = []
+ license = "MIT"
+ homepage = "https://example.com/"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify --registry alternative -Z credential-process")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[ERROR] both `token` and `credential-process` were specified in the config for registry `alternative`.
+Only one of these values may be set, remove one or the other to proceed.
+",
+ )
+ .run();
+
+ // Try with global credential-process, and registry-specific `token`.
+ // This should silently use the config token, and not run the "false" exe.
+ p.change_file(
+ ".cargo/config",
+ r#"
+ [registry]
+ credential-process = "false"
+
+ [registries.alternative]
+ token = "alternative-sekrit"
+ "#,
+ );
+ p.cargo("publish --no-verify --registry alternative -Z credential-process")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[PACKAGING] foo v0.1.0 [..]
+[PACKAGED] [..]
+[UPLOADING] foo v0.1.0 [..]
+[UPLOADED] foo v0.1.0 [..]
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.1.0 [..]
+",
+ )
+ .run();
+}
+
+/// Setup for a test that will issue a command that needs to fetch a token.
+///
+/// This does the following:
+///
+/// * Spawn a thread that will act as an API server.
+/// * Create a simple credential-process that will generate a fake token.
+/// * Create a simple `foo` project to run the test against.
+/// * Configure the credential-process config.
+///
+/// Returns the simple `foo` project to test against and the API server handle.
+fn get_token_test() -> (Project, TestRegistry) {
+ // API server that checks that the token is included correctly.
+ let server = registry::RegistryBuilder::new()
+ .no_configure_token()
+ .token(cargo_test_support::registry::Token::Plaintext(
+ "sekrit".to_string(),
+ ))
+ .alternative()
+ .http_api()
+ .build();
+ // The credential process to use.
+ let cred_proj = project()
+ .at("cred_proj")
+ .file("Cargo.toml", &basic_manifest("test-cred", "1.0.0"))
+ .file(
+ "src/main.rs",
+ r#"
+ use std::fs::File;
+ use std::io::Write;
+ fn main() {
+ let mut f = File::options()
+ .write(true)
+ .create(true)
+ .append(true)
+ .open("runs.log")
+ .unwrap();
+ write!(f, "+");
+ println!("sekrit");
+ } "#,
+ )
+ .build();
+ cred_proj.cargo("build").run();
+
+ let p = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [registries.alternative]
+ index = "{}"
+ credential-process = ["{}"]
+ "#,
+ server.index_url(),
+ toml_bin(&cred_proj, "test-cred")
+ ),
+ )
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ description = "foo"
+ authors = []
+ license = "MIT"
+ homepage = "https://example.com/"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ (p, server)
+}
+
+#[cargo_test]
+fn publish() {
+ // Checks that credential-process is used for `cargo publish`.
+ let (p, _t) = get_token_test();
+
+ p.cargo("publish --no-verify --registry alternative -Z credential-process")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[PACKAGING] foo v0.1.0 [..]
+[PACKAGED] [..]
+[UPLOADING] foo v0.1.0 [..]
+[UPLOADED] foo v0.1.0 [..]
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.1.0 [..]
+",
+ )
+ .run();
+
+ let calls = read_to_string(p.root().join("runs.log")).unwrap().len();
+ assert_eq!(calls, 1);
+}
+
+#[cargo_test]
+fn basic_unsupported() {
+ // Non-action commands don't support login/logout.
+ let registry = registry::RegistryBuilder::new()
+ .no_configure_token()
+ .build();
+ cargo_util::paths::append(
+ &paths::home().join(".cargo/config"),
+ br#"
+ [registry]
+ credential-process = "false"
+ "#,
+ )
+ .unwrap();
+
+ cargo_process("login -Z credential-process abcdefg")
+ .replace_crates_io(registry.index_url())
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] crates.io index
+[ERROR] credential process `false` cannot be used to log in, \
+the credential-process configuration value must pass the \
+`{action}` argument in the config to support this command
+",
+ )
+ .run();
+
+ cargo_process("logout -Z credential-process")
+ .replace_crates_io(registry.index_url())
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] credential process `false` cannot be used to log out, \
+the credential-process configuration value must pass the \
+`{action}` argument in the config to support this command
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn login() {
+ let server = registry::RegistryBuilder::new()
+ .no_configure_token()
+ .build();
+ // The credential process to use.
+ let cred_proj = project()
+ .at("cred_proj")
+ .file("Cargo.toml", &basic_manifest("test-cred", "1.0.0"))
+ .file(
+ "src/main.rs",
+ r#"
+ use std::io::Read;
+
+ fn main() {{
+ assert_eq!(std::env::var("CARGO_REGISTRY_NAME_OPT").unwrap(), "crates-io");
+ assert_eq!(std::env::var("CARGO_REGISTRY_INDEX_URL").unwrap(), "https://github.com/rust-lang/crates.io-index");
+ assert_eq!(std::env::args().skip(1).next().unwrap(), "store");
+ let mut buffer = String::new();
+ std::io::stdin().read_to_string(&mut buffer).unwrap();
+ assert_eq!(buffer, "abcdefg\n");
+ std::fs::write("token-store", buffer).unwrap();
+ }}
+ "#,
+ )
+ .build();
+ cred_proj.cargo("build").run();
+
+ cargo_util::paths::append(
+ &paths::home().join(".cargo/config"),
+ format!(
+ r#"
+ [registry]
+ credential-process = ["{}", "{{action}}"]
+ "#,
+ toml_bin(&cred_proj, "test-cred")
+ )
+ .as_bytes(),
+ )
+ .unwrap();
+
+ cargo_process("login -Z credential-process abcdefg")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .replace_crates_io(server.index_url())
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[LOGIN] token for `crates.io` saved
+",
+ )
+ .run();
+ assert_eq!(
+ fs::read_to_string(paths::root().join("token-store")).unwrap(),
+ "abcdefg\n"
+ );
+}
+
+#[cargo_test]
+fn logout() {
+ let server = registry::RegistryBuilder::new()
+ .no_configure_token()
+ .build();
+ // The credential process to use.
+ let cred_proj = project()
+ .at("cred_proj")
+ .file("Cargo.toml", &basic_manifest("test-cred", "1.0.0"))
+ .file(
+ "src/main.rs",
+ r#"
+ use std::io::Read;
+
+ fn main() {{
+ assert_eq!(std::env::var("CARGO_REGISTRY_NAME_OPT").unwrap(), "crates-io");
+ assert_eq!(std::env::var("CARGO_REGISTRY_INDEX_URL").unwrap(), "https://github.com/rust-lang/crates.io-index");
+ assert_eq!(std::env::args().skip(1).next().unwrap(), "erase");
+ std::fs::write("token-store", "").unwrap();
+ eprintln!("token for `crates-io` has been erased!")
+ }}
+ "#,
+ )
+ .build();
+ cred_proj.cargo("build").run();
+
+ cargo_util::paths::append(
+ &paths::home().join(".cargo/config"),
+ format!(
+ r#"
+ [registry]
+ credential-process = ["{}", "{{action}}"]
+ "#,
+ toml_bin(&cred_proj, "test-cred")
+ )
+ .as_bytes(),
+ )
+ .unwrap();
+
+ cargo_process("logout -Z credential-process")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .replace_crates_io(server.index_url())
+ .with_stderr(
+ "\
+token for `crates-io` has been erased!
+[LOGOUT] token for `crates-io` has been removed from local storage
+[NOTE] This does not revoke the token on the registry server.
+ If you need to revoke the token, visit <https://crates.io/me> \
+ and follow the instructions there.
+",
+ )
+ .run();
+ assert_eq!(
+ fs::read_to_string(paths::root().join("token-store")).unwrap(),
+ ""
+ );
+}
+
+#[cargo_test]
+fn yank() {
+ let (p, _t) = get_token_test();
+
+ p.cargo("yank --version 0.1.0 --registry alternative -Z credential-process")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[YANK] foo@0.1.0
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn owner() {
+ let (p, _t) = get_token_test();
+
+ p.cargo("owner --add username --registry alternative -Z credential-process")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[OWNER] completed!
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn libexec_path() {
+ // cargo: prefixed names use the sysroot
+ let server = registry::RegistryBuilder::new()
+ .no_configure_token()
+ .build();
+ cargo_util::paths::append(
+ &paths::home().join(".cargo/config"),
+ br#"
+ [registry]
+ credential-process = "cargo:doesnotexist"
+ "#,
+ )
+ .unwrap();
+
+ cargo_process("login -Z credential-process abcdefg")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .replace_crates_io(server.index_url())
+ .with_status(101)
+ .with_stderr(
+ // FIXME: Update "Caused by" error message once rust/pull/87704 is merged.
+ // On Windows, changing to a custom executable resolver has changed the
+ // error messages.
+ &format!("\
+[UPDATING] [..]
+[ERROR] failed to execute `[..]libexec/cargo-credential-doesnotexist[EXE]` to store authentication token for registry `crates-io`
+
+Caused by:
+ [..]
+"),
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid_token_output() {
+ // Error when credential process does not output the expected format for a token.
+ let _server = registry::RegistryBuilder::new()
+ .alternative()
+ .no_configure_token()
+ .build();
+ let cred_proj = project()
+ .at("cred_proj")
+ .file("Cargo.toml", &basic_manifest("test-cred", "1.0.0"))
+ .file("src/main.rs", r#"fn main() { print!("a\nb\n"); } "#)
+ .build();
+ cred_proj.cargo("build").run();
+
+ cargo_util::paths::append(
+ &paths::home().join(".cargo/config"),
+ format!(
+ r#"
+ [registry]
+ credential-process = ["{}"]
+ "#,
+ toml_bin(&cred_proj, "test-cred")
+ )
+ .as_bytes(),
+ )
+ .unwrap();
+
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify --registry alternative -Z credential-process")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[ERROR] credential process `[..]test-cred[EXE]` returned more than one line of output; expected a single token
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/cross_compile.rs b/src/tools/cargo/tests/testsuite/cross_compile.rs
new file mode 100644
index 000000000..cc9644550
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cross_compile.rs
@@ -0,0 +1,1342 @@
+//! Tests for cross compiling with --target.
+//!
+//! See `cargo_test_support::cross_compile` for more detail.
+
+use cargo_test_support::rustc_host;
+use cargo_test_support::{basic_bin_manifest, basic_manifest, cross_compile, project};
+
+#[cargo_test]
+fn simple_cross() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "build.rs",
+ &format!(
+ r#"
+ fn main() {{
+ assert_eq!(std::env::var("TARGET").unwrap(), "{}");
+ }}
+ "#,
+ cross_compile::alternate()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &format!(
+ r#"
+ use std::env;
+ fn main() {{
+ assert_eq!(env::consts::ARCH, "{}");
+ }}
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
+ .build();
+
+ let target = cross_compile::alternate();
+ p.cargo("build -v --target").arg(&target).run();
+ assert!(p.target_bin(target, "foo").is_file());
+
+ if cross_compile::can_run_on_host() {
+ p.process(&p.target_bin(target, "foo")).run();
+ }
+}
+
+#[cargo_test]
+fn simple_cross_config() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let p = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [build]
+ target = "{}"
+ "#,
+ cross_compile::alternate()
+ ),
+ )
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "build.rs",
+ &format!(
+ r#"
+ fn main() {{
+ assert_eq!(std::env::var("TARGET").unwrap(), "{}");
+ }}
+ "#,
+ cross_compile::alternate()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &format!(
+ r#"
+ use std::env;
+ fn main() {{
+ assert_eq!(env::consts::ARCH, "{}");
+ }}
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
+ .build();
+
+ let target = cross_compile::alternate();
+ p.cargo("build -v").run();
+ assert!(p.target_bin(target, "foo").is_file());
+
+ if cross_compile::can_run_on_host() {
+ p.process(&p.target_bin(target, "foo")).run();
+ }
+}
+
+#[cargo_test]
+fn simple_deps() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file("src/main.rs", "extern crate bar; fn main() { bar::bar(); }")
+ .build();
+ let _p2 = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ let target = cross_compile::alternate();
+ p.cargo("build --target").arg(&target).run();
+ assert!(p.target_bin(target, "foo").is_file());
+
+ if cross_compile::can_run_on_host() {
+ p.process(&p.target_bin(target, "foo")).run();
+ }
+}
+
+/// Always take care of setting these so that
+/// `cross_compile::alternate()` is the actually-picked target
+fn per_crate_target_test(
+ default_target: Option<&'static str>,
+ forced_target: Option<&'static str>,
+ arg_target: Option<&'static str>,
+) {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ cargo-features = ["per-package-target"]
+
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ build = "build.rs"
+ {}
+ {}
+ "#,
+ default_target
+ .map(|t| format!(r#"default-target = "{}""#, t))
+ .unwrap_or(String::new()),
+ forced_target
+ .map(|t| format!(r#"forced-target = "{}""#, t))
+ .unwrap_or(String::new()),
+ ),
+ )
+ .file(
+ "build.rs",
+ &format!(
+ r#"
+ fn main() {{
+ assert_eq!(std::env::var("TARGET").unwrap(), "{}");
+ }}
+ "#,
+ cross_compile::alternate()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &format!(
+ r#"
+ use std::env;
+ fn main() {{
+ assert_eq!(env::consts::ARCH, "{}");
+ }}
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
+ .build();
+
+ let mut cmd = p.cargo("build -v");
+ if let Some(t) = arg_target {
+ cmd.arg("--target").arg(&t);
+ }
+ cmd.masquerade_as_nightly_cargo(&["per-package-target"])
+ .run();
+ assert!(p.target_bin(cross_compile::alternate(), "foo").is_file());
+
+ if cross_compile::can_run_on_host() {
+ p.process(&p.target_bin(cross_compile::alternate(), "foo"))
+ .run();
+ }
+}
+
+#[cargo_test]
+fn per_crate_default_target_is_default() {
+ per_crate_target_test(Some(cross_compile::alternate()), None, None);
+}
+
+#[cargo_test]
+fn per_crate_default_target_gets_overridden() {
+ per_crate_target_test(
+ Some(cross_compile::unused()),
+ None,
+ Some(cross_compile::alternate()),
+ );
+}
+
+#[cargo_test]
+fn per_crate_forced_target_is_default() {
+ per_crate_target_test(None, Some(cross_compile::alternate()), None);
+}
+
+#[cargo_test]
+fn per_crate_forced_target_does_not_get_overridden() {
+ per_crate_target_test(
+ None,
+ Some(cross_compile::alternate()),
+ Some(cross_compile::unused()),
+ );
+}
+
+#[cargo_test]
+fn workspace_with_multiple_targets() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["native", "cross"]
+ "#,
+ )
+ .file(
+ "native/Cargo.toml",
+ r#"
+ cargo-features = ["per-package-target"]
+
+ [package]
+ name = "native"
+ version = "0.0.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "native/build.rs",
+ &format!(
+ r#"
+ fn main() {{
+ assert_eq!(std::env::var("TARGET").unwrap(), "{}");
+ }}
+ "#,
+ cross_compile::native()
+ ),
+ )
+ .file(
+ "native/src/main.rs",
+ &format!(
+ r#"
+ use std::env;
+ fn main() {{
+ assert_eq!(env::consts::ARCH, "{}");
+ }}
+ "#,
+ cross_compile::native_arch()
+ ),
+ )
+ .file(
+ "cross/Cargo.toml",
+ &format!(
+ r#"
+ cargo-features = ["per-package-target"]
+
+ [package]
+ name = "cross"
+ version = "0.0.0"
+ authors = []
+ build = "build.rs"
+ default-target = "{}"
+ "#,
+ cross_compile::alternate(),
+ ),
+ )
+ .file(
+ "cross/build.rs",
+ &format!(
+ r#"
+ fn main() {{
+ assert_eq!(std::env::var("TARGET").unwrap(), "{}");
+ }}
+ "#,
+ cross_compile::alternate()
+ ),
+ )
+ .file(
+ "cross/src/main.rs",
+ &format!(
+ r#"
+ use std::env;
+ fn main() {{
+ assert_eq!(env::consts::ARCH, "{}");
+ }}
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
+ .build();
+
+ let mut cmd = p.cargo("build -v");
+ cmd.masquerade_as_nightly_cargo(&["per-package-target"])
+ .run();
+
+ assert!(p.bin("native").is_file());
+ assert!(p.target_bin(cross_compile::alternate(), "cross").is_file());
+
+ p.process(&p.bin("native")).run();
+ if cross_compile::can_run_on_host() {
+ p.process(&p.target_bin(cross_compile::alternate(), "cross"))
+ .run();
+ }
+}
+
+#[cargo_test]
+fn linker() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let target = cross_compile::alternate();
+ let p = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}]
+ linker = "my-linker-tool"
+ "#,
+ target
+ ),
+ )
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ "src/foo.rs",
+ &format!(
+ r#"
+ use std::env;
+ fn main() {{
+ assert_eq!(env::consts::ARCH, "{}");
+ }}
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
+ .build();
+
+ p.cargo("build -v --target")
+ .arg(&target)
+ .with_status(101)
+ .with_stderr_contains(&format!(
+ "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc --crate-name foo src/foo.rs [..]--crate-type bin \
+ --emit=[..]link[..]-C debuginfo=2 \
+ -C metadata=[..] \
+ --out-dir [CWD]/target/{target}/debug/deps \
+ --target {target} \
+ -C linker=my-linker-tool \
+ -L dependency=[CWD]/target/{target}/debug/deps \
+ -L dependency=[CWD]/target/debug/deps`
+",
+ target = target,
+ ))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "plugins are unstable")]
+fn plugin_with_extra_dylib_dep() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #![feature(plugin)]
+ #![plugin(bar)]
+
+ fn main() {}
+ "#,
+ )
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "bar"
+ plugin = true
+
+ [dependencies.baz]
+ path = "../baz"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #![feature(rustc_private)]
+
+ extern crate baz;
+ extern crate rustc_driver;
+
+ use rustc_driver::plugin::Registry;
+
+ #[no_mangle]
+ pub fn __rustc_plugin_registrar(reg: &mut Registry) {
+ println!("{}", baz::baz());
+ }
+ "#,
+ )
+ .build();
+ let _baz = project()
+ .at("baz")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "baz"
+ crate_type = ["dylib"]
+ "#,
+ )
+ .file("src/lib.rs", "pub fn baz() -> i32 { 1 }")
+ .build();
+
+ let target = cross_compile::alternate();
+ foo.cargo("build --target").arg(&target).run();
+}
+
+#[cargo_test]
+fn cross_tests() {
+ if !cross_compile::can_run_on_host() {
+ return;
+ }
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.0"
+
+ [[bin]]
+ name = "bar"
+ "#,
+ )
+ .file(
+ "src/bin/bar.rs",
+ &format!(
+ r#"
+ #[allow(unused_extern_crates)]
+ extern crate foo;
+ use std::env;
+ fn main() {{
+ assert_eq!(env::consts::ARCH, "{}");
+ }}
+ #[test] fn test() {{ main() }}
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ &format!(
+ r#"
+ use std::env;
+ pub fn foo() {{ assert_eq!(env::consts::ARCH, "{}"); }}
+ #[test] fn test_foo() {{ foo() }}
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
+ .build();
+
+ let target = cross_compile::alternate();
+ p.cargo("test --target")
+ .arg(&target)
+ .with_stderr(&format!(
+ "\
+[COMPILING] foo v0.0.0 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/{triple}/debug/deps/foo-[..][EXE])
+[RUNNING] [..] (target/{triple}/debug/deps/bar-[..][EXE])",
+ triple = target
+ ))
+ .with_stdout_contains("test test_foo ... ok")
+ .with_stdout_contains("test test ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn no_cross_doctests() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ //! ```
+ //! extern crate foo;
+ //! assert!(true);
+ //! ```
+ "#,
+ )
+ .build();
+
+ let host_output = "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+[DOCTEST] foo
+";
+
+ println!("a");
+ p.cargo("test").with_stderr(&host_output).run();
+
+ println!("b");
+ let target = rustc_host();
+ p.cargo("test -v --target")
+ .arg(&target)
+ // Unordered since the two `rustc` invocations happen concurrently.
+ .with_stderr_unordered(&format!(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo [..]--crate-type lib[..]
+[RUNNING] `rustc --crate-name foo [..]--test[..]
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[CWD]/target/{target}/debug/deps/foo-[..][EXE]`
+[DOCTEST] foo
+[RUNNING] `rustdoc [..]--target {target}[..]`
+",
+ ))
+ .with_stdout(
+ "
+running 0 tests
+
+test result: ok. 0 passed[..]
+
+
+running 1 test
+test src/lib.rs - (line 2) ... ok
+
+test result: ok. 1 passed[..]
+
+",
+ )
+ .run();
+
+ println!("c");
+ let target = cross_compile::alternate();
+
+ // This will build the library, but does not build or run doc tests.
+ // This should probably be a warning or error.
+ p.cargo("test -v --doc --target")
+ .arg(&target)
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo [..]
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[NOTE] skipping doctests for foo v0.0.1 ([ROOT]/foo) (lib), \
+cross-compilation doctests are not yet supported
+See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#doctest-xcompile \
+for more information.
+",
+ )
+ .run();
+
+ if !cross_compile::can_run_on_host() {
+ return;
+ }
+
+ // This tests the library, but does not run the doc tests.
+ p.cargo("test -v --target")
+ .arg(&target)
+ .with_stderr(&format!(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo [..]--test[..]
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[CWD]/target/{triple}/debug/deps/foo-[..][EXE]`
+[NOTE] skipping doctests for foo v0.0.1 ([ROOT]/foo) (lib), \
+cross-compilation doctests are not yet supported
+See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#doctest-xcompile \
+for more information.
+",
+ triple = target
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn simple_cargo_run() {
+ if !cross_compile::can_run_on_host() {
+ return;
+ }
+
+ let p = project()
+ .file(
+ "src/main.rs",
+ &format!(
+ r#"
+ use std::env;
+ fn main() {{
+ assert_eq!(env::consts::ARCH, "{}");
+ }}
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
+ .build();
+
+ let target = cross_compile::alternate();
+ p.cargo("run --target").arg(&target).run();
+}
+
+#[cargo_test]
+fn cross_with_a_build_script() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let target = cross_compile::alternate();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ build = 'build.rs'
+ "#,
+ )
+ .file(
+ "build.rs",
+ &format!(
+ r#"
+ use std::env;
+ use std::path::PathBuf;
+ fn main() {{
+ assert_eq!(env::var("TARGET").unwrap(), "{0}");
+ let mut path = PathBuf::from(env::var_os("OUT_DIR").unwrap());
+ assert_eq!(path.file_name().unwrap().to_str().unwrap(), "out");
+ path.pop();
+ assert!(path.file_name().unwrap().to_str().unwrap()
+ .starts_with("foo-"));
+ path.pop();
+ assert_eq!(path.file_name().unwrap().to_str().unwrap(), "build");
+ path.pop();
+ assert_eq!(path.file_name().unwrap().to_str().unwrap(), "debug");
+ path.pop();
+ assert_eq!(path.file_name().unwrap().to_str().unwrap(), "{0}");
+ path.pop();
+ assert_eq!(path.file_name().unwrap().to_str().unwrap(), "target");
+ }}
+ "#,
+ target
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build -v --target")
+ .arg(&target)
+ .with_stderr(&format!(
+ "\
+[COMPILING] foo v0.0.0 ([CWD])
+[RUNNING] `rustc [..] build.rs [..] --out-dir [CWD]/target/debug/build/foo-[..]`
+[RUNNING] `[CWD]/target/debug/build/foo-[..]/build-script-build`
+[RUNNING] `rustc [..] src/main.rs [..] --target {target} [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ target = target,
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn build_script_needed_for_host_and_target() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let target = cross_compile::alternate();
+ let host = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ build = 'build.rs'
+
+ [dependencies.d1]
+ path = "d1"
+ [build-dependencies.d2]
+ path = "d2"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ #[allow(unused_extern_crates)]
+ extern crate d2;
+ fn main() { d2::d2(); }
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "
+ #[allow(unused_extern_crates)]
+ extern crate d1;
+ fn main() { d1::d1(); }
+ ",
+ )
+ .file(
+ "d1/Cargo.toml",
+ r#"
+ [package]
+ name = "d1"
+ version = "0.0.0"
+ authors = []
+ build = 'build.rs'
+ "#,
+ )
+ .file("d1/src/lib.rs", "pub fn d1() {}")
+ .file(
+ "d1/build.rs",
+ r#"
+ use std::env;
+ fn main() {
+ let target = env::var("TARGET").unwrap();
+ println!("cargo:rustc-flags=-L /path/to/{}", target);
+ }
+ "#,
+ )
+ .file(
+ "d2/Cargo.toml",
+ r#"
+ [package]
+ name = "d2"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies.d1]
+ path = "../d1"
+ "#,
+ )
+ .file(
+ "d2/src/lib.rs",
+ "
+ #[allow(unused_extern_crates)]
+ extern crate d1;
+ pub fn d2() { d1::d1(); }
+ ",
+ )
+ .build();
+
+ p.cargo("build -v --target")
+ .arg(&target)
+ .with_stderr_contains(&"[COMPILING] d1 v0.0.0 ([CWD]/d1)")
+ .with_stderr_contains(
+ "[RUNNING] `rustc [..] d1/build.rs [..] --out-dir [CWD]/target/debug/build/d1-[..]`",
+ )
+ .with_stderr_contains("[RUNNING] `[CWD]/target/debug/build/d1-[..]/build-script-build`")
+ .with_stderr_contains("[RUNNING] `rustc [..] d1/src/lib.rs [..]`")
+ .with_stderr_contains("[COMPILING] d2 v0.0.0 ([CWD]/d2)")
+ .with_stderr_contains(&format!(
+ "[RUNNING] `rustc [..] d2/src/lib.rs [..] -L /path/to/{host}`",
+ host = host
+ ))
+ .with_stderr_contains("[COMPILING] foo v0.0.0 ([CWD])")
+ .with_stderr_contains(&format!(
+ "[RUNNING] `rustc [..] build.rs [..] --out-dir [CWD]/target/debug/build/foo-[..] \
+ -L /path/to/{host}`",
+ host = host
+ ))
+ .with_stderr_contains(&format!(
+ "[RUNNING] `rustc [..] src/main.rs [..] --target {target} [..] \
+ -L /path/to/{target}`",
+ target = target
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn build_deps_for_the_right_arch() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies.d2]
+ path = "d2"
+ "#,
+ )
+ .file("src/main.rs", "extern crate d2; fn main() {}")
+ .file("d1/Cargo.toml", &basic_manifest("d1", "0.0.0"))
+ .file("d1/src/lib.rs", "pub fn d1() {}")
+ .file(
+ "d2/Cargo.toml",
+ r#"
+ [package]
+ name = "d2"
+ version = "0.0.0"
+ authors = []
+ build = "build.rs"
+
+ [build-dependencies.d1]
+ path = "../d1"
+ "#,
+ )
+ .file("d2/build.rs", "extern crate d1; fn main() {}")
+ .file("d2/src/lib.rs", "")
+ .build();
+
+ let target = cross_compile::alternate();
+ p.cargo("build -v --target").arg(&target).run();
+}
+
+#[cargo_test]
+fn build_script_only_host() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ build = "build.rs"
+
+ [build-dependencies.d1]
+ path = "d1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("build.rs", "extern crate d1; fn main() {}")
+ .file(
+ "d1/Cargo.toml",
+ r#"
+ [package]
+ name = "d1"
+ version = "0.0.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("d1/src/lib.rs", "pub fn d1() {}")
+ .file(
+ "d1/build.rs",
+ r#"
+ use std::env;
+
+ fn main() {
+ assert!(env::var("OUT_DIR").unwrap().replace("\\", "/")
+ .contains("target/debug/build/d1-"),
+ "bad: {:?}", env::var("OUT_DIR"));
+ }
+ "#,
+ )
+ .build();
+
+ let target = cross_compile::alternate();
+ p.cargo("build -v --target").arg(&target).run();
+}
+
+#[cargo_test]
+fn plugin_build_script_right_arch() {
+ if cross_compile::disabled() {
+ return;
+ }
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+
+ [lib]
+ name = "foo"
+ plugin = true
+ "#,
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v --target")
+ .arg(cross_compile::alternate())
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] build.rs [..]`
+[RUNNING] `[..]/build-script-build`
+[RUNNING] `rustc [..] src/lib.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_script_with_platform_specific_dependencies() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let target = cross_compile::alternate();
+ let host = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+
+ [build-dependencies.d1]
+ path = "d1"
+ "#,
+ )
+ .file(
+ "build.rs",
+ "
+ #[allow(unused_extern_crates)]
+ extern crate d1;
+ fn main() {}
+ ",
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "d1/Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "d1"
+ version = "0.0.0"
+ authors = []
+
+ [target.{}.dependencies]
+ d2 = {{ path = "../d2" }}
+ "#,
+ host
+ ),
+ )
+ .file(
+ "d1/src/lib.rs",
+ "#[allow(unused_extern_crates)] extern crate d2;",
+ )
+ .file("d2/Cargo.toml", &basic_manifest("d2", "0.0.0"))
+ .file("d2/src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v --target")
+ .arg(&target)
+ .with_stderr(&format!(
+ "\
+[COMPILING] d2 v0.0.0 ([..])
+[RUNNING] `rustc [..] d2/src/lib.rs [..]`
+[COMPILING] d1 v0.0.0 ([..])
+[RUNNING] `rustc [..] d1/src/lib.rs [..]`
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] build.rs [..]`
+[RUNNING] `[CWD]/target/debug/build/foo-[..]/build-script-build`
+[RUNNING] `rustc [..] src/lib.rs [..] --target {target} [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ target = target
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn platform_specific_dependencies_do_not_leak() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let target = cross_compile::alternate();
+ let host = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+
+ [dependencies.d1]
+ path = "d1"
+
+ [build-dependencies.d1]
+ path = "d1"
+ "#,
+ )
+ .file("build.rs", "extern crate d1; fn main() {}")
+ .file("src/lib.rs", "")
+ .file(
+ "d1/Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "d1"
+ version = "0.0.0"
+ authors = []
+
+ [target.{}.dependencies]
+ d2 = {{ path = "../d2" }}
+ "#,
+ host
+ ),
+ )
+ .file("d1/src/lib.rs", "extern crate d2;")
+ .file("d1/Cargo.toml", &basic_manifest("d1", "0.0.0"))
+ .file("d2/src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v --target")
+ .arg(&target)
+ .with_status(101)
+ .with_stderr_contains("[..] can't find crate for `d2`[..]")
+ .run();
+}
+
+#[cargo_test]
+fn platform_specific_variables_reflected_in_build_scripts() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let target = cross_compile::alternate();
+ let host = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+
+ [target.{host}.dependencies]
+ d1 = {{ path = "d1" }}
+
+ [target.{target}.dependencies]
+ d2 = {{ path = "d2" }}
+ "#,
+ host = host,
+ target = target
+ ),
+ )
+ .file(
+ "build.rs",
+ &format!(
+ r#"
+ use std::env;
+
+ fn main() {{
+ let platform = env::var("TARGET").unwrap();
+ let (expected, not_expected) = match &platform[..] {{
+ "{host}" => ("DEP_D1_VAL", "DEP_D2_VAL"),
+ "{target}" => ("DEP_D2_VAL", "DEP_D1_VAL"),
+ _ => panic!("unknown platform")
+ }};
+
+ env::var(expected).ok()
+ .expect(&format!("missing {{}}", expected));
+ env::var(not_expected).err()
+ .expect(&format!("found {{}}", not_expected));
+ }}
+ "#,
+ host = host,
+ target = target
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "d1/Cargo.toml",
+ r#"
+ [package]
+ name = "d1"
+ version = "0.0.0"
+ authors = []
+ links = "d1"
+ build = "build.rs"
+ "#,
+ )
+ .file("d1/build.rs", r#"fn main() { println!("cargo:val=1") }"#)
+ .file("d1/src/lib.rs", "")
+ .file(
+ "d2/Cargo.toml",
+ r#"
+ [package]
+ name = "d2"
+ version = "0.0.0"
+ authors = []
+ links = "d2"
+ build = "build.rs"
+ "#,
+ )
+ .file("d2/build.rs", r#"fn main() { println!("cargo:val=1") }"#)
+ .file("d2/src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v").run();
+ p.cargo("build -v --target").arg(&target).run();
+}
+
+#[cargo_test]
+#[cfg_attr(
+ target_os = "macos",
+ ignore = "don't have a dylib cross target on macos"
+)]
+fn cross_test_dylib() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let target = cross_compile::alternate();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "foo"
+ crate_type = ["dylib"]
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate bar as the_bar;
+
+ pub fn bar() { the_bar::baz(); }
+
+ #[test]
+ fn foo() { bar(); }
+ "#,
+ )
+ .file(
+ "tests/test.rs",
+ r#"
+ extern crate foo as the_foo;
+
+ #[test]
+ fn foo() { the_foo::bar(); }
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "bar"
+ crate_type = ["dylib"]
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ &format!(
+ r#"
+ use std::env;
+ pub fn baz() {{
+ assert_eq!(env::consts::ARCH, "{}");
+ }}
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
+ .build();
+
+ p.cargo("test --target")
+ .arg(&target)
+ .with_stderr(&format!(
+ "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/{arch}/debug/deps/foo-[..][EXE])
+[RUNNING] [..] (target/{arch}/debug/deps/test-[..][EXE])",
+ arch = cross_compile::alternate()
+ ))
+ .with_stdout_contains_n("test foo ... ok", 2)
+ .run();
+}
+
+#[cargo_test(nightly, reason = "-Zdoctest-xcompile is unstable")]
+fn doctest_xcompile_linker() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let target = cross_compile::alternate();
+ let p = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}]
+ linker = "my-linker-tool"
+ "#,
+ target
+ ),
+ )
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file(
+ "src/lib.rs",
+ r#"
+ /// ```
+ /// assert_eq!(1, 1);
+ /// ```
+ pub fn foo() {}
+ "#,
+ )
+ .build();
+
+ // Fails because `my-linker-tool` doesn't actually exist.
+ p.cargo("test --doc -v -Zdoctest-xcompile --target")
+ .arg(&target)
+ .with_status(101)
+ .masquerade_as_nightly_cargo(&["doctest-xcompile"])
+ .with_stderr_contains(&format!(
+ "\
+[RUNNING] `rustdoc --crate-type lib --crate-name foo --test [..]\
+ --target {target} [..] -C linker=my-linker-tool[..]
+",
+ target = target,
+ ))
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/cross_publish.rs b/src/tools/cargo/tests/testsuite/cross_publish.rs
new file mode 100644
index 000000000..83e0ecab7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cross_publish.rs
@@ -0,0 +1,122 @@
+//! Tests for publishing using the `--target` flag.
+
+use std::fs::File;
+
+use cargo_test_support::{cross_compile, project, publish, registry};
+
+#[cargo_test]
+fn simple_cross_package() {
+ if cross_compile::disabled() {
+ return;
+ }
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ repository = "bar"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ &format!(
+ r#"
+ use std::env;
+ fn main() {{
+ assert_eq!(env::consts::ARCH, "{}");
+ }}
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
+ .build();
+
+ let target = cross_compile::alternate();
+
+ p.cargo("package --target")
+ .arg(&target)
+ .with_stderr(
+ "\
+[PACKAGING] foo v0.0.0 ([CWD])
+[VERIFYING] foo v0.0.0 ([CWD])
+[COMPILING] foo v0.0.0 ([CWD]/target/package/foo-0.0.0)
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] 4 files, [..] ([..] compressed)
+",
+ )
+ .run();
+
+ // Check that the tarball contains the files
+ let f = File::open(&p.root().join("target/package/foo-0.0.0.crate")).unwrap();
+ publish::validate_crate_contents(
+ f,
+ "foo-0.0.0.crate",
+ &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"],
+ &[],
+ );
+}
+
+#[cargo_test]
+fn publish_with_target() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ // `publish` generally requires a remote registry
+ let registry = registry::RegistryBuilder::new().http_api().build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ repository = "bar"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ &format!(
+ r#"
+ use std::env;
+ fn main() {{
+ assert_eq!(env::consts::ARCH, "{}");
+ }}
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
+ .build();
+
+ let target = cross_compile::alternate();
+
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .arg("--target")
+ .arg(&target)
+ .with_stderr(
+ "\
+[UPDATING] crates.io index
+[PACKAGING] foo v0.0.0 ([CWD])
+[VERIFYING] foo v0.0.0 ([CWD])
+[COMPILING] foo v0.0.0 ([CWD]/target/package/foo-0.0.0)
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] [..]
+[UPLOADING] foo v0.0.0 ([CWD])
+[UPLOADED] foo v0.0.0 to registry `crates-io`
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.0.0 at registry `crates-io`
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/custom_target.rs b/src/tools/cargo/tests/testsuite/custom_target.rs
new file mode 100644
index 000000000..b7ad4d835
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/custom_target.rs
@@ -0,0 +1,250 @@
+//! Tests for custom json target specifications.
+
+use cargo_test_support::{basic_manifest, project};
+use std::fs;
+
+const MINIMAL_LIB: &str = r#"
+#![feature(no_core)]
+#![feature(lang_items)]
+#![no_core]
+
+#[lang = "sized"]
+pub trait Sized {
+ // Empty.
+}
+#[lang = "copy"]
+pub trait Copy {
+ // Empty.
+}
+"#;
+
+const SIMPLE_SPEC: &str = r#"
+{
+ "llvm-target": "x86_64-unknown-none-gnu",
+ "data-layout": "e-m:e-i64:64-f80:128-n8:16:32:64-S128",
+ "arch": "x86_64",
+ "target-endian": "little",
+ "target-pointer-width": "64",
+ "target-c-int-width": "32",
+ "os": "none",
+ "linker-flavor": "ld.lld",
+ "linker": "rust-lld",
+ "executables": true
+}
+"#;
+
+#[cargo_test(nightly, reason = "requires features no_core, lang_items")]
+fn custom_target_minimal() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ &"
+ __MINIMAL_LIB__
+
+ pub fn foo() -> u32 {
+ 42
+ }
+ "
+ .replace("__MINIMAL_LIB__", MINIMAL_LIB),
+ )
+ .file("custom-target.json", SIMPLE_SPEC)
+ .build();
+
+ p.cargo("build --lib --target custom-target.json -v").run();
+ p.cargo("build --lib --target src/../custom-target.json -v")
+ .run();
+
+ // Ensure that the correct style of flag is passed to --target with doc tests.
+ p.cargo("test --doc --target src/../custom-target.json -v -Zdoctest-xcompile")
+ .masquerade_as_nightly_cargo(&["doctest-xcompile", "no_core", "lang_items"])
+ .with_stderr_contains("[RUNNING] `rustdoc [..]--target [..]foo/custom-target.json[..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "requires features no_core, lang_items, auto_traits")]
+fn custom_target_dependency() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.0.1"
+ authors = ["author@example.com"]
+
+ [dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #![feature(no_core)]
+ #![feature(lang_items)]
+ #![feature(auto_traits)]
+ #![no_core]
+
+ extern crate bar;
+
+ pub fn foo() -> u32 {
+ bar::bar()
+ }
+
+ #[lang = "freeze"]
+ unsafe auto trait Freeze {}
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file(
+ "bar/src/lib.rs",
+ &"
+ __MINIMAL_LIB__
+
+ pub fn bar() -> u32 {
+ 42
+ }
+ "
+ .replace("__MINIMAL_LIB__", MINIMAL_LIB),
+ )
+ .file("custom-target.json", SIMPLE_SPEC)
+ .build();
+
+ p.cargo("build --lib --target custom-target.json -v").run();
+}
+
+#[cargo_test(nightly, reason = "requires features no_core, lang_items")]
+fn custom_bin_target() {
+ let p = project()
+ .file(
+ "src/main.rs",
+ &"
+ #![no_main]
+ __MINIMAL_LIB__
+ "
+ .replace("__MINIMAL_LIB__", MINIMAL_LIB),
+ )
+ .file("custom-bin-target.json", SIMPLE_SPEC)
+ .build();
+
+ p.cargo("build --target custom-bin-target.json -v").run();
+}
+
+#[cargo_test(nightly, reason = "requires features no_core, lang_items")]
+fn changing_spec_rebuilds() {
+ // Changing the .json file will trigger a rebuild.
+ let p = project()
+ .file(
+ "src/lib.rs",
+ &"
+ __MINIMAL_LIB__
+
+ pub fn foo() -> u32 {
+ 42
+ }
+ "
+ .replace("__MINIMAL_LIB__", MINIMAL_LIB),
+ )
+ .file("custom-target.json", SIMPLE_SPEC)
+ .build();
+
+ p.cargo("build --lib --target custom-target.json -v").run();
+ p.cargo("build --lib --target custom-target.json -v")
+ .with_stderr(
+ "\
+[FRESH] foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ let spec_path = p.root().join("custom-target.json");
+ let spec = fs::read_to_string(&spec_path).unwrap();
+ // Some arbitrary change that I hope is safe.
+ let spec = spec.replace('{', "{\n\"vendor\": \"unknown\",\n");
+ fs::write(&spec_path, spec).unwrap();
+ p.cargo("build --lib --target custom-target.json -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 [..]
+[RUNNING] `rustc [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "requires features no_core, lang_items")]
+fn changing_spec_relearns_crate_types() {
+ // Changing the .json file will invalidate the cache of crate types.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [lib]
+ crate-type = ["cdylib"]
+ "#,
+ )
+ .file("src/lib.rs", MINIMAL_LIB)
+ .file("custom-target.json", SIMPLE_SPEC)
+ .build();
+
+ p.cargo("build --lib --target custom-target.json -v")
+ .with_status(101)
+ .with_stderr("error: cannot produce cdylib for `foo [..]")
+ .run();
+
+ // Enable dynamic linking.
+ let spec_path = p.root().join("custom-target.json");
+ let spec = fs::read_to_string(&spec_path).unwrap();
+ let spec = spec.replace('{', "{\n\"dynamic-linking\": true,\n");
+ fs::write(&spec_path, spec).unwrap();
+
+ p.cargo("build --lib --target custom-target.json -v")
+ .with_stderr(
+ "\
+[COMPILING] foo [..]
+[RUNNING] `rustc [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "requires features no_core, lang_items")]
+fn custom_target_ignores_filepath() {
+ // Changing the path of the .json file will not trigger a rebuild.
+ let p = project()
+ .file(
+ "src/lib.rs",
+ &"
+ __MINIMAL_LIB__
+
+ pub fn foo() -> u32 {
+ 42
+ }
+ "
+ .replace("__MINIMAL_LIB__", MINIMAL_LIB),
+ )
+ .file("b/custom-target.json", SIMPLE_SPEC)
+ .file("a/custom-target.json", SIMPLE_SPEC)
+ .build();
+
+ // Should build the library the first time.
+ p.cargo("build --lib --target a/custom-target.json")
+ .with_stderr(
+ "\
+[..]Compiling foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ // But not the second time, even though the path to the custom target is dfferent.
+ p.cargo("build --lib --target b/custom-target.json")
+ .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/death.rs b/src/tools/cargo/tests/testsuite/death.rs
new file mode 100644
index 000000000..f0e182d01
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/death.rs
@@ -0,0 +1,101 @@
+//! Tests for ctrl-C handling.
+
+use std::fs;
+use std::io::{self, Read};
+use std::net::TcpListener;
+use std::process::{Child, Stdio};
+use std::thread;
+
+use cargo_test_support::{project, slow_cpu_multiplier};
+
+#[cargo_test]
+fn ctrl_c_kills_everyone() {
+ let listener = TcpListener::bind("127.0.0.1:0").unwrap();
+ let addr = listener.local_addr().unwrap();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ &format!(
+ r#"
+ use std::net::TcpStream;
+ use std::io::Read;
+
+ fn main() {{
+ let mut socket = TcpStream::connect("{}").unwrap();
+ let _ = socket.read(&mut [0; 10]);
+ panic!("that read should never return");
+ }}
+ "#,
+ addr
+ ),
+ )
+ .build();
+
+ let mut cargo = p.cargo("check").build_command();
+ cargo
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .env("__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE", "1");
+ let mut child = cargo.spawn().unwrap();
+
+ let mut sock = listener.accept().unwrap().0;
+ ctrl_c(&mut child);
+
+ assert!(!child.wait().unwrap().success());
+ match sock.read(&mut [0; 10]) {
+ Ok(n) => assert_eq!(n, 0),
+ Err(e) => assert_eq!(e.kind(), io::ErrorKind::ConnectionReset),
+ }
+
+ // Ok so what we just did was spawn cargo that spawned a build script, then
+ // we killed cargo in hopes of it killing the build script as well. If all
+ // went well the build script is now dead. On Windows, however, this is
+ // enforced with job objects which means that it may actually be in the
+ // *process* of being torn down at this point.
+ //
+ // Now on Windows we can't completely remove a file until all handles to it
+ // have been closed. Including those that represent running processes. So if
+ // we were to return here then there may still be an open reference to some
+ // file in the build directory. What we want to actually do is wait for the
+ // build script to *complete* exit. Take care of that by blowing away the
+ // build directory here, and panicking if we eventually spin too long
+ // without being able to.
+ for i in 0..10 {
+ match fs::remove_dir_all(&p.root().join("target")) {
+ Ok(()) => return,
+ Err(e) => println!("attempt {}: {}", i, e),
+ }
+ thread::sleep(slow_cpu_multiplier(100));
+ }
+
+ panic!(
+ "couldn't remove build directory after a few tries, seems like \
+ we won't be able to!"
+ );
+}
+
+#[cfg(unix)]
+pub fn ctrl_c(child: &mut Child) {
+ let r = unsafe { libc::kill(-(child.id() as i32), libc::SIGINT) };
+ if r < 0 {
+ panic!("failed to kill: {}", io::Error::last_os_error());
+ }
+}
+
+#[cfg(windows)]
+pub fn ctrl_c(child: &mut Child) {
+ child.kill().unwrap();
+}
diff --git a/src/tools/cargo/tests/testsuite/dep_info.rs b/src/tools/cargo/tests/testsuite/dep_info.rs
new file mode 100644
index 000000000..e9ea47792
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/dep_info.rs
@@ -0,0 +1,600 @@
+//! Tests for dep-info files. This includes the dep-info file Cargo creates in
+//! the output directory, and the ones stored in the fingerprint.
+
+use cargo_test_support::compare::assert_match_exact;
+use cargo_test_support::paths::{self, CargoPathExt};
+use cargo_test_support::registry::Package;
+use cargo_test_support::{
+ basic_bin_manifest, basic_manifest, main_file, project, rustc_host, Project,
+};
+use filetime::FileTime;
+use std::fs;
+use std::path::Path;
+use std::str;
+
+// Helper for testing dep-info files in the fingerprint dir.
+#[track_caller]
+fn assert_deps(project: &Project, fingerprint: &str, test_cb: impl Fn(&Path, &[(u8, &str)])) {
+ let mut files = project
+ .glob(fingerprint)
+ .map(|f| f.expect("unwrap glob result"))
+ // Filter out `.json` entries.
+ .filter(|f| f.extension().is_none());
+ let info_path = files
+ .next()
+ .unwrap_or_else(|| panic!("expected 1 dep-info file at {}, found 0", fingerprint));
+ assert!(files.next().is_none(), "expected only 1 dep-info file");
+ let dep_info = fs::read(&info_path).unwrap();
+ let dep_info = &mut &dep_info[..];
+ let deps = (0..read_usize(dep_info))
+ .map(|_| {
+ (
+ read_u8(dep_info),
+ str::from_utf8(read_bytes(dep_info)).unwrap(),
+ )
+ })
+ .collect::<Vec<_>>();
+ test_cb(&info_path, &deps);
+
+ fn read_usize(bytes: &mut &[u8]) -> usize {
+ let ret = &bytes[..4];
+ *bytes = &bytes[4..];
+
+ u32::from_le_bytes(ret.try_into().unwrap()) as usize
+ }
+
+ fn read_u8(bytes: &mut &[u8]) -> u8 {
+ let ret = bytes[0];
+ *bytes = &bytes[1..];
+ ret
+ }
+
+ fn read_bytes<'a>(bytes: &mut &'a [u8]) -> &'a [u8] {
+ let n = read_usize(bytes);
+ let ret = &bytes[..n];
+ *bytes = &bytes[n..];
+ ret
+ }
+}
+
+fn assert_deps_contains(project: &Project, fingerprint: &str, expected: &[(u8, &str)]) {
+ assert_deps(project, fingerprint, |info_path, entries| {
+ for (e_kind, e_path) in expected {
+ let pattern = glob::Pattern::new(e_path).unwrap();
+ let count = entries
+ .iter()
+ .filter(|(kind, path)| kind == e_kind && pattern.matches(path))
+ .count();
+ if count != 1 {
+ panic!(
+ "Expected 1 match of {} {} in {:?}, got {}:\n{:#?}",
+ e_kind, e_path, info_path, count, entries
+ );
+ }
+ }
+ })
+}
+
+#[cargo_test]
+fn build_dep_info() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("build").run();
+
+ let depinfo_bin_path = &p.bin("foo").with_extension("d");
+
+ assert!(depinfo_bin_path.is_file());
+
+ let depinfo = p.read_file(depinfo_bin_path.to_str().unwrap());
+
+ let bin_path = p.bin("foo");
+ let src_path = p.root().join("src").join("foo.rs");
+ if !depinfo.lines().any(|line| {
+ line.starts_with(&format!("{}:", bin_path.display()))
+ && line.contains(src_path.to_str().unwrap())
+ }) {
+ panic!(
+ "Could not find {:?}: {:?} in {:?}",
+ bin_path, src_path, depinfo_bin_path
+ );
+ }
+}
+
+#[cargo_test]
+fn build_dep_info_lib() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[example]]
+ name = "ex"
+ crate-type = ["lib"]
+ "#,
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "")
+ .file("examples/ex.rs", "")
+ .build();
+
+ p.cargo("build --example=ex").run();
+ assert!(p.example_lib("ex", "lib").with_extension("d").is_file());
+}
+
+#[cargo_test]
+fn build_dep_info_rlib() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[example]]
+ name = "ex"
+ crate-type = ["rlib"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("examples/ex.rs", "")
+ .build();
+
+ p.cargo("build --example=ex").run();
+ assert!(p.example_lib("ex", "rlib").with_extension("d").is_file());
+}
+
+#[cargo_test]
+fn build_dep_info_dylib() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[example]]
+ name = "ex"
+ crate-type = ["dylib"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("examples/ex.rs", "")
+ .build();
+
+ p.cargo("build --example=ex").run();
+ assert!(p.example_lib("ex", "dylib").with_extension("d").is_file());
+}
+
+#[cargo_test]
+fn dep_path_inside_target_has_correct_path() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("a"))
+ .file("target/debug/blah", "")
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ let x = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/target/debug/blah"));
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build").run();
+
+ let depinfo_path = &p.bin("a").with_extension("d");
+
+ assert!(depinfo_path.is_file(), "{:?}", depinfo_path);
+
+ let depinfo = p.read_file(depinfo_path.to_str().unwrap());
+
+ let bin_path = p.bin("a");
+ let target_debug_blah = Path::new("target").join("debug").join("blah");
+ if !depinfo.lines().any(|line| {
+ line.starts_with(&format!("{}:", bin_path.display()))
+ && line.contains(target_debug_blah.to_str().unwrap())
+ }) {
+ panic!(
+ "Could not find {:?}: {:?} in {:?}",
+ bin_path, target_debug_blah, depinfo_path
+ );
+ }
+}
+
+#[cargo_test]
+fn no_rewrite_if_no_change() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("build").run();
+ let dep_info = p.root().join("target/debug/libfoo.d");
+ let metadata1 = dep_info.metadata().unwrap();
+ p.cargo("build").run();
+ let metadata2 = dep_info.metadata().unwrap();
+
+ assert_eq!(
+ FileTime::from_last_modification_time(&metadata1),
+ FileTime::from_last_modification_time(&metadata2),
+ );
+}
+
+#[cargo_test(nightly, reason = "-Z binary-dep-depinfo is unstable")]
+fn relative_depinfo_paths_ws() {
+ // Test relative dep-info paths in a workspace with --target with
+ // proc-macros and other dependency kinds.
+ Package::new("regdep", "0.1.0")
+ .file("src/lib.rs", "pub fn f() {}")
+ .publish();
+ Package::new("pmdep", "0.1.0")
+ .file("src/lib.rs", "pub fn f() {}")
+ .publish();
+ Package::new("bdep", "0.1.0")
+ .file("src/lib.rs", "pub fn f() {}")
+ .publish();
+
+ let p = project()
+ /*********** Workspace ***********/
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo"]
+ "#,
+ )
+ /*********** Main Project ***********/
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ pm = {path = "../pm"}
+ bar = {path = "../bar"}
+ regdep = "0.1"
+
+ [build-dependencies]
+ bdep = "0.1"
+ bar = {path = "../bar"}
+ "#,
+ )
+ .file(
+ "foo/src/main.rs",
+ r#"
+ pm::noop!{}
+
+ fn main() {
+ bar::f();
+ regdep::f();
+ }
+ "#,
+ )
+ .file("foo/build.rs", "fn main() { bdep::f(); }")
+ /*********** Proc Macro ***********/
+ .file(
+ "pm/Cargo.toml",
+ r#"
+ [package]
+ name = "pm"
+ version = "0.1.0"
+ edition = "2018"
+
+ [lib]
+ proc-macro = true
+
+ [dependencies]
+ pmdep = "0.1"
+ "#,
+ )
+ .file(
+ "pm/src/lib.rs",
+ r#"
+ extern crate proc_macro;
+ use proc_macro::TokenStream;
+
+ #[proc_macro]
+ pub fn noop(_item: TokenStream) -> TokenStream {
+ pmdep::f();
+ "".parse().unwrap()
+ }
+ "#,
+ )
+ /*********** Path Dependency `bar` ***********/
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn f() {}")
+ .build();
+
+ let host = rustc_host();
+ p.cargo("build -Z binary-dep-depinfo --target")
+ .arg(&host)
+ .masquerade_as_nightly_cargo(&["binary-dep-depinfo"])
+ .with_stderr_contains("[COMPILING] foo [..]")
+ .run();
+
+ assert_deps_contains(
+ &p,
+ "target/debug/.fingerprint/pm-*/dep-lib-pm",
+ &[(0, "src/lib.rs"), (1, "debug/deps/libpmdep-*.rlib")],
+ );
+
+ assert_deps_contains(
+ &p,
+ &format!("target/{}/debug/.fingerprint/foo-*/dep-bin-foo", host),
+ &[
+ (0, "src/main.rs"),
+ (
+ 1,
+ &format!(
+ "debug/deps/{}pm-*.{}",
+ paths::get_lib_prefix("proc-macro"),
+ paths::get_lib_extension("proc-macro")
+ ),
+ ),
+ (1, &format!("{}/debug/deps/libbar-*.rlib", host)),
+ (1, &format!("{}/debug/deps/libregdep-*.rlib", host)),
+ ],
+ );
+
+ assert_deps_contains(
+ &p,
+ "target/debug/.fingerprint/foo-*/dep-build-script-build-script-build",
+ &[(0, "build.rs"), (1, "debug/deps/libbdep-*.rlib")],
+ );
+
+ // Make sure it stays fresh.
+ p.cargo("build -Z binary-dep-depinfo --target")
+ .arg(&host)
+ .masquerade_as_nightly_cargo(&["binary-dep-depinfo"])
+ .with_stderr("[FINISHED] dev [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "-Z binary-dep-depinfo is unstable")]
+fn relative_depinfo_paths_no_ws() {
+ // Test relative dep-info paths without a workspace with proc-macros and
+ // other dependency kinds.
+ Package::new("regdep", "0.1.0")
+ .file("src/lib.rs", "pub fn f() {}")
+ .publish();
+ Package::new("pmdep", "0.1.0")
+ .file("src/lib.rs", "pub fn f() {}")
+ .publish();
+ Package::new("bdep", "0.1.0")
+ .file("src/lib.rs", "pub fn f() {}")
+ .publish();
+
+ let p = project()
+ /*********** Main Project ***********/
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ pm = {path = "pm"}
+ bar = {path = "bar"}
+ regdep = "0.1"
+
+ [build-dependencies]
+ bdep = "0.1"
+ bar = {path = "bar"}
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ pm::noop!{}
+
+ fn main() {
+ bar::f();
+ regdep::f();
+ }
+ "#,
+ )
+ .file("build.rs", "fn main() { bdep::f(); }")
+ /*********** Proc Macro ***********/
+ .file(
+ "pm/Cargo.toml",
+ r#"
+ [package]
+ name = "pm"
+ version = "0.1.0"
+ edition = "2018"
+
+ [lib]
+ proc-macro = true
+
+ [dependencies]
+ pmdep = "0.1"
+ "#,
+ )
+ .file(
+ "pm/src/lib.rs",
+ r#"
+ extern crate proc_macro;
+ use proc_macro::TokenStream;
+
+ #[proc_macro]
+ pub fn noop(_item: TokenStream) -> TokenStream {
+ pmdep::f();
+ "".parse().unwrap()
+ }
+ "#,
+ )
+ /*********** Path Dependency `bar` ***********/
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn f() {}")
+ .build();
+
+ p.cargo("build -Z binary-dep-depinfo")
+ .masquerade_as_nightly_cargo(&["binary-dep-depinfo"])
+ .with_stderr_contains("[COMPILING] foo [..]")
+ .run();
+
+ assert_deps_contains(
+ &p,
+ "target/debug/.fingerprint/pm-*/dep-lib-pm",
+ &[(0, "src/lib.rs"), (1, "debug/deps/libpmdep-*.rlib")],
+ );
+
+ assert_deps_contains(
+ &p,
+ "target/debug/.fingerprint/foo-*/dep-bin-foo",
+ &[
+ (0, "src/main.rs"),
+ (
+ 1,
+ &format!(
+ "debug/deps/{}pm-*.{}",
+ paths::get_lib_prefix("proc-macro"),
+ paths::get_lib_extension("proc-macro")
+ ),
+ ),
+ (1, "debug/deps/libbar-*.rlib"),
+ (1, "debug/deps/libregdep-*.rlib"),
+ ],
+ );
+
+ assert_deps_contains(
+ &p,
+ "target/debug/.fingerprint/foo-*/dep-build-script-build-script-build",
+ &[(0, "build.rs"), (1, "debug/deps/libbdep-*.rlib")],
+ );
+
+ // Make sure it stays fresh.
+ p.cargo("build -Z binary-dep-depinfo")
+ .masquerade_as_nightly_cargo(&["binary-dep-depinfo"])
+ .with_stderr("[FINISHED] dev [..]")
+ .run();
+}
+
+#[cargo_test]
+fn reg_dep_source_not_tracked() {
+ // Make sure source files in dep-info file are not tracked for registry dependencies.
+ Package::new("regdep", "0.1.0")
+ .file("src/lib.rs", "pub fn f() {}")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ regdep = "0.1"
+ "#,
+ )
+ .file("src/lib.rs", "pub fn f() { regdep::f(); }")
+ .build();
+
+ p.cargo("check").run();
+
+ assert_deps(
+ &p,
+ "target/debug/.fingerprint/regdep-*/dep-lib-regdep",
+ |info_path, entries| {
+ for (kind, path) in entries {
+ if *kind == 1 {
+ panic!(
+ "Did not expect package root relative path type: {:?} in {:?}",
+ path, info_path
+ );
+ }
+ }
+ },
+ );
+}
+
+#[cargo_test(nightly, reason = "-Z binary-dep-depinfo is unstable")]
+fn canonical_path() {
+ if !cargo_test_support::symlink_supported() {
+ return;
+ }
+ Package::new("regdep", "0.1.0")
+ .file("src/lib.rs", "pub fn f() {}")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ regdep = "0.1"
+ "#,
+ )
+ .file("src/lib.rs", "pub fn f() { regdep::f(); }")
+ .build();
+
+ let real = p.root().join("real_target");
+ real.mkdir_p();
+ p.symlink(real, "target");
+
+ p.cargo("check -Z binary-dep-depinfo")
+ .masquerade_as_nightly_cargo(&["binary-dep-depinfo"])
+ .run();
+
+ assert_deps_contains(
+ &p,
+ "target/debug/.fingerprint/foo-*/dep-lib-foo",
+ &[(0, "src/lib.rs"), (1, "debug/deps/libregdep-*.rmeta")],
+ );
+}
+
+#[cargo_test]
+fn non_local_build_script() {
+ // Non-local build script information is not included.
+ Package::new("bar", "1.0.0")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rerun-if-changed=build.rs");
+ }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build").run();
+ let contents = p.read_file("target/debug/foo.d");
+ assert_match_exact(
+ "[ROOT]/foo/target/debug/foo[EXE]: [ROOT]/foo/src/main.rs",
+ &contents,
+ );
+}
diff --git a/src/tools/cargo/tests/testsuite/direct_minimal_versions.rs b/src/tools/cargo/tests/testsuite/direct_minimal_versions.rs
new file mode 100644
index 000000000..0e62d6ce0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/direct_minimal_versions.rs
@@ -0,0 +1,236 @@
+//! Tests for minimal-version resolution.
+//!
+//! Note: Some tests are located in the resolver-tests package.
+
+use cargo_test_support::project;
+use cargo_test_support::registry::Package;
+
+#[cargo_test]
+fn simple() {
+ Package::new("dep", "1.0.0").publish();
+ Package::new("dep", "1.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.1"
+
+ [dependencies]
+ dep = "1.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("generate-lockfile -Zdirect-minimal-versions")
+ .masquerade_as_nightly_cargo(&["direct-minimal-versions"])
+ .run();
+
+ let lock = p.read_lockfile();
+
+ assert!(
+ lock.contains("1.0.0"),
+ "dep minimal version must be present"
+ );
+ assert!(
+ !lock.contains("1.1.0"),
+ "dep maximimal version cannot be present"
+ );
+}
+
+#[cargo_test]
+fn mixed_dependencies() {
+ Package::new("dep", "1.0.0").publish();
+ Package::new("dep", "1.1.0").publish();
+ Package::new("dep", "1.2.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.1"
+
+ [dependencies]
+ dep = "1.0"
+
+ [dev-dependencies]
+ dep = "1.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("generate-lockfile -Zdirect-minimal-versions")
+ .masquerade_as_nightly_cargo(&["direct-minimal-versions"])
+ .with_status(101)
+ .with_stderr(
+ r#"[UPDATING] [..]
+[ERROR] failed to select a version for `dep`.
+ ... required by package `foo v0.0.1 ([CWD])`
+versions that meet the requirements `^1.1` are: 1.1.0
+
+all possible versions conflict with previously selected packages.
+
+ previously selected package `dep v1.0.0`
+ ... which satisfies dependency `dep = "^1.0"` of package `foo v0.0.1 ([CWD])`
+
+failed to select a version for `dep` which could resolve this conflict
+"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn yanked() {
+ Package::new("dep", "1.0.0").yanked(true).publish();
+ Package::new("dep", "1.1.0").publish();
+ Package::new("dep", "1.2.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.1"
+
+ [dependencies]
+ dep = "1.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("generate-lockfile -Zdirect-minimal-versions")
+ .masquerade_as_nightly_cargo(&["direct-minimal-versions"])
+ .run();
+
+ let lock = p.read_lockfile();
+
+ assert!(
+ lock.contains("1.1.0"),
+ "dep minimal version must be present"
+ );
+ assert!(
+ !lock.contains("1.0.0"),
+ "yanked minimal version must be skipped"
+ );
+ assert!(
+ !lock.contains("1.2.0"),
+ "dep maximimal version cannot be present"
+ );
+}
+
+#[cargo_test]
+fn indirect() {
+ Package::new("indirect", "2.0.0").publish();
+ Package::new("indirect", "2.1.0").publish();
+ Package::new("indirect", "2.2.0").publish();
+ Package::new("direct", "1.0.0")
+ .dep("indirect", "2.1")
+ .publish();
+ Package::new("direct", "1.1.0")
+ .dep("indirect", "2.1")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.1"
+
+ [dependencies]
+ direct = "1.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("generate-lockfile -Zdirect-minimal-versions")
+ .masquerade_as_nightly_cargo(&["direct-minimal-versions"])
+ .run();
+
+ let lock = p.read_lockfile();
+
+ assert!(
+ lock.contains("1.0.0"),
+ "direct minimal version must be present"
+ );
+ assert!(
+ !lock.contains("1.1.0"),
+ "direct maximimal version cannot be present"
+ );
+ assert!(
+ !lock.contains("2.0.0"),
+ "indirect minimal version cannot be present"
+ );
+ assert!(
+ !lock.contains("2.1.0"),
+ "indirect minimal version cannot be present"
+ );
+ assert!(
+ lock.contains("2.2.0"),
+ "indirect maximal version must be present"
+ );
+}
+
+#[cargo_test]
+fn indirect_conflict() {
+ Package::new("indirect", "2.0.0").publish();
+ Package::new("indirect", "2.1.0").publish();
+ Package::new("indirect", "2.2.0").publish();
+ Package::new("direct", "1.0.0")
+ .dep("indirect", "2.1")
+ .publish();
+ Package::new("direct", "1.1.0")
+ .dep("indirect", "2.1")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.1"
+
+ [dependencies]
+ direct = "1.0"
+ indirect = "2.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("generate-lockfile -Zdirect-minimal-versions")
+ .masquerade_as_nightly_cargo(&["direct-minimal-versions"])
+ .with_status(101)
+ .with_stderr(
+ r#"[UPDATING] [..]
+[ERROR] failed to select a version for `indirect`.
+ ... required by package `direct v1.0.0`
+ ... which satisfies dependency `direct = "^1.0"` of package `foo v0.0.1 ([CWD])`
+versions that meet the requirements `^2.1` are: 2.2.0, 2.1.0
+
+all possible versions conflict with previously selected packages.
+
+ previously selected package `indirect v2.0.0`
+ ... which satisfies dependency `indirect = "^2.0"` of package `foo v0.0.1 ([CWD])`
+
+failed to select a version for `indirect` which could resolve this conflict
+"#,
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/directory.rs b/src/tools/cargo/tests/testsuite/directory.rs
new file mode 100644
index 000000000..0e28de039
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/directory.rs
@@ -0,0 +1,774 @@
+//! Tests for directory sources.
+
+use std::collections::HashMap;
+use std::fs;
+use std::str;
+
+use serde::Serialize;
+
+use cargo_test_support::cargo_process;
+use cargo_test_support::git;
+use cargo_test_support::paths;
+use cargo_test_support::registry::{cksum, Package};
+use cargo_test_support::{basic_manifest, project, t, ProjectBuilder};
+
+fn setup() {
+ let root = paths::root();
+ t!(fs::create_dir(&root.join(".cargo")));
+ t!(fs::write(
+ root.join(".cargo/config"),
+ r#"
+ [source.crates-io]
+ replace-with = 'my-awesome-local-registry'
+
+ [source.my-awesome-local-registry]
+ directory = 'index'
+ "#
+ ));
+}
+
+struct VendorPackage {
+ p: Option<ProjectBuilder>,
+ cksum: Checksum,
+}
+
+#[derive(Serialize)]
+struct Checksum {
+ package: Option<String>,
+ files: HashMap<String, String>,
+}
+
+impl VendorPackage {
+ fn new(name: &str) -> VendorPackage {
+ VendorPackage {
+ p: Some(project().at(&format!("index/{}", name))),
+ cksum: Checksum {
+ package: Some(String::new()),
+ files: HashMap::new(),
+ },
+ }
+ }
+
+ fn file(&mut self, name: &str, contents: &str) -> &mut VendorPackage {
+ self.p = Some(self.p.take().unwrap().file(name, contents));
+ self.cksum
+ .files
+ .insert(name.to_string(), cksum(contents.as_bytes()));
+ self
+ }
+
+ fn disable_checksum(&mut self) -> &mut VendorPackage {
+ self.cksum.package = None;
+ self
+ }
+
+ fn no_manifest(mut self) -> Self {
+ self.p = self.p.map(|pb| pb.no_manifest());
+ self
+ }
+
+ fn build(&mut self) {
+ let p = self.p.take().unwrap();
+ let json = serde_json::to_string(&self.cksum).unwrap();
+ let p = p.file(".cargo-checksum.json", &json);
+ let _ = p.build();
+ }
+}
+
+#[cargo_test]
+fn simple() {
+ setup();
+
+ VendorPackage::new("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate bar; pub fn foo() { bar::bar(); }",
+ )
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.1.0
+[CHECKING] foo v0.1.0 ([CWD])
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn simple_install() {
+ setup();
+
+ VendorPackage::new("foo")
+ .file("src/lib.rs", "pub fn foo() {}")
+ .build();
+
+ VendorPackage::new("bar")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ foo = "0.0.1"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "extern crate foo; pub fn main() { foo::foo(); }",
+ )
+ .build();
+
+ cargo_process("install bar")
+ .with_stderr(
+ "\
+[INSTALLING] bar v0.1.0
+[COMPILING] foo v0.0.1
+[COMPILING] bar v0.1.0
+[FINISHED] release [optimized] target(s) in [..]s
+[INSTALLING] [..]bar[..]
+[INSTALLED] package `bar v0.1.0` (executable `bar[EXE]`)
+[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn simple_install_fail() {
+ setup();
+
+ VendorPackage::new("foo")
+ .file("src/lib.rs", "pub fn foo() {}")
+ .build();
+
+ VendorPackage::new("bar")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ foo = "0.1.0"
+ baz = "9.8.7"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "extern crate foo; pub fn main() { foo::foo(); }",
+ )
+ .build();
+
+ cargo_process("install bar")
+ .with_status(101)
+ .with_stderr(
+ " Installing bar v0.1.0
+error: failed to compile `bar v0.1.0`, intermediate artifacts can be found at `[..]`
+
+Caused by:
+ no matching package found
+ searched package name: `baz`
+ perhaps you meant: bar or foo
+ location searched: registry `crates-io`
+ required by package `bar v0.1.0`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn install_without_feature_dep() {
+ setup();
+
+ VendorPackage::new("foo")
+ .file("src/lib.rs", "pub fn foo() {}")
+ .build();
+
+ VendorPackage::new("bar")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ foo = "0.0.1"
+ baz = { version = "9.8.7", optional = true }
+
+ [features]
+ wantbaz = ["baz"]
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "extern crate foo; pub fn main() { foo::foo(); }",
+ )
+ .build();
+
+ cargo_process("install bar")
+ .with_stderr(
+ "\
+[INSTALLING] bar v0.1.0
+[COMPILING] foo v0.0.1
+[COMPILING] bar v0.1.0
+[FINISHED] release [optimized] target(s) in [..]s
+[INSTALLING] [..]bar[..]
+[INSTALLED] package `bar v0.1.0` (executable `bar[EXE]`)
+[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn not_there() {
+ setup();
+
+ let _ = project().at("index").build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate bar; pub fn foo() { bar::bar(); }",
+ )
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: no matching package named `bar` found
+location searched: [..]
+required by package `foo v0.1.0 ([..])`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn multiple() {
+ setup();
+
+ VendorPackage::new("bar-0.1.0")
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn bar() {}")
+ .file(".cargo-checksum", "")
+ .build();
+
+ VendorPackage::new("bar-0.2.0")
+ .file("Cargo.toml", &basic_manifest("bar", "0.2.0"))
+ .file("src/lib.rs", "pub fn bar() {}")
+ .file(".cargo-checksum", "")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate bar; pub fn foo() { bar::bar(); }",
+ )
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.1.0
+[CHECKING] foo v0.1.0 ([CWD])
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn crates_io_then_directory() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate bar; pub fn foo() { bar::bar(); }",
+ )
+ .build();
+
+ let cksum = Package::new("bar", "0.1.0")
+ .file("src/lib.rs", "pub fn bar() -> u32 { 0 }")
+ .publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 ([..])
+[CHECKING] bar v0.1.0
+[CHECKING] foo v0.1.0 ([CWD])
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ setup();
+
+ let mut v = VendorPackage::new("bar");
+ v.file("Cargo.toml", &basic_manifest("bar", "0.1.0"));
+ v.file("src/lib.rs", "pub fn bar() -> u32 { 1 }");
+ v.cksum.package = Some(cksum);
+ v.build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.1.0
+[CHECKING] foo v0.1.0 ([CWD])
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn crates_io_then_bad_checksum() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ Package::new("bar", "0.1.0").publish();
+
+ p.cargo("check").run();
+ setup();
+
+ VendorPackage::new("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: checksum for `bar v0.1.0` changed between lock files
+
+this could be indicative of a few possible errors:
+
+ * the lock file is corrupt
+ * a replacement source in use (e.g., a mirror) returned a different checksum
+ * the source itself may be corrupt in one way or another
+
+unable to verify that `bar v0.1.0` is the same as when the lockfile was generated
+
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_file_checksum() {
+ setup();
+
+ VendorPackage::new("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "")
+ .build();
+
+ t!(fs::write(
+ paths::root().join("index/bar/src/lib.rs"),
+ "fn bar() -> u32 { 0 }"
+ ));
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: the listed checksum of `[..]lib.rs` has changed:
+expected: [..]
+actual: [..]
+
+directory sources are not intended to be edited, if modifications are \
+required then it is recommended that `[patch]` is used with a forked copy of \
+the source
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn only_dot_files_ok() {
+ setup();
+
+ VendorPackage::new("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "")
+ .build();
+ VendorPackage::new("foo")
+ .no_manifest()
+ .file(".bar", "")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn random_files_ok() {
+ setup();
+
+ VendorPackage::new("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "")
+ .build();
+ VendorPackage::new("foo")
+ .no_manifest()
+ .file("bar", "")
+ .file("../test", "")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn git_lock_file_doesnt_change() {
+ let git = git::new("git", |p| {
+ p.file("Cargo.toml", &basic_manifest("git", "0.5.0"))
+ .file("src/lib.rs", "")
+ });
+
+ VendorPackage::new("git")
+ .file("Cargo.toml", &basic_manifest("git", "0.5.0"))
+ .file("src/lib.rs", "")
+ .disable_checksum()
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ git = {{ git = '{0}' }}
+ "#,
+ git.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+
+ let lock1 = p.read_lockfile();
+
+ let root = paths::root();
+ t!(fs::create_dir(&root.join(".cargo")));
+ t!(fs::write(
+ root.join(".cargo/config"),
+ format!(
+ r#"
+ [source.my-git-repo]
+ git = '{}'
+ replace-with = 'my-awesome-local-registry'
+
+ [source.my-awesome-local-registry]
+ directory = 'index'
+ "#,
+ git.url()
+ )
+ ));
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] [..]
+[CHECKING] [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ let lock2 = p.read_lockfile();
+ assert_eq!(lock1, lock2, "lock files changed");
+}
+
+#[cargo_test]
+fn git_override_requires_lockfile() {
+ VendorPackage::new("git")
+ .file("Cargo.toml", &basic_manifest("git", "0.5.0"))
+ .file("src/lib.rs", "")
+ .disable_checksum()
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ git = { git = 'https://example.com/' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ let root = paths::root();
+ t!(fs::create_dir(&root.join(".cargo")));
+ t!(fs::write(
+ root.join(".cargo/config"),
+ r#"
+ [source.my-git-repo]
+ git = 'https://example.com/'
+ replace-with = 'my-awesome-local-registry'
+
+ [source.my-awesome-local-registry]
+ directory = 'index'
+ "#
+ ));
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to get `git` as a dependency of package `foo v0.0.1 ([..])`
+
+Caused by:
+ failed to load source for dependency `git`
+
+Caused by:
+ Unable to update [..]
+
+Caused by:
+ the source my-git-repo requires a lock file to be present first before it can be
+ used against vendored source code
+
+ remove the source replacement configuration, generate a lock file, and then
+ restore the source replacement configuration to continue the build
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn workspace_different_locations() {
+ let p = project()
+ .no_manifest()
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = 'foo'
+ version = '0.1.0'
+
+ [dependencies]
+ baz = "*"
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .file("foo/vendor/baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("foo/vendor/baz/src/lib.rs", "")
+ .file("foo/vendor/baz/.cargo-checksum.json", "{\"files\":{}}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = 'bar'
+ version = '0.1.0'
+
+ [dependencies]
+ baz = "*"
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ target-dir = './target'
+
+ [source.crates-io]
+ replace-with = 'my-awesome-local-registry'
+
+ [source.my-awesome-local-registry]
+ directory = 'foo/vendor'
+ "#,
+ )
+ .build();
+
+ p.cargo("check").cwd("foo").run();
+ p.cargo("check")
+ .cwd("bar")
+ .with_stderr(
+ "\
+[CHECKING] bar [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn version_missing() {
+ setup();
+
+ VendorPackage::new("foo")
+ .file("src/lib.rs", "pub fn foo() {}")
+ .build();
+
+ VendorPackage::new("bar")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ foo = "2"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ cargo_process("install bar")
+ .with_stderr(
+ "\
+[INSTALLING] bar v0.1.0
+error: failed to compile [..]
+
+Caused by:
+ failed to select a version for the requirement `foo = \"^2\"`
+ candidate versions found which didn't match: 0.0.1
+ location searched: directory source `[..] (which is replacing registry `[..]`)
+ required by package `bar v0.1.0`
+ perhaps a crate was updated and forgotten to be re-vendored?
+",
+ )
+ .with_status(101)
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/doc.rs b/src/tools/cargo/tests/testsuite/doc.rs
new file mode 100644
index 000000000..739bcf376
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/doc.rs
@@ -0,0 +1,2503 @@
+//! Tests for the `cargo doc` command.
+
+use cargo::core::compiler::RustDocFingerprint;
+use cargo_test_support::paths::CargoPathExt;
+use cargo_test_support::registry::Package;
+use cargo_test_support::{basic_lib_manifest, basic_manifest, git, project};
+use cargo_test_support::{rustc_host, symlink_supported, tools};
+use std::fs;
+use std::str;
+
+#[cargo_test]
+fn simple() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "pub fn foo() {}")
+ .build();
+
+ p.cargo("doc")
+ .with_stderr(
+ "\
+[..] foo v0.0.1 ([CWD])
+[..] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ assert!(p.root().join("target/doc").is_dir());
+ assert!(p.root().join("target/doc/foo/index.html").is_file());
+}
+
+#[cargo_test]
+fn doc_no_libs() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name = "foo"
+ doc = false
+ "#,
+ )
+ .file("src/main.rs", "bad code")
+ .build();
+
+ p.cargo("doc").run();
+}
+
+#[cargo_test]
+fn doc_twice() {
+ let p = project().file("src/lib.rs", "pub fn foo() {}").build();
+
+ p.cargo("doc")
+ .with_stderr(
+ "\
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("doc").with_stdout("").run();
+}
+
+#[cargo_test]
+fn doc_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar; pub fn foo() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("doc")
+ .with_stderr(
+ "\
+[..] bar v0.0.1 ([CWD]/bar)
+[..] bar v0.0.1 ([CWD]/bar)
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ assert!(p.root().join("target/doc").is_dir());
+ assert!(p.root().join("target/doc/foo/index.html").is_file());
+ assert!(p.root().join("target/doc/bar/index.html").is_file());
+
+ // Verify that it only emits rmeta for the dependency.
+ assert_eq!(p.glob("target/debug/**/*.rlib").count(), 0);
+ assert_eq!(p.glob("target/debug/deps/libbar-*.rmeta").count(), 1);
+
+ p.cargo("doc")
+ .env("CARGO_LOG", "cargo::ops::cargo_rustc::fingerprint")
+ .with_stdout("")
+ .run();
+
+ assert!(p.root().join("target/doc").is_dir());
+ assert!(p.root().join("target/doc/foo/index.html").is_file());
+ assert!(p.root().join("target/doc/bar/index.html").is_file());
+}
+
+#[cargo_test]
+fn doc_no_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar; pub fn foo() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("doc --no-deps")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.0.1 ([CWD]/bar)
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ assert!(p.root().join("target/doc").is_dir());
+ assert!(p.root().join("target/doc/foo/index.html").is_file());
+ assert!(!p.root().join("target/doc/bar/index.html").is_file());
+}
+
+#[cargo_test]
+fn doc_only_bin() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/main.rs", "extern crate bar; pub fn foo() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("doc -v").run();
+
+ assert!(p.root().join("target/doc").is_dir());
+ assert!(p.root().join("target/doc/bar/index.html").is_file());
+ assert!(p.root().join("target/doc/foo/index.html").is_file());
+}
+
+#[cargo_test]
+fn doc_multiple_targets_same_name_lib() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo", "bar"]
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [lib]
+ name = "foo_lib"
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ [lib]
+ name = "foo_lib"
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("doc --workspace")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: document output filename collision
+The lib `foo_lib` in package `foo v0.1.0 ([ROOT]/foo/foo)` has the same name as \
+the lib `foo_lib` in package `bar v0.1.0 ([ROOT]/foo/bar)`.
+Only one may be documented at once since they output to the same path.
+Consider documenting only one, renaming one, or marking one with `doc = false` in Cargo.toml.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn doc_multiple_targets_same_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo", "bar"]
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [[bin]]
+ name = "foo_lib"
+ path = "src/foo_lib.rs"
+ "#,
+ )
+ .file("foo/src/foo_lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ [lib]
+ name = "foo_lib"
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("doc --workspace")
+ .with_stderr_unordered(
+ "\
+warning: output filename collision.
+The bin target `foo_lib` in package `foo v0.1.0 ([ROOT]/foo/foo)` \
+has the same output filename as the lib target `foo_lib` in package \
+`bar v0.1.0 ([ROOT]/foo/bar)`.
+Colliding filename is: [ROOT]/foo/target/doc/foo_lib/index.html
+The targets should have unique names.
+This is a known bug where multiple crates with the same name use
+the same path; see <https://github.com/rust-lang/cargo/issues/6313>.
+[DOCUMENTING] bar v0.1.0 ([ROOT]/foo/bar)
+[DOCUMENTING] foo v0.1.0 ([ROOT]/foo/foo)
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn doc_multiple_targets_same_name_bin() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo", "bar"]
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ "#,
+ )
+ .file("foo/src/bin/foo-cli.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ "#,
+ )
+ .file("bar/src/bin/foo-cli.rs", "")
+ .build();
+
+ p.cargo("doc --workspace")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: document output filename collision
+The bin `foo-cli` in package `foo v0.1.0 ([ROOT]/foo/foo)` has the same name as \
+the bin `foo-cli` in package `bar v0.1.0 ([ROOT]/foo/bar)`.
+Only one may be documented at once since they output to the same path.
+Consider documenting only one, renaming one, or marking one with `doc = false` in Cargo.toml.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn doc_multiple_targets_same_name_undoced() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo", "bar"]
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [[bin]]
+ name = "foo-cli"
+ "#,
+ )
+ .file("foo/src/foo-cli.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ [[bin]]
+ name = "foo-cli"
+ doc = false
+ "#,
+ )
+ .file("bar/src/foo-cli.rs", "")
+ .build();
+
+ p.cargo("doc --workspace").run();
+}
+
+#[cargo_test]
+fn doc_lib_bin_same_name_documents_lib() {
+ let p = project()
+ .file(
+ "src/main.rs",
+ r#"
+ //! Binary documentation
+ extern crate foo;
+ fn main() {
+ foo::foo();
+ }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ //! Library documentation
+ pub fn foo() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("doc")
+ .with_stderr(
+ "\
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ let doc_html = p.read_file("target/doc/foo/index.html");
+ assert!(doc_html.contains("Library"));
+ assert!(!doc_html.contains("Binary"));
+}
+
+#[cargo_test]
+fn doc_lib_bin_same_name_documents_lib_when_requested() {
+ let p = project()
+ .file(
+ "src/main.rs",
+ r#"
+ //! Binary documentation
+ extern crate foo;
+ fn main() {
+ foo::foo();
+ }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ //! Library documentation
+ pub fn foo() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("doc --lib")
+ .with_stderr(
+ "\
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ let doc_html = p.read_file("target/doc/foo/index.html");
+ assert!(doc_html.contains("Library"));
+ assert!(!doc_html.contains("Binary"));
+}
+
+#[cargo_test]
+fn doc_lib_bin_same_name_documents_named_bin_when_requested() {
+ let p = project()
+ .file(
+ "src/main.rs",
+ r#"
+ //! Binary documentation
+ extern crate foo;
+ fn main() {
+ foo::foo();
+ }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ //! Library documentation
+ pub fn foo() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("doc --bin foo")
+ // The checking/documenting lines are sometimes swapped since they run
+ // concurrently.
+ .with_stderr_unordered(
+ "\
+warning: output filename collision.
+The bin target `foo` in package `foo v0.0.1 ([ROOT]/foo)` \
+has the same output filename as the lib target `foo` in package `foo v0.0.1 ([ROOT]/foo)`.
+Colliding filename is: [ROOT]/foo/target/doc/foo/index.html
+The targets should have unique names.
+This is a known bug where multiple crates with the same name use
+the same path; see <https://github.com/rust-lang/cargo/issues/6313>.
+[CHECKING] foo v0.0.1 ([CWD])
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ let doc_html = p.read_file("target/doc/foo/index.html");
+ assert!(!doc_html.contains("Library"));
+ assert!(doc_html.contains("Binary"));
+}
+
+#[cargo_test]
+fn doc_lib_bin_same_name_documents_bins_when_requested() {
+ let p = project()
+ .file(
+ "src/main.rs",
+ r#"
+ //! Binary documentation
+ extern crate foo;
+ fn main() {
+ foo::foo();
+ }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ //! Library documentation
+ pub fn foo() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("doc --bins")
+ // The checking/documenting lines are sometimes swapped since they run
+ // concurrently.
+ .with_stderr_unordered(
+ "\
+warning: output filename collision.
+The bin target `foo` in package `foo v0.0.1 ([ROOT]/foo)` \
+has the same output filename as the lib target `foo` in package `foo v0.0.1 ([ROOT]/foo)`.
+Colliding filename is: [ROOT]/foo/target/doc/foo/index.html
+The targets should have unique names.
+This is a known bug where multiple crates with the same name use
+the same path; see <https://github.com/rust-lang/cargo/issues/6313>.
+[CHECKING] foo v0.0.1 ([CWD])
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ let doc_html = p.read_file("target/doc/foo/index.html");
+ assert!(!doc_html.contains("Library"));
+ assert!(doc_html.contains("Binary"));
+}
+
+#[cargo_test]
+fn doc_lib_bin_example_same_name_documents_named_example_when_requested() {
+ let p = project()
+ .file(
+ "src/main.rs",
+ r#"
+ //! Binary documentation
+ extern crate foo;
+ fn main() {
+ foo::foo();
+ }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ //! Library documentation
+ pub fn foo() {}
+ "#,
+ )
+ .file(
+ "examples/ex1.rs",
+ r#"
+ //! Example1 documentation
+ pub fn x() { f(); }
+ "#,
+ )
+ .build();
+
+ p.cargo("doc --example ex1")
+ // The checking/documenting lines are sometimes swapped since they run
+ // concurrently.
+ .with_stderr_unordered(
+ "\
+[CHECKING] foo v0.0.1 ([CWD])
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+
+ let doc_html = p.read_file("target/doc/ex1/index.html");
+ assert!(!doc_html.contains("Library"));
+ assert!(!doc_html.contains("Binary"));
+ assert!(doc_html.contains("Example1"));
+}
+
+#[cargo_test]
+fn doc_lib_bin_example_same_name_documents_examples_when_requested() {
+ let p = project()
+ .file(
+ "src/main.rs",
+ r#"
+ //! Binary documentation
+ extern crate foo;
+ fn main() {
+ foo::foo();
+ }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ //! Library documentation
+ pub fn foo() {}
+ "#,
+ )
+ .file(
+ "examples/ex1.rs",
+ r#"
+ //! Example1 documentation
+ pub fn example1() { f(); }
+ "#,
+ )
+ .file(
+ "examples/ex2.rs",
+ r#"
+ //! Example2 documentation
+ pub fn example2() { f(); }
+ "#,
+ )
+ .build();
+
+ p.cargo("doc --examples")
+ // The checking/documenting lines are sometimes swapped since they run
+ // concurrently.
+ .with_stderr_unordered(
+ "\
+[CHECKING] foo v0.0.1 ([CWD])
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+
+ let example_doc_html_1 = p.read_file("target/doc/ex1/index.html");
+ let example_doc_html_2 = p.read_file("target/doc/ex2/index.html");
+
+ assert!(!example_doc_html_1.contains("Library"));
+ assert!(!example_doc_html_1.contains("Binary"));
+
+ assert!(!example_doc_html_2.contains("Library"));
+ assert!(!example_doc_html_2.contains("Binary"));
+
+ assert!(example_doc_html_1.contains("Example1"));
+ assert!(example_doc_html_2.contains("Example2"));
+}
+
+#[cargo_test]
+fn doc_dash_p() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.a]
+ path = "a"
+ "#,
+ )
+ .file("src/lib.rs", "extern crate a;")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.b]
+ path = "../b"
+ "#,
+ )
+ .file("a/src/lib.rs", "extern crate b;")
+ .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+ .file("b/src/lib.rs", "")
+ .build();
+
+ p.cargo("doc -p a")
+ .with_stderr(
+ "\
+[..] b v0.0.1 ([CWD]/b)
+[..] b v0.0.1 ([CWD]/b)
+[DOCUMENTING] a v0.0.1 ([CWD]/a)
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn doc_all_exclude() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }")
+ .build();
+
+ p.cargo("doc --workspace --exclude baz")
+ .with_stderr_does_not_contain("[DOCUMENTING] baz v0.1.0 [..]")
+ .with_stderr(
+ "\
+[DOCUMENTING] bar v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn doc_all_exclude_glob() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }")
+ .build();
+
+ p.cargo("doc --workspace --exclude '*z'")
+ .with_stderr_does_not_contain("[DOCUMENTING] baz v0.1.0 [..]")
+ .with_stderr(
+ "\
+[DOCUMENTING] bar v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn doc_same_name() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("src/bin/main.rs", "fn main() {}")
+ .file("examples/main.rs", "fn main() {}")
+ .file("tests/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("doc").run();
+}
+
+#[cargo_test(nightly, reason = "no_core, lang_items requires nightly")]
+fn doc_target() {
+ const TARGET: &str = "arm-unknown-linux-gnueabihf";
+
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ #![feature(no_core, lang_items)]
+ #![no_core]
+
+ #[lang = "sized"]
+ trait Sized {}
+
+ extern {
+ pub static A: u32;
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("doc --verbose --target").arg(TARGET).run();
+ assert!(p.root().join(&format!("target/{}/doc", TARGET)).is_dir());
+ assert!(p
+ .root()
+ .join(&format!("target/{}/doc/foo/index.html", TARGET))
+ .is_file());
+}
+
+#[cargo_test]
+fn target_specific_not_documented() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [target.foo.dependencies]
+ a = { path = "a" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+ .file("a/src/lib.rs", "not rust")
+ .build();
+
+ p.cargo("doc").run();
+}
+
+#[cargo_test]
+fn output_not_captured() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = { path = "a" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+ .file(
+ "a/src/lib.rs",
+ "
+ /// ```
+ /// `
+ /// ```
+ pub fn foo() {}
+ ",
+ )
+ .build();
+
+ p.cargo("doc")
+ .with_stderr_contains("[..]unknown start of token: `")
+ .run();
+}
+
+#[cargo_test]
+fn target_specific_documented() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [target.foo.dependencies]
+ a = {{ path = "a" }}
+ [target.{}.dependencies]
+ a = {{ path = "a" }}
+ "#,
+ rustc_host()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "
+ extern crate a;
+
+ /// test
+ pub fn foo() {}
+ ",
+ )
+ .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+ .file(
+ "a/src/lib.rs",
+ "
+ /// test
+ pub fn foo() {}
+ ",
+ )
+ .build();
+
+ p.cargo("doc").run();
+}
+
+#[cargo_test]
+fn no_document_build_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [build-dependencies]
+ a = { path = "a" }
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
+ .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+ .file(
+ "a/src/lib.rs",
+ "
+ /// ```
+ /// ☃
+ /// ```
+ pub fn foo() {}
+ ",
+ )
+ .build();
+
+ p.cargo("doc").run();
+}
+
+#[cargo_test]
+fn doc_release() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("check --release").run();
+ p.cargo("doc --release -v")
+ .with_stderr(
+ "\
+[DOCUMENTING] foo v0.0.1 ([..])
+[RUNNING] `rustdoc [..] src/lib.rs [..]`
+[FINISHED] release [optimized] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn doc_multiple_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+
+ [dependencies.baz]
+ path = "baz"
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar; pub fn foo() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ p.cargo("doc -p bar -p baz -v").run();
+
+ assert!(p.root().join("target/doc").is_dir());
+ assert!(p.root().join("target/doc/bar/index.html").is_file());
+ assert!(p.root().join("target/doc/baz/index.html").is_file());
+}
+
+#[cargo_test]
+fn features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+
+ [features]
+ foo = ["bar/bar"]
+ "#,
+ )
+ .file("src/lib.rs", r#"#[cfg(feature = "foo")] pub fn foo() {}"#)
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ bar = []
+ "#,
+ )
+ .file(
+ "bar/build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-cfg=bar");
+ }
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"#[cfg(feature = "bar")] pub fn bar() {}"#,
+ )
+ .build();
+ p.cargo("doc --features foo")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.0.1 [..]
+[DOCUMENTING] bar v0.0.1 [..]
+[DOCUMENTING] foo v0.0.1 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ assert!(p.root().join("target/doc").is_dir());
+ assert!(p.root().join("target/doc/foo/fn.foo.html").is_file());
+ assert!(p.root().join("target/doc/bar/fn.bar.html").is_file());
+ // Check that turning the feature off will remove the files.
+ p.cargo("doc")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.0.1 [..]
+[DOCUMENTING] bar v0.0.1 [..]
+[DOCUMENTING] foo v0.0.1 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ assert!(!p.root().join("target/doc/foo/fn.foo.html").is_file());
+ assert!(!p.root().join("target/doc/bar/fn.bar.html").is_file());
+ // And switching back will rebuild and bring them back.
+ p.cargo("doc --features foo")
+ .with_stderr(
+ "\
+[DOCUMENTING] bar v0.0.1 [..]
+[DOCUMENTING] foo v0.0.1 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ assert!(p.root().join("target/doc/foo/fn.foo.html").is_file());
+ assert!(p.root().join("target/doc/bar/fn.bar.html").is_file());
+}
+
+#[cargo_test]
+fn rerun_when_dir_removed() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ /// dox
+ pub fn foo() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("doc").run();
+ assert!(p.root().join("target/doc/foo/index.html").is_file());
+
+ fs::remove_dir_all(p.root().join("target/doc/foo")).unwrap();
+
+ p.cargo("doc").run();
+ assert!(p.root().join("target/doc/foo/index.html").is_file());
+}
+
+#[cargo_test]
+fn document_only_lib() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ /// dox
+ pub fn foo() {}
+ "#,
+ )
+ .file(
+ "src/bin/bar.rs",
+ r#"
+ /// ```
+ /// ☃
+ /// ```
+ pub fn foo() {}
+ fn main() { foo(); }
+ "#,
+ )
+ .build();
+ p.cargo("doc --lib").run();
+ assert!(p.root().join("target/doc/foo/index.html").is_file());
+}
+
+#[cargo_test]
+fn plugins_no_use_target() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("doc --target=x86_64-unknown-openbsd -v").run();
+}
+
+#[cargo_test]
+fn doc_all_workspace() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { path = "bar" }
+
+ [workspace]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ // The order in which bar is compiled or documented is not deterministic
+ p.cargo("doc --workspace")
+ .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])")
+ .with_stderr_contains("[..] Checking bar v0.1.0 ([..])")
+ .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])")
+ .run();
+}
+
+#[cargo_test]
+fn doc_all_virtual_manifest() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ // The order in which bar and baz are documented is not guaranteed
+ p.cargo("doc --workspace")
+ .with_stderr_contains("[..] Documenting baz v0.1.0 ([..])")
+ .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])")
+ .run();
+}
+
+#[cargo_test]
+fn doc_virtual_manifest_all_implied() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ // The order in which bar and baz are documented is not guaranteed
+ p.cargo("doc")
+ .with_stderr_contains("[..] Documenting baz v0.1.0 ([..])")
+ .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])")
+ .run();
+}
+
+#[cargo_test]
+fn doc_virtual_manifest_one_project() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }")
+ .build();
+
+ p.cargo("doc -p bar")
+ .with_stderr_does_not_contain("[DOCUMENTING] baz v0.1.0 [..]")
+ .with_stderr(
+ "\
+[DOCUMENTING] bar v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn doc_virtual_manifest_glob() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() { break_the_build(); }")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ p.cargo("doc -p '*z'")
+ .with_stderr_does_not_contain("[DOCUMENTING] bar v0.1.0 [..]")
+ .with_stderr(
+ "\
+[DOCUMENTING] baz v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn doc_all_member_dependency_same_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ Package::new("bar", "0.1.0").publish();
+
+ p.cargo("doc --workspace")
+ .with_stderr_unordered(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 (registry `dummy-registry`)
+warning: output filename collision.
+The lib target `bar` in package `bar v0.1.0` has the same output filename as \
+the lib target `bar` in package `bar v0.1.0 ([ROOT]/foo/bar)`.
+Colliding filename is: [ROOT]/foo/target/doc/bar/index.html
+The targets should have unique names.
+This is a known bug where multiple crates with the same name use
+the same path; see <https://github.com/rust-lang/cargo/issues/6313>.
+[DOCUMENTING] bar v0.1.0
+[CHECKING] bar v0.1.0
+[DOCUMENTING] bar v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn doc_workspace_open_help_message() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo", "bar"]
+ "#,
+ )
+ .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("foo/src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ // The order in which bar is compiled or documented is not deterministic
+ p.cargo("doc --workspace --open")
+ .env("BROWSER", tools::echo())
+ .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])")
+ .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])")
+ .with_stderr_contains("[..] Opening [..]/bar/index.html")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "-Zextern-html-root-url is unstable")]
+fn doc_extern_map_local() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(".cargo/config.toml", "doc.extern-map.std = 'local'")
+ .build();
+
+ p.cargo("doc -v --no-deps -Zrustdoc-map --open")
+ .env("BROWSER", tools::echo())
+ .masquerade_as_nightly_cargo(&["rustdoc-map"])
+ .with_stderr(
+ "\
+[DOCUMENTING] foo v0.1.0 [..]
+[RUNNING] `rustdoc --crate-type lib --crate-name foo src/lib.rs [..]--crate-version 0.1.0`
+[FINISHED] [..]
+ Opening [CWD]/target/doc/foo/index.html
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn open_no_doc_crate() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ doc = false
+ "#,
+ )
+ .file("src/lib.rs", "#[cfg(feature)] pub fn f();")
+ .build();
+
+ p.cargo("doc --open")
+ .env("BROWSER", "do_not_run_me")
+ .with_status(101)
+ .with_stderr_contains("error: no crates with documentation")
+ .run();
+}
+
+#[cargo_test]
+fn doc_workspace_open_different_library_and_package_names() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo"]
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [lib]
+ name = "foolib"
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .build();
+
+ p.cargo("doc --open")
+ .env("BROWSER", tools::echo())
+ .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])")
+ .with_stderr_contains("[..] [CWD]/target/doc/foolib/index.html")
+ .with_stdout_contains("[CWD]/target/doc/foolib/index.html")
+ .run();
+
+ p.change_file(
+ ".cargo/config.toml",
+ &format!(
+ r#"
+ [doc]
+ browser = ["{}", "a"]
+ "#,
+ tools::echo().display().to_string().replace('\\', "\\\\")
+ ),
+ );
+
+ // check that the cargo config overrides the browser env var
+ p.cargo("doc --open")
+ .env("BROWSER", "do_not_run_me")
+ .with_stdout_contains("a [CWD]/target/doc/foolib/index.html")
+ .run();
+}
+
+#[cargo_test]
+fn doc_workspace_open_binary() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo"]
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [[bin]]
+ name = "foobin"
+ path = "src/main.rs"
+ "#,
+ )
+ .file("foo/src/main.rs", "")
+ .build();
+
+ p.cargo("doc --open")
+ .env("BROWSER", tools::echo())
+ .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])")
+ .with_stderr_contains("[..] Opening [CWD]/target/doc/foobin/index.html")
+ .run();
+}
+
+#[cargo_test]
+fn doc_workspace_open_binary_and_library() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo"]
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [lib]
+ name = "foolib"
+ [[bin]]
+ name = "foobin"
+ path = "src/main.rs"
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .file("foo/src/main.rs", "")
+ .build();
+
+ p.cargo("doc --open")
+ .env("BROWSER", tools::echo())
+ .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])")
+ .with_stderr_contains("[..] Opening [CWD]/target/doc/foolib/index.html")
+ .run();
+}
+
+#[cargo_test]
+fn doc_edition() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ edition = "2018"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("doc -v")
+ .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]")
+ .run();
+
+ p.cargo("test -v")
+ .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]")
+ .run();
+}
+
+#[cargo_test]
+fn doc_target_edition() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ edition = "2018"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("doc -v")
+ .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]")
+ .run();
+
+ p.cargo("test -v")
+ .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]")
+ .run();
+}
+
+// Tests an issue where depending on different versions of the same crate depending on `cfg`s
+// caused `cargo doc` to fail.
+#[cargo_test]
+fn issue_5345() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [target.'cfg(all(windows, target_arch = "x86"))'.dependencies]
+ bar = "0.1"
+
+ [target.'cfg(not(all(windows, target_arch = "x86")))'.dependencies]
+ bar = "0.2"
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar;")
+ .build();
+ Package::new("bar", "0.1.0").publish();
+ Package::new("bar", "0.2.0").publish();
+
+ foo.cargo("check").run();
+ foo.cargo("doc").run();
+}
+
+#[cargo_test]
+fn doc_private_items() {
+ let foo = project()
+ .file("src/lib.rs", "mod private { fn private_item() {} }")
+ .build();
+ foo.cargo("doc --document-private-items").run();
+
+ assert!(foo.root().join("target/doc").is_dir());
+ assert!(foo
+ .root()
+ .join("target/doc/foo/private/index.html")
+ .is_file());
+}
+
+#[cargo_test]
+fn doc_private_ws() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+ .file("a/src/lib.rs", "fn p() {}")
+ .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+ .file("b/src/lib.rs", "fn p2() {}")
+ .file("b/src/bin/b-cli.rs", "fn main() {}")
+ .build();
+ p.cargo("doc --workspace --bins --lib --document-private-items -v")
+ .with_stderr_contains(
+ "[RUNNING] `rustdoc [..] a/src/lib.rs [..]--document-private-items[..]",
+ )
+ .with_stderr_contains(
+ "[RUNNING] `rustdoc [..] b/src/lib.rs [..]--document-private-items[..]",
+ )
+ .with_stderr_contains(
+ "[RUNNING] `rustdoc [..] b/src/bin/b-cli.rs [..]--document-private-items[..]",
+ )
+ .run();
+}
+
+const BAD_INTRA_LINK_LIB: &str = r#"
+#![deny(broken_intra_doc_links)]
+
+/// [bad_link]
+pub fn foo() {}
+"#;
+
+#[cargo_test]
+fn doc_cap_lints() {
+ let a = git::new("a", |p| {
+ p.file("Cargo.toml", &basic_lib_manifest("a"))
+ .file("src/lib.rs", BAD_INTRA_LINK_LIB)
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = {{ git = '{}' }}
+ "#,
+ a.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("doc")
+ .with_stderr_unordered(
+ "\
+[UPDATING] git repository `[..]`
+[DOCUMENTING] a v0.5.0 ([..])
+[CHECKING] a v0.5.0 ([..])
+[DOCUMENTING] foo v0.0.1 ([..])
+[FINISHED] dev [..]
+",
+ )
+ .run();
+
+ p.root().join("target").rm_rf();
+
+ p.cargo("doc -vv")
+ .with_stderr_contains("[WARNING] [..]`bad_link`[..]")
+ .run();
+}
+
+#[cargo_test]
+fn doc_message_format() {
+ let p = project().file("src/lib.rs", BAD_INTRA_LINK_LIB).build();
+
+ p.cargo("doc --message-format=json")
+ .with_status(101)
+ .with_json_contains_unordered(
+ r#"
+ {
+ "message": {
+ "children": "{...}",
+ "code": "{...}",
+ "level": "error",
+ "message": "{...}",
+ "rendered": "{...}",
+ "spans": "{...}"
+ },
+ "package_id": "foo [..]",
+ "manifest_path": "[..]",
+ "reason": "compiler-message",
+ "target": "{...}"
+ }
+ "#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn doc_json_artifacts() {
+ // Checks the output of json artifact messages.
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("src/bin/somebin.rs", "fn main() {}")
+ .build();
+
+ p.cargo("doc --message-format=json")
+ .with_json_contains_unordered(
+ r#"
+{
+ "reason": "compiler-artifact",
+ "package_id": "foo 0.0.1 [..]",
+ "manifest_path": "[ROOT]/foo/Cargo.toml",
+ "target":
+ {
+ "kind": ["lib"],
+ "crate_types": ["lib"],
+ "name": "foo",
+ "src_path": "[ROOT]/foo/src/lib.rs",
+ "edition": "2015",
+ "doc": true,
+ "doctest": true,
+ "test": true
+ },
+ "profile": "{...}",
+ "features": [],
+ "filenames": ["[ROOT]/foo/target/debug/deps/libfoo-[..].rmeta"],
+ "executable": null,
+ "fresh": false
+}
+
+{
+ "reason": "compiler-artifact",
+ "package_id": "foo 0.0.1 [..]",
+ "manifest_path": "[ROOT]/foo/Cargo.toml",
+ "target":
+ {
+ "kind": ["lib"],
+ "crate_types": ["lib"],
+ "name": "foo",
+ "src_path": "[ROOT]/foo/src/lib.rs",
+ "edition": "2015",
+ "doc": true,
+ "doctest": true,
+ "test": true
+ },
+ "profile": "{...}",
+ "features": [],
+ "filenames": ["[ROOT]/foo/target/doc/foo/index.html"],
+ "executable": null,
+ "fresh": false
+}
+
+{
+ "reason": "compiler-artifact",
+ "package_id": "foo 0.0.1 [..]",
+ "manifest_path": "[ROOT]/foo/Cargo.toml",
+ "target":
+ {
+ "kind": ["bin"],
+ "crate_types": ["bin"],
+ "name": "somebin",
+ "src_path": "[ROOT]/foo/src/bin/somebin.rs",
+ "edition": "2015",
+ "doc": true,
+ "doctest": false,
+ "test": true
+ },
+ "profile": "{...}",
+ "features": [],
+ "filenames": ["[ROOT]/foo/target/doc/somebin/index.html"],
+ "executable": null,
+ "fresh": false
+}
+
+{"reason":"build-finished","success":true}
+"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn short_message_format() {
+ let p = project().file("src/lib.rs", BAD_INTRA_LINK_LIB).build();
+ p.cargo("doc --message-format=short")
+ .with_status(101)
+ .with_stderr_contains("src/lib.rs:4:6: error: [..]`bad_link`[..]")
+ .run();
+}
+
+#[cargo_test]
+fn doc_example() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [[example]]
+ crate-type = ["lib"]
+ name = "ex1"
+ doc = true
+ "#,
+ )
+ .file("src/lib.rs", "pub fn f() {}")
+ .file(
+ "examples/ex1.rs",
+ r#"
+ use foo::f;
+
+ /// Example
+ pub fn x() { f(); }
+ "#,
+ )
+ .build();
+
+ p.cargo("doc").run();
+ assert!(p
+ .build_dir()
+ .join("doc")
+ .join("ex1")
+ .join("fn.x.html")
+ .exists());
+}
+
+#[cargo_test]
+fn doc_example_with_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [[example]]
+ crate-type = ["lib"]
+ name = "ex"
+ doc = true
+
+ [dev-dependencies]
+ a = {path = "a"}
+ b = {path = "b"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "examples/ex.rs",
+ r#"
+ use a::fun;
+
+ /// Example
+ pub fn x() { fun(); }
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+
+ [dependencies]
+ b = {path = "../b"}
+ "#,
+ )
+ .file("a/src/fun.rs", "pub fn fun() {}")
+ .file("a/src/lib.rs", "pub mod fun;")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.0.1"
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .build();
+
+ p.cargo("doc --examples").run();
+ assert!(p
+ .build_dir()
+ .join("doc")
+ .join("ex")
+ .join("fn.x.html")
+ .exists());
+}
+
+#[cargo_test]
+fn bin_private_items() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "
+ pub fn foo_pub() {}
+ fn foo_priv() {}
+ struct FooStruct;
+ enum FooEnum {}
+ trait FooTrait {}
+ type FooType = u32;
+ mod foo_mod {}
+
+ ",
+ )
+ .build();
+
+ p.cargo("doc")
+ .with_stderr(
+ "\
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ assert!(p.root().join("target/doc/foo/index.html").is_file());
+ assert!(p.root().join("target/doc/foo/fn.foo_pub.html").is_file());
+ assert!(p.root().join("target/doc/foo/fn.foo_priv.html").is_file());
+ assert!(p
+ .root()
+ .join("target/doc/foo/struct.FooStruct.html")
+ .is_file());
+ assert!(p.root().join("target/doc/foo/enum.FooEnum.html").is_file());
+ assert!(p
+ .root()
+ .join("target/doc/foo/trait.FooTrait.html")
+ .is_file());
+ assert!(p.root().join("target/doc/foo/type.FooType.html").is_file());
+ assert!(p.root().join("target/doc/foo/foo_mod/index.html").is_file());
+}
+
+#[cargo_test]
+fn bin_private_items_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "
+ fn foo_priv() {}
+ pub fn foo_pub() {}
+ ",
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file(
+ "bar/src/lib.rs",
+ "
+ #[allow(dead_code)]
+ fn bar_priv() {}
+ pub fn bar_pub() {}
+ ",
+ )
+ .build();
+
+ p.cargo("doc")
+ .with_stderr_unordered(
+ "\
+[DOCUMENTING] bar v0.0.1 ([..])
+[CHECKING] bar v0.0.1 ([..])
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ assert!(p.root().join("target/doc/foo/index.html").is_file());
+ assert!(p.root().join("target/doc/foo/fn.foo_pub.html").is_file());
+ assert!(p.root().join("target/doc/foo/fn.foo_priv.html").is_file());
+
+ assert!(p.root().join("target/doc/bar/index.html").is_file());
+ assert!(p.root().join("target/doc/bar/fn.bar_pub.html").is_file());
+ assert!(!p.root().join("target/doc/bar/fn.bar_priv.html").exists());
+}
+
+#[cargo_test]
+fn crate_versions() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.2.4"
+ authors = []
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("doc -v")
+ .with_stderr(
+ "\
+[DOCUMENTING] foo v1.2.4 [..]
+[RUNNING] `rustdoc --crate-type lib --crate-name foo src/lib.rs [..]--crate-version 1.2.4`
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ let output_path = p.root().join("target/doc/foo/index.html");
+ let output_documentation = fs::read_to_string(&output_path).unwrap();
+
+ assert!(output_documentation.contains("Version 1.2.4"));
+}
+
+#[cargo_test]
+fn crate_versions_flag_is_overridden() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.2.4"
+ authors = []
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ let output_documentation = || {
+ let output_path = p.root().join("target/doc/foo/index.html");
+ fs::read_to_string(&output_path).unwrap()
+ };
+ let asserts = |html: String| {
+ assert!(!html.contains("1.2.4"));
+ assert!(html.contains("Version 2.0.3"));
+ };
+
+ p.cargo("doc")
+ .env("RUSTDOCFLAGS", "--crate-version 2.0.3")
+ .run();
+ asserts(output_documentation());
+
+ p.build_dir().rm_rf();
+
+ p.cargo("rustdoc -- --crate-version 2.0.3").run();
+ asserts(output_documentation());
+}
+
+#[cargo_test(nightly, reason = "-Zdoctest-in-workspace is unstable")]
+fn doc_test_in_workspace() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = [
+ "crate-a",
+ "crate-b",
+ ]
+ "#,
+ )
+ .file(
+ "crate-a/Cargo.toml",
+ r#"
+ [package]
+ name = "crate-a"
+ version = "0.1.0"
+ "#,
+ )
+ .file(
+ "crate-a/src/lib.rs",
+ "\
+ //! ```
+ //! assert_eq!(1, 1);
+ //! ```
+ ",
+ )
+ .file(
+ "crate-b/Cargo.toml",
+ r#"
+ [package]
+ name = "crate-b"
+ version = "0.1.0"
+ "#,
+ )
+ .file(
+ "crate-b/src/lib.rs",
+ "\
+ //! ```
+ //! assert_eq!(1, 1);
+ //! ```
+ ",
+ )
+ .build();
+ p.cargo("test -Zdoctest-in-workspace --doc -vv")
+ .masquerade_as_nightly_cargo(&["doctest-in-workspace"])
+ .with_stderr_contains("[DOCTEST] crate-a")
+ .with_stdout_contains(
+ "
+running 1 test
+test crate-a/src/lib.rs - (line 1) ... ok
+
+test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]
+
+",
+ )
+ .with_stderr_contains("[DOCTEST] crate-b")
+ .with_stdout_contains(
+ "
+running 1 test
+test crate-b/src/lib.rs - (line 1) ... ok
+
+test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]
+
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn doc_fingerprint_is_versioning_consistent() {
+ // Random rustc verbose version
+ let old_rustc_verbose_version = format!(
+ "\
+rustc 1.41.1 (f3e1a954d 2020-02-24)
+binary: rustc
+commit-hash: f3e1a954d2ead4e2fc197c7da7d71e6c61bad196
+commit-date: 2020-02-24
+host: {}
+release: 1.41.1
+LLVM version: 9.0
+",
+ rustc_host()
+ );
+
+ // Create the dummy project.
+ let dummy_project = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.2.4"
+ authors = []
+ "#,
+ )
+ .file("src/lib.rs", "//! These are the docs!")
+ .build();
+
+ dummy_project.cargo("doc").run();
+
+ let fingerprint: RustDocFingerprint =
+ serde_json::from_str(&dummy_project.read_file("target/.rustdoc_fingerprint.json"))
+ .expect("JSON Serde fail");
+
+ // Check that the fingerprint contains the actual rustc version
+ // which has been used to compile the docs.
+ let output = std::process::Command::new("rustc")
+ .arg("-vV")
+ .output()
+ .expect("Failed to get actual rustc verbose version");
+ assert_eq!(
+ fingerprint.rustc_vv,
+ (String::from_utf8_lossy(&output.stdout).as_ref())
+ );
+
+ // As the test shows above. Now we have generated the `doc/` folder and inside
+ // the rustdoc fingerprint file is located with the correct rustc version.
+ // So we will remove it and create a new fingerprint with an old rustc version
+ // inside it. We will also place a bogus file inside of the `doc/` folder to ensure
+ // it gets removed as we expect on the next doc compilation.
+ dummy_project.change_file(
+ "target/.rustdoc_fingerprint.json",
+ &old_rustc_verbose_version,
+ );
+
+ fs::write(
+ dummy_project.build_dir().join("doc/bogus_file"),
+ String::from("This is a bogus file and should be removed!"),
+ )
+ .expect("Error writing test bogus file");
+
+ // Now if we trigger another compilation, since the fingerprint contains an old version
+ // of rustc, cargo should remove the entire `/doc` folder (including the fingerprint)
+ // and generating another one with the actual version.
+ // It should also remove the bogus file we created above.
+ dummy_project.cargo("doc").run();
+
+ assert!(!dummy_project.build_dir().join("doc/bogus_file").exists());
+
+ let fingerprint: RustDocFingerprint =
+ serde_json::from_str(&dummy_project.read_file("target/.rustdoc_fingerprint.json"))
+ .expect("JSON Serde fail");
+
+ // Check that the fingerprint contains the actual rustc version
+ // which has been used to compile the docs.
+ assert_eq!(
+ fingerprint.rustc_vv,
+ (String::from_utf8_lossy(&output.stdout).as_ref())
+ );
+}
+
+#[cargo_test]
+fn doc_fingerprint_respects_target_paths() {
+ // Random rustc verbose version
+ let old_rustc_verbose_version = format!(
+ "\
+rustc 1.41.1 (f3e1a954d 2020-02-24)
+binary: rustc
+commit-hash: f3e1a954d2ead4e2fc197c7da7d71e6c61bad196
+commit-date: 2020-02-24
+host: {}
+release: 1.41.1
+LLVM version: 9.0
+",
+ rustc_host()
+ );
+
+ // Create the dummy project.
+ let dummy_project = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.2.4"
+ authors = []
+ "#,
+ )
+ .file("src/lib.rs", "//! These are the docs!")
+ .build();
+
+ dummy_project.cargo("doc --target").arg(rustc_host()).run();
+
+ let fingerprint: RustDocFingerprint =
+ serde_json::from_str(&dummy_project.read_file("target/.rustdoc_fingerprint.json"))
+ .expect("JSON Serde fail");
+
+ // Check that the fingerprint contains the actual rustc version
+ // which has been used to compile the docs.
+ let output = std::process::Command::new("rustc")
+ .arg("-vV")
+ .output()
+ .expect("Failed to get actual rustc verbose version");
+ assert_eq!(
+ fingerprint.rustc_vv,
+ (String::from_utf8_lossy(&output.stdout).as_ref())
+ );
+
+ // As the test shows above. Now we have generated the `doc/` folder and inside
+ // the rustdoc fingerprint file is located with the correct rustc version.
+ // So we will remove it and create a new fingerprint with an old rustc version
+ // inside it. We will also place a bogus file inside of the `doc/` folder to ensure
+ // it gets removed as we expect on the next doc compilation.
+ dummy_project.change_file(
+ "target/.rustdoc_fingerprint.json",
+ &old_rustc_verbose_version,
+ );
+
+ fs::write(
+ dummy_project
+ .build_dir()
+ .join(rustc_host())
+ .join("doc/bogus_file"),
+ String::from("This is a bogus file and should be removed!"),
+ )
+ .expect("Error writing test bogus file");
+
+ // Now if we trigger another compilation, since the fingerprint contains an old version
+ // of rustc, cargo should remove the entire `/doc` folder (including the fingerprint)
+ // and generating another one with the actual version.
+ // It should also remove the bogus file we created above.
+ dummy_project.cargo("doc --target").arg(rustc_host()).run();
+
+ assert!(!dummy_project
+ .build_dir()
+ .join(rustc_host())
+ .join("doc/bogus_file")
+ .exists());
+
+ let fingerprint: RustDocFingerprint =
+ serde_json::from_str(&dummy_project.read_file("target/.rustdoc_fingerprint.json"))
+ .expect("JSON Serde fail");
+
+ // Check that the fingerprint contains the actual rustc version
+ // which has been used to compile the docs.
+ assert_eq!(
+ fingerprint.rustc_vv,
+ (String::from_utf8_lossy(&output.stdout).as_ref())
+ );
+}
+
+#[cargo_test]
+fn doc_fingerprint_unusual_behavior() {
+ // Checks for some unusual circumstances with clearing the doc directory.
+ if !symlink_supported() {
+ return;
+ }
+ let p = project().file("src/lib.rs", "").build();
+ p.build_dir().mkdir_p();
+ let real_doc = p.root().join("doc");
+ real_doc.mkdir_p();
+ let build_doc = p.build_dir().join("doc");
+ p.symlink(&real_doc, &build_doc);
+ fs::write(real_doc.join("somefile"), "test").unwrap();
+ fs::write(real_doc.join(".hidden"), "test").unwrap();
+ p.cargo("doc").run();
+ // Make sure for the first run, it does not delete any files and does not
+ // break the symlink.
+ assert!(build_doc.join("somefile").exists());
+ assert!(real_doc.join("somefile").exists());
+ assert!(real_doc.join(".hidden").exists());
+ assert!(real_doc.join("foo/index.html").exists());
+ // Pretend that the last build was generated by an older version.
+ p.change_file(
+ "target/.rustdoc_fingerprint.json",
+ "{\"rustc_vv\": \"I am old\"}",
+ );
+ // Change file to trigger a new build.
+ p.change_file("src/lib.rs", "// changed");
+ p.cargo("doc")
+ .with_stderr(
+ "[DOCUMENTING] foo [..]\n\
+ [FINISHED] [..]",
+ )
+ .run();
+ // This will delete somefile, but not .hidden.
+ assert!(!real_doc.join("somefile").exists());
+ assert!(real_doc.join(".hidden").exists());
+ assert!(real_doc.join("foo/index.html").exists());
+ // And also check the -Z flag behavior.
+ p.change_file(
+ "target/.rustdoc_fingerprint.json",
+ "{\"rustc_vv\": \"I am old\"}",
+ );
+ // Change file to trigger a new build.
+ p.change_file("src/lib.rs", "// changed2");
+ fs::write(real_doc.join("somefile"), "test").unwrap();
+ p.cargo("doc -Z skip-rustdoc-fingerprint")
+ .masquerade_as_nightly_cargo(&["skip-rustdoc-fingerprint"])
+ .with_stderr(
+ "[DOCUMENTING] foo [..]\n\
+ [FINISHED] [..]",
+ )
+ .run();
+ // Should not have deleted anything.
+ assert!(build_doc.join("somefile").exists());
+ assert!(real_doc.join("somefile").exists());
+}
+
+#[cargo_test]
+fn lib_before_bin() {
+ // Checks that the library is documented before the binary.
+ // Previously they were built concurrently, which can cause issues
+ // if the bin has intra-doc links to the lib.
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ /// Hi
+ pub fn abc() {}
+ "#,
+ )
+ .file(
+ "src/bin/somebin.rs",
+ r#"
+ //! See [`foo::abc`]
+ fn main() {}
+ "#,
+ )
+ .build();
+
+ // Run check first. This just helps ensure that the test clearly shows the
+ // order of the rustdoc commands.
+ p.cargo("check").run();
+
+ // The order of output here should be deterministic.
+ p.cargo("doc -v")
+ .with_stderr(
+ "\
+[DOCUMENTING] foo [..]
+[RUNNING] `rustdoc --crate-type lib --crate-name foo src/lib.rs [..]
+[RUNNING] `rustdoc --crate-type bin --crate-name somebin src/bin/somebin.rs [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ // And the link should exist.
+ let bin_html = p.read_file("target/doc/somebin/index.html");
+ assert!(bin_html.contains("../foo/fn.abc.html"));
+}
+
+#[cargo_test]
+fn doc_lib_false() {
+ // doc = false for a library
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [lib]
+ doc = false
+
+ [dependencies]
+ bar = {path = "bar"}
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar;")
+ .file("src/bin/some-bin.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [lib]
+ doc = false
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("doc")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.1.0 [..]
+[CHECKING] foo v0.1.0 [..]
+[DOCUMENTING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ assert!(!p.build_dir().join("doc/foo").exists());
+ assert!(!p.build_dir().join("doc/bar").exists());
+ assert!(p.build_dir().join("doc/some_bin").exists());
+}
+
+#[cargo_test]
+fn doc_lib_false_dep() {
+ // doc = false for a dependency
+ // Ensures that the rmeta gets produced
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar;")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [lib]
+ doc = false
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("doc")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.1.0 [..]
+[DOCUMENTING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ assert!(p.build_dir().join("doc/foo").exists());
+ assert!(!p.build_dir().join("doc/bar").exists());
+}
+
+#[cargo_test]
+fn link_to_private_item() {
+ let main = r#"
+ //! [bar]
+ #[allow(dead_code)]
+ fn bar() {}
+ "#;
+ let p = project().file("src/lib.rs", main).build();
+ p.cargo("doc")
+ .with_stderr_contains("[..] documentation for `foo` links to private item `bar`")
+ .run();
+ // Check that binaries don't emit a private_intra_doc_links warning.
+ fs::rename(p.root().join("src/lib.rs"), p.root().join("src/main.rs")).unwrap();
+ p.cargo("doc")
+ .with_stderr(
+ "[DOCUMENTING] foo [..]\n\
+ [FINISHED] [..]",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/docscrape.rs b/src/tools/cargo/tests/testsuite/docscrape.rs
new file mode 100644
index 000000000..c536a6738
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/docscrape.rs
@@ -0,0 +1,637 @@
+//! Tests for the `cargo doc` command with `-Zrustdoc-scrape-examples`.
+
+use cargo_test_support::project;
+
+#[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")]
+fn basic() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file("examples/ex.rs", "fn main() { foo::foo(); }")
+ .file("src/lib.rs", "pub fn foo() {}\npub fn bar() { foo(); }")
+ .build();
+
+ p.cargo("doc -Zunstable-options -Zrustdoc-scrape-examples")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.1 ([CWD])
+[SCRAPING] foo v0.0.1 ([CWD])
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("doc -Zunstable-options -Z rustdoc-scrape-examples")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .with_stderr("[FINISHED] [..]")
+ .run();
+
+ let doc_html = p.read_file("target/doc/foo/fn.foo.html");
+ assert!(doc_html.contains("Examples found in repository"));
+ assert!(!doc_html.contains("More examples"));
+
+ // Ensure that the reverse-dependency has its sources generated
+ assert!(p.build_dir().join("doc/src/ex/ex.rs.html").exists());
+}
+
+#[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")]
+fn avoid_build_script_cycle() {
+ let p = project()
+ // package with build dependency
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ links = "foo"
+
+ [workspace]
+ members = ["bar"]
+
+ [build-dependencies]
+ bar = {path = "bar"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main(){}")
+ // dependency
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+ links = "bar"
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .file("bar/build.rs", "fn main(){}")
+ .build();
+
+ p.cargo("doc --workspace -Zunstable-options -Zrustdoc-scrape-examples")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .run();
+}
+
+#[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")]
+fn complex_reverse_dependencies() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dev-dependencies]
+ a = {path = "a", features = ["feature"]}
+ b = {path = "b"}
+
+ [workspace]
+ members = ["b"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("examples/ex.rs", "fn main() {}")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ proc-macro = true
+
+ [dependencies]
+ b = {path = "../b"}
+
+ [features]
+ feature = []
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .build();
+
+ p.cargo("doc --workspace --examples -Zunstable-options -Zrustdoc-scrape-examples")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .run();
+}
+
+#[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")]
+fn crate_with_dash() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "da-sh"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
+ .file("examples/a.rs", "fn main() { da_sh::foo(); }")
+ .build();
+
+ p.cargo("doc -Zunstable-options -Zrustdoc-scrape-examples")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .run();
+
+ let doc_html = p.read_file("target/doc/da_sh/fn.foo.html");
+ assert!(doc_html.contains("Examples found in repository"));
+}
+
+#[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")]
+fn configure_target() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ doc-scrape-examples = true
+
+ [[bin]]
+ name = "a_bin"
+ doc-scrape-examples = true
+
+ [[example]]
+ name = "a"
+ doc-scrape-examples = false
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "pub fn foo() {} fn lib_must_appear() { foo(); }",
+ )
+ .file(
+ "examples/a.rs",
+ "fn example_must_not_appear() { foo::foo(); }",
+ )
+ .file(
+ "src/bin/a_bin.rs",
+ "fn bin_must_appear() { foo::foo(); } fn main(){}",
+ )
+ .build();
+
+ p.cargo("doc -Zunstable-options -Zrustdoc-scrape-examples")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .run();
+
+ let doc_html = p.read_file("target/doc/foo/fn.foo.html");
+ assert!(doc_html.contains("lib_must_appear"));
+ assert!(doc_html.contains("bin_must_appear"));
+ assert!(!doc_html.contains("example_must_not_appear"));
+}
+
+#[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")]
+fn configure_profile_issue_10500() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [profile.dev]
+ panic = "abort"
+ "#,
+ )
+ .file("examples/ex.rs", "fn main() { foo::foo(); }")
+ .file("src/lib.rs", "pub fn foo() {}\npub fn bar() { foo(); }")
+ .build();
+
+ p.cargo("doc -Zunstable-options -Zrustdoc-scrape-examples")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .run();
+
+ let doc_html = p.read_file("target/doc/foo/fn.foo.html");
+ assert!(doc_html.contains("Examples found in repository"));
+}
+
+#[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")]
+fn issue_10545() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ resolver = "2"
+ members = ["a", "b"]
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+ edition = "2021"
+
+ [features]
+ default = ["foo"]
+ foo = []
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.0.1"
+ authors = []
+ edition = "2021"
+
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .build();
+
+ p.cargo("doc --workspace -Zunstable-options -Zrustdoc-scrape-examples")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .run();
+}
+
+#[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")]
+fn cache() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file("examples/ex.rs", "fn main() { foo::foo(); }")
+ .file("src/lib.rs", "pub fn foo() {}\npub fn bar() { foo(); }")
+ .build();
+
+ p.cargo("doc -Zunstable-options -Zrustdoc-scrape-examples")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.1 ([CWD])
+[SCRAPING] foo v0.0.1 ([CWD])
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("doc -Zunstable-options -Zrustdoc-scrape-examples")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .with_stderr(
+ "\
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")]
+fn no_fail_bad_lib() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() { CRASH_THE_BUILD() }")
+ .file("examples/ex.rs", "fn main() { foo::foo(); }")
+ .file("examples/ex2.rs", "fn main() { foo::foo(); }")
+ .build();
+
+ p.cargo("doc -Zunstable-options -Z rustdoc-scrape-examples")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .with_stderr_unordered(
+ "\
+[CHECKING] foo v0.0.1 ([CWD])
+[SCRAPING] foo v0.0.1 ([CWD])
+warning: failed to check lib in package `foo` as a prerequisite for scraping examples from: example \"ex\", example \"ex2\"
+ Try running with `--verbose` to see the error message.
+ If an example should not be scanned, then consider adding `doc-scrape-examples = false` to its `[[example]]` definition in Cargo.toml
+warning: `foo` (lib) generated 1 warning
+warning: failed to scan example \"ex\" in package `foo` for example code usage
+ Try running with `--verbose` to see the error message.
+ If an example should not be scanned, then consider adding `doc-scrape-examples = false` to its `[[example]]` definition in Cargo.toml
+warning: `foo` (example \"ex\") generated 1 warning
+warning: failed to scan example \"ex2\" in package `foo` for example code usage
+ Try running with `--verbose` to see the error message.
+ If an example should not be scanned, then consider adding `doc-scrape-examples = false` to its `[[example]]` definition in Cargo.toml
+warning: `foo` (example \"ex2\") generated 1 warning
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")]
+fn fail_bad_build_script() {
+ // See rust-lang/cargo#11623
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() { panic!(\"You shall not pass\")}")
+ .file("examples/ex.rs", "fn main() {}")
+ .build();
+
+ // `cargo doc` fails
+ p.cargo("doc")
+ .with_status(101)
+ .with_stderr_contains("[..]You shall not pass[..]")
+ .run();
+
+ // scrape examples should fail whenever `cargo doc` fails.
+ p.cargo("doc -Zunstable-options -Z rustdoc-scrape-examples")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .with_status(101)
+ .with_stderr_contains("[..]You shall not pass[..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")]
+fn no_fail_bad_example() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file("examples/ex1.rs", "DOES NOT COMPILE")
+ .file("examples/ex2.rs", "fn main() { foo::foo(); }")
+ .file("src/lib.rs", "pub fn foo(){}")
+ .build();
+
+ p.cargo("doc -Zunstable-options -Z rustdoc-scrape-examples")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.1 ([CWD])
+[SCRAPING] foo v0.0.1 ([CWD])
+warning: failed to scan example \"ex1\" in package `foo` for example code usage
+ Try running with `--verbose` to see the error message.
+ If an example should not be scanned, then consider adding `doc-scrape-examples = false` to its `[[example]]` definition in Cargo.toml
+warning: `foo` (example \"ex1\") generated 1 warning
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+
+ p.cargo("clean").run();
+
+ p.cargo("doc -v -Zunstable-options -Z rustdoc-scrape-examples")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .with_stderr_unordered(
+ "\
+[CHECKING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo[..]
+[SCRAPING] foo v0.0.1 ([CWD])
+[RUNNING] `rustdoc[..] --crate-name ex1[..]
+[RUNNING] `rustdoc[..] --crate-name ex2[..]
+[RUNNING] `rustdoc[..] --crate-name foo[..]
+error: expected one of `!` or `::`, found `NOT`
+ --> examples/ex1.rs:1:6
+ |
+1 | DOES NOT COMPILE
+ | ^^^ expected one of `!` or `::`
+
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+
+ let doc_html = p.read_file("target/doc/foo/fn.foo.html");
+ assert!(doc_html.contains("Examples found in repository"));
+}
+
+#[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")]
+fn no_scrape_with_dev_deps() {
+ // Tests that a crate with dev-dependencies does not have its examples
+ // scraped unless explicitly prompted to check them. See
+ // `UnitGenerator::create_docscrape_proposals` for details on why.
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dev-dependencies]
+ a = {path = "a"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("examples/ex.rs", "fn main() { a::f(); }")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file("a/src/lib.rs", "pub fn f() {}")
+ .build();
+
+ // If --examples is not provided, then the example is not scanned, and a warning
+ // should be raised.
+ p.cargo("doc -Zunstable-options -Z rustdoc-scrape-examples")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .with_stderr(
+ "\
+warning: Rustdoc did not scrape the following examples because they require dev-dependencies: ex
+ If you want Rustdoc to scrape these examples, then add `doc-scrape-examples = true`
+ to the [[example]] target configuration of at least one example.
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+
+ // If --examples is provided, then the example is scanned.
+ p.cargo("doc --examples -Zunstable-options -Z rustdoc-scrape-examples")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .with_stderr_unordered(
+ "\
+[CHECKING] a v0.0.1 ([CWD]/a)
+[CHECKING] foo v0.0.1 ([CWD])
+[DOCUMENTING] a v0.0.1 ([CWD]/a)
+[SCRAPING] foo v0.0.1 ([CWD])
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")]
+fn use_dev_deps_if_explicitly_enabled() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[example]]
+ name = "ex"
+ doc-scrape-examples = true
+
+ [dev-dependencies]
+ a = {path = "a"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("examples/ex.rs", "fn main() { a::f(); }")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file("a/src/lib.rs", "pub fn f() {}")
+ .build();
+
+ // If --examples is not provided, then the example is never scanned.
+ p.cargo("doc -Zunstable-options -Z rustdoc-scrape-examples")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .with_stderr_unordered(
+ "\
+[CHECKING] foo v0.0.1 ([CWD])
+[CHECKING] a v0.0.1 ([CWD]/a)
+[SCRAPING] foo v0.0.1 ([CWD])
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")]
+fn only_scrape_documented_targets() {
+ // package bar has doc = false and should not be eligible for documtation.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ doc = false
+
+ [workspace]
+ members = ["foo"]
+
+ [dependencies]
+ foo = {{ path = "foo" }}
+ "#
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file("examples/ex.rs", "pub fn main() { foo::foo(); }")
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file("foo/src/lib.rs", "pub fn foo() {}")
+ .build();
+
+ p.cargo("doc --workspace -Zunstable-options -Zrustdoc-scrape-examples")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .run();
+
+ let doc_html = p.read_file("target/doc/foo/fn.foo.html");
+ let example_found = doc_html.contains("Examples found in repository");
+ assert!(!example_found);
+}
+
+#[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")]
+fn issue_11496() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "repro"
+ version = "0.1.0"
+ edition = "2021"
+
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("examples/ex.rs", "fn main(){}")
+ .build();
+
+ p.cargo("doc -Zunstable-options -Zrustdoc-scrape-examples")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/edition.rs b/src/tools/cargo/tests/testsuite/edition.rs
new file mode 100644
index 000000000..377a86ec0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/edition.rs
@@ -0,0 +1,124 @@
+//! Tests for edition setting.
+
+use cargo::core::Edition;
+use cargo_test_support::{basic_lib_manifest, project};
+
+#[cargo_test]
+fn edition_works_for_build_script() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = 'foo'
+ version = '0.1.0'
+ edition = '2018'
+
+ [build-dependencies]
+ a = { path = 'a' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ a::foo();
+ }
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_lib_manifest("a"))
+ .file("a/src/lib.rs", "pub fn foo() {}")
+ .build();
+
+ p.cargo("check -v").run();
+}
+
+#[cargo_test]
+fn edition_unstable_gated() {
+ // During the period where a new edition is coming up, but not yet stable,
+ // this test will verify that it cannot be used on stable. If there is no
+ // next edition, it does nothing.
+ let next = match Edition::LATEST_UNSTABLE {
+ Some(next) => next,
+ None => {
+ eprintln!("Next edition is currently not available, skipping test.");
+ return;
+ }
+ };
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "{}"
+ "#,
+ next
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(&format!(
+ "\
+[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml`
+
+Caused by:
+ feature `edition{next}` is required
+
+ The package requires the Cargo feature called `edition{next}`, \
+ but that feature is not stabilized in this version of Cargo (1.[..]).
+ Consider trying a newer version of Cargo (this may require the nightly release).
+ See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#edition-{next} \
+ for more information about the status of this feature.
+",
+ next = next
+ ))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "fundamentally always nightly")]
+fn edition_unstable() {
+ // During the period where a new edition is coming up, but not yet stable,
+ // this test will verify that it can be used with `cargo-features`. If
+ // there is no next edition, it does nothing.
+ let next = match Edition::LATEST_UNSTABLE {
+ Some(next) => next,
+ None => {
+ eprintln!("Next edition is currently not available, skipping test.");
+ return;
+ }
+ };
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ cargo-features = ["edition{next}"]
+
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "{next}"
+ "#,
+ next = next
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .masquerade_as_nightly_cargo(&["always_nightly"])
+ .with_stderr(
+ "\
+[CHECKING] foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/error.rs b/src/tools/cargo/tests/testsuite/error.rs
new file mode 100644
index 000000000..410902c21
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/error.rs
@@ -0,0 +1,19 @@
+//! General error tests that don't belong anywhere else.
+
+use cargo_test_support::cargo_process;
+
+#[cargo_test]
+fn internal_error() {
+ cargo_process("init")
+ .env("__CARGO_TEST_INTERNAL_ERROR", "1")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] internal error test
+[NOTE] this is an unexpected cargo internal error
+[NOTE] we would appreciate a bug report: https://github.com/rust-lang/cargo/issues/
+[NOTE] cargo [..]
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/features.rs b/src/tools/cargo/tests/testsuite/features.rs
new file mode 100644
index 000000000..848e05677
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/features.rs
@@ -0,0 +1,2084 @@
+//! Tests for `[features]` table.
+
+use cargo_test_support::paths::CargoPathExt;
+use cargo_test_support::registry::{Dependency, Package};
+use cargo_test_support::{basic_manifest, project};
+
+#[cargo_test]
+fn invalid1() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ bar = ["baz"]
+ "#,
+ )
+ .file("src/main.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ feature `bar` includes `baz` which is neither a dependency nor another feature
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn same_name() {
+ // Feature with the same name as a dependency.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ bar = ["baz"]
+ baz = []
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/main.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "1.0.0"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("tree -f")
+ .arg("{p} [{f}]")
+ .with_stderr("")
+ .with_stdout(
+ "\
+foo v0.0.1 ([..]) []
+└── bar v1.0.0 ([..]) []
+",
+ )
+ .run();
+
+ p.cargo("tree --features bar -f")
+ .arg("{p} [{f}]")
+ .with_stderr("")
+ .with_stdout(
+ "\
+foo v0.0.1 ([..]) [bar,baz]
+└── bar v1.0.0 ([..]) []
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid3() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ bar = ["baz"]
+
+ [dependencies.baz]
+ path = "foo"
+ "#,
+ )
+ .file("src/main.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ feature `bar` includes `baz`, but `baz` is not an optional dependency
+ A non-optional dependency of the same name is defined; consider adding `optional = true` to its definition.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid4() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ features = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to select a version for `bar`.
+ ... required by package `foo v0.0.1 ([..])`
+versions that meet the requirements `*` are: 0.0.1
+
+the package `foo` depends on `bar`, with features: `bar` but `bar` does not have these features.
+
+
+failed to select a version for `bar` which could resolve this conflict",
+ )
+ .run();
+
+ p.change_file("Cargo.toml", &basic_manifest("foo", "0.0.1"));
+
+ p.cargo("check --features test")
+ .with_status(101)
+ .with_stderr("error: Package `foo v0.0.1 ([..])` does not have the feature `test`")
+ .run();
+}
+
+#[cargo_test]
+fn invalid5() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dev-dependencies.bar]
+ path = "bar"
+ optional = true
+ "#,
+ )
+ .file("src/main.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ dev-dependencies are not allowed to be optional: `bar`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid6() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ foo = ["bar/baz"]
+ "#,
+ )
+ .file("src/main.rs", "")
+ .build();
+
+ p.cargo("check --features foo")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ feature `foo` includes `bar/baz`, but `bar` is not a dependency
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid7() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ foo = ["bar/baz"]
+ bar = []
+ "#,
+ )
+ .file("src/main.rs", "")
+ .build();
+
+ p.cargo("check --features foo")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ feature `foo` includes `bar/baz`, but `bar` is not a dependency
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid8() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ features = ["foo/bar"]
+ "#,
+ )
+ .file("src/main.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check --features foo")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[CWD]/Cargo.toml`
+
+Caused by:
+ feature `foo/bar` in dependency `bar` is not allowed to contain slashes
+ If you want to enable features [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid9() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check --features bar")
+ .with_stderr(
+ "\
+error: Package `foo v0.0.1 ([..])` does not have feature `bar`. It has a required dependency with that name, but only optional dependencies can be used as features.
+",
+ ).with_status(101).run();
+}
+
+#[cargo_test]
+fn invalid10() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ features = ["baz"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.baz]
+ path = "baz"
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.0.1"))
+ .file("bar/baz/src/lib.rs", "")
+ .build();
+
+ p.cargo("check").with_stderr("\
+error: failed to select a version for `bar`.
+ ... required by package `foo v0.0.1 ([..])`
+versions that meet the requirements `*` are: 0.0.1
+
+the package `foo` depends on `bar`, with features: `baz` but `bar` does not have these features.
+ It has a required dependency with that name, but only optional dependencies can be used as features.
+
+
+failed to select a version for `bar` which could resolve this conflict
+").with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn no_transitive_dep_feature_requirement() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.derived]
+ path = "derived"
+
+ [features]
+ default = ["derived/bar/qux"]
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ extern crate derived;
+ fn main() { derived::test(); }
+ "#,
+ )
+ .file(
+ "derived/Cargo.toml",
+ r#"
+ [package]
+ name = "derived"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file("derived/src/lib.rs", "extern crate bar; pub use bar::test;")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ qux = []
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ #[cfg(feature = "qux")]
+ pub fn test() { print!("test"); }
+ "#,
+ )
+ .build();
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[CWD]/Cargo.toml`
+
+Caused by:
+ multiple slashes in feature `derived/bar/qux` (included by feature `default`) are not allowed
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn no_feature_doesnt_build() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ optional = true
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[cfg(feature = "bar")]
+ extern crate bar;
+ #[cfg(feature = "bar")]
+ fn main() { bar::bar(); println!("bar") }
+ #[cfg(not(feature = "bar"))]
+ fn main() {}
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("build")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.process(&p.bin("foo")).with_stdout("").run();
+
+ p.cargo("build --features bar -v")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[RUNNING] `rustc --crate-name bar [..]
+[DIRTY-MSVC] foo v0.0.1 ([CWD]): the list of features changed
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.process(&p.bin("foo")).with_stdout("bar\n").run();
+}
+
+#[cargo_test]
+fn default_feature_pulled_in() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ default = ["bar"]
+
+ [dependencies.bar]
+ path = "bar"
+ optional = true
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[cfg(feature = "bar")]
+ extern crate bar;
+ #[cfg(feature = "bar")]
+ fn main() { bar::bar(); println!("bar") }
+ #[cfg(not(feature = "bar"))]
+ fn main() {}
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("build")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.process(&p.bin("foo")).with_stdout("bar\n").run();
+
+ p.cargo("build --no-default-features -v")
+ .with_stderr(
+ "\
+[DIRTY-MSVC] foo v0.0.1 ([CWD]): the list of features changed
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.process(&p.bin("foo")).with_stdout("").run();
+}
+
+#[cargo_test]
+fn cyclic_feature() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ default = ["default"]
+ "#,
+ )
+ .file("src/main.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr("[ERROR] cyclic feature dependency: feature `default` depends on itself")
+ .run();
+}
+
+#[cargo_test]
+fn cyclic_feature2() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ foo = ["bar"]
+ bar = ["foo"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check").with_stdout("").run();
+}
+
+#[cargo_test]
+fn groups_on_groups_on_groups() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ default = ["f1"]
+ f1 = ["f2", "bar"]
+ f2 = ["f3", "f4"]
+ f3 = ["f5", "f6", "baz"]
+ f4 = ["f5", "f7"]
+ f5 = ["f6"]
+ f6 = ["f7"]
+ f7 = ["bar"]
+
+ [dependencies.bar]
+ path = "bar"
+ optional = true
+
+ [dependencies.baz]
+ path = "baz"
+ optional = true
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[allow(unused_extern_crates)]
+ extern crate bar;
+ #[allow(unused_extern_crates)]
+ extern crate baz;
+ fn main() {}
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] ba[..] v0.0.1 ([CWD]/ba[..])
+[CHECKING] ba[..] v0.0.1 ([CWD]/ba[..])
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn many_cli_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ optional = true
+
+ [dependencies.baz]
+ path = "baz"
+ optional = true
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[allow(unused_extern_crates)]
+ extern crate bar;
+ #[allow(unused_extern_crates)]
+ extern crate baz;
+ fn main() {}
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ p.cargo("check --features")
+ .arg("bar baz")
+ .with_stderr(
+ "\
+[CHECKING] ba[..] v0.0.1 ([CWD]/ba[..])
+[CHECKING] ba[..] v0.0.1 ([CWD]/ba[..])
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn union_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.d1]
+ path = "d1"
+ features = ["f1"]
+ [dependencies.d2]
+ path = "d2"
+ features = ["f2"]
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[allow(unused_extern_crates)]
+ extern crate d1;
+ extern crate d2;
+ fn main() {
+ d2::f1();
+ d2::f2();
+ }
+ "#,
+ )
+ .file(
+ "d1/Cargo.toml",
+ r#"
+ [package]
+ name = "d1"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ f1 = ["d2"]
+
+ [dependencies.d2]
+ path = "../d2"
+ features = ["f1"]
+ optional = true
+ "#,
+ )
+ .file("d1/src/lib.rs", "")
+ .file(
+ "d2/Cargo.toml",
+ r#"
+ [package]
+ name = "d2"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ f1 = []
+ f2 = []
+ "#,
+ )
+ .file(
+ "d2/src/lib.rs",
+ r#"
+ #[cfg(feature = "f1")] pub fn f1() {}
+ #[cfg(feature = "f2")] pub fn f2() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] d2 v0.0.1 ([CWD]/d2)
+[CHECKING] d1 v0.0.1 ([CWD]/d1)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn many_features_no_rebuilds() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies.a]
+ path = "a"
+ features = ["fall"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+ authors = []
+
+ [features]
+ ftest = []
+ ftest2 = []
+ fall = ["ftest", "ftest2"]
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] a v0.1.0 ([CWD]/a)
+[CHECKING] b v0.1.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.root().move_into_the_past();
+
+ p.cargo("check -v")
+ .with_stderr(
+ "\
+[FRESH] a v0.1.0 ([..]/a)
+[FRESH] b v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+// Tests that all cmd lines work with `--features ""`
+#[cargo_test]
+fn empty_features() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ p.cargo("check --features").arg("").run();
+}
+
+// Tests that all cmd lines work with `--features ""`
+#[cargo_test]
+fn transitive_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ foo = ["bar/baz"]
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/main.rs", "extern crate bar; fn main() { bar::baz(); }")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ baz = []
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"#[cfg(feature = "baz")] pub fn baz() {}"#,
+ )
+ .build();
+
+ p.cargo("check --features foo").run();
+}
+
+#[cargo_test]
+fn everything_in_the_lockfile() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ f1 = ["d1/f1"]
+ f2 = ["d2"]
+
+ [dependencies.d1]
+ path = "d1"
+ [dependencies.d2]
+ path = "d2"
+ optional = true
+ [dependencies.d3]
+ path = "d3"
+ optional = true
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "d1/Cargo.toml",
+ r#"
+ [package]
+ name = "d1"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ f1 = []
+ "#,
+ )
+ .file("d1/src/lib.rs", "")
+ .file("d2/Cargo.toml", &basic_manifest("d2", "0.0.2"))
+ .file("d2/src/lib.rs", "")
+ .file(
+ "d3/Cargo.toml",
+ r#"
+ [package]
+ name = "d3"
+ version = "0.0.3"
+ authors = []
+
+ [features]
+ f3 = []
+ "#,
+ )
+ .file("d3/src/lib.rs", "")
+ .build();
+
+ p.cargo("fetch").run();
+ let lockfile = p.read_lockfile();
+ assert!(
+ lockfile.contains(r#"name = "d1""#),
+ "d1 not found\n{}",
+ lockfile
+ );
+ assert!(
+ lockfile.contains(r#"name = "d2""#),
+ "d2 not found\n{}",
+ lockfile
+ );
+ assert!(
+ lockfile.contains(r#"name = "d3""#),
+ "d3 not found\n{}",
+ lockfile
+ );
+}
+
+#[cargo_test]
+fn no_rebuild_when_frobbing_default_feature() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ a = { path = "a" }
+ b = { path = "b" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ a = { path = "../a", features = ["f1"], default-features = false }
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+ authors = []
+
+ [features]
+ default = ["f1"]
+ f1 = []
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+ p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stdout("").run();
+}
+
+#[cargo_test]
+fn unions_work_with_no_default_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ a = { path = "a" }
+ b = { path = "b" }
+ "#,
+ )
+ .file("src/lib.rs", "extern crate a; pub fn foo() { a::a(); }")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ a = { path = "../a", features = [], default-features = false }
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+ authors = []
+
+ [features]
+ default = ["f1"]
+ f1 = []
+ "#,
+ )
+ .file("a/src/lib.rs", r#"#[cfg(feature = "f1")] pub fn a() {}"#)
+ .build();
+
+ p.cargo("check").run();
+ p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stdout("").run();
+}
+
+#[cargo_test]
+fn optional_and_dev_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "test"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ foo = { path = "foo", optional = true }
+ [dev-dependencies]
+ foo = { path = "foo" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("foo/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] test v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn activating_feature_activates_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "test"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ foo = { path = "foo", optional = true }
+
+ [features]
+ a = ["foo/a"]
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate foo; pub fn bar() { foo::bar(); }",
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [features]
+ a = []
+ "#,
+ )
+ .file("foo/src/lib.rs", r#"#[cfg(feature = "a")] pub fn bar() {}"#)
+ .build();
+
+ p.cargo("check --features a -v").run();
+}
+
+#[cargo_test]
+fn dep_feature_in_cmd_line() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.derived]
+ path = "derived"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ extern crate derived;
+ fn main() { derived::test(); }
+ "#,
+ )
+ .file(
+ "derived/Cargo.toml",
+ r#"
+ [package]
+ name = "derived"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "../bar"
+
+ [features]
+ default = []
+ derived-feat = ["bar/some-feat"]
+ "#,
+ )
+ .file("derived/src/lib.rs", "extern crate bar; pub use bar::test;")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ some-feat = []
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ #[cfg(feature = "some-feat")]
+ pub fn test() { print!("test"); }
+ "#,
+ )
+ .build();
+
+ // The foo project requires that feature "some-feat" in "bar" is enabled.
+ // Building without any features enabled should fail:
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains("[..]unresolved import `bar::test`")
+ .run();
+
+ // We should be able to enable the feature "derived-feat", which enables "some-feat",
+ // on the command line. The feature is enabled, thus building should be successful:
+ p.cargo("check --features derived/derived-feat").run();
+
+ // Trying to enable features of transitive dependencies is an error
+ p.cargo("check --features bar/some-feat")
+ .with_status(101)
+ .with_stderr("error: package `foo v0.0.1 ([..])` does not have a dependency named `bar`")
+ .run();
+
+ // Hierarchical feature specification should still be disallowed
+ p.cargo("check --features derived/bar/some-feat")
+ .with_status(101)
+ .with_stderr("[ERROR] multiple slashes in feature `derived/bar/some-feat` is not allowed")
+ .run();
+}
+
+#[cargo_test]
+fn all_features_flag_enables_all_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ foo = []
+ bar = []
+
+ [dependencies.baz]
+ path = "baz"
+ optional = true
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[cfg(feature = "foo")]
+ pub fn foo() {}
+
+ #[cfg(feature = "bar")]
+ pub fn bar() {
+ extern crate baz;
+ baz::baz();
+ }
+
+ fn main() {
+ foo();
+ bar();
+ }
+ "#,
+ )
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ p.cargo("check --all-features").run();
+}
+
+#[cargo_test]
+fn many_cli_features_comma_delimited() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ optional = true
+
+ [dependencies.baz]
+ path = "baz"
+ optional = true
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[allow(unused_extern_crates)]
+ extern crate bar;
+ #[allow(unused_extern_crates)]
+ extern crate baz;
+ fn main() {}
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ p.cargo("check --features bar,baz")
+ .with_stderr(
+ "\
+[CHECKING] ba[..] v0.0.1 ([CWD]/ba[..])
+[CHECKING] ba[..] v0.0.1 ([CWD]/ba[..])
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn many_cli_features_comma_and_space_delimited() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ optional = true
+
+ [dependencies.baz]
+ path = "baz"
+ optional = true
+
+ [dependencies.bam]
+ path = "bam"
+ optional = true
+
+ [dependencies.bap]
+ path = "bap"
+ optional = true
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[allow(unused_extern_crates)]
+ extern crate bar;
+ #[allow(unused_extern_crates)]
+ extern crate baz;
+ #[allow(unused_extern_crates)]
+ extern crate bam;
+ #[allow(unused_extern_crates)]
+ extern crate bap;
+ fn main() {}
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .file("bam/Cargo.toml", &basic_manifest("bam", "0.0.1"))
+ .file("bam/src/lib.rs", "pub fn bam() {}")
+ .file("bap/Cargo.toml", &basic_manifest("bap", "0.0.1"))
+ .file("bap/src/lib.rs", "pub fn bap() {}")
+ .build();
+
+ p.cargo("check --features")
+ .arg("bar,baz bam bap")
+ .with_stderr(
+ "\
+[CHECKING] ba[..] v0.0.1 ([CWD]/ba[..])
+[CHECKING] ba[..] v0.0.1 ([CWD]/ba[..])
+[CHECKING] ba[..] v0.0.1 ([CWD]/ba[..])
+[CHECKING] ba[..] v0.0.1 ([CWD]/ba[..])
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn only_dep_is_optional() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ foo = ['bar']
+
+ [dependencies]
+ bar = { version = "0.1", optional = true }
+
+ [dev-dependencies]
+ bar = "0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn all_features_all_crates() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [workspace]
+ members = ['bar']
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ foo = []
+ "#,
+ )
+ .file("bar/src/main.rs", "#[cfg(feature = \"foo\")] fn main() {}")
+ .build();
+
+ p.cargo("check --all-features --workspace").run();
+}
+
+#[cargo_test]
+fn feature_off_dylib() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [lib]
+ crate-type = ["dylib"]
+
+ [features]
+ f1 = []
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn hello() -> &'static str {
+ if cfg!(feature = "f1") {
+ "f1"
+ } else {
+ "no f1"
+ }
+ }
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+
+ [dependencies]
+ foo = { path = ".." }
+ "#,
+ )
+ .file(
+ "bar/src/main.rs",
+ r#"
+ extern crate foo;
+
+ fn main() {
+ assert_eq!(foo::hello(), "no f1");
+ }
+ "#,
+ )
+ .build();
+
+ // Build the dylib with `f1` feature.
+ p.cargo("check --features f1").run();
+ // Check that building without `f1` uses a dylib without `f1`.
+ p.cargo("run -p bar").run();
+}
+
+#[cargo_test]
+fn warn_if_default_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ optional = true
+
+ [features]
+ default-features = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ r#"
+[WARNING] `default-features = [".."]` was found in [features]. Did you mean to use `default = [".."]`?
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+ "#.trim(),
+ ).run();
+}
+
+#[cargo_test]
+fn no_feature_for_non_optional_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[cfg(not(feature = "bar"))]
+ fn main() {
+ }
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ a = []
+ "#,
+ )
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("check --features bar/a").run();
+}
+
+#[cargo_test]
+fn features_option_given_twice() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ a = []
+ b = []
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[cfg(all(feature = "a", feature = "b"))]
+ fn main() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("check --features a --features b").run();
+}
+
+#[cargo_test]
+fn multi_multi_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ a = []
+ b = []
+ c = []
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[cfg(all(feature = "a", feature = "b", feature = "c"))]
+ fn main() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("check --features a --features").arg("b c").run();
+}
+
+#[cargo_test]
+fn cli_parse_ok() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ a = []
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[cfg(feature = "a")]
+ fn main() {
+ assert_eq!(std::env::args().nth(1).unwrap(), "b");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run --features a b").run();
+}
+
+#[cargo_test]
+fn all_features_virtual_ws() {
+ // What happens with `--all-features` in the root of a virtual workspace.
+ // Some of this behavior is a little strange (member dependencies also
+ // have all features enabled, one might expect `f4` to be disabled).
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ b = {path="../b", optional=true}
+
+ [features]
+ default = ["f1"]
+ f1 = []
+ f2 = []
+ "#,
+ )
+ .file(
+ "a/src/main.rs",
+ r#"
+ fn main() {
+ if cfg!(feature="f1") {
+ println!("f1");
+ }
+ if cfg!(feature="f2") {
+ println!("f2");
+ }
+ #[cfg(feature="b")]
+ b::f();
+ }
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.1.0"
+
+ [features]
+ default = ["f3"]
+ f3 = []
+ f4 = []
+ "#,
+ )
+ .file(
+ "b/src/lib.rs",
+ r#"
+ pub fn f() {
+ if cfg!(feature="f3") {
+ println!("f3");
+ }
+ if cfg!(feature="f4") {
+ println!("f4");
+ }
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run").with_stdout("f1\n").run();
+ p.cargo("run --all-features")
+ .with_stdout("f1\nf2\nf3\nf4\n")
+ .run();
+ // In `a`, it behaves differently. :(
+ p.cargo("run --all-features")
+ .cwd("a")
+ .with_stdout("f1\nf2\nf3\n")
+ .run();
+}
+
+#[cargo_test]
+fn slash_optional_enables() {
+ // --features dep/feat will enable `dep` and set its feature.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ dep = {path="dep", optional=true}
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #[cfg(not(feature="dep"))]
+ compile_error!("dep not set");
+ "#,
+ )
+ .file(
+ "dep/Cargo.toml",
+ r#"
+ [package]
+ name = "dep"
+ version = "0.1.0"
+
+ [features]
+ feat = []
+ "#,
+ )
+ .file(
+ "dep/src/lib.rs",
+ r#"
+ #[cfg(not(feature="feat"))]
+ compile_error!("feat not set");
+ "#,
+ )
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains("[..]dep not set[..]")
+ .run();
+
+ p.cargo("check --features dep/feat").run();
+}
+
+#[cargo_test]
+fn registry_summary_order_doesnt_matter() {
+ // Checks for an issue where the resolver depended on the order of entries
+ // in the registry summary. If there was a non-optional dev-dependency
+ // that appeared before an optional normal dependency, then the resolver
+ // would not activate the optional dependency with a pkg/featname feature
+ // syntax.
+ Package::new("dep", "0.1.0")
+ .feature("feat1", &[])
+ .file(
+ "src/lib.rs",
+ r#"
+ #[cfg(feature="feat1")]
+ pub fn work() {
+ println!("it works");
+ }
+ "#,
+ )
+ .publish();
+ Package::new("bar", "0.1.0")
+ .feature("bar_feat", &["dep/feat1"])
+ .add_dep(Dependency::new("dep", "0.1.0").dev())
+ .add_dep(Dependency::new("dep", "0.1.0").optional(true))
+ .file(
+ "src/lib.rs",
+ r#"
+ // This will fail to compile without `dep` optional dep activated.
+ extern crate dep;
+
+ pub fn doit() {
+ dep::work();
+ }
+ "#,
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ bar = { version="0.1", features = ["bar_feat"] }
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ bar::doit();
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..]
+[DOWNLOADED] [..]
+[COMPILING] dep v0.1.0
+[COMPILING] bar v0.1.0
+[COMPILING] foo v0.1.0 [..]
+[FINISHED] [..]
+[RUNNING] `target/debug/foo[EXE]`
+",
+ )
+ .with_stdout("it works")
+ .run();
+}
+
+#[cargo_test]
+fn nonexistent_required_features() {
+ Package::new("required_dependency", "0.1.0")
+ .feature("simple", &[])
+ .publish();
+ Package::new("optional_dependency", "0.2.0")
+ .feature("optional", &[])
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [features]
+ existing = []
+ fancy = ["optional_dependency"]
+ [dependencies]
+ required_dependency = { version = "0.1", optional = false}
+ optional_dependency = { version = "0.2", optional = true}
+ [[example]]
+ name = "ololo"
+ required-features = ["not_present",
+ "existing",
+ "fancy",
+ "required_dependency/not_existing",
+ "required_dependency/simple",
+ "optional_dependency/optional",
+ "not_specified_dependency/some_feature"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("examples/ololo.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check --examples")
+ .with_stderr_contains(
+ "\
+[WARNING] invalid feature `not_present` in required-features of target `ololo`: \
+ `not_present` is not present in [features] section
+[WARNING] invalid feature `required_dependency/not_existing` in required-features \
+ of target `ololo`: feature `not_existing` does not exist in package \
+ `required_dependency v0.1.0`
+[WARNING] invalid feature `not_specified_dependency/some_feature` in required-features \
+ of target `ololo`: dependency `not_specified_dependency` does not exist
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid_feature_names_warning() {
+ // Warnings for more restricted feature syntax.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [features]
+ # Some valid, but unusual names, shouldn't warn.
+ "c++17" = []
+ "128bit" = []
+ "_foo" = []
+ "feat-name" = []
+ "feat_name" = []
+ "foo.bar" = []
+
+ # Invalid names.
+ "+foo" = []
+ "-foo" = []
+ ".foo" = []
+ "foo:bar" = []
+ "foo?" = []
+ "?foo" = []
+ "ⒶⒷⒸ" = []
+ "a¼" = []
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // Unfortunately the warnings are duplicated due to the Summary being
+ // loaded twice (once in the Workspace, and once in PackageRegistry) and
+ // Cargo does not have a de-duplication system. This should probably be
+ // OK, since I'm not expecting this to affect anyone.
+ p.cargo("check")
+ .with_stderr("\
+[WARNING] invalid character `+` in feature `+foo` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`)
+This was previously accepted but is being phased out; it will become a hard error in a future release.
+For more information, see issue #8813 <https://github.com/rust-lang/cargo/issues/8813>, and please leave a comment if this will be a problem for your project.
+[WARNING] invalid character `-` in feature `-foo` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`)
+This was previously accepted but is being phased out; it will become a hard error in a future release.
+For more information, see issue #8813 <https://github.com/rust-lang/cargo/issues/8813>, and please leave a comment if this will be a problem for your project.
+[WARNING] invalid character `.` in feature `.foo` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`)
+This was previously accepted but is being phased out; it will become a hard error in a future release.
+For more information, see issue #8813 <https://github.com/rust-lang/cargo/issues/8813>, and please leave a comment if this will be a problem for your project.
+[WARNING] invalid character `?` in feature `?foo` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`)
+This was previously accepted but is being phased out; it will become a hard error in a future release.
+For more information, see issue #8813 <https://github.com/rust-lang/cargo/issues/8813>, and please leave a comment if this will be a problem for your project.
+[WARNING] invalid character `¼` in feature `a¼` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters)
+This was previously accepted but is being phased out; it will become a hard error in a future release.
+For more information, see issue #8813 <https://github.com/rust-lang/cargo/issues/8813>, and please leave a comment if this will be a problem for your project.
+[WARNING] invalid character `:` in feature `foo:bar` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters)
+This was previously accepted but is being phased out; it will become a hard error in a future release.
+For more information, see issue #8813 <https://github.com/rust-lang/cargo/issues/8813>, and please leave a comment if this will be a problem for your project.
+[WARNING] invalid character `?` in feature `foo?` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters)
+This was previously accepted but is being phased out; it will become a hard error in a future release.
+For more information, see issue #8813 <https://github.com/rust-lang/cargo/issues/8813>, and please leave a comment if this will be a problem for your project.
+[WARNING] invalid character `Ⓐ` in feature `ⒶⒷⒸ` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`)
+This was previously accepted but is being phased out; it will become a hard error in a future release.
+For more information, see issue #8813 <https://github.com/rust-lang/cargo/issues/8813>, and please leave a comment if this will be a problem for your project.
+[WARNING] invalid character `Ⓑ` in feature `ⒶⒷⒸ` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters)
+This was previously accepted but is being phased out; it will become a hard error in a future release.
+For more information, see issue #8813 <https://github.com/rust-lang/cargo/issues/8813>, and please leave a comment if this will be a problem for your project.
+[WARNING] invalid character `Ⓒ` in feature `ⒶⒷⒸ` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters)
+This was previously accepted but is being phased out; it will become a hard error in a future release.
+For more information, see issue #8813 <https://github.com/rust-lang/cargo/issues/8813>, and please leave a comment if this will be a problem for your project.
+[CHECKING] foo v0.1.0 [..]
+[FINISHED] [..]
+")
+ .run();
+}
+
+#[cargo_test]
+fn invalid_feature_names_error() {
+ // Errors for more restricted feature syntax.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [features]
+ "foo/bar" = []
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[CWD]/Cargo.toml`
+
+Caused by:
+ feature named `foo/bar` is not allowed to contain slashes
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn default_features_conflicting_warning() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ a = { path = "a", features = ["f1"], default-features = false, default_features = false }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+ authors = []
+
+ [features]
+ default = ["f1"]
+ f1 = []
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr_contains(
+"[WARNING] conflicting between `default-features` and `default_features` in the `a` dependency.\n
+ `default_features` is ignored and not recommended for use in the future"
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/features2.rs b/src/tools/cargo/tests/testsuite/features2.rs
new file mode 100644
index 000000000..494c83f1e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/features2.rs
@@ -0,0 +1,2553 @@
+//! Tests for the new feature resolver.
+
+use cargo_test_support::cross_compile::{self, alternate};
+use cargo_test_support::install::cargo_home;
+use cargo_test_support::paths::CargoPathExt;
+use cargo_test_support::publish::validate_crate_contents;
+use cargo_test_support::registry::{Dependency, Package};
+use cargo_test_support::{basic_manifest, cargo_process, project, rustc_host, Project};
+use std::fs::File;
+
+/// Switches Cargo.toml to use `resolver = "2"`.
+pub fn switch_to_resolver_2(p: &Project) {
+ let mut manifest = p.read_file("Cargo.toml");
+ if manifest.contains("resolver =") {
+ panic!("did not expect manifest to already contain a resolver setting");
+ }
+ if let Some(index) = manifest.find("[workspace]\n") {
+ manifest.insert_str(index + 12, "resolver = \"2\"\n");
+ } else if let Some(index) = manifest.find("[package]\n") {
+ manifest.insert_str(index + 10, "resolver = \"2\"\n");
+ } else {
+ panic!("expected [package] or [workspace] in manifest");
+ }
+ p.change_file("Cargo.toml", &manifest);
+}
+
+#[cargo_test]
+fn inactivate_targets() {
+ // Basic test of `itarget`. A shared dependency where an inactive [target]
+ // changes the features.
+ Package::new("common", "1.0.0")
+ .feature("f1", &[])
+ .file(
+ "src/lib.rs",
+ r#"
+ #[cfg(feature = "f1")]
+ compile_error!("f1 should not activate");
+ "#,
+ )
+ .publish();
+
+ Package::new("bar", "1.0.0")
+ .add_dep(
+ Dependency::new("common", "1.0")
+ .target("cfg(whatever)")
+ .enable_features(&["f1"]),
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ common = "1.0"
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains("[..]f1 should not activate[..]")
+ .run();
+
+ switch_to_resolver_2(&p);
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn inactive_target_optional() {
+ // Activating optional [target] dependencies for inactivate target.
+ Package::new("common", "1.0.0")
+ .feature("f1", &[])
+ .feature("f2", &[])
+ .feature("f3", &[])
+ .feature("f4", &[])
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn f() {
+ if cfg!(feature="f1") { println!("f1"); }
+ if cfg!(feature="f2") { println!("f2"); }
+ if cfg!(feature="f3") { println!("f3"); }
+ if cfg!(feature="f4") { println!("f4"); }
+ }
+ "#,
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ common = "1.0"
+
+ [target.'cfg(whatever)'.dependencies]
+ dep1 = {path='dep1', optional=true}
+ dep2 = {path='dep2', optional=true, features=["f3"]}
+ common = {version="1.0", optional=true, features=["f4"]}
+
+ [features]
+ foo1 = ["dep1/f2"]
+ foo2 = ["dep2"]
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ if cfg!(feature="foo1") { println!("foo1"); }
+ if cfg!(feature="foo2") { println!("foo2"); }
+ if cfg!(feature="dep1") { println!("dep1"); }
+ if cfg!(feature="dep2") { println!("dep2"); }
+ if cfg!(feature="common") { println!("common"); }
+ common::f();
+ }
+ "#,
+ )
+ .file(
+ "dep1/Cargo.toml",
+ r#"
+ [package]
+ name = "dep1"
+ version = "0.1.0"
+
+ [dependencies]
+ common = {version="1.0", features=["f1"]}
+
+ [features]
+ f2 = ["common/f2"]
+ "#,
+ )
+ .file(
+ "dep1/src/lib.rs",
+ r#"compile_error!("dep1 should not build");"#,
+ )
+ .file(
+ "dep2/Cargo.toml",
+ r#"
+ [package]
+ name = "dep2"
+ version = "0.1.0"
+
+ [dependencies]
+ common = "1.0"
+
+ [features]
+ f3 = ["common/f3"]
+ "#,
+ )
+ .file(
+ "dep2/src/lib.rs",
+ r#"compile_error!("dep2 should not build");"#,
+ )
+ .build();
+
+ p.cargo("run --all-features")
+ .with_stdout("foo1\nfoo2\ndep1\ndep2\ncommon\nf1\nf2\nf3\nf4\n")
+ .run();
+ p.cargo("run --features dep1")
+ .with_stdout("dep1\nf1\n")
+ .run();
+ p.cargo("run --features foo1")
+ .with_stdout("foo1\ndep1\nf1\nf2\n")
+ .run();
+ p.cargo("run --features dep2")
+ .with_stdout("dep2\nf3\n")
+ .run();
+ p.cargo("run --features common")
+ .with_stdout("common\nf4\n")
+ .run();
+
+ switch_to_resolver_2(&p);
+ p.cargo("run --all-features")
+ .with_stdout("foo1\nfoo2\ndep1\ndep2\ncommon")
+ .run();
+ p.cargo("run --features dep1").with_stdout("dep1\n").run();
+ p.cargo("run --features foo1").with_stdout("foo1\n").run();
+ p.cargo("run --features dep2").with_stdout("dep2\n").run();
+ p.cargo("run --features common").with_stdout("common").run();
+}
+
+#[cargo_test]
+fn itarget_proc_macro() {
+ // itarget inside a proc-macro while cross-compiling
+ if cross_compile::disabled() {
+ return;
+ }
+ Package::new("hostdep", "1.0.0").publish();
+ Package::new("pm", "1.0.0")
+ .proc_macro(true)
+ .target_dep("hostdep", "1.0", rustc_host())
+ .file("src/lib.rs", "extern crate hostdep;")
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ pm = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // Old behavior
+ p.cargo("check").run();
+ p.cargo("check --target").arg(alternate()).run();
+
+ // New behavior
+ switch_to_resolver_2(&p);
+ p.cargo("check").run();
+ p.cargo("check --target").arg(alternate()).run();
+ // For good measure, just make sure things don't break.
+ p.cargo("check --target").arg(alternate()).run();
+}
+
+#[cargo_test]
+fn decouple_host_deps() {
+ // Basic test for `host_dep` decouple.
+ Package::new("common", "1.0.0")
+ .feature("f1", &[])
+ .file(
+ "src/lib.rs",
+ r#"
+ #[cfg(feature = "f1")]
+ pub fn foo() {}
+ #[cfg(not(feature = "f1"))]
+ pub fn bar() {}
+ "#,
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [build-dependencies]
+ common = {version="1.0", features=["f1"]}
+
+ [dependencies]
+ common = "1.0"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ use common::foo;
+ fn main() {}
+ "#,
+ )
+ .file("src/lib.rs", "use common::bar;")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains("[..]unresolved import `common::bar`[..]")
+ .run();
+
+ switch_to_resolver_2(&p);
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn decouple_host_deps_nested() {
+ // `host_dep` decouple of transitive dependencies.
+ Package::new("common", "1.0.0")
+ .feature("f1", &[])
+ .file(
+ "src/lib.rs",
+ r#"
+ #[cfg(feature = "f1")]
+ pub fn foo() {}
+ #[cfg(not(feature = "f1"))]
+ pub fn bar() {}
+ "#,
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [build-dependencies]
+ bdep = {path="bdep"}
+
+ [dependencies]
+ common = "1.0"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ use bdep::foo;
+ fn main() {}
+ "#,
+ )
+ .file("src/lib.rs", "use common::bar;")
+ .file(
+ "bdep/Cargo.toml",
+ r#"
+ [package]
+ name = "bdep"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ common = {version="1.0", features=["f1"]}
+ "#,
+ )
+ .file("bdep/src/lib.rs", "pub use common::foo;")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains("[..]unresolved import `common::bar`[..]")
+ .run();
+
+ switch_to_resolver_2(&p);
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn decouple_dev_deps() {
+ // Basic test for `dev_dep` decouple.
+ Package::new("common", "1.0.0")
+ .feature("f1", &[])
+ .feature("f2", &[])
+ .file(
+ "src/lib.rs",
+ r#"
+ // const ensures it uses the correct dependency at *build time*
+ // compared to *link time*.
+ #[cfg(all(feature="f1", not(feature="f2")))]
+ pub const X: u32 = 1;
+
+ #[cfg(all(feature="f1", feature="f2"))]
+ pub const X: u32 = 3;
+
+ pub fn foo() -> u32 {
+ let mut res = 0;
+ if cfg!(feature = "f1") {
+ res |= 1;
+ }
+ if cfg!(feature = "f2") {
+ res |= 2;
+ }
+ res
+ }
+ "#,
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ common = {version="1.0", features=["f1"]}
+
+ [dev-dependencies]
+ common = {version="1.0", features=["f2"]}
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ let expected: u32 = std::env::args().skip(1).next().unwrap().parse().unwrap();
+ assert_eq!(foo::foo(), expected);
+ assert_eq!(foo::build_time(), expected);
+ assert_eq!(common::foo(), expected);
+ assert_eq!(common::X, expected);
+ }
+
+ #[test]
+ fn test_bin() {
+ assert_eq!(foo::foo(), 3);
+ assert_eq!(common::foo(), 3);
+ assert_eq!(common::X, 3);
+ assert_eq!(foo::build_time(), 3);
+ }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() -> u32 {
+ common::foo()
+ }
+
+ pub fn build_time() -> u32 {
+ common::X
+ }
+
+ #[test]
+ fn test_lib() {
+ assert_eq!(foo(), 3);
+ assert_eq!(common::foo(), 3);
+ assert_eq!(common::X, 3);
+ }
+ "#,
+ )
+ .file(
+ "tests/t1.rs",
+ r#"
+ #[test]
+ fn test_t1() {
+ assert_eq!(foo::foo(), 3);
+ assert_eq!(common::foo(), 3);
+ assert_eq!(common::X, 3);
+ assert_eq!(foo::build_time(), 3);
+ }
+
+ #[test]
+ fn test_main() {
+ // Features are unified for main when run with `cargo test`,
+ // even with the new resolver.
+ let s = std::process::Command::new("target/debug/foo")
+ .arg("3")
+ .status().unwrap();
+ assert!(s.success());
+ }
+ "#,
+ )
+ .build();
+
+ // Old behavior
+ p.cargo("run 3").run();
+ p.cargo("test").run();
+
+ // New behavior
+ switch_to_resolver_2(&p);
+ p.cargo("run 1").run();
+ p.cargo("test").run();
+}
+
+#[cargo_test]
+fn build_script_runtime_features() {
+ // Check that the CARGO_FEATURE_* environment variable is set correctly.
+ //
+ // This has a common dependency between build/normal/dev-deps, and it
+ // queries which features it was built with in different circumstances.
+ Package::new("common", "1.0.0")
+ .feature("normal", &[])
+ .feature("dev", &[])
+ .feature("build", &[])
+ .file(
+ "build.rs",
+ r#"
+ fn is_set(name: &str) -> bool {
+ std::env::var(name) == Ok("1".to_string())
+ }
+
+ fn main() {
+ let mut res = 0;
+ if is_set("CARGO_FEATURE_NORMAL") {
+ res |= 1;
+ }
+ if is_set("CARGO_FEATURE_DEV") {
+ res |= 2;
+ }
+ if is_set("CARGO_FEATURE_BUILD") {
+ res |= 4;
+ }
+ println!("cargo:rustc-cfg=RunCustomBuild=\"{}\"", res);
+
+ let mut res = 0;
+ if cfg!(feature = "normal") {
+ res |= 1;
+ }
+ if cfg!(feature = "dev") {
+ res |= 2;
+ }
+ if cfg!(feature = "build") {
+ res |= 4;
+ }
+ println!("cargo:rustc-cfg=CustomBuild=\"{}\"", res);
+ }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() -> u32 {
+ let mut res = 0;
+ if cfg!(feature = "normal") {
+ res |= 1;
+ }
+ if cfg!(feature = "dev") {
+ res |= 2;
+ }
+ if cfg!(feature = "build") {
+ res |= 4;
+ }
+ res
+ }
+
+ pub fn build_time() -> u32 {
+ #[cfg(RunCustomBuild="1")] return 1;
+ #[cfg(RunCustomBuild="3")] return 3;
+ #[cfg(RunCustomBuild="4")] return 4;
+ #[cfg(RunCustomBuild="5")] return 5;
+ #[cfg(RunCustomBuild="7")] return 7;
+ }
+ "#,
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [build-dependencies]
+ common = {version="1.0", features=["build"]}
+
+ [dependencies]
+ common = {version="1.0", features=["normal"]}
+
+ [dev-dependencies]
+ common = {version="1.0", features=["dev"]}
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ assert_eq!(common::foo(), common::build_time());
+ println!("cargo:rustc-cfg=from_build=\"{}\"", common::foo());
+ }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() -> u32 {
+ common::foo()
+ }
+
+ pub fn build_time() -> u32 {
+ common::build_time()
+ }
+
+ #[test]
+ fn test_lib() {
+ assert_eq!(common::foo(), common::build_time());
+ assert_eq!(common::foo(),
+ std::env::var("CARGO_FEATURE_EXPECT").unwrap().parse().unwrap());
+ }
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ assert_eq!(common::foo(), common::build_time());
+ assert_eq!(common::foo(),
+ std::env::var("CARGO_FEATURE_EXPECT").unwrap().parse().unwrap());
+ }
+
+ #[test]
+ fn test_bin() {
+ assert_eq!(common::foo(), common::build_time());
+ assert_eq!(common::foo(),
+ std::env::var("CARGO_FEATURE_EXPECT").unwrap().parse().unwrap());
+ }
+ "#,
+ )
+ .file(
+ "tests/t1.rs",
+ r#"
+ #[test]
+ fn test_t1() {
+ assert_eq!(common::foo(), common::build_time());
+ assert_eq!(common::foo(),
+ std::env::var("CARGO_FEATURE_EXPECT").unwrap().parse().unwrap());
+ }
+
+ #[test]
+ fn test_main() {
+ // Features are unified for main when run with `cargo test`,
+ // even with the new resolver.
+ let s = std::process::Command::new("target/debug/foo")
+ .status().unwrap();
+ assert!(s.success());
+ }
+ "#,
+ )
+ .build();
+
+ // Old way, unifies all 3.
+ p.cargo("run").env("CARGO_FEATURE_EXPECT", "7").run();
+ p.cargo("test").env("CARGO_FEATURE_EXPECT", "7").run();
+
+ // New behavior.
+ switch_to_resolver_2(&p);
+
+ // normal + build unify
+ p.cargo("run").env("CARGO_FEATURE_EXPECT", "1").run();
+
+ // dev_deps are still unified with `cargo test`
+ p.cargo("test").env("CARGO_FEATURE_EXPECT", "3").run();
+}
+
+#[cargo_test]
+fn cyclical_dev_dep() {
+ // Check how a cyclical dev-dependency will work.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [features]
+ dev = []
+
+ [dev-dependencies]
+ foo = { path = '.', features = ["dev"] }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn assert_dev(enabled: bool) {
+ assert_eq!(enabled, cfg!(feature="dev"));
+ }
+
+ #[test]
+ fn test_in_lib() {
+ assert_dev(true);
+ }
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ let expected: bool = std::env::args().skip(1).next().unwrap().parse().unwrap();
+ foo::assert_dev(expected);
+ }
+ "#,
+ )
+ .file(
+ "tests/t1.rs",
+ r#"
+ #[test]
+ fn integration_links() {
+ foo::assert_dev(true);
+ // The lib linked with main.rs will also be unified.
+ let s = std::process::Command::new("target/debug/foo")
+ .arg("true")
+ .status().unwrap();
+ assert!(s.success());
+ }
+ "#,
+ )
+ .build();
+
+ // Old way unifies features.
+ p.cargo("run true").run();
+ // dev feature should always be enabled in tests.
+ p.cargo("test").run();
+
+ // New behavior.
+ switch_to_resolver_2(&p);
+ // Should decouple main.
+ p.cargo("run false").run();
+
+ // And this should be no different.
+ p.cargo("test").run();
+}
+
+#[cargo_test]
+fn all_feature_opts() {
+ // All feature options at once.
+ Package::new("common", "1.0.0")
+ .feature("normal", &[])
+ .feature("build", &[])
+ .feature("dev", &[])
+ .feature("itarget", &[])
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn feats() -> u32 {
+ let mut res = 0;
+ if cfg!(feature="normal") { res |= 1; }
+ if cfg!(feature="build") { res |= 2; }
+ if cfg!(feature="dev") { res |= 4; }
+ if cfg!(feature="itarget") { res |= 8; }
+ res
+ }
+ "#,
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ common = {version = "1.0", features=["normal"]}
+
+ [dev-dependencies]
+ common = {version = "1.0", features=["dev"]}
+
+ [build-dependencies]
+ common = {version = "1.0", features=["build"]}
+
+ [target.'cfg(whatever)'.dependencies]
+ common = {version = "1.0", features=["itarget"]}
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ expect();
+ }
+
+ fn expect() {
+ let expected: u32 = std::env::var("EXPECTED_FEATS").unwrap().parse().unwrap();
+ assert_eq!(expected, common::feats());
+ }
+
+ #[test]
+ fn from_test() {
+ expect();
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run").env("EXPECTED_FEATS", "15").run();
+ p.cargo("test").env("EXPECTED_FEATS", "15").run();
+
+ // New behavior.
+ switch_to_resolver_2(&p);
+ // Only normal feature.
+ p.cargo("run").env("EXPECTED_FEATS", "1").run();
+
+ // only normal+dev
+ p.cargo("test").env("EXPECTED_FEATS", "5").run();
+}
+
+#[cargo_test]
+fn required_features_host_dep() {
+ // Check that required-features handles build-dependencies correctly.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [[bin]]
+ name = "x"
+ required-features = ["bdep/f1"]
+
+ [build-dependencies]
+ bdep = {path="bdep"}
+ "#,
+ )
+ .file("build.rs", "fn main() {}")
+ .file(
+ "src/bin/x.rs",
+ r#"
+ fn main() {}
+ "#,
+ )
+ .file(
+ "bdep/Cargo.toml",
+ r#"
+ [package]
+ name = "bdep"
+ version = "0.1.0"
+
+ [features]
+ f1 = []
+ "#,
+ )
+ .file("bdep/src/lib.rs", "")
+ .build();
+
+ p.cargo("run")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] target `x` in package `foo` requires the features: `bdep/f1`
+Consider enabling them by passing, e.g., `--features=\"bdep/f1\"`
+",
+ )
+ .run();
+
+ // New behavior.
+ switch_to_resolver_2(&p);
+ p.cargo("run --features bdep/f1").run();
+}
+
+#[cargo_test]
+fn disabled_shared_host_dep() {
+ // Check for situation where an optional dep of a shared dep is enabled in
+ // a normal dependency, but disabled in an optional one. The unit tree is:
+ // foo
+ // ├── foo build.rs
+ // | └── common (BUILD dependency, NO FEATURES)
+ // └── common (Normal dependency, default features)
+ // └── somedep
+ Package::new("somedep", "1.0.0")
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn f() { println!("hello from somedep"); }
+ "#,
+ )
+ .publish();
+ Package::new("common", "1.0.0")
+ .feature("default", &["somedep"])
+ .add_dep(Dependency::new("somedep", "1.0").optional(true))
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn check_somedep() -> bool {
+ #[cfg(feature="somedep")]
+ {
+ extern crate somedep;
+ somedep::f();
+ true
+ }
+ #[cfg(not(feature="somedep"))]
+ {
+ println!("no somedep");
+ false
+ }
+ }
+ "#,
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+ edition = "2018"
+ resolver = "2"
+
+ [dependencies]
+ common = "1.0"
+
+ [build-dependencies]
+ common = {version = "1.0", default-features = false}
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "fn main() { assert!(common::check_somedep()); }",
+ )
+ .file(
+ "build.rs",
+ "fn main() { assert!(!common::check_somedep()); }",
+ )
+ .build();
+
+ p.cargo("run -v").with_stdout("hello from somedep").run();
+}
+
+#[cargo_test]
+fn required_features_inactive_dep() {
+ // required-features with an inactivated dep.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ resolver = "2"
+
+ [target.'cfg(whatever)'.dependencies]
+ bar = {path="bar"}
+
+ [[bin]]
+ name = "foo"
+ required-features = ["feat1"]
+
+ [features]
+ feat1 = []
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+
+ p.cargo("check --features=feat1")
+ .with_stderr("[CHECKING] foo[..]\n[FINISHED] [..]")
+ .run();
+}
+
+#[cargo_test]
+fn decouple_proc_macro() {
+ // proc macro features are not shared
+ Package::new("common", "1.0.0")
+ .feature("somefeat", &[])
+ .file(
+ "src/lib.rs",
+ r#"
+ pub const fn foo() -> bool { cfg!(feature="somefeat") }
+ #[cfg(feature="somefeat")]
+ pub const FEAT_ONLY_CONST: bool = true;
+ "#,
+ )
+ .publish();
+ Package::new("pm", "1.0.0")
+ .proc_macro(true)
+ .feature_dep("common", "1.0", &["somefeat"])
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate proc_macro;
+ extern crate common;
+ #[proc_macro]
+ pub fn foo(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
+ assert!(common::foo());
+ "".parse().unwrap()
+ }
+ "#,
+ )
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+ edition = "2018"
+
+ [dependencies]
+ pm = "1.0"
+ common = "1.0"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ //! Test with docs.
+ //!
+ //! ```rust
+ //! pm::foo!{}
+ //! fn main() {
+ //! let expected = std::env::var_os("TEST_EXPECTS_ENABLED").is_some();
+ //! assert_eq!(expected, common::foo(), "common is wrong");
+ //! }
+ //! ```
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ pm::foo!{}
+ fn main() {
+ println!("it is {}", common::foo());
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run")
+ .env("TEST_EXPECTS_ENABLED", "1")
+ .with_stdout("it is true")
+ .run();
+ // Make sure the test is fallible.
+ p.cargo("test --doc")
+ .with_status(101)
+ .with_stdout_contains("[..]common is wrong[..]")
+ .run();
+ p.cargo("test --doc").env("TEST_EXPECTS_ENABLED", "1").run();
+ p.cargo("doc").run();
+ assert!(p
+ .build_dir()
+ .join("doc/common/constant.FEAT_ONLY_CONST.html")
+ .exists());
+ // cargo doc should clean in-between runs, but it doesn't, and leaves stale files.
+ // https://github.com/rust-lang/cargo/issues/6783 (same for removed items)
+ p.build_dir().join("doc").rm_rf();
+
+ // New behavior.
+ switch_to_resolver_2(&p);
+ p.cargo("run").with_stdout("it is false").run();
+
+ p.cargo("test --doc").run();
+ p.cargo("doc").run();
+ assert!(!p
+ .build_dir()
+ .join("doc/common/constant.FEAT_ONLY_CONST.html")
+ .exists());
+}
+
+#[cargo_test]
+fn proc_macro_ws() {
+ // Checks for bug with proc-macro in a workspace with dependency (shouldn't panic).
+ //
+ // Note, debuginfo is explicitly requested here to preserve the intent of this non-regression
+ // test: that will disable the debuginfo build dependencies optimization. Otherwise, it would
+ // initially trigger when the crates are built independently, but rebuild them with debuginfo
+ // when it sees the shared build/runtime dependency when checking the complete workspace.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo", "pm"]
+ resolver = "2"
+
+ [profile.dev.build-override]
+ debug = true
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [features]
+ feat1 = []
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .file(
+ "pm/Cargo.toml",
+ r#"
+ [package]
+ name = "pm"
+ version = "0.1.0"
+
+ [lib]
+ proc-macro = true
+
+ [dependencies]
+ foo = { path = "../foo", features=["feat1"] }
+ "#,
+ )
+ .file("pm/src/lib.rs", "")
+ .build();
+
+ p.cargo("check -p pm -v")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]--cfg[..]feat1[..]")
+ .run();
+ // This may be surprising that `foo` doesn't get built separately. It is
+ // because pm might have other units (binaries, tests, etc.), and so the
+ // feature resolver must assume that normal deps get unified with it. This
+ // is related to the bigger issue where the features selected in a
+ // workspace depend on which packages are selected.
+ p.cargo("check --workspace -v")
+ .with_stderr(
+ "\
+[FRESH] foo v0.1.0 [..]
+[FRESH] pm v0.1.0 [..]
+[FINISHED] dev [..]
+",
+ )
+ .run();
+ // Selecting just foo will build without unification.
+ p.cargo("check -p foo -v")
+ // Make sure `foo` is built without feat1
+ .with_stderr_line_without(&["[RUNNING] `rustc --crate-name foo"], &["--cfg[..]feat1"])
+ .run();
+}
+
+#[cargo_test]
+fn has_dev_dep_for_test() {
+ // Check for a bug where the decision on whether or not "dev dependencies"
+ // should be used did not consider `check --profile=test`.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dev-dependencies]
+ dep = { path = 'dep', features = ['f1'] }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #[test]
+ fn t1() {
+ dep::f();
+ }
+ "#,
+ )
+ .file(
+ "dep/Cargo.toml",
+ r#"
+ [package]
+ name = "dep"
+ version = "0.1.0"
+
+ [features]
+ f1 = []
+ "#,
+ )
+ .file(
+ "dep/src/lib.rs",
+ r#"
+ #[cfg(feature = "f1")]
+ pub fn f() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("check -v")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.1.0 [..]
+[RUNNING] `rustc --crate-name foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.cargo("check -v --profile=test")
+ .with_stderr(
+ "\
+[CHECKING] dep v0.1.0 [..]
+[RUNNING] `rustc --crate-name dep [..]
+[CHECKING] foo v0.1.0 [..]
+[RUNNING] `rustc --crate-name foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ // New resolver should not be any different.
+ switch_to_resolver_2(&p);
+ p.cargo("check -v --profile=test")
+ .with_stderr(
+ "\
+[FRESH] dep [..]
+[FRESH] foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_dep_activated() {
+ // Build dependencies always match the host for [target.*.build-dependencies].
+ if cross_compile::disabled() {
+ return;
+ }
+ Package::new("somedep", "1.0.0")
+ .file("src/lib.rs", "")
+ .publish();
+ Package::new("targetdep", "1.0.0").publish();
+ Package::new("hostdep", "1.0.0")
+ // Check that "for_host" is sticky.
+ .target_dep("somedep", "1.0", rustc_host())
+ .feature("feat1", &[])
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate somedep;
+
+ #[cfg(not(feature="feat1"))]
+ compile_error!{"feat1 missing"}
+ "#,
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ # This should never be selected.
+ [target.'{}'.build-dependencies]
+ targetdep = "1.0"
+
+ [target.'{}'.build-dependencies]
+ hostdep = {{version="1.0", features=["feat1"]}}
+ "#,
+ alternate(),
+ rustc_host()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check").run();
+ p.cargo("check --target").arg(alternate()).run();
+
+ // New behavior.
+ switch_to_resolver_2(&p);
+ p.cargo("check").run();
+ p.cargo("check --target").arg(alternate()).run();
+}
+
+#[cargo_test]
+fn resolver_bad_setting() {
+ // Unknown setting in `resolver`
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ resolver = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]/foo/Cargo.toml`
+
+Caused by:
+ `resolver` setting `foo` is not valid, valid options are \"1\" or \"2\"
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn resolver_original() {
+ // resolver="1" uses old unification behavior.
+ Package::new("common", "1.0.0")
+ .feature("f1", &[])
+ .file(
+ "src/lib.rs",
+ r#"
+ #[cfg(feature = "f1")]
+ compile_error!("f1 should not activate");
+ "#,
+ )
+ .publish();
+
+ Package::new("bar", "1.0.0")
+ .add_dep(
+ Dependency::new("common", "1.0")
+ .target("cfg(whatever)")
+ .enable_features(&["f1"]),
+ )
+ .publish();
+
+ let manifest = |resolver| {
+ format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ resolver = "{}"
+
+ [dependencies]
+ common = "1.0"
+ bar = "1.0"
+ "#,
+ resolver
+ )
+ };
+
+ let p = project()
+ .file("Cargo.toml", &manifest("1"))
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains("[..]f1 should not activate[..]")
+ .run();
+
+ p.change_file("Cargo.toml", &manifest("2"));
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn resolver_not_both() {
+ // Can't specify resolver in both workspace and package.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ resolver = "2"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ resolver = "2"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]/foo/Cargo.toml`
+
+Caused by:
+ cannot specify `resolver` field in both `[workspace]` and `[package]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn resolver_ws_member() {
+ // Can't specify `resolver` in a ws member.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a"]
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+ resolver = "2"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+warning: resolver for the non root package will be ignored, specify resolver at the workspace root:
+package: [..]/foo/a/Cargo.toml
+workspace: [..]/foo/Cargo.toml
+[CHECKING] a v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn resolver_ws_root_and_member() {
+ // Check when specified in both ws root and member.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a"]
+ resolver = "2"
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+ resolver = "2"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .build();
+
+ // Ignores if they are the same.
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] a v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn resolver_enables_new_features() {
+ // resolver="2" enables all the things.
+ Package::new("common", "1.0.0")
+ .feature("normal", &[])
+ .feature("build", &[])
+ .feature("dev", &[])
+ .feature("itarget", &[])
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn feats() -> u32 {
+ let mut res = 0;
+ if cfg!(feature="normal") { res |= 1; }
+ if cfg!(feature="build") { res |= 2; }
+ if cfg!(feature="dev") { res |= 4; }
+ if cfg!(feature="itarget") { res |= 8; }
+ res
+ }
+ "#,
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ resolver = "2"
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ common = {version = "1.0", features=["normal"]}
+
+ [dev-dependencies]
+ common = {version = "1.0", features=["dev"]}
+
+ [build-dependencies]
+ common = {version = "1.0", features=["build"]}
+
+ [target.'cfg(whatever)'.dependencies]
+ common = {version = "1.0", features=["itarget"]}
+ "#,
+ )
+ .file(
+ "a/src/main.rs",
+ r#"
+ fn main() {
+ expect();
+ }
+
+ fn expect() {
+ let expected: u32 = std::env::var("EXPECTED_FEATS").unwrap().parse().unwrap();
+ assert_eq!(expected, common::feats());
+ }
+
+ #[test]
+ fn from_test() {
+ expect();
+ }
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.1.0"
+
+ [features]
+ ping = []
+ "#,
+ )
+ .file(
+ "b/src/main.rs",
+ r#"
+ fn main() {
+ if cfg!(feature="ping") {
+ println!("pong");
+ }
+ }
+ "#,
+ )
+ .build();
+
+ // Only normal.
+ p.cargo("run --bin a")
+ .env("EXPECTED_FEATS", "1")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] common [..]
+[COMPILING] common v1.0.0
+[COMPILING] a v0.1.0 [..]
+[FINISHED] [..]
+[RUNNING] `target/debug/a[EXE]`
+",
+ )
+ .run();
+
+ // only normal+dev
+ p.cargo("test").cwd("a").env("EXPECTED_FEATS", "5").run();
+
+ // Can specify features of packages from a different directory.
+ p.cargo("run -p b --features=ping")
+ .cwd("a")
+ .with_stdout("pong")
+ .run();
+}
+
+#[cargo_test]
+fn install_resolve_behavior() {
+ // install honors the resolver behavior.
+ Package::new("common", "1.0.0")
+ .feature("f1", &[])
+ .file(
+ "src/lib.rs",
+ r#"
+ #[cfg(feature = "f1")]
+ compile_error!("f1 should not activate");
+ "#,
+ )
+ .publish();
+
+ Package::new("bar", "1.0.0").dep("common", "1.0").publish();
+
+ Package::new("foo", "1.0.0")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+ resolver = "2"
+
+ [target.'cfg(whatever)'.dependencies]
+ common = {version="1.0", features=["f1"]}
+
+ [dependencies]
+ bar = "1.0"
+
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .publish();
+
+ cargo_process("install foo").run();
+}
+
+#[cargo_test]
+fn package_includes_resolve_behavior() {
+ // `cargo package` will inherit the correct resolve behavior.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a"]
+ resolver = "2"
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+ authors = ["Zzz"]
+ description = "foo"
+ license = "MIT"
+ homepage = "https://example.com/"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("package").cwd("a").run();
+
+ let rewritten_toml = format!(
+ r#"{}
+[package]
+name = "a"
+version = "0.1.0"
+authors = ["Zzz"]
+description = "foo"
+homepage = "https://example.com/"
+license = "MIT"
+resolver = "2"
+"#,
+ cargo::core::package::MANIFEST_PREAMBLE
+ );
+
+ let f = File::open(&p.root().join("target/package/a-0.1.0.crate")).unwrap();
+ validate_crate_contents(
+ f,
+ "a-0.1.0.crate",
+ &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"],
+ &[("Cargo.toml", &rewritten_toml)],
+ );
+}
+
+#[cargo_test]
+fn tree_all() {
+ // `cargo tree` with the new feature resolver.
+ Package::new("log", "0.4.8").feature("serde", &[]).publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ resolver = "2"
+
+ [target.'cfg(whatever)'.dependencies]
+ log = {version="*", features=["serde"]}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("tree --target=all")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+└── log v0.4.8
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn shared_dep_same_but_dependencies() {
+ // Checks for a bug of nondeterminism. This scenario creates a shared
+ // dependency `dep` which needs to be built twice (once as normal, and
+ // once as a build dep). However, in both cases the flags to `dep` are the
+ // same, the only difference is what it links to. The normal dependency
+ // should link to `subdep` with the feature disabled, and the build
+ // dependency should link to it with it enabled. Crucially, the `--target`
+ // flag should not be specified, otherwise Unit.kind would be different
+ // and avoid the collision, and this bug won't manifest.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bin1", "bin2"]
+ resolver = "2"
+ "#,
+ )
+ .file(
+ "bin1/Cargo.toml",
+ r#"
+ [package]
+ name = "bin1"
+ version = "0.1.0"
+
+ [dependencies]
+ dep = { path = "../dep" }
+ "#,
+ )
+ .file("bin1/src/main.rs", "fn main() { dep::feat_func(); }")
+ .file(
+ "bin2/Cargo.toml",
+ r#"
+ [package]
+ name = "bin2"
+ version = "0.1.0"
+
+ [build-dependencies]
+ dep = { path = "../dep" }
+ subdep = { path = "../subdep", features = ["feat"] }
+ "#,
+ )
+ .file("bin2/build.rs", "fn main() { dep::feat_func(); }")
+ .file("bin2/src/main.rs", "fn main() {}")
+ .file(
+ "dep/Cargo.toml",
+ r#"
+ [package]
+ name = "dep"
+ version = "0.1.0"
+
+ [dependencies]
+ subdep = { path = "../subdep" }
+ "#,
+ )
+ .file(
+ "dep/src/lib.rs",
+ "pub fn feat_func() { subdep::feat_func(); }",
+ )
+ .file(
+ "subdep/Cargo.toml",
+ r#"
+ [package]
+ name = "subdep"
+ version = "0.1.0"
+
+ [features]
+ feat = []
+ "#,
+ )
+ .file(
+ "subdep/src/lib.rs",
+ r#"
+ pub fn feat_func() {
+ #[cfg(feature = "feat")] println!("cargo:warning=feat: enabled");
+ #[cfg(not(feature = "feat"))] println!("cargo:warning=feat: not enabled");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build --bin bin1 --bin bin2")
+ // unordered because bin1 and bin2 build at the same time
+ .with_stderr_unordered(
+ "\
+[COMPILING] subdep [..]
+[COMPILING] dep [..]
+[COMPILING] bin2 [..]
+[COMPILING] bin1 [..]
+warning: feat: enabled
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.process(p.bin("bin1"))
+ .with_stdout("cargo:warning=feat: not enabled")
+ .run();
+
+ // Make sure everything stays cached.
+ p.cargo("build -v --bin bin1 --bin bin2")
+ .with_stderr_unordered(
+ "\
+[FRESH] subdep [..]
+[FRESH] dep [..]
+[FRESH] bin1 [..]
+warning: feat: enabled
+[FRESH] bin2 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_proc_macro() {
+ // Running `cargo test` on a proc-macro, with a shared dependency that has
+ // different features.
+ //
+ // There was a bug where `shared` was built twice (once with feature "B"
+ // and once without), and both copies linked into the unit test. This
+ // would cause a type failure when used in an intermediate dependency
+ // (the-macro-support).
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "runtime"
+ version = "0.1.0"
+ resolver = "2"
+
+ [dependencies]
+ the-macro = { path = "the-macro", features = ['a'] }
+ [build-dependencies]
+ shared = { path = "shared", features = ['b'] }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "the-macro/Cargo.toml",
+ r#"
+ [package]
+ name = "the-macro"
+ version = "0.1.0"
+ [lib]
+ proc-macro = true
+ test = false
+ [dependencies]
+ the-macro-support = { path = "../the-macro-support" }
+ shared = { path = "../shared" }
+ [dev-dependencies]
+ runtime = { path = ".." }
+ [features]
+ a = []
+ "#,
+ )
+ .file(
+ "the-macro/src/lib.rs",
+ "
+ fn _test() {
+ the_macro_support::foo(shared::Foo);
+ }
+ ",
+ )
+ .file(
+ "the-macro-support/Cargo.toml",
+ r#"
+ [package]
+ name = "the-macro-support"
+ version = "0.1.0"
+ [dependencies]
+ shared = { path = "../shared" }
+ "#,
+ )
+ .file(
+ "the-macro-support/src/lib.rs",
+ "
+ pub fn foo(_: shared::Foo) {}
+ ",
+ )
+ .file(
+ "shared/Cargo.toml",
+ r#"
+ [package]
+ name = "shared"
+ version = "0.1.0"
+ [features]
+ b = []
+ "#,
+ )
+ .file("shared/src/lib.rs", "pub struct Foo;")
+ .build();
+ p.cargo("test --manifest-path the-macro/Cargo.toml").run();
+}
+
+#[cargo_test]
+fn doc_optional() {
+ // Checks for a bug where `cargo doc` was failing with an inactive target
+ // that enables a shared optional dependency.
+ Package::new("spin", "1.0.0").publish();
+ Package::new("bar", "1.0.0")
+ .add_dep(Dependency::new("spin", "1.0").optional(true))
+ .publish();
+ // The enabler package enables the `spin` feature, which we don't want.
+ Package::new("enabler", "1.0.0")
+ .feature_dep("bar", "1.0", &["spin"])
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ resolver = "2"
+
+ [target.'cfg(whatever)'.dependencies]
+ enabler = "1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("doc")
+ .with_stderr_unordered(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] spin v1.0.0 [..]
+[DOWNLOADED] bar v1.0.0 [..]
+[DOCUMENTING] bar v1.0.0
+[CHECKING] bar v1.0.0
+[DOCUMENTING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn minimal_download() {
+ // Various checks that it only downloads the minimum set of dependencies
+ // needed in various situations.
+ //
+ // This checks several permutations of the different
+ // host_dep/dev_dep/itarget settings. These 3 are planned to be stabilized
+ // together, so there isn't much need to be concerned about how the behave
+ // independently. However, there are some cases where they do behave
+ // independently. Specifically:
+ //
+ // * `cargo test` forces dev_dep decoupling to be disabled.
+ // * `cargo tree --target=all` forces ignore_inactive_targets off and decouple_dev_deps off.
+ // * `cargo tree --target=all -e normal` forces ignore_inactive_targets off.
+ //
+ // However, `cargo tree` is a little weird because it downloads everything
+ // anyways.
+ //
+ // So to summarize the different permutations:
+ //
+ // dev_dep | host_dep | itarget | Notes
+ // --------|----------|---------|----------------------------
+ // | | | -Zfeatures=compare (new resolver should behave same as old)
+ // | | ✓ | This scenario should not happen.
+ // | ✓ | | `cargo tree --target=all -Zfeatures=all`†
+ // | ✓ | ✓ | `cargo test`
+ // ✓ | | | This scenario should not happen.
+ // ✓ | | ✓ | This scenario should not happen.
+ // ✓ | ✓ | | `cargo tree --target=all -e normal -Z features=all`†
+ // ✓ | ✓ | ✓ | A normal build.
+ //
+ // † — However, `cargo tree` downloads everything.
+ Package::new("normal", "1.0.0").publish();
+ Package::new("normal_pm", "1.0.0").publish();
+ Package::new("normal_opt", "1.0.0").publish();
+ Package::new("dev_dep", "1.0.0").publish();
+ Package::new("dev_dep_pm", "1.0.0").publish();
+ Package::new("build_dep", "1.0.0").publish();
+ Package::new("build_dep_pm", "1.0.0").publish();
+ Package::new("build_dep_opt", "1.0.0").publish();
+
+ Package::new("itarget_normal", "1.0.0").publish();
+ Package::new("itarget_normal_pm", "1.0.0").publish();
+ Package::new("itarget_dev_dep", "1.0.0").publish();
+ Package::new("itarget_dev_dep_pm", "1.0.0").publish();
+ Package::new("itarget_build_dep", "1.0.0").publish();
+ Package::new("itarget_build_dep_pm", "1.0.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ normal = "1.0"
+ normal_pm = "1.0"
+ normal_opt = { version = "1.0", optional = true }
+
+ [dev-dependencies]
+ dev_dep = "1.0"
+ dev_dep_pm = "1.0"
+
+ [build-dependencies]
+ build_dep = "1.0"
+ build_dep_pm = "1.0"
+ build_dep_opt = { version = "1.0", optional = true }
+
+ [target.'cfg(whatever)'.dependencies]
+ itarget_normal = "1.0"
+ itarget_normal_pm = "1.0"
+
+ [target.'cfg(whatever)'.dev-dependencies]
+ itarget_dev_dep = "1.0"
+ itarget_dev_dep_pm = "1.0"
+
+ [target.'cfg(whatever)'.build-dependencies]
+ itarget_build_dep = "1.0"
+ itarget_build_dep_pm = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .build();
+
+ let clear = || {
+ cargo_home().join("registry/cache").rm_rf();
+ cargo_home().join("registry/src").rm_rf();
+ p.build_dir().rm_rf();
+ };
+
+ // none
+ // Should be the same as `-Zfeatures=all`
+ p.cargo("check -Zfeatures=compare")
+ .masquerade_as_nightly_cargo(&["features=compare"])
+ .with_stderr_unordered(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] normal_pm v1.0.0 [..]
+[DOWNLOADED] normal v1.0.0 [..]
+[DOWNLOADED] build_dep_pm v1.0.0 [..]
+[DOWNLOADED] build_dep v1.0.0 [..]
+[COMPILING] build_dep v1.0.0
+[COMPILING] build_dep_pm v1.0.0
+[CHECKING] normal_pm v1.0.0
+[CHECKING] normal v1.0.0
+[COMPILING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ clear();
+
+ // New behavior
+ switch_to_resolver_2(&p);
+
+ // all
+ p.cargo("check")
+ .with_stderr_unordered(
+ "\
+[DOWNLOADING] crates ...
+[DOWNLOADED] normal_pm v1.0.0 [..]
+[DOWNLOADED] normal v1.0.0 [..]
+[DOWNLOADED] build_dep_pm v1.0.0 [..]
+[DOWNLOADED] build_dep v1.0.0 [..]
+[COMPILING] build_dep v1.0.0
+[COMPILING] build_dep_pm v1.0.0
+[CHECKING] normal v1.0.0
+[CHECKING] normal_pm v1.0.0
+[COMPILING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ clear();
+
+ // This disables decouple_dev_deps.
+ p.cargo("test --no-run")
+ .with_stderr_unordered(
+ "\
+[DOWNLOADING] crates ...
+[DOWNLOADED] normal_pm v1.0.0 [..]
+[DOWNLOADED] normal v1.0.0 [..]
+[DOWNLOADED] dev_dep_pm v1.0.0 [..]
+[DOWNLOADED] dev_dep v1.0.0 [..]
+[DOWNLOADED] build_dep_pm v1.0.0 [..]
+[DOWNLOADED] build_dep v1.0.0 [..]
+[COMPILING] build_dep v1.0.0
+[COMPILING] build_dep_pm v1.0.0
+[COMPILING] normal_pm v1.0.0
+[COMPILING] normal v1.0.0
+[COMPILING] dev_dep_pm v1.0.0
+[COMPILING] dev_dep v1.0.0
+[COMPILING] foo v0.1.0 [..]
+[FINISHED] [..]
+[EXECUTABLE] unittests src/lib.rs (target/debug/deps/foo-[..][EXE])
+",
+ )
+ .run();
+ clear();
+
+ // This disables itarget, but leaves decouple_dev_deps enabled.
+ p.cargo("tree -e normal --target=all")
+ .with_stderr_unordered(
+ "\
+[DOWNLOADING] crates ...
+[DOWNLOADED] normal v1.0.0 [..]
+[DOWNLOADED] normal_pm v1.0.0 [..]
+[DOWNLOADED] build_dep v1.0.0 [..]
+[DOWNLOADED] build_dep_pm v1.0.0 [..]
+[DOWNLOADED] itarget_normal v1.0.0 [..]
+[DOWNLOADED] itarget_normal_pm v1.0.0 [..]
+[DOWNLOADED] itarget_build_dep v1.0.0 [..]
+[DOWNLOADED] itarget_build_dep_pm v1.0.0 [..]
+",
+ )
+ .with_stdout(
+ "\
+foo v0.1.0 ([ROOT]/foo)
+├── itarget_normal v1.0.0
+├── itarget_normal_pm v1.0.0
+├── normal v1.0.0
+└── normal_pm v1.0.0
+",
+ )
+ .run();
+ clear();
+
+ // This disables itarget and decouple_dev_deps.
+ p.cargo("tree --target=all")
+ .with_stderr_unordered(
+ "\
+[DOWNLOADING] crates ...
+[DOWNLOADED] normal_pm v1.0.0 [..]
+[DOWNLOADED] normal v1.0.0 [..]
+[DOWNLOADED] itarget_normal_pm v1.0.0 [..]
+[DOWNLOADED] itarget_normal v1.0.0 [..]
+[DOWNLOADED] itarget_dev_dep_pm v1.0.0 [..]
+[DOWNLOADED] itarget_dev_dep v1.0.0 [..]
+[DOWNLOADED] itarget_build_dep_pm v1.0.0 [..]
+[DOWNLOADED] itarget_build_dep v1.0.0 [..]
+[DOWNLOADED] dev_dep_pm v1.0.0 [..]
+[DOWNLOADED] dev_dep v1.0.0 [..]
+[DOWNLOADED] build_dep_pm v1.0.0 [..]
+[DOWNLOADED] build_dep v1.0.0 [..]
+",
+ )
+ .with_stdout(
+ "\
+foo v0.1.0 ([ROOT]/foo)
+├── itarget_normal v1.0.0
+├── itarget_normal_pm v1.0.0
+├── normal v1.0.0
+└── normal_pm v1.0.0
+[build-dependencies]
+├── build_dep v1.0.0
+├── build_dep_pm v1.0.0
+├── itarget_build_dep v1.0.0
+└── itarget_build_dep_pm v1.0.0
+[dev-dependencies]
+├── dev_dep v1.0.0
+├── dev_dep_pm v1.0.0
+├── itarget_dev_dep v1.0.0
+└── itarget_dev_dep_pm v1.0.0
+",
+ )
+ .run();
+ clear();
+}
+
+#[cargo_test]
+fn pm_with_int_shared() {
+ // This is a somewhat complex scenario of a proc-macro in a workspace with
+ // an integration test where the proc-macro is used for other things, and
+ // *everything* is built at once (`--workspace --all-targets
+ // --all-features`). There was a bug where the UnitFor settings were being
+ // incorrectly computed based on the order that the graph was traversed.
+ //
+ // There are some uncertainties about exactly how proc-macros should behave
+ // with `--workspace`, see https://github.com/rust-lang/cargo/issues/8312.
+ //
+ // This uses a const-eval hack to do compile-time feature checking.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo", "pm", "shared"]
+ resolver = "2"
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ pm = { path = "../pm" }
+ shared = { path = "../shared", features = ["norm-feat"] }
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ r#"
+ // foo->shared always has both features set
+ const _CHECK: [(); 0] = [(); 0-!(shared::FEATS==3) as usize];
+ "#,
+ )
+ .file(
+ "pm/Cargo.toml",
+ r#"
+ [package]
+ name = "pm"
+ version = "0.1.0"
+
+ [lib]
+ proc-macro = true
+
+ [dependencies]
+ shared = { path = "../shared", features = ["host-feat"] }
+ "#,
+ )
+ .file(
+ "pm/src/lib.rs",
+ r#"
+ // pm->shared always has just host
+ const _CHECK: [(); 0] = [(); 0-!(shared::FEATS==1) as usize];
+ "#,
+ )
+ .file(
+ "pm/tests/pm_test.rs",
+ r#"
+ // integration test gets both set
+ const _CHECK: [(); 0] = [(); 0-!(shared::FEATS==3) as usize];
+ "#,
+ )
+ .file(
+ "shared/Cargo.toml",
+ r#"
+ [package]
+ name = "shared"
+ version = "0.1.0"
+
+ [features]
+ norm-feat = []
+ host-feat = []
+ "#,
+ )
+ .file(
+ "shared/src/lib.rs",
+ r#"
+ pub const FEATS: u32 = {
+ if cfg!(feature="norm-feat") && cfg!(feature="host-feat") {
+ 3
+ } else if cfg!(feature="norm-feat") {
+ 2
+ } else if cfg!(feature="host-feat") {
+ 1
+ } else {
+ 0
+ }
+ };
+ "#,
+ )
+ .build();
+
+ p.cargo("build --workspace --all-targets --all-features -v")
+ .with_stderr_unordered(
+ "\
+[COMPILING] shared [..]
+[RUNNING] `rustc --crate-name shared [..]--crate-type lib [..]
+[RUNNING] `rustc --crate-name shared [..]--crate-type lib [..]
+[RUNNING] `rustc --crate-name shared [..]--test[..]
+[COMPILING] pm [..]
+[RUNNING] `rustc --crate-name pm [..]--crate-type proc-macro[..]
+[RUNNING] `rustc --crate-name pm [..]--test[..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name foo [..]--test[..]
+[RUNNING] `rustc --crate-name pm_test [..]--test[..]
+[RUNNING] `rustc --crate-name foo [..]--crate-type lib[..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ // And again, should stay fresh.
+ p.cargo("build --workspace --all-targets --all-features -v")
+ .with_stderr_unordered(
+ "\
+[FRESH] shared [..]
+[FRESH] pm [..]
+[FRESH] foo [..]
+[FINISHED] [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn doc_proc_macro() {
+ // Checks for a bug when documenting a proc-macro with a dependency. The
+ // doc unit builder was not carrying the "for host" setting through the
+ // dependencies, and the `pm-dep` dependency was causing a panic because
+ // it was looking for target features instead of host features.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ resolver = "2"
+
+ [dependencies]
+ pm = { path = "pm" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "pm/Cargo.toml",
+ r#"
+ [package]
+ name = "pm"
+ version = "0.1.0"
+
+ [lib]
+ proc-macro = true
+
+ [dependencies]
+ pm-dep = { path = "../pm-dep" }
+ "#,
+ )
+ .file("pm/src/lib.rs", "")
+ .file("pm-dep/Cargo.toml", &basic_manifest("pm-dep", "0.1.0"))
+ .file("pm-dep/src/lib.rs", "")
+ .build();
+
+ // Unfortunately this cannot check the output because what it prints is
+ // nondeterministic. Sometimes it says "Compiling pm-dep" and sometimes
+ // "Checking pm-dep". This is because it is both building it and checking
+ // it in parallel (building so it can build the proc-macro, and checking
+ // so rustdoc can load it).
+ p.cargo("doc").run();
+}
+
+#[cargo_test]
+fn edition_2021_default_2() {
+ // edition = 2021 defaults to v2 resolver.
+ Package::new("common", "1.0.0")
+ .feature("f1", &[])
+ .file("src/lib.rs", "")
+ .publish();
+
+ Package::new("bar", "1.0.0")
+ .add_dep(
+ Dependency::new("common", "1.0")
+ .target("cfg(whatever)")
+ .enable_features(&["f1"]),
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ common = "1.0"
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // First without edition.
+ p.cargo("tree -f")
+ .arg("{p} feats:{f}")
+ .with_stdout(
+ "\
+foo v0.1.0 [..]
+├── bar v1.0.0 feats:
+└── common v1.0.0 feats:f1
+",
+ )
+ .run();
+
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["edition2021"]
+
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2021"
+
+ [dependencies]
+ common = "1.0"
+ bar = "1.0"
+ "#,
+ );
+
+ // Importantly, this does not include `f1` on `common`.
+ p.cargo("tree -f")
+ .arg("{p} feats:{f}")
+ .with_stdout(
+ "\
+foo v0.1.0 [..]
+├── bar v1.0.0 feats:
+└── common v1.0.0 feats:
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn all_features_merges_with_features() {
+ Package::new("dep", "0.1.0")
+ .feature("feat1", &[])
+ .file(
+ "src/lib.rs",
+ r#"
+ #[cfg(feature="feat1")]
+ pub fn work() {
+ println!("it works");
+ }
+ "#,
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [features]
+ a = []
+
+ [dependencies]
+ dep = "0.1"
+
+ [[example]]
+ name = "ex"
+ required-features = ["a", "dep/feat1"]
+ "#,
+ )
+ .file(
+ "examples/ex.rs",
+ r#"
+ fn main() {
+ dep::work();
+ }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("run --example ex --all-features --features dep/feat1")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..]
+[COMPILING] dep v0.1.0
+[COMPILING] foo v0.1.0 [..]
+[FINISHED] [..]
+[RUNNING] `target/debug/examples/ex[EXE]`
+",
+ )
+ .with_stdout("it works")
+ .run();
+
+ switch_to_resolver_2(&p);
+
+ p.cargo("run --example ex --all-features --features dep/feat1")
+ .with_stderr(
+ "\
+[FINISHED] [..]
+[RUNNING] `target/debug/examples/ex[EXE]`
+",
+ )
+ .with_stdout("it works")
+ .run();
+}
+
+#[cargo_test]
+fn dep_with_optional_host_deps_activated() {
+ // To prevent regression like rust-lang/cargo#11330
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2021"
+
+ [dependencies]
+ serde = { path = "serde", features = ["derive", "build"] }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "serde/Cargo.toml",
+ r#"
+ [package]
+ name = "serde"
+ version = "0.1.0"
+ edition = "2021"
+
+ [dependencies]
+ serde_derive = { path = "../serde_derive", optional = true }
+
+ [build-dependencies]
+ serde_build = { path = "../serde_build", optional = true }
+
+ [features]
+ derive = ["dep:serde_derive"]
+ build = ["dep:serde_build"]
+ "#,
+ )
+ .file("serde/src/lib.rs", "")
+ .file("serde/build.rs", "fn main() {}")
+ .file(
+ "serde_derive/Cargo.toml",
+ r#"
+ [package]
+ name = "serde_derive"
+ version = "0.1.0"
+ edition = "2021"
+
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file("serde_derive/src/lib.rs", "")
+ .file(
+ "serde_build/Cargo.toml",
+ &basic_manifest("serde_build", "0.1.0"),
+ )
+ .file("serde_build/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[COMPILING] serde_build v0.1.0 ([CWD]/serde_build)
+[COMPILING] serde_derive v0.1.0 ([CWD]/serde_derive)
+[COMPILING] serde v0.1.0 ([CWD]/serde)
+[CHECKING] foo v0.1.0 ([CWD])
+[FINISHED] dev [..]
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/features_namespaced.rs b/src/tools/cargo/tests/testsuite/features_namespaced.rs
new file mode 100644
index 000000000..8ec2fc2e3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/features_namespaced.rs
@@ -0,0 +1,1215 @@
+//! Tests for namespaced features.
+
+use super::features2::switch_to_resolver_2;
+use cargo_test_support::registry::{Dependency, Package, RegistryBuilder};
+use cargo_test_support::{project, publish};
+
+#[cargo_test]
+fn dependency_with_crate_syntax() {
+ // Registry dependency uses dep: syntax.
+ Package::new("baz", "1.0.0").publish();
+ Package::new("bar", "1.0.0")
+ .add_dep(Dependency::new("baz", "1.0").optional(true))
+ .feature("feat", &["dep:baz"])
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = {version="1.0", features=["feat"]}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..]
+[DOWNLOADED] [..]
+[CHECKING] baz v1.0.0
+[CHECKING] bar v1.0.0
+[CHECKING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn namespaced_invalid_feature() {
+ // Specifies a feature that doesn't exist.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ bar = ["baz"]
+ "#,
+ )
+ .file("src/main.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ feature `bar` includes `baz` which is neither a dependency nor another feature
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn namespaced_invalid_dependency() {
+ // Specifies a dep:name that doesn't exist.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [features]
+ bar = ["dep:baz"]
+ "#,
+ )
+ .file("src/main.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ feature `bar` includes `dep:baz`, but `baz` is not listed as a dependency
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn namespaced_non_optional_dependency() {
+ // Specifies a dep:name for a dependency that is not optional.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [features]
+ bar = ["dep:baz"]
+
+ [dependencies]
+ baz = "0.1"
+ "#,
+ )
+ .file("src/main.rs", "")
+ .build();
+
+ p.cargo("check")
+
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ feature `bar` includes `dep:baz`, but `baz` is not an optional dependency
+ A non-optional dependency of the same name is defined; consider adding `optional = true` to its definition.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn namespaced_implicit_feature() {
+ // Backwards-compatible with old syntax.
+ Package::new("baz", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [features]
+ bar = ["baz"]
+
+ [dependencies]
+ baz = { version = "0.1", optional = true }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[CHECKING] foo v0.0.1 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.cargo("check --features baz")
+ .with_stderr(
+ "\
+[DOWNLOADING] crates ...
+[DOWNLOADED] baz v0.1.0 [..]
+[CHECKING] baz v0.1.0
+[CHECKING] foo v0.0.1 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn namespaced_shadowed_dep() {
+ // An optional dependency is not listed in the features table, and its
+ // implicit feature is overridden.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [features]
+ baz = []
+
+ [dependencies]
+ baz = { version = "0.1", optional = true }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ optional dependency `baz` is not included in any feature
+ Make sure that `dep:baz` is included in one of features in the [features] table.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn namespaced_shadowed_non_optional() {
+ // Able to specify a feature with the same name as a required dependency.
+ Package::new("baz", "0.1.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [features]
+ baz = []
+
+ [dependencies]
+ baz = "0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn namespaced_implicit_non_optional() {
+ // Includes a non-optional dependency in [features] table.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [features]
+ bar = ["baz"]
+
+ [dependencies]
+ baz = "0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check").with_status(101).with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ feature `bar` includes `baz`, but `baz` is not an optional dependency
+ A non-optional dependency of the same name is defined; consider adding `optional = true` to its definition.
+",
+ ).run();
+}
+
+#[cargo_test]
+fn namespaced_same_name() {
+ // Explicitly listing an optional dependency in the [features] table.
+ Package::new("baz", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [features]
+ baz = ["dep:baz"]
+
+ [dependencies]
+ baz = { version = "0.1", optional = true }
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ if cfg!(feature="baz") { println!("baz"); }
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[COMPILING] foo v0.0.1 [..]
+[FINISHED] [..]
+[RUNNING] [..]
+",
+ )
+ .with_stdout("")
+ .run();
+
+ p.cargo("run --features baz")
+ .with_stderr(
+ "\
+[DOWNLOADING] crates ...
+[DOWNLOADED] baz v0.1.0 [..]
+[COMPILING] baz v0.1.0
+[COMPILING] foo v0.0.1 [..]
+[FINISHED] [..]
+[RUNNING] [..]
+",
+ )
+ .with_stdout("baz")
+ .run();
+}
+
+#[cargo_test]
+fn no_implicit_feature() {
+ // Using `dep:` will not create an implicit feature.
+ Package::new("regex", "1.0.0").publish();
+ Package::new("lazy_static", "1.0.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ regex = { version = "1.0", optional = true }
+ lazy_static = { version = "1.0", optional = true }
+
+ [features]
+ regex = ["dep:regex", "dep:lazy_static"]
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ if cfg!(feature = "regex") { println!("regex"); }
+ if cfg!(feature = "lazy_static") { println!("lazy_static"); }
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[COMPILING] foo v0.1.0 [..]
+[FINISHED] [..]
+[RUNNING] `target/debug/foo[EXE]`
+",
+ )
+ .with_stdout("")
+ .run();
+
+ p.cargo("run --features regex")
+ .with_stderr_unordered(
+ "\
+[DOWNLOADING] crates ...
+[DOWNLOADED] regex v1.0.0 [..]
+[DOWNLOADED] lazy_static v1.0.0 [..]
+[COMPILING] regex v1.0.0
+[COMPILING] lazy_static v1.0.0
+[COMPILING] foo v0.1.0 [..]
+[FINISHED] [..]
+[RUNNING] `target/debug/foo[EXE]`
+",
+ )
+ .with_stdout("regex")
+ .run();
+
+ p.cargo("run --features lazy_static")
+ .with_stderr(
+ "\
+[ERROR] Package `foo v0.1.0 [..]` does not have feature `lazy_static`. \
+It has an optional dependency with that name, but that dependency uses the \"dep:\" \
+syntax in the features table, so it does not have an implicit feature with that name.
+",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn crate_syntax_bad_name() {
+ // "dep:bar" = []
+ Package::new("bar", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { version="1.0", optional=true }
+
+ [features]
+ "dep:bar" = []
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check --features dep:bar")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at [..]/foo/Cargo.toml`
+
+Caused by:
+ feature named `dep:bar` is not allowed to start with `dep:`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn crate_syntax_in_dep() {
+ // features = ["dep:baz"]
+ Package::new("baz", "1.0.0").publish();
+ Package::new("bar", "1.0.0")
+ .add_dep(Dependency::new("baz", "1.0").optional(true))
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { version = "1.0", features = ["dep:baz"] }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[CWD]/Cargo.toml`
+
+Caused by:
+ feature `dep:baz` in dependency `bar` is not allowed to use explicit `dep:` syntax
+ If you want to enable [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn crate_syntax_cli() {
+ // --features dep:bar
+ Package::new("bar", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { version = "1.0", optional=true }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check --features dep:bar")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] feature `dep:bar` is not allowed to use explicit `dep:` syntax
+",
+ )
+ .run();
+
+ switch_to_resolver_2(&p);
+ p.cargo("check --features dep:bar")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] feature `dep:bar` is not allowed to use explicit `dep:` syntax
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn crate_required_features() {
+ // required-features = ["dep:bar"]
+ Package::new("bar", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { version = "1.0", optional=true }
+
+ [[bin]]
+ name = "foo"
+ required-features = ["dep:bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[ERROR] invalid feature `dep:bar` in required-features of target `foo`: \
+`dep:` prefixed feature values are not allowed in required-features
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn json_exposed() {
+ // Checks that the implicit dep: values are exposed in JSON.
+ Package::new("bar", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { version = "1.0", optional=true }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("metadata --no-deps")
+ .with_json(
+ r#"
+ {
+ "packages": [
+ {
+ "name": "foo",
+ "version": "0.1.0",
+ "id": "foo 0.1.0 [..]",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "homepage": null,
+ "documentation": null,
+ "source": null,
+ "dependencies": "{...}",
+ "targets": "{...}",
+ "features": {
+ "bar": ["dep:bar"]
+ },
+ "manifest_path": "[..]foo/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "edition": "2015",
+ "links": null
+ }
+ ],
+ "workspace_members": "{...}",
+ "resolve": null,
+ "target_directory": "[..]foo/target",
+ "version": 1,
+ "workspace_root": "[..]foo",
+ "metadata": null
+ }
+ "#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn crate_feature_with_explicit() {
+ // crate_name/feat_name syntax where crate_name already has a feature defined.
+ // NOTE: I don't know if this is actually ideal behavior.
+ Package::new("bar", "1.0.0")
+ .feature("bar_feat", &[])
+ .file(
+ "src/lib.rs",
+ r#"
+ #[cfg(not(feature="bar_feat"))]
+ compile_error!("bar_feat is not enabled");
+ "#,
+ )
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { version="1.0", optional = true }
+
+ [features]
+ f1 = ["bar/bar_feat"]
+ bar = ["dep:bar", "f2"]
+ f2 = []
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #[cfg(not(feature="bar"))]
+ compile_error!("bar should be enabled");
+
+ #[cfg(not(feature="f2"))]
+ compile_error!("f2 should be enabled");
+ "#,
+ )
+ .build();
+
+ p.cargo("check --features f1")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v1.0.0 [..]
+[CHECKING] bar v1.0.0
+[CHECKING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn optional_explicit_without_crate() {
+ // "feat" syntax when there is no implicit "feat" feature because it is
+ // explicitly listed elsewhere.
+ Package::new("bar", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { version = "1.0", optional = true }
+
+ [features]
+ feat1 = ["dep:bar"]
+ feat2 = ["bar"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at [..]
+
+Caused by:
+ feature `feat2` includes `bar`, but `bar` is an optional dependency without an implicit feature
+ Use `dep:bar` to enable the dependency.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn tree() {
+ Package::new("baz", "1.0.0").publish();
+ Package::new("bar", "1.0.0")
+ .add_dep(Dependency::new("baz", "1.0").optional(true))
+ .feature("feat1", &["dep:baz"])
+ .feature("feat2", &[])
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { version = "1.0", features = ["feat1"], optional=true }
+
+ [features]
+ a = ["bar/feat2"]
+ bar = ["dep:bar"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("tree -e features")
+ .with_stdout("foo v0.1.0 ([ROOT]/foo)")
+ .run();
+
+ p.cargo("tree -e features --features a")
+ .with_stdout(
+ "\
+foo v0.1.0 ([ROOT]/foo)
+├── bar feature \"default\"
+│ └── bar v1.0.0
+│ └── baz feature \"default\"
+│ └── baz v1.0.0
+└── bar feature \"feat1\"
+ └── bar v1.0.0 (*)
+",
+ )
+ .run();
+
+ p.cargo("tree -e features --features a -i bar")
+ .with_stdout(
+ "\
+bar v1.0.0
+├── bar feature \"default\"
+│ └── foo v0.1.0 ([ROOT]/foo)
+│ ├── foo feature \"a\" (command-line)
+│ ├── foo feature \"bar\"
+│ │ └── foo feature \"a\" (command-line)
+│ └── foo feature \"default\" (command-line)
+├── bar feature \"feat1\"
+│ └── foo v0.1.0 ([ROOT]/foo) (*)
+└── bar feature \"feat2\"
+ └── foo feature \"a\" (command-line)
+",
+ )
+ .run();
+
+ p.cargo("tree -e features --features bar")
+ .with_stdout(
+ "\
+foo v0.1.0 ([ROOT]/foo)
+├── bar feature \"default\"
+│ └── bar v1.0.0
+│ └── baz feature \"default\"
+│ └── baz v1.0.0
+└── bar feature \"feat1\"
+ └── bar v1.0.0 (*)
+",
+ )
+ .run();
+
+ p.cargo("tree -e features --features bar -i bar")
+ .with_stdout(
+ "\
+bar v1.0.0
+├── bar feature \"default\"
+│ └── foo v0.1.0 ([ROOT]/foo)
+│ ├── foo feature \"bar\" (command-line)
+│ └── foo feature \"default\" (command-line)
+└── bar feature \"feat1\"
+ └── foo v0.1.0 ([ROOT]/foo) (*)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn tree_no_implicit() {
+ // tree without an implicit feature
+ Package::new("bar", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { version = "1.0", optional=true }
+
+ [features]
+ a = ["dep:bar"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("tree -e features")
+ .with_stdout("foo v0.1.0 ([ROOT]/foo)")
+ .run();
+
+ p.cargo("tree -e features --all-features")
+ .with_stdout(
+ "\
+foo v0.1.0 ([ROOT]/foo)
+└── bar feature \"default\"
+ └── bar v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree -e features -i bar --all-features")
+ .with_stdout(
+ "\
+bar v1.0.0
+└── bar feature \"default\"
+ └── foo v0.1.0 ([ROOT]/foo)
+ ├── foo feature \"a\" (command-line)
+ └── foo feature \"default\" (command-line)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn publish_no_implicit() {
+ let registry = RegistryBuilder::new().http_api().http_index().build();
+
+ // Does not include implicit features or dep: syntax on publish.
+ Package::new("opt-dep1", "1.0.0").publish();
+ Package::new("opt-dep2", "1.0.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ description = "foo"
+ license = "MIT"
+ homepage = "https://example.com/"
+
+ [dependencies]
+ opt-dep1 = { version = "1.0", optional = true }
+ opt-dep2 = { version = "1.0", optional = true }
+
+ [features]
+ feat = ["opt-dep1"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[PACKAGING] foo v0.1.0 [..]
+[PACKAGED] [..]
+[UPLOADING] foo v0.1.0 [..]
+[UPLOADED] foo v0.1.0 [..]
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.1.0 [..]
+",
+ )
+ .run();
+
+ publish::validate_upload_with_contents(
+ r#"
+ {
+ "authors": [],
+ "badges": {},
+ "categories": [],
+ "deps": [
+ {
+ "default_features": true,
+ "features": [],
+ "kind": "normal",
+ "name": "opt-dep1",
+ "optional": true,
+ "target": null,
+ "version_req": "^1.0"
+ },
+ {
+ "default_features": true,
+ "features": [],
+ "kind": "normal",
+ "name": "opt-dep2",
+ "optional": true,
+ "target": null,
+ "version_req": "^1.0"
+ }
+ ],
+ "description": "foo",
+ "documentation": null,
+ "features": {
+ "feat": ["opt-dep1"]
+ },
+ "homepage": "https://example.com/",
+ "keywords": [],
+ "license": "MIT",
+ "license_file": null,
+ "links": null,
+ "name": "foo",
+ "readme": null,
+ "readme_file": null,
+ "repository": null,
+ "vers": "0.1.0"
+ }
+ "#,
+ "foo-0.1.0.crate",
+ &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"],
+ &[(
+ "Cargo.toml",
+ &format!(
+ r#"{}
+[package]
+name = "foo"
+version = "0.1.0"
+description = "foo"
+homepage = "https://example.com/"
+license = "MIT"
+
+[dependencies.opt-dep1]
+version = "1.0"
+optional = true
+
+[dependencies.opt-dep2]
+version = "1.0"
+optional = true
+
+[features]
+feat = ["opt-dep1"]
+"#,
+ cargo::core::package::MANIFEST_PREAMBLE
+ ),
+ )],
+ );
+}
+
+#[cargo_test]
+fn publish() {
+ let registry = RegistryBuilder::new().http_api().http_index().build();
+
+ // Publish behavior with explicit dep: syntax.
+ Package::new("bar", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ description = "foo"
+ license = "MIT"
+ homepage = "https://example.com/"
+
+ [dependencies]
+ bar = { version = "1.0", optional = true }
+
+ [features]
+ feat1 = []
+ feat2 = ["dep:bar"]
+ feat3 = ["feat2"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[PACKAGING] foo v0.1.0 [..]
+[VERIFYING] foo v0.1.0 [..]
+[UPDATING] [..]
+[COMPILING] foo v0.1.0 [..]
+[FINISHED] [..]
+[PACKAGED] [..]
+[UPLOADING] foo v0.1.0 [..]
+[UPLOADED] foo v0.1.0 [..]
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.1.0 [..]
+",
+ )
+ .run();
+
+ publish::validate_upload_with_contents(
+ r#"
+ {
+ "authors": [],
+ "badges": {},
+ "categories": [],
+ "deps": [
+ {
+ "default_features": true,
+ "features": [],
+ "kind": "normal",
+ "name": "bar",
+ "optional": true,
+ "target": null,
+ "version_req": "^1.0"
+ }
+ ],
+ "description": "foo",
+ "documentation": null,
+ "features": {
+ "feat1": [],
+ "feat2": ["dep:bar"],
+ "feat3": ["feat2"]
+ },
+ "homepage": "https://example.com/",
+ "keywords": [],
+ "license": "MIT",
+ "license_file": null,
+ "links": null,
+ "name": "foo",
+ "readme": null,
+ "readme_file": null,
+ "repository": null,
+ "vers": "0.1.0"
+ }
+ "#,
+ "foo-0.1.0.crate",
+ &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"],
+ &[(
+ "Cargo.toml",
+ &format!(
+ r#"{}
+[package]
+name = "foo"
+version = "0.1.0"
+description = "foo"
+homepage = "https://example.com/"
+license = "MIT"
+
+[dependencies.bar]
+version = "1.0"
+optional = true
+
+[features]
+feat1 = []
+feat2 = ["dep:bar"]
+feat3 = ["feat2"]
+"#,
+ cargo::core::package::MANIFEST_PREAMBLE
+ ),
+ )],
+ );
+}
+
+#[cargo_test]
+fn namespaced_feature_together() {
+ // Check for an error when `dep:` is used with `/`
+ Package::new("bar", "1.0.0")
+ .feature("bar-feat", &[])
+ .publish();
+
+ // Non-optional shouldn't have extra err.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+
+ [features]
+ f1 = ["dep:bar/bar-feat"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[ROOT]/foo/Cargo.toml`
+
+Caused by:
+ feature `f1` includes `dep:bar/bar-feat` with both `dep:` and `/`
+ To fix this, remove the `dep:` prefix.
+",
+ )
+ .run();
+
+ // Weak dependency shouldn't have extra err.
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = {version = "1.0", optional = true }
+
+ [features]
+ f1 = ["dep:bar?/bar-feat"]
+ "#,
+ );
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[ROOT]/foo/Cargo.toml`
+
+Caused by:
+ feature `f1` includes `dep:bar?/bar-feat` with both `dep:` and `/`
+ To fix this, remove the `dep:` prefix.
+",
+ )
+ .run();
+
+ // If dep: is already specified, shouldn't have extra err.
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = {version = "1.0", optional = true }
+
+ [features]
+ f1 = ["dep:bar", "dep:bar/bar-feat"]
+ "#,
+ );
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[ROOT]/foo/Cargo.toml`
+
+Caused by:
+ feature `f1` includes `dep:bar/bar-feat` with both `dep:` and `/`
+ To fix this, remove the `dep:` prefix.
+",
+ )
+ .run();
+
+ // Only when the other 3 cases aren't true should it give some extra help.
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = {version = "1.0", optional = true }
+
+ [features]
+ f1 = ["dep:bar/bar-feat"]
+ "#,
+ );
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[ROOT]/foo/Cargo.toml`
+
+Caused by:
+ feature `f1` includes `dep:bar/bar-feat` with both `dep:` and `/`
+ To fix this, remove the `dep:` prefix.
+ If the intent is to avoid creating an implicit feature `bar` for an optional \
+ dependency, then consider replacing this with two values:
+ \"dep:bar\", \"bar/bar-feat\"
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/fetch.rs b/src/tools/cargo/tests/testsuite/fetch.rs
new file mode 100644
index 000000000..f90131a59
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/fetch.rs
@@ -0,0 +1,135 @@
+//! Tests for the `cargo fetch` command.
+
+use cargo_test_support::registry::Package;
+use cargo_test_support::rustc_host;
+use cargo_test_support::{basic_manifest, cross_compile, project};
+
+#[cargo_test]
+fn no_deps() {
+ let p = project()
+ .file("src/main.rs", "mod a; fn main() {}")
+ .file("src/a.rs", "")
+ .build();
+
+ p.cargo("fetch").with_stdout("").run();
+}
+
+#[cargo_test]
+fn fetch_all_platform_dependencies_when_no_target_is_given() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ Package::new("d1", "1.2.3")
+ .file("Cargo.toml", &basic_manifest("d1", "1.2.3"))
+ .file("src/lib.rs", "")
+ .publish();
+
+ Package::new("d2", "0.1.2")
+ .file("Cargo.toml", &basic_manifest("d2", "0.1.2"))
+ .file("src/lib.rs", "")
+ .publish();
+
+ let target = cross_compile::alternate();
+ let host = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [target.{host}.dependencies]
+ d1 = "1.2.3"
+
+ [target.{target}.dependencies]
+ d2 = "0.1.2"
+ "#,
+ host = host,
+ target = target
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("fetch")
+ .with_stderr_contains("[DOWNLOADED] d1 v1.2.3 [..]")
+ .with_stderr_contains("[DOWNLOADED] d2 v0.1.2 [..]")
+ .run();
+}
+
+#[cargo_test]
+fn fetch_platform_specific_dependencies() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ Package::new("d1", "1.2.3")
+ .file("Cargo.toml", &basic_manifest("d1", "1.2.3"))
+ .file("src/lib.rs", "")
+ .publish();
+
+ Package::new("d2", "0.1.2")
+ .file("Cargo.toml", &basic_manifest("d2", "0.1.2"))
+ .file("src/lib.rs", "")
+ .publish();
+
+ let target = cross_compile::alternate();
+ let host = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [target.{host}.dependencies]
+ d1 = "1.2.3"
+
+ [target.{target}.dependencies]
+ d2 = "0.1.2"
+ "#,
+ host = host,
+ target = target
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("fetch --target")
+ .arg(&host)
+ .with_stderr_contains("[DOWNLOADED] d1 v1.2.3 [..]")
+ .with_stderr_does_not_contain("[DOWNLOADED] d2 v0.1.2 [..]")
+ .run();
+
+ p.cargo("fetch --target")
+ .arg(&target)
+ .with_stderr_contains("[DOWNLOADED] d2 v0.1.2[..]")
+ .with_stderr_does_not_contain("[DOWNLOADED] d1 v1.2.3 [..]")
+ .run();
+}
+
+#[cargo_test]
+fn fetch_warning() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+ misspelled = "wut"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("fetch")
+ .with_stderr("[WARNING] unused manifest key: package.misspelled")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/fix.rs b/src/tools/cargo/tests/testsuite/fix.rs
new file mode 100644
index 000000000..54a021c03
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/fix.rs
@@ -0,0 +1,1855 @@
+//! Tests for the `cargo fix` command.
+
+use cargo::core::Edition;
+use cargo_test_support::compare::assert_match_exact;
+use cargo_test_support::git::{self, init};
+use cargo_test_support::paths::{self, CargoPathExt};
+use cargo_test_support::registry::{Dependency, Package};
+use cargo_test_support::tools;
+use cargo_test_support::{basic_manifest, is_nightly, project};
+
+#[cargo_test]
+fn do_not_fix_broken_builds() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() {
+ let mut x = 3;
+ drop(x);
+ }
+
+ pub fn foo2() {
+ let _x: u32 = "a";
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("fix --allow-no-vcs")
+ .env("__CARGO_FIX_YOLO", "1")
+ .with_status(101)
+ .with_stderr_contains("[ERROR] could not compile `foo` (lib) due to previous error")
+ .run();
+ assert!(p.read_file("src/lib.rs").contains("let mut x = 3;"));
+}
+
+#[cargo_test]
+fn fix_broken_if_requested() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ fn foo(a: &u32) -> u32 { a + 1 }
+ pub fn bar() {
+ foo(1);
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("fix --allow-no-vcs --broken-code")
+ .env("__CARGO_FIX_YOLO", "1")
+ .run();
+}
+
+#[cargo_test]
+fn broken_fixes_backed_out() {
+ // This works as follows:
+ // - Create a `rustc` shim (the "foo" project) which will pretend that the
+ // verification step fails.
+ // - There is an empty build script so `foo` has `OUT_DIR` to track the steps.
+ // - The first "check", `foo` creates a file in OUT_DIR, and it completes
+ // successfully with a warning diagnostic to remove unused `mut`.
+ // - rustfix removes the `mut`.
+ // - The second "check" to verify the changes, `foo` swaps out the content
+ // with something that fails to compile. It creates a second file so it
+ // won't do anything in the third check.
+ // - cargo fix discovers that the fix failed, and it backs out the changes.
+ // - The third "check" is done to display the original diagnostics of the
+ // original code.
+ let p = project()
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = 'foo'
+ version = '0.1.0'
+ [workspace]
+ "#,
+ )
+ .file(
+ "foo/src/main.rs",
+ r#"
+ use std::env;
+ use std::fs;
+ use std::io::Write;
+ use std::path::{Path, PathBuf};
+ use std::process::{self, Command};
+
+ fn main() {
+ // Ignore calls to things like --print=file-names and compiling build.rs.
+ // Also compatible for rustc invocations with `@path` argfile.
+ let is_lib_rs = env::args_os()
+ .map(PathBuf::from)
+ .flat_map(|p| if let Some(p) = p.to_str().unwrap_or_default().strip_prefix("@") {
+ fs::read_to_string(p).unwrap().lines().map(PathBuf::from).collect()
+ } else {
+ vec![p]
+ })
+ .any(|l| l == Path::new("src/lib.rs"));
+ if is_lib_rs {
+ let path = PathBuf::from(env::var_os("OUT_DIR").unwrap());
+ let first = path.join("first");
+ let second = path.join("second");
+ if first.exists() && !second.exists() {
+ fs::write("src/lib.rs", b"not rust code").unwrap();
+ fs::File::create(&second).unwrap();
+ } else {
+ fs::File::create(&first).unwrap();
+ }
+ }
+
+ let status = Command::new("rustc")
+ .args(env::args().skip(1))
+ .status()
+ .expect("failed to run rustc");
+ process::exit(status.code().unwrap_or(2));
+ }
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = 'bar'
+ version = '0.1.0'
+ [workspace]
+ "#,
+ )
+ .file("bar/build.rs", "fn main() {}")
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ pub fn foo() {
+ let mut x = 3;
+ drop(x);
+ }
+ "#,
+ )
+ .build();
+
+ // Build our rustc shim
+ p.cargo("build").cwd("foo").run();
+
+ // Attempt to fix code, but our shim will always fail the second compile
+ p.cargo("fix --allow-no-vcs --lib")
+ .cwd("bar")
+ .env("__CARGO_FIX_YOLO", "1")
+ .env("RUSTC", p.root().join("foo/target/debug/foo"))
+ .with_stderr_contains(
+ "warning: failed to automatically apply fixes suggested by rustc \
+ to crate `bar`\n\
+ \n\
+ after fixes were automatically applied the compiler reported \
+ errors within these files:\n\
+ \n \
+ * src/lib.rs\n\
+ \n\
+ This likely indicates a bug in either rustc or cargo itself,\n\
+ and we would appreciate a bug report! You're likely to see \n\
+ a number of compiler warnings after this message which cargo\n\
+ attempted to fix but failed. If you could open an issue at\n\
+ [..]\n\
+ quoting the full output of this command we'd be very appreciative!\n\
+ Note that you may be able to make some more progress in the near-term\n\
+ fixing code with the `--broken-code` flag\n\
+ \n\
+ The following errors were reported:\n\
+ error: expected one of `!` or `::`, found `rust`\n\
+ ",
+ )
+ .with_stderr_contains("Original diagnostics will follow.")
+ .with_stderr_contains("[WARNING] variable does not need to be mutable")
+ .with_stderr_does_not_contain("[..][FIXED][..]")
+ .run();
+
+ // Make sure the fix which should have been applied was backed out
+ assert!(p.read_file("bar/src/lib.rs").contains("let mut x = 3;"));
+}
+
+#[cargo_test]
+fn fix_path_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { path = 'bar' }
+
+ [workspace]
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate bar;
+
+ pub fn foo() -> u32 {
+ let mut x = 3;
+ x
+ }
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ pub fn foo() -> u32 {
+ let mut x = 3;
+ x
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("fix --allow-no-vcs -p foo -p bar")
+ .env("__CARGO_FIX_YOLO", "1")
+ .with_stdout("")
+ .with_stderr_unordered(
+ "\
+[CHECKING] bar v0.1.0 ([..])
+[FIXED] bar/src/lib.rs (1 fix)
+[CHECKING] foo v0.1.0 ([..])
+[FIXED] src/lib.rs (1 fix)
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn do_not_fix_non_relevant_deps() {
+ let p = project()
+ .no_manifest()
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { path = '../bar' }
+
+ [workspace]
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ pub fn foo() -> u32 {
+ let mut x = 3;
+ x
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("fix --allow-no-vcs")
+ .env("__CARGO_FIX_YOLO", "1")
+ .cwd("foo")
+ .run();
+
+ assert!(p.read_file("bar/src/lib.rs").contains("mut"));
+}
+
+#[cargo_test]
+fn prepare_for_2018() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ #![allow(unused)]
+
+ mod foo {
+ pub const FOO: &str = "fooo";
+ }
+
+ mod bar {
+ use ::foo::FOO;
+ }
+
+ fn main() {
+ let x = ::foo::FOO;
+ }
+ "#,
+ )
+ .build();
+
+ let stderr = "\
+[CHECKING] foo v0.0.1 ([..])
+[MIGRATING] src/lib.rs from 2015 edition to 2018
+[FIXED] src/lib.rs (2 fixes)
+[FINISHED] [..]
+";
+ p.cargo("fix --edition --allow-no-vcs")
+ .with_stderr(stderr)
+ .with_stdout("")
+ .run();
+
+ println!("{}", p.read_file("src/lib.rs"));
+ assert!(p.read_file("src/lib.rs").contains("use crate::foo::FOO;"));
+ assert!(p
+ .read_file("src/lib.rs")
+ .contains("let x = crate::foo::FOO;"));
+}
+
+#[cargo_test]
+fn local_paths() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ use test::foo;
+
+ mod test {
+ pub fn foo() {}
+ }
+
+ pub fn f() {
+ foo();
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("fix --edition --allow-no-vcs")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.1 ([..])
+[MIGRATING] src/lib.rs from 2015 edition to 2018
+[FIXED] src/lib.rs (1 fix)
+[FINISHED] [..]
+",
+ )
+ .with_stdout("")
+ .run();
+
+ println!("{}", p.read_file("src/lib.rs"));
+ assert!(p.read_file("src/lib.rs").contains("use crate::test::foo;"));
+}
+
+#[cargo_test]
+fn upgrade_extern_crate() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = '2018'
+
+ [workspace]
+
+ [dependencies]
+ bar = { path = 'bar' }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #![warn(rust_2018_idioms)]
+ extern crate bar;
+
+ use bar::bar;
+
+ pub fn foo() {
+ ::bar::bar();
+ bar();
+ }
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ let stderr = "\
+[CHECKING] bar v0.1.0 ([..])
+[CHECKING] foo v0.1.0 ([..])
+[FIXED] src/lib.rs (1 fix)
+[FINISHED] [..]
+";
+ p.cargo("fix --allow-no-vcs")
+ .env("__CARGO_FIX_YOLO", "1")
+ .with_stderr(stderr)
+ .with_stdout("")
+ .run();
+ println!("{}", p.read_file("src/lib.rs"));
+ assert!(!p.read_file("src/lib.rs").contains("extern crate"));
+}
+
+#[cargo_test]
+fn specify_rustflags() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ #![allow(unused)]
+
+ mod foo {
+ pub const FOO: &str = "fooo";
+ }
+
+ fn main() {
+ let x = ::foo::FOO;
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("fix --edition --allow-no-vcs")
+ .env("RUSTFLAGS", "-C linker=cc")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.1 ([..])
+[MIGRATING] src/lib.rs from 2015 edition to 2018
+[FIXED] src/lib.rs (1 fix)
+[FINISHED] [..]
+",
+ )
+ .with_stdout("")
+ .run();
+}
+
+#[cargo_test]
+fn no_changes_necessary() {
+ let p = project().file("src/lib.rs", "").build();
+
+ let stderr = "\
+[CHECKING] foo v0.0.1 ([..])
+[FINISHED] [..]
+";
+ p.cargo("fix --allow-no-vcs")
+ .with_stderr(stderr)
+ .with_stdout("")
+ .run();
+}
+
+#[cargo_test]
+fn fixes_extra_mut() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() -> u32 {
+ let mut x = 3;
+ x
+ }
+ "#,
+ )
+ .build();
+
+ let stderr = "\
+[CHECKING] foo v0.0.1 ([..])
+[FIXED] src/lib.rs (1 fix)
+[FINISHED] [..]
+";
+ p.cargo("fix --allow-no-vcs")
+ .env("__CARGO_FIX_YOLO", "1")
+ .with_stderr(stderr)
+ .with_stdout("")
+ .run();
+}
+
+#[cargo_test]
+fn fixes_two_missing_ampersands() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() -> u32 {
+ let mut x = 3;
+ let mut y = 3;
+ x + y
+ }
+ "#,
+ )
+ .build();
+
+ let stderr = "\
+[CHECKING] foo v0.0.1 ([..])
+[FIXED] src/lib.rs (2 fixes)
+[FINISHED] [..]
+";
+ p.cargo("fix --allow-no-vcs")
+ .env("__CARGO_FIX_YOLO", "1")
+ .with_stderr(stderr)
+ .with_stdout("")
+ .run();
+}
+
+#[cargo_test]
+fn tricky() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() -> u32 {
+ let mut x = 3; let mut y = 3;
+ x + y
+ }
+ "#,
+ )
+ .build();
+
+ let stderr = "\
+[CHECKING] foo v0.0.1 ([..])
+[FIXED] src/lib.rs (2 fixes)
+[FINISHED] [..]
+";
+ p.cargo("fix --allow-no-vcs")
+ .env("__CARGO_FIX_YOLO", "1")
+ .with_stderr(stderr)
+ .with_stdout("")
+ .run();
+}
+
+#[cargo_test]
+fn preserve_line_endings() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ "fn add(a: &u32) -> u32 { a + 1 }\r\n\
+ pub fn foo() -> u32 { let mut x = 3; add(&x) }\r\n\
+ ",
+ )
+ .build();
+
+ p.cargo("fix --allow-no-vcs")
+ .env("__CARGO_FIX_YOLO", "1")
+ .run();
+ assert!(p.read_file("src/lib.rs").contains("\r\n"));
+}
+
+#[cargo_test]
+fn fix_deny_warnings() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ "#![deny(warnings)]
+ pub fn foo() { let mut x = 3; drop(x); }
+ ",
+ )
+ .build();
+
+ p.cargo("fix --allow-no-vcs")
+ .env("__CARGO_FIX_YOLO", "1")
+ .run();
+}
+
+#[cargo_test]
+fn fix_deny_warnings_but_not_others() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ "
+ #![deny(unused_mut)]
+
+ pub fn foo() -> u32 {
+ let mut x = 3;
+ x
+ }
+
+ pub fn bar() {
+ #[allow(unused_mut)]
+ let mut _y = 4;
+ }
+ ",
+ )
+ .build();
+
+ p.cargo("fix --allow-no-vcs")
+ .env("__CARGO_FIX_YOLO", "1")
+ .run();
+ assert!(!p.read_file("src/lib.rs").contains("let mut x = 3;"));
+ assert!(p.read_file("src/lib.rs").contains("let mut _y = 4;"));
+}
+
+#[cargo_test]
+fn fix_two_files() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ "
+ pub mod bar;
+
+ pub fn foo() -> u32 {
+ let mut x = 3;
+ x
+ }
+ ",
+ )
+ .file(
+ "src/bar.rs",
+ "
+ pub fn foo() -> u32 {
+ let mut x = 3;
+ x
+ }
+
+ ",
+ )
+ .build();
+
+ p.cargo("fix --allow-no-vcs")
+ .env("__CARGO_FIX_YOLO", "1")
+ .with_stderr_contains("[FIXED] src/bar.rs (1 fix)")
+ .with_stderr_contains("[FIXED] src/lib.rs (1 fix)")
+ .run();
+ assert!(!p.read_file("src/lib.rs").contains("let mut x = 3;"));
+ assert!(!p.read_file("src/bar.rs").contains("let mut x = 3;"));
+}
+
+#[cargo_test]
+fn fixes_missing_ampersand() {
+ let p = project()
+ .file("src/main.rs", "fn main() { let mut x = 3; drop(x); }")
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() { let mut x = 3; drop(x); }
+
+ #[test]
+ pub fn foo2() { let mut x = 3; drop(x); }
+ "#,
+ )
+ .file(
+ "tests/a.rs",
+ r#"
+ #[test]
+ pub fn foo() { let mut x = 3; drop(x); }
+ "#,
+ )
+ .file("examples/foo.rs", "fn main() { let mut x = 3; drop(x); }")
+ .file("build.rs", "fn main() { let mut x = 3; drop(x); }")
+ .build();
+
+ p.cargo("fix --all-targets --allow-no-vcs")
+ .env("__CARGO_FIX_YOLO", "1")
+ .with_stdout("")
+ .with_stderr_contains("[COMPILING] foo v0.0.1 ([..])")
+ .with_stderr_contains("[FIXED] build.rs (1 fix)")
+ // Don't assert number of fixes for this one, as we don't know if we're
+ // fixing it once or twice! We run this all concurrently, and if we
+ // compile (and fix) in `--test` mode first, we get two fixes. Otherwise
+ // we'll fix one non-test thing, and then fix another one later in
+ // test mode.
+ .with_stderr_contains("[FIXED] src/lib.rs[..]")
+ .with_stderr_contains("[FIXED] src/main.rs (1 fix)")
+ .with_stderr_contains("[FIXED] examples/foo.rs (1 fix)")
+ .with_stderr_contains("[FIXED] tests/a.rs (1 fix)")
+ .with_stderr_contains("[FINISHED] [..]")
+ .run();
+ p.cargo("check").run();
+ p.cargo("test").run();
+}
+
+#[cargo_test]
+fn fix_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [features]
+ bar = []
+
+ [workspace]
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #[cfg(feature = "bar")]
+ pub fn foo() -> u32 { let mut x = 3; x }
+ "#,
+ )
+ .build();
+
+ p.cargo("fix --allow-no-vcs").run();
+ p.cargo("check").run();
+ p.cargo("fix --features bar --allow-no-vcs").run();
+ p.cargo("check --features bar").run();
+}
+
+#[cargo_test]
+fn shows_warnings() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ "#[deprecated] fn bar() {} pub fn foo() { let _ = bar(); }",
+ )
+ .build();
+
+ p.cargo("fix --allow-no-vcs")
+ .with_stderr_contains("[..]warning: use of deprecated[..]")
+ .run();
+}
+
+#[cargo_test]
+fn warns_if_no_vcs_detected() {
+ let p = project().file("src/lib.rs", "pub fn foo() {}").build();
+
+ p.cargo("fix")
+ .with_status(101)
+ .with_stderr(
+ "error: no VCS found for this package and `cargo fix` can potentially perform \
+ destructive changes; if you'd like to suppress this error pass `--allow-no-vcs`\
+ ",
+ )
+ .run();
+ p.cargo("fix --allow-no-vcs").run();
+}
+
+#[cargo_test]
+fn warns_about_dirty_working_directory() {
+ let p = git::new("foo", |p| p.file("src/lib.rs", "pub fn foo() {}"));
+
+ p.change_file("src/lib.rs", "");
+
+ p.cargo("fix")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: the working directory of this package has uncommitted changes, \
+and `cargo fix` can potentially perform destructive changes; if you'd \
+like to suppress this error pass `--allow-dirty`, `--allow-staged`, or \
+commit the changes to these files:
+
+ * src/lib.rs (dirty)
+
+
+",
+ )
+ .run();
+ p.cargo("fix --allow-dirty").run();
+}
+
+#[cargo_test]
+fn warns_about_staged_working_directory() {
+ let (p, repo) = git::new_repo("foo", |p| p.file("src/lib.rs", "pub fn foo() {}"));
+
+ p.change_file("src/lib.rs", "pub fn bar() {}");
+ git::add(&repo);
+
+ p.cargo("fix")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: the working directory of this package has uncommitted changes, \
+and `cargo fix` can potentially perform destructive changes; if you'd \
+like to suppress this error pass `--allow-dirty`, `--allow-staged`, or \
+commit the changes to these files:
+
+ * src/lib.rs (staged)
+
+
+",
+ )
+ .run();
+ p.cargo("fix --allow-staged").run();
+}
+
+#[cargo_test]
+fn errors_about_untracked_files() {
+ let mut git_project = project().at("foo");
+ git_project = git_project.file("src/lib.rs", "pub fn foo() {}");
+ let p = git_project.build();
+ let _ = init(&p.root());
+
+ p.cargo("fix")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: the working directory of this package has uncommitted changes, \
+and `cargo fix` can potentially perform destructive changes; if you'd \
+like to suppress this error pass `--allow-dirty`, `--allow-staged`, or \
+commit the changes to these files:
+
+ * Cargo.toml (dirty)
+ * src/ (dirty)
+
+
+",
+ )
+ .run();
+ p.cargo("fix --allow-dirty").run();
+}
+
+#[cargo_test]
+fn does_not_warn_about_clean_working_directory() {
+ let p = git::new("foo", |p| p.file("src/lib.rs", "pub fn foo() {}"));
+ p.cargo("fix").run();
+}
+
+#[cargo_test]
+fn does_not_warn_about_dirty_ignored_files() {
+ let p = git::new("foo", |p| {
+ p.file("src/lib.rs", "pub fn foo() {}")
+ .file(".gitignore", "bar\n")
+ });
+
+ p.change_file("bar", "");
+
+ p.cargo("fix").run();
+}
+
+#[cargo_test]
+fn fix_all_targets_by_default() {
+ let p = project()
+ .file("src/lib.rs", "pub fn foo() { let mut x = 3; drop(x); }")
+ .file("tests/foo.rs", "pub fn foo() { let mut x = 3; drop(x); }")
+ .build();
+ p.cargo("fix --allow-no-vcs")
+ .env("__CARGO_FIX_YOLO", "1")
+ .run();
+ assert!(!p.read_file("src/lib.rs").contains("let mut x"));
+ assert!(!p.read_file("tests/foo.rs").contains("let mut x"));
+}
+
+#[cargo_test]
+fn prepare_for_unstable() {
+ // During the period where a new edition is coming up, but not yet stable,
+ // this test will verify that it cannot be migrated to on stable. If there
+ // is no next edition, it does nothing.
+ let next = match Edition::LATEST_UNSTABLE {
+ Some(next) => next,
+ None => {
+ eprintln!("Next edition is currently not available, skipping test.");
+ return;
+ }
+ };
+ let latest_stable = Edition::LATEST_STABLE;
+ let prev = latest_stable.previous().unwrap();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "{}"
+ "#,
+ latest_stable
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // -j1 to make the error more deterministic (otherwise there can be
+ // multiple errors since they run in parallel).
+ p.cargo("fix --edition --allow-no-vcs -j1")
+ .with_stderr(&format_args!("\
+[CHECKING] foo [..]
+[WARNING] `src/lib.rs` is on the latest edition, but trying to migrate to edition {next}.
+Edition {next} is unstable and not allowed in this release, consider trying the nightly release channel.
+
+If you are trying to migrate from the previous edition ({prev}), the
+process requires following these steps:
+
+1. Start with `edition = \"{prev}\"` in `Cargo.toml`
+2. Run `cargo fix --edition`
+3. Modify `Cargo.toml` to set `edition = \"{latest_stable}\"`
+4. Run `cargo build` or `cargo test` to verify the fixes worked
+
+More details may be found at
+https://doc.rust-lang.org/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html
+
+[FINISHED] [..]
+", next=next, latest_stable=latest_stable, prev=prev))
+ .run();
+
+ if !is_nightly() {
+ // The rest of this test is fundamentally always nightly.
+ return;
+ }
+
+ p.cargo("fix --edition --allow-no-vcs")
+ .masquerade_as_nightly_cargo(&["always_nightly"])
+ .with_stderr(&format!(
+ "\
+[CHECKING] foo [..]
+[MIGRATING] src/lib.rs from {latest_stable} edition to {next}
+[FINISHED] [..]
+",
+ latest_stable = latest_stable,
+ next = next,
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn prepare_for_latest_stable() {
+ // This is the stable counterpart of prepare_for_unstable.
+ let latest_stable = Edition::LATEST_STABLE;
+ let previous = latest_stable.previous().unwrap();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = 'foo'
+ version = '0.1.0'
+ edition = '{}'
+ "#,
+ previous
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("fix --edition --allow-no-vcs")
+ .with_stderr(&format!(
+ "\
+[CHECKING] foo [..]
+[MIGRATING] src/lib.rs from {} edition to {}
+[FINISHED] [..]
+",
+ previous, latest_stable
+ ))
+ .run();
+}
+
+#[cargo_test(nightly, reason = "fundamentally always nightly")]
+fn prepare_for_already_on_latest_unstable() {
+ // During the period where a new edition is coming up, but not yet stable,
+ // this test will check what happens if you are already on the latest. If
+ // there is no next edition, it does nothing.
+ let next_edition = match Edition::LATEST_UNSTABLE {
+ Some(next) => next,
+ None => {
+ eprintln!("Next edition is currently not available, skipping test.");
+ return;
+ }
+ };
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ cargo-features = ["edition{}"]
+
+ [package]
+ name = 'foo'
+ version = '0.1.0'
+ edition = '{}'
+ "#,
+ next_edition, next_edition
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("fix --edition --allow-no-vcs")
+ .masquerade_as_nightly_cargo(&["always_nightly"])
+ .with_stderr_contains("[CHECKING] foo [..]")
+ .with_stderr_contains(&format!(
+ "\
+[WARNING] `src/lib.rs` is already on the latest edition ({next_edition}), unable to migrate further
+",
+ next_edition = next_edition
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn prepare_for_already_on_latest_stable() {
+ // Stable counterpart of prepare_for_already_on_latest_unstable.
+ if Edition::LATEST_UNSTABLE.is_some() {
+ eprintln!("This test cannot run while the latest edition is unstable, skipping.");
+ return;
+ }
+ let latest_stable = Edition::LATEST_STABLE;
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = 'foo'
+ version = '0.1.0'
+ edition = '{}'
+ "#,
+ latest_stable
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("fix --edition --allow-no-vcs")
+ .with_stderr_contains("[CHECKING] foo [..]")
+ .with_stderr_contains(&format!(
+ "\
+[WARNING] `src/lib.rs` is already on the latest edition ({latest_stable}), unable to migrate further
+",
+ latest_stable = latest_stable
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn fix_overlapping() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo<T>() {}
+ pub struct A;
+
+ pub mod bar {
+ pub fn baz() {
+ ::foo::<::A>();
+ }
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("fix --allow-no-vcs --edition --lib")
+ .with_stderr(
+ "\
+[CHECKING] foo [..]
+[MIGRATING] src/lib.rs from 2015 edition to 2018
+[FIXED] src/lib.rs (2 fixes)
+[FINISHED] dev [..]
+",
+ )
+ .run();
+
+ let contents = p.read_file("src/lib.rs");
+ println!("{}", contents);
+ assert!(contents.contains("crate::foo::<crate::A>()"));
+}
+
+#[cargo_test]
+fn fix_idioms() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = 'foo'
+ version = '0.1.0'
+ edition = '2018'
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ use std::any::Any;
+ pub fn foo() {
+ let _x: Box<Any> = Box::new(3);
+ }
+ "#,
+ )
+ .build();
+
+ let stderr = "\
+[CHECKING] foo [..]
+[FIXED] src/lib.rs (1 fix)
+[FINISHED] [..]
+";
+ p.cargo("fix --edition-idioms --allow-no-vcs")
+ .with_stderr(stderr)
+ .run();
+
+ assert!(p.read_file("src/lib.rs").contains("Box<dyn Any>"));
+}
+
+#[cargo_test]
+fn idioms_2015_ok() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("fix --edition-idioms --allow-no-vcs").run();
+}
+
+#[cargo_test]
+fn shows_warnings_on_second_run_without_changes() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ #[deprecated]
+ fn bar() {}
+
+ pub fn foo() {
+ let _ = bar();
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("fix --allow-no-vcs")
+ .with_stderr_contains("[..]warning: use of deprecated[..]")
+ .run();
+
+ p.cargo("fix --allow-no-vcs")
+ .with_stderr_contains("[..]warning: use of deprecated[..]")
+ .run();
+}
+
+#[cargo_test]
+fn shows_warnings_on_second_run_without_changes_on_multiple_targets() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ #[deprecated]
+ fn bar() {}
+
+ pub fn foo() {
+ let _ = bar();
+ }
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[deprecated]
+ fn bar() {}
+
+ fn main() {
+ let _ = bar();
+ }
+ "#,
+ )
+ .file(
+ "tests/foo.rs",
+ r#"
+ #[deprecated]
+ fn bar() {}
+
+ #[test]
+ fn foo_test() {
+ let _ = bar();
+ }
+ "#,
+ )
+ .file(
+ "tests/bar.rs",
+ r#"
+ #[deprecated]
+ fn bar() {}
+
+ #[test]
+ fn foo_test() {
+ let _ = bar();
+ }
+ "#,
+ )
+ .file(
+ "examples/fooxample.rs",
+ r#"
+ #[deprecated]
+ fn bar() {}
+
+ fn main() {
+ let _ = bar();
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("fix --allow-no-vcs --all-targets")
+ .with_stderr_contains(" --> examples/fooxample.rs:6:29")
+ .with_stderr_contains(" --> src/lib.rs:6:29")
+ .with_stderr_contains(" --> src/main.rs:6:29")
+ .with_stderr_contains(" --> tests/bar.rs:7:29")
+ .with_stderr_contains(" --> tests/foo.rs:7:29")
+ .run();
+
+ p.cargo("fix --allow-no-vcs --all-targets")
+ .with_stderr_contains(" --> examples/fooxample.rs:6:29")
+ .with_stderr_contains(" --> src/lib.rs:6:29")
+ .with_stderr_contains(" --> src/main.rs:6:29")
+ .with_stderr_contains(" --> tests/bar.rs:7:29")
+ .with_stderr_contains(" --> tests/foo.rs:7:29")
+ .run();
+}
+
+#[cargo_test]
+fn doesnt_rebuild_dependencies() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { path = 'bar' }
+
+ [workspace]
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar;")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("fix --allow-no-vcs -p foo")
+ .env("__CARGO_FIX_YOLO", "1")
+ .with_stdout("")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.1.0 ([..])
+[CHECKING] foo v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("fix --allow-no-vcs -p foo")
+ .env("__CARGO_FIX_YOLO", "1")
+ .with_stdout("")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn does_not_crash_with_rustc_wrapper() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("fix --allow-no-vcs")
+ .env("RUSTC_WRAPPER", tools::echo_wrapper())
+ .run();
+ p.build_dir().rm_rf();
+ p.cargo("fix --allow-no-vcs --verbose")
+ .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper())
+ .run();
+}
+
+#[cargo_test]
+fn uses_workspace_wrapper_and_primary_wrapper_override() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("fix --allow-no-vcs --verbose")
+ .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper())
+ .with_stderr_contains("WRAPPER CALLED: rustc src/lib.rs --crate-name foo [..]")
+ .run();
+}
+
+#[cargo_test]
+fn only_warn_for_relevant_crates() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ a = { path = 'a' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+ "#,
+ )
+ .file(
+ "a/src/lib.rs",
+ "
+ pub fn foo() {}
+ pub mod bar {
+ use foo;
+ pub fn baz() { foo() }
+ }
+ ",
+ )
+ .build();
+
+ p.cargo("fix --allow-no-vcs --edition")
+ .with_stderr(
+ "\
+[CHECKING] a v0.1.0 ([..])
+[CHECKING] foo v0.1.0 ([..])
+[MIGRATING] src/lib.rs from 2015 edition to 2018
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn fix_to_broken_code() {
+ let p = project()
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = 'foo'
+ version = '0.1.0'
+ [workspace]
+ "#,
+ )
+ .file(
+ "foo/src/main.rs",
+ r#"
+ use std::env;
+ use std::fs;
+ use std::io::Write;
+ use std::path::{Path, PathBuf};
+ use std::process::{self, Command};
+
+ fn main() {
+ // Ignore calls to things like --print=file-names and compiling build.rs.
+ // Also compatible for rustc invocations with `@path` argfile.
+ let is_lib_rs = env::args_os()
+ .map(PathBuf::from)
+ .flat_map(|p| if let Some(p) = p.to_str().unwrap_or_default().strip_prefix("@") {
+ fs::read_to_string(p).unwrap().lines().map(PathBuf::from).collect()
+ } else {
+ vec![p]
+ })
+ .any(|l| l == Path::new("src/lib.rs"));
+ if is_lib_rs {
+ let path = PathBuf::from(env::var_os("OUT_DIR").unwrap());
+ let path = path.join("foo");
+ if path.exists() {
+ panic!()
+ } else {
+ fs::File::create(&path).unwrap();
+ }
+ }
+
+ let status = Command::new("rustc")
+ .args(env::args().skip(1))
+ .status()
+ .expect("failed to run rustc");
+ process::exit(status.code().unwrap_or(2));
+ }
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = 'bar'
+ version = '0.1.0'
+ [workspace]
+ "#,
+ )
+ .file("bar/build.rs", "fn main() {}")
+ .file("bar/src/lib.rs", "pub fn foo() { let mut x = 3; drop(x); }")
+ .build();
+
+ // Build our rustc shim
+ p.cargo("build").cwd("foo").run();
+
+ // Attempt to fix code, but our shim will always fail the second compile
+ p.cargo("fix --allow-no-vcs --broken-code")
+ .cwd("bar")
+ .env("RUSTC", p.root().join("foo/target/debug/foo"))
+ .with_status(101)
+ .with_stderr_contains("[WARNING] failed to automatically apply fixes [..]")
+ .run();
+
+ assert_eq!(
+ p.read_file("bar/src/lib.rs"),
+ "pub fn foo() { let x = 3; drop(x); }"
+ );
+}
+
+#[cargo_test]
+fn fix_with_common() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "tests/t1.rs",
+ "mod common; #[test] fn t1() { common::try(); }",
+ )
+ .file(
+ "tests/t2.rs",
+ "mod common; #[test] fn t2() { common::try(); }",
+ )
+ .file("tests/common/mod.rs", "pub fn try() {}")
+ .build();
+
+ p.cargo("fix --edition --allow-no-vcs").run();
+
+ assert_eq!(p.read_file("tests/common/mod.rs"), "pub fn r#try() {}");
+}
+
+#[cargo_test]
+fn fix_in_existing_repo_weird_ignore() {
+ // Check that ignore doesn't ignore the repo itself.
+ let p = git::new("foo", |project| {
+ project
+ .file("src/lib.rs", "")
+ .file(".gitignore", "foo\ninner\nCargo.lock\ntarget\n")
+ .file("inner/file", "")
+ });
+
+ p.cargo("fix").run();
+ // This is questionable about whether it is the right behavior. It should
+ // probably be checking if any source file for the current project is
+ // ignored.
+ p.cargo("fix")
+ .cwd("inner")
+ .with_stderr_contains("[ERROR] no VCS found[..]")
+ .with_status(101)
+ .run();
+ p.cargo("fix").cwd("src").run();
+}
+
+#[cargo_test]
+fn fix_color_message() {
+ // Check that color appears in diagnostics.
+ let p = project()
+ .file("src/lib.rs", "std::compile_error!{\"color test\"}")
+ .build();
+
+ p.cargo("fix --allow-no-vcs --color=always")
+ .with_stderr_contains("[..]\x1b[[..]")
+ .with_status(101)
+ .run();
+
+ p.cargo("fix --allow-no-vcs --color=never")
+ .with_stderr_contains("error: color test")
+ .with_stderr_does_not_contain("[..]\x1b[[..]")
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn edition_v2_resolver_report() {
+ // Show a report if the V2 resolver shows differences.
+ Package::new("common", "1.0.0")
+ .feature("f1", &[])
+ .feature("dev-feat", &[])
+ .add_dep(Dependency::new("opt_dep", "1.0").optional(true))
+ .publish();
+ Package::new("opt_dep", "1.0.0").publish();
+
+ Package::new("bar", "1.0.0")
+ .add_dep(
+ Dependency::new("common", "1.0")
+ .target("cfg(whatever)")
+ .enable_features(&["f1"]),
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ common = "1.0"
+ bar = "1.0"
+
+ [build-dependencies]
+ common = { version = "1.0", features = ["opt_dep"] }
+
+ [dev-dependencies]
+ common = { version="1.0", features=["dev-feat"] }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("fix --edition --allow-no-vcs")
+ .with_stderr_unordered("\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] common v1.0.0 [..]
+[DOWNLOADED] bar v1.0.0 [..]
+[DOWNLOADED] opt_dep v1.0.0 [..]
+note: Switching to Edition 2021 will enable the use of the version 2 feature resolver in Cargo.
+This may cause some dependencies to be built with fewer features enabled than previously.
+More information about the resolver changes may be found at https://doc.rust-lang.org/nightly/edition-guide/rust-2021/default-cargo-resolver.html
+When building the following dependencies, the given features will no longer be used:
+
+ common v1.0.0 removed features: dev-feat, f1, opt_dep
+ common v1.0.0 (as host dependency) removed features: dev-feat, f1
+
+The following differences only apply when building with dev-dependencies:
+
+ common v1.0.0 removed features: f1, opt_dep
+
+[CHECKING] opt_dep v1.0.0
+[CHECKING] common v1.0.0
+[CHECKING] bar v1.0.0
+[CHECKING] foo v0.1.0 [..]
+[MIGRATING] src/lib.rs from 2018 edition to 2021
+[FINISHED] [..]
+")
+ .run();
+}
+
+#[cargo_test]
+fn rustfix_handles_multi_spans() {
+ // Checks that rustfix handles a single diagnostic with multiple
+ // suggestion spans (non_fmt_panic in this case).
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() {
+ panic!(format!("hey"));
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("fix --allow-no-vcs").run();
+ assert!(p.read_file("src/lib.rs").contains(r#"panic!("hey");"#));
+}
+
+#[cargo_test]
+fn fix_edition_2021() {
+ // Can migrate 2021, even when lints are allowed.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #![allow(ellipsis_inclusive_range_patterns)]
+
+ pub fn f() -> bool {
+ let x = 123;
+ match x {
+ 0...100 => true,
+ _ => false,
+ }
+ }
+ "#,
+ )
+ .build();
+ p.cargo("fix --edition --allow-no-vcs")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.1.0 [..]
+[MIGRATING] src/lib.rs from 2018 edition to 2021
+[FIXED] src/lib.rs (1 fix)
+[FINISHED] [..]
+",
+ )
+ .run();
+ assert!(p.read_file("src/lib.rs").contains(r#"0..=100 => true,"#));
+}
+
+#[cargo_test]
+fn fix_shared_cross_workspace() {
+ // Fixing a file that is shared between multiple packages in the same workspace.
+ // Make sure two processes don't try to fix the same file at the same time.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo", "bar"]
+ "#,
+ )
+ .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("foo/src/lib.rs", "pub mod shared;")
+ // This will fix both unused and bare trait.
+ .file("foo/src/shared.rs", "pub fn fixme(x: Box<&Fn() -> ()>) {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ #[path="../../foo/src/shared.rs"]
+ pub mod shared;
+ "#,
+ )
+ .build();
+
+ // The output here can be either of these two, depending on who runs first:
+ // [FIXED] bar/src/../../foo/src/shared.rs (2 fixes)
+ // [FIXED] foo/src/shared.rs (2 fixes)
+ p.cargo("fix --allow-no-vcs")
+ .with_stderr_unordered(
+ "\
+[CHECKING] foo v0.1.0 [..]
+[CHECKING] bar v0.1.0 [..]
+[FIXED] [..]foo/src/shared.rs (2 fixes)
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ assert_match_exact(
+ "pub fn fixme(_x: Box<&dyn Fn() -> ()>) {}",
+ &p.read_file("foo/src/shared.rs"),
+ );
+}
+
+#[cargo_test]
+fn abnormal_exit() {
+ // rustc fails unexpectedly after applying fixes, should show some error information.
+ //
+ // This works with a proc-macro that runs three times:
+ // - First run (collect diagnostics pass): writes a file, exits normally.
+ // - Second run (verify diagnostics work): it detects the presence of the
+ // file, removes the file, and aborts the process.
+ // - Third run (collecting messages to display): file not found, exits normally.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ pm = {path="pm"}
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn f() {
+ let mut x = 1;
+ pm::crashme!();
+ }
+ "#,
+ )
+ .file(
+ "pm/Cargo.toml",
+ r#"
+ [package]
+ name = "pm"
+ version = "0.1.0"
+ edition = "2018"
+
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file(
+ "pm/src/lib.rs",
+ r#"
+ use proc_macro::TokenStream;
+ #[proc_macro]
+ pub fn crashme(_input: TokenStream) -> TokenStream {
+ // Use a file to succeed on the first pass, and fail on the second.
+ let p = std::env::var_os("ONCE_PATH").unwrap();
+ let check_path = std::path::Path::new(&p);
+ if check_path.exists() {
+ eprintln!("I'm not a diagnostic.");
+ std::fs::remove_file(check_path).unwrap();
+ std::process::abort();
+ } else {
+ std::fs::write(check_path, "").unwrap();
+ "".parse().unwrap()
+ }
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("fix --lib --allow-no-vcs")
+ .env(
+ "ONCE_PATH",
+ paths::root().join("proc-macro-run-once").to_str().unwrap(),
+ )
+ .with_stderr_contains(
+ "[WARNING] failed to automatically apply fixes suggested by rustc to crate `foo`",
+ )
+ .with_stderr_contains("I'm not a diagnostic.")
+ // "signal: 6, SIGABRT: process abort signal" on some platforms
+ .with_stderr_contains("rustc exited abnormally: [..]")
+ .with_stderr_contains("Original diagnostics will follow.")
+ .run();
+}
+
+#[cargo_test]
+fn fix_with_run_cargo_in_proc_macros() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ use proc_macro::*;
+
+ #[proc_macro]
+ pub fn foo(_input: TokenStream) -> TokenStream {
+ let output = std::process::Command::new(env!("CARGO"))
+ .args(&["metadata", "--format-version=1"])
+ .output()
+ .unwrap();
+ eprintln!("{}", std::str::from_utf8(&output.stderr).unwrap());
+ println!("{}", std::str::from_utf8(&output.stdout).unwrap());
+ "".parse().unwrap()
+ }
+ "#,
+ )
+ .file(
+ "src/bin/main.rs",
+ r#"
+ use foo::foo;
+
+ fn main() {
+ foo!("bar")
+ }
+ "#,
+ )
+ .build();
+ p.cargo("fix --allow-no-vcs")
+ .with_stderr_does_not_contain("error: could not find .rs file in rustc args")
+ .run();
+}
+
+#[cargo_test]
+fn non_edition_lint_migration() {
+ // Migrating to a new edition where a non-edition lint causes problems.
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file(
+ "src/lib.rs",
+ r#"
+ // This is only used in a test.
+ // To be correct, this should be gated on #[cfg(test)], but
+ // sometimes people don't do that. If the unused_imports
+ // lint removes this, then the unittest will fail to compile.
+ use std::str::from_utf8;
+
+ pub mod foo {
+ pub const FOO: &[u8] = &[102, 111, 111];
+ }
+
+ #[test]
+ fn example() {
+ assert_eq!(
+ from_utf8(::foo::FOO), Ok("foo")
+ );
+ }
+ "#,
+ )
+ .build();
+ // Check that it complains about an unused import.
+ p.cargo("check --lib")
+ .with_stderr_contains("[..]unused_imports[..]")
+ .with_stderr_contains("[..]std::str::from_utf8[..]")
+ .run();
+ p.cargo("fix --edition --allow-no-vcs").run();
+ let contents = p.read_file("src/lib.rs");
+ // Check it does not remove the "unused" import.
+ assert!(contents.contains("use std::str::from_utf8;"));
+ // Check that it made the edition migration.
+ assert!(contents.contains("from_utf8(crate::foo::FOO)"));
+}
+
+// For rust-lang/cargo#9857
+#[cargo_test]
+fn fix_in_dependency() {
+ Package::new("bar", "1.0.0")
+ .file(
+ "src/lib.rs",
+ r#"
+ #[macro_export]
+ macro_rules! m {
+ ($i:tt) => {
+ let $i = 1;
+ };
+ }
+ "#,
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() {
+ bar::m!(abc);
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("fix --allow-no-vcs")
+ .with_stderr_does_not_contain("[FIXED] [..]")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/freshness.rs b/src/tools/cargo/tests/testsuite/freshness.rs
new file mode 100644
index 000000000..86b186af8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/freshness.rs
@@ -0,0 +1,2816 @@
+//! Tests for fingerprinting (rebuild detection).
+
+use filetime::FileTime;
+use std::fs::{self, OpenOptions};
+use std::io;
+use std::io::prelude::*;
+use std::net::TcpListener;
+use std::path::{Path, PathBuf};
+use std::process::Stdio;
+use std::thread;
+use std::time::SystemTime;
+
+use super::death;
+use cargo_test_support::paths::{self, CargoPathExt};
+use cargo_test_support::registry::Package;
+use cargo_test_support::{
+ basic_manifest, is_coarse_mtime, project, rustc_host, rustc_host_env, sleep_ms,
+};
+
+#[cargo_test]
+fn modifying_and_moving() {
+ let p = project()
+ .file("src/main.rs", "mod a; fn main() {}")
+ .file("src/a.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("build").with_stdout("").run();
+ p.root().move_into_the_past();
+ p.root().join("target").move_into_the_past();
+
+ p.change_file("src/a.rs", "#[allow(unused)]fn main() {}");
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[DIRTY] foo v0.0.1 ([CWD]): the file `src/a.rs` has changed ([..])
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ fs::rename(&p.root().join("src/a.rs"), &p.root().join("src/b.rs")).unwrap();
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr_contains("[..]file not found[..]")
+ .run();
+}
+
+#[cargo_test]
+fn modify_only_some_files() {
+ let p = project()
+ .file("src/lib.rs", "mod a;")
+ .file("src/a.rs", "")
+ .file("src/main.rs", "mod b; fn main() {}")
+ .file("src/b.rs", "")
+ .file("tests/test.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.cargo("test").run();
+ sleep_ms(1000);
+
+ assert!(p.bin("foo").is_file());
+
+ let lib = p.root().join("src/lib.rs");
+ p.change_file("src/lib.rs", "invalid rust code");
+ p.change_file("src/b.rs", "#[allow(unused)]fn foo() {}");
+ lib.move_into_the_past();
+
+ // Make sure the binary is rebuilt, not the lib
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[DIRTY] foo v0.0.1 ([CWD]): the file `src/b.rs` has changed ([..])
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ assert!(p.bin("foo").is_file());
+}
+
+#[cargo_test]
+fn rebuild_sub_package_then_while_package() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.1"
+
+ [dependencies.a]
+ path = "a"
+ [dependencies.b]
+ path = "b"
+ "#,
+ )
+ .file("src/lib.rs", "extern crate a; extern crate b;")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ authors = []
+ version = "0.0.1"
+ [dependencies.b]
+ path = "../b"
+ "#,
+ )
+ .file("a/src/lib.rs", "extern crate b;")
+ .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+ .file("b/src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_stderr(
+ "\
+[COMPILING] b [..]
+[COMPILING] a [..]
+[COMPILING] foo [..]
+[FINISHED] dev [..]
+",
+ )
+ .run();
+
+ if is_coarse_mtime() {
+ sleep_ms(1000);
+ }
+ p.change_file("b/src/lib.rs", "pub fn b() {}");
+
+ p.cargo("build -pb -v")
+ .with_stderr(
+ "\
+[DIRTY] b v0.0.1 ([..]): the file `b/src/lib.rs` has changed ([..])
+[COMPILING] b [..]
+[RUNNING] `rustc --crate-name b [..]
+[FINISHED] dev [..]
+",
+ )
+ .run();
+
+ p.change_file(
+ "src/lib.rs",
+ "extern crate a; extern crate b; pub fn toplevel() {}",
+ );
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[FRESH] b [..]
+[DIRTY] a [..]: the dependency b was rebuilt ([..])
+[COMPILING] a [..]
+[RUNNING] `rustc --crate-name a [..]
+[DIRTY] foo [..]: the dependency b was rebuilt ([..])
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name foo [..]
+[FINISHED] dev [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn changing_lib_features_caches_targets() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.1"
+
+ [features]
+ foo = []
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_stderr(
+ "\
+[..]Compiling foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("build --features foo")
+ .with_stderr(
+ "\
+[..]Compiling foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ /* Targets should be cached from the first build */
+
+ p.cargo("build")
+ .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+ .run();
+
+ p.cargo("build").with_stdout("").run();
+
+ p.cargo("build --features foo")
+ .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+ .run();
+}
+
+#[cargo_test]
+fn changing_profiles_caches_targets() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.1"
+
+ [profile.dev]
+ panic = "abort"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_stderr(
+ "\
+[..]Compiling foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[..]Compiling foo v0.0.1 ([..])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target[..]debug[..]deps[..]foo-[..][EXE])
+[DOCTEST] foo
+",
+ )
+ .run();
+
+ /* Targets should be cached from the first build */
+
+ p.cargo("build")
+ .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+ .run();
+
+ p.cargo("test foo")
+ .with_stderr(
+ "\
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target[..]debug[..]deps[..]foo-[..][EXE])
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn changing_bin_paths_common_target_features_caches_targets() {
+ // Make sure dep_cache crate is built once per feature
+ let p = project()
+ .no_manifest()
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ target-dir = "./target"
+ "#,
+ )
+ .file(
+ "dep_crate/Cargo.toml",
+ r#"
+ [package]
+ name = "dep_crate"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ ftest = []
+ "#,
+ )
+ .file(
+ "dep_crate/src/lib.rs",
+ r#"
+ #[cfg(feature = "ftest")]
+ pub fn yo() {
+ println!("ftest on")
+ }
+ #[cfg(not(feature = "ftest"))]
+ pub fn yo() {
+ println!("ftest off")
+ }
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ dep_crate = {path = "../dep_crate", features = []}
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "a/src/main.rs",
+ r#"
+ extern crate dep_crate;
+ use dep_crate::yo;
+ fn main() {
+ yo();
+ }
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ dep_crate = {path = "../dep_crate", features = ["ftest"]}
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .file(
+ "b/src/main.rs",
+ r#"
+ extern crate dep_crate;
+ use dep_crate::yo;
+ fn main() {
+ yo();
+ }
+ "#,
+ )
+ .build();
+
+ /* Build and rebuild a/. Ensure dep_crate only builds once */
+ p.cargo("run")
+ .cwd("a")
+ .with_stdout("ftest off")
+ .with_stderr(
+ "\
+[..]Compiling dep_crate v0.0.1 ([..])
+[..]Compiling a v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]target/debug/a[EXE]`
+",
+ )
+ .run();
+ p.cargo("clean -p a").cwd("a").run();
+ p.cargo("run")
+ .cwd("a")
+ .with_stdout("ftest off")
+ .with_stderr(
+ "\
+[..]Compiling a v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]target/debug/a[EXE]`
+",
+ )
+ .run();
+
+ /* Build and rebuild b/. Ensure dep_crate only builds once */
+ p.cargo("run")
+ .cwd("b")
+ .with_stdout("ftest on")
+ .with_stderr(
+ "\
+[..]Compiling dep_crate v0.0.1 ([..])
+[..]Compiling b v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]target/debug/b[EXE]`
+",
+ )
+ .run();
+ p.cargo("clean -p b").cwd("b").run();
+ p.cargo("run")
+ .cwd("b")
+ .with_stdout("ftest on")
+ .with_stderr(
+ "\
+[..]Compiling b v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]target/debug/b[EXE]`
+",
+ )
+ .run();
+
+ /* Build a/ package again. If we cache different feature dep builds correctly,
+ * this should not cause a rebuild of dep_crate */
+ p.cargo("clean -p a").cwd("a").run();
+ p.cargo("run")
+ .cwd("a")
+ .with_stdout("ftest off")
+ .with_stderr(
+ "\
+[..]Compiling a v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]target/debug/a[EXE]`
+",
+ )
+ .run();
+
+ /* Build b/ package again. If we cache different feature dep builds correctly,
+ * this should not cause a rebuild */
+ p.cargo("clean -p b").cwd("b").run();
+ p.cargo("run")
+ .cwd("b")
+ .with_stdout("ftest on")
+ .with_stderr(
+ "\
+[..]Compiling b v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]target/debug/b[EXE]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn changing_bin_features_caches_targets() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.1"
+
+ [features]
+ foo = []
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ let msg = if cfg!(feature = "foo") { "feature on" } else { "feature off" };
+ println!("{}", msg);
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.rename_run("foo", "off1").with_stdout("feature off").run();
+
+ p.cargo("build --features foo")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.rename_run("foo", "on1").with_stdout("feature on").run();
+
+ /* Targets should be cached from the first build */
+
+ let mut e = p.cargo("build -v");
+
+ // MSVC does not include hash in binary filename, so it gets recompiled.
+ if cfg!(target_env = "msvc") {
+ e.with_stderr(
+ "\
+[DIRTY] foo v0.0.1 ([..]): the list of features changed
+[COMPILING] foo[..]
+[RUNNING] `rustc [..]
+[FINISHED] dev[..]",
+ );
+ } else {
+ e.with_stderr("[FRESH] foo v0.0.1 ([..])\n[FINISHED] dev[..]");
+ }
+ e.run();
+ p.rename_run("foo", "off2").with_stdout("feature off").run();
+
+ let mut e = p.cargo("build --features foo -v");
+ if cfg!(target_env = "msvc") {
+ e.with_stderr(
+ "\
+[DIRTY] foo v0.0.1 ([..]): the list of features changed
+[COMPILING] foo[..]
+[RUNNING] `rustc [..]
+[FINISHED] dev[..]",
+ );
+ } else {
+ e.with_stderr(
+ "\
+[FRESH] foo v0.0.1 ([..])
+[FINISHED] dev[..]",
+ );
+ }
+ e.run();
+ p.rename_run("foo", "on2").with_stdout("feature on").run();
+}
+
+#[cargo_test]
+fn rebuild_tests_if_lib_changes() {
+ let p = project()
+ .file("src/lib.rs", "pub fn foo() {}")
+ .file(
+ "tests/foo.rs",
+ r#"
+ extern crate foo;
+ #[test]
+ fn test() { foo::foo(); }
+ "#,
+ )
+ .build();
+
+ p.cargo("build").run();
+ p.cargo("test").run();
+
+ sleep_ms(1000);
+ p.change_file("src/lib.rs", "");
+
+ p.cargo("build -v").run();
+ p.cargo("test -v")
+ .with_status(101)
+ .with_stderr_contains("[..]cannot find function `foo`[..]")
+ .run();
+}
+
+#[cargo_test]
+fn no_rebuild_transitive_target_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = { path = "a" }
+ [dev-dependencies]
+ b = { path = "b" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("tests/foo.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+
+ [target.foo.dependencies]
+ c = { path = "../c" }
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ c = { path = "../c" }
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .file("c/Cargo.toml", &basic_manifest("c", "0.0.1"))
+ .file("c/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+ p.cargo("test --no-run")
+ .with_stderr(
+ "\
+[COMPILING] c v0.0.1 ([..])
+[COMPILING] b v0.0.1 ([..])
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[EXECUTABLE] unittests src/lib.rs (target/debug/deps/foo-[..][EXE])
+[EXECUTABLE] tests/foo.rs (target/debug/deps/foo-[..][EXE])
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rerun_if_changed_in_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = { path = "a" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "a/build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rerun-if-changed=build.rs");
+ }
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+ p.cargo("build").with_stdout("").run();
+}
+
+#[cargo_test]
+fn same_build_dir_cached_packages() {
+ let p = project()
+ .no_manifest()
+ .file(
+ "a1/Cargo.toml",
+ r#"
+ [package]
+ name = "a1"
+ version = "0.0.1"
+ authors = []
+ [dependencies]
+ b = { path = "../b" }
+ "#,
+ )
+ .file("a1/src/lib.rs", "")
+ .file(
+ "a2/Cargo.toml",
+ r#"
+ [package]
+ name = "a2"
+ version = "0.0.1"
+ authors = []
+ [dependencies]
+ b = { path = "../b" }
+ "#,
+ )
+ .file("a2/src/lib.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.0.1"
+ authors = []
+ [dependencies]
+ c = { path = "../c" }
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .file(
+ "c/Cargo.toml",
+ r#"
+ [package]
+ name = "c"
+ version = "0.0.1"
+ authors = []
+ [dependencies]
+ d = { path = "../d" }
+ "#,
+ )
+ .file("c/src/lib.rs", "")
+ .file("d/Cargo.toml", &basic_manifest("d", "0.0.1"))
+ .file("d/src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ target-dir = "./target"
+ "#,
+ )
+ .build();
+
+ p.cargo("build")
+ .cwd("a1")
+ .with_stderr(&format!(
+ "\
+[COMPILING] d v0.0.1 ({dir}/d)
+[COMPILING] c v0.0.1 ({dir}/c)
+[COMPILING] b v0.0.1 ({dir}/b)
+[COMPILING] a1 v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ dir = p.url().to_file_path().unwrap().to_str().unwrap()
+ ))
+ .run();
+ p.cargo("build")
+ .cwd("a2")
+ .with_stderr(
+ "\
+[COMPILING] a2 v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn no_rebuild_if_build_artifacts_move_backwards_in_time() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = { path = "a" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+
+ p.root().move_into_the_past();
+
+ p.cargo("build")
+ .with_stdout("")
+ .with_stderr("[FINISHED] [..]")
+ .run();
+}
+
+#[cargo_test]
+fn rebuild_if_build_artifacts_move_forward_in_time() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = { path = "a" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+
+ p.root().move_into_the_future();
+
+ p.cargo("build")
+ .env("CARGO_LOG", "")
+ .with_stdout("")
+ .with_stderr(
+ "\
+[COMPILING] a v0.0.1 ([..])
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rebuild_if_environment_changes() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ description = "old desc"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ println!("{}", env!("CARGO_PKG_DESCRIPTION"));
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run")
+ .with_stdout("old desc")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/foo[EXE]`
+",
+ )
+ .run();
+
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ description = "new desc"
+ version = "0.0.1"
+ authors = []
+ "#,
+ );
+
+ p.cargo("run -v")
+ .with_stdout("new desc")
+ .with_stderr(
+ "\
+[DIRTY] foo v0.0.1 ([CWD]): the metadata changed
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/foo[EXE]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn no_rebuild_when_rename_dir() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [workspace]
+
+ [dependencies]
+ foo = { path = "foo" }
+ "#,
+ )
+ .file("src/_unused.rs", "")
+ .file("build.rs", "fn main() {}")
+ .file("foo/Cargo.toml", &basic_manifest("foo", "0.0.1"))
+ .file("foo/src/lib.rs", "")
+ .file("foo/build.rs", "fn main() {}")
+ .build();
+
+ // make sure the most recently modified file is `src/lib.rs`, not
+ // `Cargo.toml`, to expose a historical bug where we forgot to strip the
+ // `Cargo.toml` path from looking for the package root.
+ cargo_test_support::sleep_ms(100);
+ fs::write(p.root().join("src/lib.rs"), "").unwrap();
+
+ p.cargo("build").run();
+ let mut new = p.root();
+ new.pop();
+ new.push("bar");
+ fs::rename(p.root(), &new).unwrap();
+
+ p.cargo("build")
+ .cwd(&new)
+ .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+ .run();
+}
+
+#[cargo_test]
+fn unused_optional_dep() {
+ Package::new("registry1", "0.1.0").publish();
+ Package::new("registry2", "0.1.0").publish();
+ Package::new("registry3", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "p"
+ authors = []
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { path = "bar" }
+ baz = { path = "baz" }
+ registry1 = "*"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.1"
+ authors = []
+
+ [dev-dependencies]
+ registry2 = "*"
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .file(
+ "baz/Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.1.1"
+ authors = []
+
+ [dependencies]
+ registry3 = { version = "*", optional = true }
+ "#,
+ )
+ .file("baz/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+ p.cargo("build").with_stderr("[FINISHED] [..]").run();
+}
+
+#[cargo_test]
+fn path_dev_dep_registry_updates() {
+ Package::new("registry1", "0.1.0").publish();
+ Package::new("registry2", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "p"
+ authors = []
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.1"
+ authors = []
+
+ [dependencies]
+ registry1 = "*"
+
+ [dev-dependencies]
+ baz = { path = "../baz"}
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .file(
+ "baz/Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.1.1"
+ authors = []
+
+ [dependencies]
+ registry2 = "*"
+ "#,
+ )
+ .file("baz/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+ p.cargo("build").with_stderr("[FINISHED] [..]").run();
+}
+
+#[cargo_test]
+fn change_panic_mode() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ['bar', 'baz']
+ [profile.dev]
+ panic = 'abort'
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1"))
+ .file("bar/src/lib.rs", "")
+ .file(
+ "baz/Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.1.1"
+ authors = []
+
+ [lib]
+ proc-macro = true
+
+ [dependencies]
+ bar = { path = '../bar' }
+ "#,
+ )
+ .file("baz/src/lib.rs", "extern crate bar;")
+ .build();
+
+ p.cargo("build -p bar").run();
+ p.cargo("build -p baz").run();
+}
+
+#[cargo_test]
+fn dont_rebuild_based_on_plugins() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.1"
+
+ [workspace]
+ members = ['baz']
+
+ [dependencies]
+ proc-macro-thing = { path = 'proc-macro-thing' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "proc-macro-thing/Cargo.toml",
+ r#"
+ [package]
+ name = "proc-macro-thing"
+ version = "0.1.1"
+
+ [lib]
+ proc-macro = true
+
+ [dependencies]
+ qux = { path = '../qux' }
+ "#,
+ )
+ .file("proc-macro-thing/src/lib.rs", "")
+ .file(
+ "baz/Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.1.1"
+
+ [dependencies]
+ qux = { path = '../qux' }
+ "#,
+ )
+ .file("baz/src/main.rs", "fn main() {}")
+ .file("qux/Cargo.toml", &basic_manifest("qux", "0.1.1"))
+ .file("qux/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+ p.cargo("build -p baz").run();
+ p.cargo("build").with_stderr("[FINISHED] [..]\n").run();
+ p.cargo("build -p bar")
+ .with_stderr("[FINISHED] [..]\n")
+ .run();
+}
+
+#[cargo_test]
+fn reuse_workspace_lib() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.1"
+
+ [workspace]
+
+ [dependencies]
+ baz = { path = 'baz' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.1"))
+ .file("baz/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+ p.cargo("test -p baz -v --no-run")
+ .with_stderr(
+ "\
+[COMPILING] baz v0.1.1 ([..])
+[RUNNING] `rustc[..] --test [..]`
+[FINISHED] [..]
+[EXECUTABLE] `[..]/target/debug/deps/baz-[..][EXE]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn reuse_shared_build_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ shared = {path = "shared"}
+
+ [workspace]
+ members = ["shared", "bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("shared/Cargo.toml", &basic_manifest("shared", "0.0.1"))
+ .file("shared/src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+
+ [build-dependencies]
+ shared = { path = "../shared" }
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .file("bar/build.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --workspace").run();
+ // This should not recompile!
+ p.cargo("build -p foo -v")
+ .with_stderr(
+ "\
+[FRESH] shared [..]
+[FRESH] foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn changing_rustflags_is_cached() {
+ let p = project().file("src/lib.rs", "").build();
+
+ // This isn't ever cached, we always have to recompile
+ p.cargo("build")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+ p.cargo("build -v")
+ .env("RUSTFLAGS", "-C linker=cc")
+ .with_stderr(
+ "\
+[DIRTY] foo v0.0.1 ([..]): the rustflags changed
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[DIRTY] foo v0.0.1 ([..]): the rustflags changed
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+ p.cargo("build -v")
+ .env("RUSTFLAGS", "-C linker=cc")
+ .with_stderr(
+ "\
+[DIRTY] foo v0.0.1 ([..]): the rustflags changed
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn update_dependency_mtime_does_not_rebuild() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("build -Z mtime-on-use")
+ .masquerade_as_nightly_cargo(&["mtime-on-use"])
+ .env("RUSTFLAGS", "-C linker=cc")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.0.1 ([..])
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+ // This does not make new files, but it does update the mtime of the dependency.
+ p.cargo("build -p bar -Z mtime-on-use")
+ .masquerade_as_nightly_cargo(&["mtime-on-use"])
+ .env("RUSTFLAGS", "-C linker=cc")
+ .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+ .run();
+ // This should not recompile!
+ p.cargo("build -Z mtime-on-use")
+ .masquerade_as_nightly_cargo(&["mtime-on-use"])
+ .env("RUSTFLAGS", "-C linker=cc")
+ .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+ .run();
+}
+
+fn fingerprint_cleaner(mut dir: PathBuf, timestamp: filetime::FileTime) {
+ // Cargo is experimenting with letting outside projects develop some
+ // limited forms of GC for target_dir. This is one of the forms.
+ // Specifically, Cargo is updating the mtime of a file in
+ // target/profile/.fingerprint each time it uses the fingerprint.
+ // So a cleaner can remove files associated with a fingerprint
+ // if all the files in the fingerprint's folder are older then a time stamp without
+ // effecting any builds that happened since that time stamp.
+ let mut cleaned = false;
+ dir.push(".fingerprint");
+ for fing in fs::read_dir(&dir).unwrap() {
+ let fing = fing.unwrap();
+
+ let outdated = |f: io::Result<fs::DirEntry>| {
+ filetime::FileTime::from_last_modification_time(&f.unwrap().metadata().unwrap())
+ <= timestamp
+ };
+ if fs::read_dir(fing.path()).unwrap().all(outdated) {
+ fs::remove_dir_all(fing.path()).unwrap();
+ println!("remove: {:?}", fing.path());
+ // a real cleaner would remove the big files in deps and build as well
+ // but fingerprint is sufficient for our tests
+ cleaned = true;
+ } else {
+ }
+ }
+ assert!(
+ cleaned,
+ "called fingerprint_cleaner, but there was nothing to remove"
+ );
+}
+
+#[cargo_test]
+fn fingerprint_cleaner_does_not_rebuild() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = { path = "bar" }
+
+ [features]
+ a = []
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("build -Z mtime-on-use")
+ .masquerade_as_nightly_cargo(&["mtime-on-use"])
+ .run();
+ p.cargo("build -Z mtime-on-use --features a")
+ .masquerade_as_nightly_cargo(&["mtime-on-use"])
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+ if is_coarse_mtime() {
+ sleep_ms(1000);
+ }
+ let timestamp = filetime::FileTime::from_system_time(SystemTime::now());
+ if is_coarse_mtime() {
+ sleep_ms(1000);
+ }
+ // This does not make new files, but it does update the mtime.
+ p.cargo("build -Z mtime-on-use --features a")
+ .masquerade_as_nightly_cargo(&["mtime-on-use"])
+ .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+ .run();
+ fingerprint_cleaner(p.target_debug_dir(), timestamp);
+ // This should not recompile!
+ p.cargo("build -Z mtime-on-use --features a")
+ .masquerade_as_nightly_cargo(&["mtime-on-use"])
+ .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+ .run();
+ // But this should be cleaned and so need a rebuild
+ p.cargo("build -Z mtime-on-use")
+ .masquerade_as_nightly_cargo(&["mtime-on-use"])
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn reuse_panic_build_dep_test() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [build-dependencies]
+ bar = { path = "bar" }
+
+ [dev-dependencies]
+ bar = { path = "bar" }
+
+ [profile.dev]
+ panic = "abort"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ // Check that `bar` is not built twice. It is only needed once (without `panic`).
+ p.cargo("test --lib --no-run -v")
+ .with_stderr(
+ "\
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name build_script_build [..]
+[RUNNING] [..]build-script-build`
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--test[..]
+[FINISHED] [..]
+[EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn reuse_panic_pm() {
+ // foo(panic) -> bar(panic)
+ // somepm(nopanic) -> bar(nopanic)
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = { path = "bar" }
+ somepm = { path = "somepm" }
+
+ [profile.dev]
+ panic = "abort"
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar;")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .file(
+ "somepm/Cargo.toml",
+ r#"
+ [package]
+ name = "somepm"
+ version = "0.0.1"
+
+ [lib]
+ proc-macro = true
+
+ [dependencies]
+ bar = { path = "../bar" }
+ "#,
+ )
+ .file("somepm/src/lib.rs", "extern crate bar;")
+ .build();
+
+ // bar is built once without panic (for proc-macro) and once with (for the
+ // normal dependency).
+ p.cargo("build -v")
+ .with_stderr_unordered(
+ "\
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C debuginfo=2 [..]
+[COMPILING] somepm [..]
+[RUNNING] `rustc --crate-name somepm [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]-C panic=abort[..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bust_patched_dep() {
+ Package::new("registry1", "0.1.0").publish();
+ Package::new("registry2", "0.1.0")
+ .dep("registry1", "0.1.0")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ registry2 = "0.1.0"
+
+ [patch.crates-io]
+ registry1 = { path = "reg1new" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("reg1new/Cargo.toml", &basic_manifest("registry1", "0.1.0"))
+ .file("reg1new/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+ if is_coarse_mtime() {
+ sleep_ms(1000);
+ }
+
+ p.change_file("reg1new/src/lib.rs", "");
+ if is_coarse_mtime() {
+ sleep_ms(1000);
+ }
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[DIRTY] registry1 v0.1.0 ([..]): the file `reg1new/src/lib.rs` has changed ([..])
+[COMPILING] registry1 v0.1.0 ([..])
+[RUNNING] `rustc [..]
+[DIRTY] registry2 v0.1.0: the dependency registry1 was rebuilt
+[COMPILING] registry2 v0.1.0
+[RUNNING] `rustc [..]
+[DIRTY] foo v0.0.1 ([..]): the dependency registry2 was rebuilt
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[FRESH] registry1 v0.1.0 ([..])
+[FRESH] registry2 v0.1.0
+[FRESH] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rebuild_on_mid_build_file_modification() {
+ let server = TcpListener::bind("127.0.0.1:0").unwrap();
+ let addr = server.local_addr().unwrap();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["root", "proc_macro_dep"]
+ "#,
+ )
+ .file(
+ "root/Cargo.toml",
+ r#"
+ [package]
+ name = "root"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ proc_macro_dep = { path = "../proc_macro_dep" }
+ "#,
+ )
+ .file(
+ "root/src/lib.rs",
+ r#"
+ #[macro_use]
+ extern crate proc_macro_dep;
+
+ #[derive(Noop)]
+ pub struct X;
+ "#,
+ )
+ .file(
+ "proc_macro_dep/Cargo.toml",
+ r#"
+ [package]
+ name = "proc_macro_dep"
+ version = "0.1.0"
+ authors = []
+
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file(
+ "proc_macro_dep/src/lib.rs",
+ &format!(
+ r#"
+ extern crate proc_macro;
+
+ use std::io::Read;
+ use std::net::TcpStream;
+ use proc_macro::TokenStream;
+
+ #[proc_macro_derive(Noop)]
+ pub fn noop(_input: TokenStream) -> TokenStream {{
+ let mut stream = TcpStream::connect("{}").unwrap();
+ let mut v = Vec::new();
+ stream.read_to_end(&mut v).unwrap();
+ "".parse().unwrap()
+ }}
+ "#,
+ addr
+ ),
+ )
+ .build();
+ let root = p.root();
+
+ let t = thread::spawn(move || {
+ let socket = server.accept().unwrap().0;
+ sleep_ms(1000);
+ let mut file = OpenOptions::new()
+ .write(true)
+ .append(true)
+ .open(root.join("root/src/lib.rs"))
+ .unwrap();
+ writeln!(file, "// modified").expect("Failed to append to root sources");
+ drop(file);
+ drop(socket);
+ drop(server.accept().unwrap());
+ });
+
+ p.cargo("build")
+ .with_stderr(
+ "\
+[COMPILING] proc_macro_dep v0.1.0 ([..]/proc_macro_dep)
+[COMPILING] root v0.1.0 ([..]/root)
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[FRESH] proc_macro_dep v0.1.0 ([..]/proc_macro_dep)
+[DIRTY] root v0.1.0 ([..]/root): the file `root/src/lib.rs` has changed ([..])
+[COMPILING] root v0.1.0 ([..]/root)
+[RUNNING] `rustc [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ t.join().ok().unwrap();
+}
+
+#[cargo_test]
+fn dirty_both_lib_and_test() {
+ // This tests that all artifacts that depend on the results of a build
+ // script will get rebuilt when the build script reruns, even for separate
+ // commands. It does the following:
+ //
+ // 1. Project "foo" has a build script which will compile a small
+ // staticlib to link against. Normally this would use the `cc` crate,
+ // but here we just use rustc to avoid the `cc` dependency.
+ // 2. Build the library.
+ // 3. Build the unit test. The staticlib intentionally has a bad value.
+ // 4. Rewrite the staticlib with the correct value.
+ // 5. Build the library again.
+ // 6. Build the unit test. This should recompile.
+
+ let slib = |n| {
+ format!(
+ r#"
+ #[no_mangle]
+ pub extern "C" fn doit() -> i32 {{
+ return {};
+ }}
+ "#,
+ n
+ )
+ };
+
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ extern "C" {
+ fn doit() -> i32;
+ }
+
+ #[test]
+ fn t1() {
+ assert_eq!(unsafe { doit() }, 1, "doit assert failure");
+ }
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+ use std::path::PathBuf;
+ use std::process::Command;
+
+ fn main() {
+ let rustc = env::var_os("RUSTC").unwrap();
+ let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
+ assert!(
+ Command::new(rustc)
+ .args(&[
+ "--crate-type=staticlib",
+ "--out-dir",
+ out_dir.to_str().unwrap(),
+ "slib.rs"
+ ])
+ .status()
+ .unwrap()
+ .success(),
+ "slib build failed"
+ );
+ println!("cargo:rustc-link-lib=slib");
+ println!("cargo:rustc-link-search={}", out_dir.display());
+ }
+ "#,
+ )
+ .file("slib.rs", &slib(2))
+ .build();
+
+ p.cargo("build").run();
+
+ // 2 != 1
+ p.cargo("test --lib")
+ .with_status(101)
+ .with_stdout_contains("[..]doit assert failure[..]")
+ .run();
+
+ if is_coarse_mtime() {
+ // #5918
+ sleep_ms(1000);
+ }
+ // Fix the mistake.
+ p.change_file("slib.rs", &slib(1));
+
+ p.cargo("build").run();
+ // This should recompile with the new static lib, and the test should pass.
+ p.cargo("test --lib").run();
+}
+
+#[cargo_test]
+fn script_fails_stay_dirty() {
+ // Check if a script is aborted (such as hitting Ctrl-C) that it will re-run.
+ // Steps:
+ // 1. Build to establish fingerprints.
+ // 2. Make a change that triggers the build script to re-run. Abort the
+ // script while it is running.
+ // 3. Run the build again and make sure it re-runs the script.
+ let p = project()
+ .file(
+ "build.rs",
+ r#"
+ mod helper;
+ fn main() {
+ println!("cargo:rerun-if-changed=build.rs");
+ helper::doit();
+ }
+ "#,
+ )
+ .file("helper.rs", "pub fn doit() {}")
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+ if is_coarse_mtime() {
+ sleep_ms(1000);
+ }
+ p.change_file("helper.rs", r#"pub fn doit() {panic!("Crash!");}"#);
+ p.cargo("build")
+ .with_stderr_contains("[..]Crash![..]")
+ .with_status(101)
+ .run();
+ // There was a bug where this second call would be "fresh".
+ p.cargo("build")
+ .with_stderr_contains("[..]Crash![..]")
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn simulated_docker_deps_stay_cached() {
+ // Test what happens in docker where the nanoseconds are zeroed out.
+ Package::new("regdep", "1.0.0").publish();
+ Package::new("regdep_old_style", "1.0.0")
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "")
+ .publish();
+ Package::new("regdep_env", "1.0.0")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rerun-if-env-changed=SOMEVAR");
+ }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .publish();
+ Package::new("regdep_rerun", "1.0.0")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rerun-if-changed=build.rs");
+ }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ pathdep = { path = "pathdep" }
+ regdep = "1.0"
+ regdep_old_style = "1.0"
+ regdep_env = "1.0"
+ regdep_rerun = "1.0"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
+ extern crate pathdep;
+ extern crate regdep;
+ extern crate regdep_old_style;
+ extern crate regdep_env;
+ extern crate regdep_rerun;
+ ",
+ )
+ .file("build.rs", "fn main() {}")
+ .file("pathdep/Cargo.toml", &basic_manifest("pathdep", "1.0.0"))
+ .file("pathdep/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+
+ let already_zero = {
+ // This happens on HFS with 1-second timestamp resolution,
+ // or other filesystems where it just so happens to write exactly on a
+ // 1-second boundary.
+ let metadata = fs::metadata(p.root().join("src/lib.rs")).unwrap();
+ let mtime = FileTime::from_last_modification_time(&metadata);
+ mtime.nanoseconds() == 0
+ };
+
+ // Recursively remove `nanoseconds` from every path.
+ fn zeropath(path: &Path) {
+ for entry in walkdir::WalkDir::new(path)
+ .into_iter()
+ .filter_map(|e| e.ok())
+ {
+ let metadata = fs::metadata(entry.path()).unwrap();
+ let mtime = metadata.modified().unwrap();
+ let mtime_duration = mtime.duration_since(SystemTime::UNIX_EPOCH).unwrap();
+ let trunc_mtime = FileTime::from_unix_time(mtime_duration.as_secs() as i64, 0);
+ let atime = metadata.accessed().unwrap();
+ let atime_duration = atime.duration_since(SystemTime::UNIX_EPOCH).unwrap();
+ let trunc_atime = FileTime::from_unix_time(atime_duration.as_secs() as i64, 0);
+ if let Err(e) = filetime::set_file_times(entry.path(), trunc_atime, trunc_mtime) {
+ // Windows doesn't allow changing filetimes on some things
+ // (directories, other random things I'm not sure why). Just
+ // ignore them.
+ if e.kind() == std::io::ErrorKind::PermissionDenied {
+ println!("PermissionDenied filetime on {:?}", entry.path());
+ } else {
+ panic!("FileTime error on {:?}: {:?}", entry.path(), e);
+ }
+ }
+ }
+ }
+ zeropath(&p.root());
+ zeropath(&paths::home());
+
+ if already_zero {
+ println!("already zero");
+ // If it was already truncated, then everything stays fresh.
+ p.cargo("build -v")
+ .with_stderr_unordered(
+ "\
+[FRESH] pathdep [..]
+[FRESH] regdep [..]
+[FRESH] regdep_env [..]
+[FRESH] regdep_old_style [..]
+[FRESH] regdep_rerun [..]
+[FRESH] foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ } else {
+ println!("not already zero");
+ // It is not ideal that `foo` gets recompiled, but that is the current
+ // behavior. Currently mtimes are ignored for registry deps.
+ //
+ // Note that this behavior is due to the fact that `foo` has a build
+ // script in "old" mode where it doesn't print `rerun-if-*`. In this
+ // mode we use `Precalculated` to fingerprint a path dependency, where
+ // `Precalculated` is an opaque string which has the most recent mtime
+ // in it. It differs between builds because one has nsec=0 and the other
+ // likely has a nonzero nsec. Hence, the rebuild.
+ p.cargo("build -v")
+ .with_stderr_unordered(
+ "\
+[FRESH] pathdep [..]
+[FRESH] regdep [..]
+[FRESH] regdep_env [..]
+[FRESH] regdep_old_style [..]
+[FRESH] regdep_rerun [..]
+[DIRTY] foo [..]: the precalculated components changed
+[COMPILING] foo [..]
+[RUNNING] [..]/foo-[..]/build-script-build[..]
+[RUNNING] `rustc --crate-name foo[..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ }
+}
+
+#[cargo_test]
+fn metadata_change_invalidates() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+
+ for attr in &[
+ "authors = [\"foo\"]",
+ "description = \"desc\"",
+ "homepage = \"https://example.com\"",
+ "repository =\"https://example.com\"",
+ ] {
+ let mut file = OpenOptions::new()
+ .write(true)
+ .append(true)
+ .open(p.root().join("Cargo.toml"))
+ .unwrap();
+ writeln!(file, "{}", attr).unwrap();
+ p.cargo("build")
+ .with_stderr_contains("[COMPILING] foo [..]")
+ .run();
+ }
+ p.cargo("build -v")
+ .with_stderr_contains("[FRESH] foo[..]")
+ .run();
+ assert_eq!(p.glob("target/debug/deps/libfoo-*.rlib").count(), 1);
+}
+
+#[cargo_test]
+fn edition_change_invalidates() {
+ const MANIFEST: &str = r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ "#;
+ let p = project()
+ .file("Cargo.toml", MANIFEST)
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("build").run();
+ p.change_file("Cargo.toml", &format!("{}edition = \"2018\"", MANIFEST));
+ p.cargo("build")
+ .with_stderr_contains("[COMPILING] foo [..]")
+ .run();
+ p.change_file(
+ "Cargo.toml",
+ &format!(
+ r#"{}edition = "2018"
+ [lib]
+ edition = "2015"
+ "#,
+ MANIFEST
+ ),
+ );
+ p.cargo("build")
+ .with_stderr_contains("[COMPILING] foo [..]")
+ .run();
+ p.cargo("build -v")
+ .with_stderr_contains("[FRESH] foo[..]")
+ .run();
+ assert_eq!(p.glob("target/debug/deps/libfoo-*.rlib").count(), 1);
+}
+
+#[cargo_test]
+fn rename_with_path_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ a = { path = 'a' }
+ "#,
+ )
+ .file("src/lib.rs", "extern crate a; pub fn foo() { a::foo(); }")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ b = { path = 'b' }
+ "#,
+ )
+ .file("a/src/lib.rs", "extern crate b; pub fn foo() { b::foo() }")
+ .file(
+ "a/b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.5.0"
+ authors = []
+ "#,
+ )
+ .file("a/b/src/lib.rs", "pub fn foo() { }");
+ let p = p.build();
+
+ p.cargo("build").run();
+
+ // Now rename the root directory and rerun `cargo run`. Not only should we
+ // not build anything but we also shouldn't crash.
+ let mut new = p.root();
+ new.pop();
+ new.push("foo2");
+
+ fs::rename(p.root(), &new).unwrap();
+
+ p.cargo("build")
+ .cwd(&new)
+ .with_stderr("[FINISHED] [..]")
+ .run();
+}
+
+#[cargo_test]
+fn move_target_directory_with_path_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ a = { path = "a" }
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ "#,
+ )
+ .file("src/lib.rs", "extern crate a; pub use a::print_msg;")
+ .file(
+ "a/build.rs",
+ r###"
+ use std::env;
+ use std::fs;
+ use std::path::Path;
+
+ fn main() {
+ println!("cargo:rerun-if-changed=build.rs");
+ let out_dir = env::var("OUT_DIR").unwrap();
+ let dest_path = Path::new(&out_dir).join("hello.rs");
+ fs::write(&dest_path, r#"
+ pub fn message() -> &'static str {
+ "Hello, World!"
+ }
+ "#).unwrap();
+ }
+ "###,
+ )
+ .file(
+ "a/src/lib.rs",
+ r#"
+ include!(concat!(env!("OUT_DIR"), "/hello.rs"));
+ pub fn print_msg() { message(); }
+ "#,
+ );
+ let p = p.build();
+
+ let mut parent = p.root();
+ parent.pop();
+
+ p.cargo("build").run();
+
+ let new_target = p.root().join("target2");
+ fs::rename(p.root().join("target"), &new_target).unwrap();
+
+ p.cargo("build")
+ .env("CARGO_TARGET_DIR", &new_target)
+ .with_stderr("[FINISHED] [..]")
+ .run();
+}
+
+#[cargo_test]
+fn rerun_if_changes() {
+ let p = project()
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rerun-if-env-changed=FOO");
+ if std::env::var("FOO").is_ok() {
+ println!("cargo:rerun-if-env-changed=BAR");
+ }
+ }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+ p.cargo("build").with_stderr("[FINISHED] [..]").run();
+
+ p.cargo("build -v")
+ .env("FOO", "1")
+ .with_stderr(
+ "\
+[DIRTY] foo [..]: the env variable FOO changed
+[COMPILING] foo [..]
+[RUNNING] `[..]build-script-build`
+[RUNNING] `rustc [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.cargo("build")
+ .env("FOO", "1")
+ .with_stderr("[FINISHED] [..]")
+ .run();
+
+ p.cargo("build -v")
+ .env("FOO", "1")
+ .env("BAR", "1")
+ .with_stderr(
+ "\
+[DIRTY] foo [..]: the env variable BAR changed
+[COMPILING] foo [..]
+[RUNNING] `[..]build-script-build`
+[RUNNING] `rustc [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.cargo("build")
+ .env("FOO", "1")
+ .env("BAR", "1")
+ .with_stderr("[FINISHED] [..]")
+ .run();
+
+ p.cargo("build -v")
+ .env("BAR", "2")
+ .with_stderr(
+ "\
+[DIRTY] foo [..]: the env variable FOO changed
+[COMPILING] foo [..]
+[RUNNING] `[..]build-script-build`
+[RUNNING] `rustc [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.cargo("build")
+ .env("BAR", "2")
+ .with_stderr("[FINISHED] [..]")
+ .run();
+}
+
+#[cargo_test]
+fn channel_shares_filenames() {
+ // Test that different "nightly" releases use the same output filename.
+
+ // Create separate rustc binaries to emulate running different toolchains.
+ let nightly1 = format!(
+ "\
+rustc 1.44.0-nightly (38114ff16 2020-03-21)
+binary: rustc
+commit-hash: 38114ff16e7856f98b2b4be7ab4cd29b38bed59a
+commit-date: 2020-03-21
+host: {}
+release: 1.44.0-nightly
+LLVM version: 9.0
+",
+ rustc_host()
+ );
+
+ let nightly2 = format!(
+ "\
+rustc 1.44.0-nightly (a5b09d354 2020-03-31)
+binary: rustc
+commit-hash: a5b09d35473615e7142f5570f5c5fad0caf68bd2
+commit-date: 2020-03-31
+host: {}
+release: 1.44.0-nightly
+LLVM version: 9.0
+",
+ rustc_host()
+ );
+
+ let beta1 = format!(
+ "\
+rustc 1.43.0-beta.3 (4c587bbda 2020-03-25)
+binary: rustc
+commit-hash: 4c587bbda04ab55aaf56feab11dfdfe387a85d7a
+commit-date: 2020-03-25
+host: {}
+release: 1.43.0-beta.3
+LLVM version: 9.0
+",
+ rustc_host()
+ );
+
+ let beta2 = format!(
+ "\
+rustc 1.42.0-beta.5 (4e1c5f0e9 2020-02-28)
+binary: rustc
+commit-hash: 4e1c5f0e9769a588b91c977e3d81e140209ef3a2
+commit-date: 2020-02-28
+host: {}
+release: 1.42.0-beta.5
+LLVM version: 9.0
+",
+ rustc_host()
+ );
+
+ let stable1 = format!(
+ "\
+rustc 1.42.0 (b8cedc004 2020-03-09)
+binary: rustc
+commit-hash: b8cedc00407a4c56a3bda1ed605c6fc166655447
+commit-date: 2020-03-09
+host: {}
+release: 1.42.0
+LLVM version: 9.0
+",
+ rustc_host()
+ );
+
+ let stable2 = format!(
+ "\
+rustc 1.41.1 (f3e1a954d 2020-02-24)
+binary: rustc
+commit-hash: f3e1a954d2ead4e2fc197c7da7d71e6c61bad196
+commit-date: 2020-02-24
+host: {}
+release: 1.41.1
+LLVM version: 9.0
+",
+ rustc_host()
+ );
+
+ let compiler = project()
+ .at("compiler")
+ .file("Cargo.toml", &basic_manifest("compiler", "0.1.0"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ if std::env::args_os().any(|a| a == "-vV") {
+ print!("{}", env!("FUNKY_VERSION_TEST"));
+ return;
+ }
+ let mut cmd = std::process::Command::new("rustc");
+ cmd.args(std::env::args_os().skip(1));
+ assert!(cmd.status().unwrap().success());
+ }
+ "#,
+ )
+ .build();
+
+ let makeit = |version, vv| {
+ // Force a rebuild.
+ compiler.target_debug_dir().join("deps").rm_rf();
+ compiler.cargo("build").env("FUNKY_VERSION_TEST", vv).run();
+ fs::rename(compiler.bin("compiler"), compiler.bin(version)).unwrap();
+ };
+ makeit("nightly1", nightly1);
+ makeit("nightly2", nightly2);
+ makeit("beta1", beta1);
+ makeit("beta2", beta2);
+ makeit("stable1", stable1);
+ makeit("stable2", stable2);
+
+ // Run `cargo check` with different rustc versions to observe its behavior.
+ let p = project().file("src/lib.rs", "").build();
+
+ // Runs `cargo check` and returns the rmeta filename created.
+ // Checks that the freshness matches the given value.
+ let check = |version, fresh| -> String {
+ let output = p
+ .cargo("check --message-format=json")
+ .env("RUSTC", compiler.bin(version))
+ .exec_with_output()
+ .unwrap();
+ // Collect the filenames generated.
+ let mut artifacts: Vec<_> = std::str::from_utf8(&output.stdout)
+ .unwrap()
+ .lines()
+ .filter_map(|line| {
+ let value: serde_json::Value = serde_json::from_str(line).unwrap();
+ if value["reason"].as_str().unwrap() == "compiler-artifact" {
+ assert_eq!(value["fresh"].as_bool().unwrap(), fresh);
+ let filenames = value["filenames"].as_array().unwrap();
+ assert_eq!(filenames.len(), 1);
+ Some(filenames[0].to_string())
+ } else {
+ None
+ }
+ })
+ .collect();
+ // Should only generate one rmeta file.
+ assert_eq!(artifacts.len(), 1);
+ artifacts.pop().unwrap()
+ };
+
+ let nightly1_name = check("nightly1", false);
+ assert_eq!(check("nightly1", true), nightly1_name);
+ assert_eq!(check("nightly2", false), nightly1_name); // same as before
+ assert_eq!(check("nightly2", true), nightly1_name);
+ // Should rebuild going back to nightly1.
+ assert_eq!(check("nightly1", false), nightly1_name);
+
+ let beta1_name = check("beta1", false);
+ assert_ne!(beta1_name, nightly1_name);
+ assert_eq!(check("beta1", true), beta1_name);
+ assert_eq!(check("beta2", false), beta1_name); // same as before
+ assert_eq!(check("beta2", true), beta1_name);
+ // Should rebuild going back to beta1.
+ assert_eq!(check("beta1", false), beta1_name);
+
+ let stable1_name = check("stable1", false);
+ assert_ne!(stable1_name, nightly1_name);
+ assert_ne!(stable1_name, beta1_name);
+ let stable2_name = check("stable2", false);
+ assert_ne!(stable1_name, stable2_name);
+ // Check everything is fresh.
+ assert_eq!(check("stable1", true), stable1_name);
+ assert_eq!(check("stable2", true), stable2_name);
+ assert_eq!(check("beta1", true), beta1_name);
+ assert_eq!(check("nightly1", true), nightly1_name);
+}
+
+#[cargo_test]
+fn linking_interrupted() {
+ // Interrupt during the linking phase shouldn't leave test executable as "fresh".
+
+ // This is used to detect when linking starts, then to pause the linker so
+ // that the test can kill cargo.
+ let link_listener = TcpListener::bind("127.0.0.1:0").unwrap();
+ let link_addr = link_listener.local_addr().unwrap();
+
+ // This is used to detect when rustc exits.
+ let rustc_listener = TcpListener::bind("127.0.0.1:0").unwrap();
+ let rustc_addr = rustc_listener.local_addr().unwrap();
+
+ // Create a linker that we can interrupt.
+ let linker = project()
+ .at("linker")
+ .file("Cargo.toml", &basic_manifest("linker", "1.0.0"))
+ .file(
+ "src/main.rs",
+ &r#"
+ fn main() {
+ // Figure out the output filename.
+ let output = match std::env::args().find(|a| a.starts_with("/OUT:")) {
+ Some(s) => s[5..].to_string(),
+ None => {
+ let mut args = std::env::args();
+ loop {
+ if args.next().unwrap() == "-o" {
+ break;
+ }
+ }
+ args.next().unwrap()
+ }
+ };
+ std::fs::remove_file(&output).unwrap();
+ std::fs::write(&output, "").unwrap();
+ // Tell the test that we are ready to be interrupted.
+ let mut socket = std::net::TcpStream::connect("__ADDR__").unwrap();
+ // Wait for the test to kill us.
+ std::thread::sleep(std::time::Duration::new(60, 0));
+ }
+ "#
+ .replace("__ADDR__", &link_addr.to_string()),
+ )
+ .build();
+ linker.cargo("build").run();
+
+ // Create a wrapper around rustc that will tell us when rustc is finished.
+ let rustc = project()
+ .at("rustc-waiter")
+ .file("Cargo.toml", &basic_manifest("rustc-waiter", "1.0.0"))
+ .file(
+ "src/main.rs",
+ &r#"
+ fn main() {
+ let mut conn = None;
+ // Check for a normal build (not -vV or --print).
+ if std::env::args().any(|arg| arg == "t1") {
+ // Tell the test that rustc has started.
+ conn = Some(std::net::TcpStream::connect("__ADDR__").unwrap());
+ }
+ let status = std::process::Command::new("rustc")
+ .args(std::env::args().skip(1))
+ .status()
+ .expect("rustc to run");
+ std::process::exit(status.code().unwrap_or(1));
+ }
+ "#
+ .replace("__ADDR__", &rustc_addr.to_string()),
+ )
+ .build();
+ rustc.cargo("build").run();
+
+ // Build it once so that the fingerprint gets saved to disk.
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("tests/t1.rs", "")
+ .build();
+ p.cargo("test --test t1 --no-run").run();
+
+ // Make a change, start a build, then interrupt it.
+ p.change_file("src/lib.rs", "// modified");
+ let linker_env = format!("CARGO_TARGET_{}_LINKER", rustc_host_env());
+ // NOTE: This assumes that the paths to the linker or rustc are not in the
+ // fingerprint. But maybe they should be?
+ let mut cmd = p
+ .cargo("test --test t1 --no-run")
+ .env(&linker_env, linker.bin("linker"))
+ .env("RUSTC", rustc.bin("rustc-waiter"))
+ .build_command();
+ let mut child = cmd
+ .stdout(Stdio::null())
+ .stderr(Stdio::null())
+ .env("__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE", "1")
+ .spawn()
+ .unwrap();
+ // Wait for rustc to start.
+ let mut rustc_conn = rustc_listener.accept().unwrap().0;
+ // Wait for linking to start.
+ drop(link_listener.accept().unwrap());
+
+ // Interrupt the child.
+ death::ctrl_c(&mut child);
+ assert!(!child.wait().unwrap().success());
+ // Wait for rustc to exit. If we don't wait, then the command below could
+ // start while rustc is still being torn down.
+ let mut buf = [0];
+ drop(rustc_conn.read_exact(&mut buf));
+
+ // Build again, shouldn't be fresh.
+ p.cargo("test --test t1 -v")
+ .with_stderr(
+ "\
+[DIRTY] foo v0.0.1 ([..]): the config settings changed
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name foo [..]
+[RUNNING] `rustc --crate-name t1 [..]
+[FINISHED] [..]
+[RUNNING] `[..]target/debug/deps/t1-[..][EXE]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+#[cfg_attr(
+ not(all(target_arch = "x86_64", target_os = "windows", target_env = "msvc")),
+ ignore
+)]
+fn lld_is_fresh() {
+ // Check for bug when using lld linker that it remains fresh with dylib.
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [target.x86_64-pc-windows-msvc]
+ linker = "rust-lld"
+ rustflags = ["-C", "link-arg=-fuse-ld=lld"]
+ "#,
+ )
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [lib]
+ crate-type = ["dylib"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+ p.cargo("build -v")
+ .with_stderr("[FRESH] foo [..]\n[FINISHED] [..]")
+ .run();
+}
+
+#[cargo_test]
+fn env_in_code_causes_rebuild() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ println!("{:?}", option_env!("FOO"));
+ println!("{:?}", option_env!("FOO\nBAR"));
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build").env_remove("FOO").run();
+ p.cargo("build")
+ .env_remove("FOO")
+ .with_stderr("[FINISHED] [..]")
+ .run();
+ p.cargo("build -v")
+ .env("FOO", "bar")
+ .with_stderr(
+ "\
+[DIRTY] foo [..]: the environment variable FOO changed
+[COMPILING][..]
+[RUNNING] `rustc [..]
+[FINISHED][..]",
+ )
+ .run();
+ p.cargo("build")
+ .env("FOO", "bar")
+ .with_stderr("[FINISHED][..]")
+ .run();
+ p.cargo("build -v")
+ .env("FOO", "baz")
+ .with_stderr(
+ "\
+[DIRTY] foo [..]: the environment variable FOO changed
+[COMPILING][..]
+[RUNNING] `rustc [..]
+[FINISHED][..]",
+ )
+ .run();
+ p.cargo("build")
+ .env("FOO", "baz")
+ .with_stderr("[FINISHED][..]")
+ .run();
+ p.cargo("build -v")
+ .env_remove("FOO")
+ .with_stderr(
+ "\
+[DIRTY] foo [..]: the environment variable FOO changed
+[COMPILING][..]
+[RUNNING] `rustc [..]
+[FINISHED][..]",
+ )
+ .run();
+ p.cargo("build")
+ .env_remove("FOO")
+ .with_stderr("[FINISHED][..]")
+ .run();
+
+ let interesting = " #!$\nabc\r\\\t\u{8}\r\n";
+ p.cargo("build").env("FOO", interesting).run();
+ p.cargo("build")
+ .env("FOO", interesting)
+ .with_stderr("[FINISHED][..]")
+ .run();
+
+ p.cargo("build").env("FOO\nBAR", interesting).run();
+ p.cargo("build")
+ .env("FOO\nBAR", interesting)
+ .with_stderr("[FINISHED][..]")
+ .run();
+}
+
+#[cargo_test]
+fn env_build_script_no_rebuild() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo:rustc-env=FOO=bar");
+ }
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ println!("{:?}", env!("FOO"));
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build").run();
+ p.cargo("build").with_stderr("[FINISHED] [..]").run();
+}
+
+#[cargo_test]
+fn cargo_env_changes() {
+ // Checks that changes to the env var CARGO in the dep-info file triggers
+ // a rebuild.
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ println!("{:?}", env!("CARGO"));
+ }
+ "#,
+ )
+ .build();
+
+ let cargo_exe = cargo_test_support::cargo_exe();
+ let other_cargo_path = p.root().join(cargo_exe.file_name().unwrap());
+ std::fs::hard_link(&cargo_exe, &other_cargo_path).unwrap();
+ let other_cargo = || {
+ let mut pb = cargo_test_support::process(&other_cargo_path);
+ pb.cwd(p.root());
+ cargo_test_support::execs().with_process_builder(pb)
+ };
+
+ p.cargo("check").run();
+ other_cargo()
+ .arg("check")
+ .arg("-v")
+ .with_stderr(
+ "\
+[DIRTY] foo v1.0.0 ([..]): the environment variable CARGO changed
+[CHECKING] foo [..]
+[RUNNING] `rustc [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ // And just to confirm that without using env! it doesn't rebuild.
+ p.change_file("src/main.rs", "fn main() {}");
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ other_cargo()
+ .arg("check")
+ .arg("-v")
+ .with_stderr(
+ "\
+[FRESH] foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn changing_linker() {
+ // Changing linker should rebuild.
+ let p = project().file("src/main.rs", "fn main() {}").build();
+ p.cargo("build").run();
+ let linker_env = format!("CARGO_TARGET_{}_LINKER", rustc_host_env());
+ p.cargo("build --verbose")
+ .env(&linker_env, "nonexistent-linker")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] -C linker=nonexistent-linker [..]`
+[ERROR] [..]linker[..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn verify_source_before_recompile() {
+ Package::new("bar", "0.1.0")
+ .file("src/lib.rs", "")
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("vendor --respect-source-config").run();
+ p.change_file(
+ ".cargo/config.toml",
+ r#"
+ [source.crates-io]
+ replace-with = 'vendor'
+
+ [source.vendor]
+ directory = 'vendor'
+ "#,
+ );
+ // Sanity check: vendoring works correctly.
+ p.cargo("check --verbose")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bar [CWD]/vendor/bar/src/lib.rs[..]")
+ .run();
+ // Now modify vendored crate.
+ p.change_file(
+ "vendor/bar/src/lib.rs",
+ r#"compile_error!("You shall not pass!");"#,
+ );
+ // Should ignore modifed sources without any recompile.
+ p.cargo("check --verbose")
+ .with_stderr(
+ "\
+[FRESH] bar v0.1.0
+[FRESH] foo v0.1.0 ([CWD])
+[FINISHED] dev [..]
+",
+ )
+ .run();
+
+ // Add a `RUSTFLAGS` to trigger a recompile.
+ //
+ // Cargo should refuse to build because of checksum verfication failure.
+ // Cargo shouldn't recompile dependency `bar`.
+ p.cargo("check --verbose")
+ .env("RUSTFLAGS", "-W warnings")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: the listed checksum of `[CWD]/vendor/bar/src/lib.rs` has changed:
+expected: [..]
+actual: [..]
+
+directory sources are not [..]
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/future_incompat_report.rs b/src/tools/cargo/tests/testsuite/future_incompat_report.rs
new file mode 100644
index 000000000..9f451a64c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/future_incompat_report.rs
@@ -0,0 +1,391 @@
+//! Tests for future-incompat-report messages
+//!
+//! Note that these tests use the -Zfuture-incompat-test for rustc.
+//! This causes rustc to treat *every* lint as future-incompatible.
+//! This is done because future-incompatible lints are inherently
+//! ephemeral, but we don't want to continually update these tests.
+//! So we pick some random lint that will likely always be the same
+//! over time.
+
+use super::config::write_config_toml;
+use cargo_test_support::registry::Package;
+use cargo_test_support::{basic_manifest, project, Project};
+
+// An arbitrary lint (unused_variables) that triggers a lint.
+// We use a special flag to force it to generate a report.
+const FUTURE_EXAMPLE: &'static str = "fn main() { let x = 1; }";
+// Some text that will be displayed when the lint fires.
+const FUTURE_OUTPUT: &'static str = "[..]unused_variables[..]";
+
+fn simple_project() -> Project {
+ project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.0.0"))
+ .file("src/main.rs", FUTURE_EXAMPLE)
+ .build()
+}
+
+#[cargo_test(
+ nightly,
+ reason = "-Zfuture-incompat-test requires nightly (permanently)"
+)]
+fn output_on_stable() {
+ let p = simple_project();
+
+ p.cargo("check")
+ .env("RUSTFLAGS", "-Zfuture-incompat-test")
+ .with_stderr_contains(FUTURE_OUTPUT)
+ .with_stderr_contains("[..]cargo report[..]")
+ .run();
+}
+
+// This feature is stable, and should not be gated
+#[cargo_test]
+fn no_gate_future_incompat_report() {
+ let p = simple_project();
+
+ p.cargo("check --future-incompat-report")
+ .with_status(0)
+ .run();
+
+ p.cargo("report future-incompatibilities --id foo")
+ .with_stderr_contains("error: no reports are currently available")
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test(
+ nightly,
+ reason = "-Zfuture-incompat-test requires nightly (permanently)"
+)]
+fn test_zero_future_incompat() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.0.0"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ // No note if --future-incompat-report is not specified.
+ p.cargo("check")
+ .env("RUSTFLAGS", "-Zfuture-incompat-test")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ p.cargo("check --future-incompat-report")
+ .env("RUSTFLAGS", "-Zfuture-incompat-test")
+ .with_stderr(
+ "\
+[FINISHED] [..]
+note: 0 dependencies had future-incompatible warnings
+",
+ )
+ .run();
+}
+
+#[cargo_test(
+ nightly,
+ reason = "-Zfuture-incompat-test requires nightly (permanently)"
+)]
+fn test_single_crate() {
+ let p = simple_project();
+
+ for command in &["build", "check", "rustc", "test"] {
+ let check_has_future_compat = || {
+ p.cargo(command)
+ .env("RUSTFLAGS", "-Zfuture-incompat-test")
+ .with_stderr_contains(FUTURE_OUTPUT)
+ .with_stderr_contains("warning: the following packages contain code that will be rejected by a future version of Rust: foo v0.0.0 [..]")
+ .with_stderr_does_not_contain("[..]incompatibility[..]")
+ .run();
+ };
+
+ // Check that we show a message with no [future-incompat-report] config section
+ write_config_toml("");
+ check_has_future_compat();
+
+ // Check that we show a message with `frequency = "always"`
+ write_config_toml(
+ "\
+[future-incompat-report]
+frequency = 'always'
+",
+ );
+ check_has_future_compat();
+
+ // Check that we do not show a message with `frequency = "never"`
+ write_config_toml(
+ "\
+[future-incompat-report]
+frequency = 'never'
+",
+ );
+ p.cargo(command)
+ .env("RUSTFLAGS", "-Zfuture-incompat-test")
+ .with_stderr_contains(FUTURE_OUTPUT)
+ .with_stderr_does_not_contain("[..]rejected[..]")
+ .with_stderr_does_not_contain("[..]incompatibility[..]")
+ .run();
+
+ // Check that passing `--future-incompat-report` overrides `frequency = 'never'`
+ p.cargo(command).arg("--future-incompat-report")
+ .env("RUSTFLAGS", "-Zfuture-incompat-test")
+ .with_stderr_contains(FUTURE_OUTPUT)
+ .with_stderr_contains("warning: the following packages contain code that will be rejected by a future version of Rust: foo v0.0.0 [..]")
+ .with_stderr_contains(" - foo@0.0.0[..]")
+ .run();
+ }
+}
+
+#[cargo_test(
+ nightly,
+ reason = "-Zfuture-incompat-test requires nightly (permanently)"
+)]
+fn test_multi_crate() {
+ Package::new("first-dep", "0.0.1")
+ .file("src/lib.rs", FUTURE_EXAMPLE)
+ .publish();
+ Package::new("second-dep", "0.0.2")
+ .file("src/lib.rs", FUTURE_EXAMPLE)
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+
+ [dependencies]
+ first-dep = "*"
+ second-dep = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ for command in &["build", "check", "rustc", "test"] {
+ p.cargo(command)
+ .env("RUSTFLAGS", "-Zfuture-incompat-test")
+ .with_stderr_does_not_contain(FUTURE_OUTPUT)
+ .with_stderr_contains("warning: the following packages contain code that will be rejected by a future version of Rust: first-dep v0.0.1, second-dep v0.0.2")
+ // Check that we don't have the 'triggers' message shown at the bottom of this loop,
+ // and that we don't explain how to show a per-package report
+ .with_stderr_does_not_contain("[..]triggers[..]")
+ .with_stderr_does_not_contain("[..]--package[..]")
+ .with_stderr_does_not_contain("[..]-p[..]")
+ .run();
+
+ p.cargo(command).arg("--future-incompat-report")
+ .env("RUSTFLAGS", "-Zfuture-incompat-test")
+ .with_stderr_contains("warning: the following packages contain code that will be rejected by a future version of Rust: first-dep v0.0.1, second-dep v0.0.2")
+ .with_stderr_contains(" - first-dep@0.0.1")
+ .with_stderr_contains(" - second-dep@0.0.2")
+ .run();
+
+ p.cargo("report future-incompatibilities").arg("--package").arg("first-dep@0.0.1")
+ .with_stdout_contains("The package `first-dep v0.0.1` currently triggers the following future incompatibility lints:")
+ .with_stdout_contains(FUTURE_OUTPUT)
+ .with_stdout_does_not_contain("[..]second-dep-0.0.2/src[..]")
+ .run();
+
+ p.cargo("report future-incompatibilities").arg("--package").arg("second-dep@0.0.2")
+ .with_stdout_contains("The package `second-dep v0.0.2` currently triggers the following future incompatibility lints:")
+ .with_stdout_contains(FUTURE_OUTPUT)
+ .with_stdout_does_not_contain("[..]first-dep-0.0.1/src[..]")
+ .run();
+ }
+
+ // Test that passing the correct id via '--id' doesn't generate a warning message
+ let output = p
+ .cargo("check")
+ .env("RUSTFLAGS", "-Zfuture-incompat-test")
+ .exec_with_output()
+ .unwrap();
+
+ // Extract the 'id' from the stdout. We are looking
+ // for the id in a line of the form "run `cargo report future-incompatibilities --id yZ7S`"
+ // which is generated by Cargo to tell the user what command to run
+ // This is just to test that passing the id suppresses the warning mesasge. Any users needing
+ // access to the report from a shell script should use the `--future-incompat-report` flag
+ let stderr = std::str::from_utf8(&output.stderr).unwrap();
+
+ // Find '--id <ID>' in the output
+ let mut iter = stderr.split(' ');
+ iter.find(|w| *w == "--id").unwrap();
+ let id = iter
+ .next()
+ .unwrap_or_else(|| panic!("Unexpected output:\n{}", stderr));
+ // Strip off the trailing '`' included in the output
+ let id: String = id.chars().take_while(|c| *c != '`').collect();
+
+ p.cargo(&format!("report future-incompatibilities --id {}", id))
+ .with_stdout_contains("The package `first-dep v0.0.1` currently triggers the following future incompatibility lints:")
+ .with_stdout_contains("The package `second-dep v0.0.2` currently triggers the following future incompatibility lints:")
+ .run();
+
+ // Test without --id, and also the full output of the report.
+ let output = p
+ .cargo("report future-incompat")
+ .exec_with_output()
+ .unwrap();
+ let output = std::str::from_utf8(&output.stdout).unwrap();
+ assert!(output.starts_with("The following warnings were discovered"));
+ let mut lines = output
+ .lines()
+ // Skip the beginning of the per-package information.
+ .skip_while(|line| !line.starts_with("The package"));
+ for expected in &["first-dep v0.0.1", "second-dep v0.0.2"] {
+ assert_eq!(
+ &format!(
+ "The package `{}` currently triggers the following future incompatibility lints:",
+ expected
+ ),
+ lines.next().unwrap(),
+ "Bad output:\n{}",
+ output
+ );
+ let mut count = 0;
+ while let Some(line) = lines.next() {
+ if line.is_empty() {
+ break;
+ }
+ count += 1;
+ }
+ assert!(count > 0);
+ }
+ assert_eq!(lines.next(), None);
+}
+
+#[cargo_test(
+ nightly,
+ reason = "-Zfuture-incompat-test requires nightly (permanently)"
+)]
+fn color() {
+ let p = simple_project();
+
+ p.cargo("check")
+ .env("RUSTFLAGS", "-Zfuture-incompat-test")
+ .masquerade_as_nightly_cargo(&["future-incompat-test"])
+ .run();
+
+ p.cargo("report future-incompatibilities")
+ .with_stdout_does_not_contain("[..]\x1b[[..]")
+ .run();
+
+ p.cargo("report future-incompatibilities")
+ .env("CARGO_TERM_COLOR", "always")
+ .with_stdout_contains("[..]\x1b[[..]")
+ .run();
+}
+
+#[cargo_test(
+ nightly,
+ reason = "-Zfuture-incompat-test requires nightly (permanently)"
+)]
+fn bad_ids() {
+ let p = simple_project();
+
+ p.cargo("report future-incompatibilities --id 1")
+ .with_status(101)
+ .with_stderr("error: no reports are currently available")
+ .run();
+
+ p.cargo("check")
+ .env("RUSTFLAGS", "-Zfuture-incompat-test")
+ .masquerade_as_nightly_cargo(&["future-incompat-test"])
+ .run();
+
+ p.cargo("report future-incompatibilities --id foo")
+ .with_status(1)
+ .with_stderr("error: Invalid value: could not parse `foo` as a number")
+ .run();
+
+ p.cargo("report future-incompatibilities --id 7")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: could not find report with ID 7
+Available IDs are: 1
+",
+ )
+ .run();
+}
+
+#[cargo_test(
+ nightly,
+ reason = "-Zfuture-incompat-test requires nightly (permanently)"
+)]
+fn suggestions_for_updates() {
+ Package::new("with_updates", "1.0.0")
+ .file("src/lib.rs", FUTURE_EXAMPLE)
+ .publish();
+ Package::new("big_update", "1.0.0")
+ .file("src/lib.rs", FUTURE_EXAMPLE)
+ .publish();
+ Package::new("without_updates", "1.0.0")
+ .file("src/lib.rs", FUTURE_EXAMPLE)
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ with_updates = "1"
+ big_update = "1"
+ without_updates = "1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("generate-lockfile").run();
+
+ Package::new("with_updates", "1.0.1")
+ .file("src/lib.rs", "")
+ .publish();
+ Package::new("with_updates", "1.0.2")
+ .file("src/lib.rs", "")
+ .publish();
+ Package::new("with_updates", "3.0.1")
+ .file("src/lib.rs", "")
+ .publish();
+ Package::new("big_update", "2.0.0")
+ .file("src/lib.rs", "")
+ .publish();
+
+ // This is a hack to force cargo to update the index. Cargo can't do this
+ // automatically because doing a network update on every build would be a
+ // bad idea. Under normal circumstances, we'll hope the user has done
+ // something else along the way to trigger an update (building some other
+ // project or something). This could use some more consideration of how to
+ // handle this better (maybe only trigger an update if it hasn't updated
+ // in a long while?).
+ p.cargo("update -p without_updates").run();
+
+ let update_message = "\
+- Some affected dependencies have newer versions available.
+You may want to consider updating them to a newer version to see if the issue has been fixed.
+
+big_update v1.0.0 has the following newer versions available: 2.0.0
+with_updates v1.0.0 has the following newer versions available: 1.0.1, 1.0.2, 3.0.1
+";
+
+ p.cargo("check --future-incompat-report")
+ .masquerade_as_nightly_cargo(&["future-incompat-test"])
+ .env("RUSTFLAGS", "-Zfuture-incompat-test")
+ .with_stderr_contains(update_message)
+ .run();
+
+ p.cargo("report future-incompatibilities")
+ .with_stdout_contains(update_message)
+ .run()
+}
diff --git a/src/tools/cargo/tests/testsuite/generate_lockfile.rs b/src/tools/cargo/tests/testsuite/generate_lockfile.rs
new file mode 100644
index 000000000..d2b633605
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/generate_lockfile.rs
@@ -0,0 +1,230 @@
+//! Tests for the `cargo generate-lockfile` command.
+
+use cargo_test_support::registry::{Package, RegistryBuilder};
+use cargo_test_support::{basic_manifest, paths, project, ProjectBuilder};
+use std::fs;
+
+#[cargo_test]
+fn adding_and_removing_packages() {
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("generate-lockfile").run();
+
+ let lock1 = p.read_lockfile();
+
+ // add a dep
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.1"
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ );
+ p.cargo("generate-lockfile").run();
+ let lock2 = p.read_lockfile();
+ assert_ne!(lock1, lock2);
+
+ // change the dep
+ p.change_file("bar/Cargo.toml", &basic_manifest("bar", "0.0.2"));
+ p.cargo("generate-lockfile").run();
+ let lock3 = p.read_lockfile();
+ assert_ne!(lock1, lock3);
+ assert_ne!(lock2, lock3);
+
+ // remove the dep
+ println!("lock4");
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.1"
+ "#,
+ );
+ p.cargo("generate-lockfile").run();
+ let lock4 = p.read_lockfile();
+ assert_eq!(lock1, lock4);
+}
+
+#[cargo_test]
+fn no_index_update_sparse() {
+ let _registry = RegistryBuilder::new().http_index().build();
+ no_index_update();
+}
+
+#[cargo_test]
+fn no_index_update_git() {
+ no_index_update();
+}
+
+fn no_index_update() {
+ Package::new("serde", "1.0.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.1"
+
+ [dependencies]
+ serde = "1.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("generate-lockfile")
+ .with_stderr("[UPDATING] `[..]` index")
+ .run();
+
+ p.cargo("generate-lockfile -Zno-index-update")
+ .masquerade_as_nightly_cargo(&["no-index-update"])
+ .with_stdout("")
+ .with_stderr("")
+ .run();
+}
+
+#[cargo_test]
+fn preserve_metadata() {
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("generate-lockfile").run();
+
+ let metadata = r#"
+[metadata]
+bar = "baz"
+foo = "bar"
+"#;
+ let lock = p.read_lockfile();
+ let data = lock + metadata;
+ p.change_file("Cargo.lock", &data);
+
+ // Build and make sure the metadata is still there
+ p.cargo("build").run();
+ let lock = p.read_lockfile();
+ assert!(lock.contains(metadata.trim()), "{}", lock);
+
+ // Update and make sure the metadata is still there
+ p.cargo("update").run();
+ let lock = p.read_lockfile();
+ assert!(lock.contains(metadata.trim()), "{}", lock);
+}
+
+#[cargo_test]
+fn preserve_line_endings_issue_2076() {
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ let lockfile = p.root().join("Cargo.lock");
+ p.cargo("generate-lockfile").run();
+ assert!(lockfile.is_file());
+ p.cargo("generate-lockfile").run();
+
+ let lock0 = p.read_lockfile();
+
+ assert!(lock0.starts_with("# This file is automatically @generated by Cargo.\n# It is not intended for manual editing.\n"));
+
+ let lock1 = lock0.replace("\n", "\r\n");
+ p.change_file("Cargo.lock", &lock1);
+
+ p.cargo("generate-lockfile").run();
+
+ let lock2 = p.read_lockfile();
+
+ assert!(lock2.starts_with("# This file is automatically @generated by Cargo.\r\n# It is not intended for manual editing.\r\n"));
+ assert_eq!(lock1, lock2);
+}
+
+#[cargo_test]
+fn cargo_update_generate_lockfile() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ let lockfile = p.root().join("Cargo.lock");
+ assert!(!lockfile.is_file());
+ p.cargo("update").with_stdout("").run();
+ assert!(lockfile.is_file());
+
+ fs::remove_file(p.root().join("Cargo.lock")).unwrap();
+
+ assert!(!lockfile.is_file());
+ p.cargo("update").with_stdout("").run();
+ assert!(lockfile.is_file());
+}
+
+#[cargo_test]
+fn duplicate_entries_in_lockfile() {
+ let _a = ProjectBuilder::new(paths::root().join("a"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ authors = []
+ version = "0.0.1"
+
+ [dependencies]
+ common = {path="common"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ let common_toml = &basic_manifest("common", "0.0.1");
+
+ let _common_in_a = ProjectBuilder::new(paths::root().join("a/common"))
+ .file("Cargo.toml", common_toml)
+ .file("src/lib.rs", "")
+ .build();
+
+ let b = ProjectBuilder::new(paths::root().join("b"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ authors = []
+ version = "0.0.1"
+
+ [dependencies]
+ common = {path="common"}
+ a = {path="../a"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ let _common_in_b = ProjectBuilder::new(paths::root().join("b/common"))
+ .file("Cargo.toml", common_toml)
+ .file("src/lib.rs", "")
+ .build();
+
+ // should fail due to a duplicate package `common` in the lock file
+ b.cargo("build")
+ .with_status(101)
+ .with_stderr_contains(
+ "[..]package collision in the lockfile: packages common [..] and \
+ common [..] are different, but only one can be written to \
+ lockfile unambiguously",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/git.rs b/src/tools/cargo/tests/testsuite/git.rs
new file mode 100644
index 000000000..b170c204f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/git.rs
@@ -0,0 +1,3702 @@
+//! Tests for git support.
+
+use std::fs;
+use std::io::prelude::*;
+use std::net::{TcpListener, TcpStream};
+use std::path::Path;
+use std::str;
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::sync::Arc;
+use std::thread;
+
+use cargo_test_support::git::cargo_uses_gitoxide;
+use cargo_test_support::paths::{self, CargoPathExt};
+use cargo_test_support::registry::Package;
+use cargo_test_support::{basic_lib_manifest, basic_manifest, git, main_file, path2url, project};
+use cargo_test_support::{sleep_ms, t, Project};
+
+#[cargo_test]
+fn cargo_compile_simple_git_dep() {
+ let project = project();
+ let git_project = git::new("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("dep1"))
+ .file(
+ "src/dep1.rs",
+ r#"
+ pub fn hello() -> &'static str {
+ "hello world"
+ }
+ "#,
+ )
+ });
+
+ let project = project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.dep1]
+
+ git = '{}'
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+ )
+ .build();
+
+ let git_root = git_project.root();
+
+ project
+ .cargo("build")
+ .with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ [COMPILING] dep1 v0.5.0 ({}#[..])\n\
+ [COMPILING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ path2url(&git_root),
+ path2url(&git_root),
+ ))
+ .run();
+
+ assert!(project.bin("foo").is_file());
+
+ project
+ .process(&project.bin("foo"))
+ .with_stdout("hello world\n")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_git_dep_branch() {
+ let project = project();
+ let git_project = git::new("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("dep1"))
+ .file(
+ "src/dep1.rs",
+ r#"
+ pub fn hello() -> &'static str {
+ "hello world"
+ }
+ "#,
+ )
+ });
+
+ // Make a new branch based on the current HEAD commit
+ let repo = git2::Repository::open(&git_project.root()).unwrap();
+ let head = repo.head().unwrap().target().unwrap();
+ let head = repo.find_commit(head).unwrap();
+ repo.branch("branchy", &head, true).unwrap();
+
+ let project = project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.dep1]
+
+ git = '{}'
+ branch = "branchy"
+
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+ )
+ .build();
+
+ let git_root = git_project.root();
+
+ project
+ .cargo("build")
+ .with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ [COMPILING] dep1 v0.5.0 ({}?branch=branchy#[..])\n\
+ [COMPILING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ path2url(&git_root),
+ path2url(&git_root),
+ ))
+ .run();
+
+ assert!(project.bin("foo").is_file());
+
+ project
+ .process(&project.bin("foo"))
+ .with_stdout("hello world\n")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_git_dep_tag() {
+ let project = project();
+ let git_project = git::new("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("dep1"))
+ .file(
+ "src/dep1.rs",
+ r#"
+ pub fn hello() -> &'static str {
+ "hello world"
+ }
+ "#,
+ )
+ });
+
+ // Make a tag corresponding to the current HEAD
+ let repo = git2::Repository::open(&git_project.root()).unwrap();
+ let head = repo.head().unwrap().target().unwrap();
+ repo.tag(
+ "v0.1.0",
+ &repo.find_object(head, None).unwrap(),
+ &repo.signature().unwrap(),
+ "make a new tag",
+ false,
+ )
+ .unwrap();
+
+ let project = project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.dep1]
+
+ git = '{}'
+ tag = "v0.1.0"
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+ )
+ .build();
+
+ let git_root = git_project.root();
+
+ project
+ .cargo("build")
+ .with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ [COMPILING] dep1 v0.5.0 ({}?tag=v0.1.0#[..])\n\
+ [COMPILING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ path2url(&git_root),
+ path2url(&git_root),
+ ))
+ .run();
+
+ assert!(project.bin("foo").is_file());
+
+ project
+ .process(&project.bin("foo"))
+ .with_stdout("hello world\n")
+ .run();
+
+ project.cargo("build").run();
+}
+
+#[cargo_test]
+fn cargo_compile_git_dep_pull_request() {
+ let project = project();
+ let git_project = git::new("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("dep1"))
+ .file(
+ "src/dep1.rs",
+ r#"
+ pub fn hello() -> &'static str {
+ "hello world"
+ }
+ "#,
+ )
+ });
+
+ // Make a reference in GitHub's pull request ref naming convention.
+ let repo = git2::Repository::open(&git_project.root()).unwrap();
+ let oid = repo.refname_to_id("HEAD").unwrap();
+ let force = false;
+ let log_message = "open pull request";
+ repo.reference("refs/pull/330/head", oid, force, log_message)
+ .unwrap();
+
+ let project = project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+
+ [dependencies]
+ dep1 = {{ git = "{}", rev = "refs/pull/330/head" }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+ )
+ .build();
+
+ let git_root = git_project.root();
+
+ project
+ .cargo("build")
+ .with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ [COMPILING] dep1 v0.5.0 ({}?rev=refs/pull/330/head#[..])\n\
+ [COMPILING] foo v0.0.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ path2url(&git_root),
+ path2url(&git_root),
+ ))
+ .run();
+
+ assert!(project.bin("foo").is_file());
+}
+
+#[cargo_test]
+fn cargo_compile_with_nested_paths() {
+ let git_project = git::new("dep1", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "dep1"
+ version = "0.5.0"
+ authors = ["carlhuda@example.com"]
+
+ [dependencies.dep2]
+
+ version = "0.5.0"
+ path = "vendor/dep2"
+
+ [lib]
+
+ name = "dep1"
+ "#,
+ )
+ .file(
+ "src/dep1.rs",
+ r#"
+ extern crate dep2;
+
+ pub fn hello() -> &'static str {
+ dep2::hello()
+ }
+ "#,
+ )
+ .file("vendor/dep2/Cargo.toml", &basic_lib_manifest("dep2"))
+ .file(
+ "vendor/dep2/src/dep2.rs",
+ r#"
+ pub fn hello() -> &'static str {
+ "hello world"
+ }
+ "#,
+ )
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.dep1]
+
+ version = "0.5.0"
+ git = '{}'
+
+ [[bin]]
+
+ name = "foo"
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/foo.rs",
+ &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+ )
+ .build();
+
+ p.cargo("build").run();
+
+ assert!(p.bin("foo").is_file());
+
+ p.process(&p.bin("foo")).with_stdout("hello world\n").run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_malformed_nested_paths() {
+ let git_project = git::new("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("dep1"))
+ .file(
+ "src/dep1.rs",
+ r#"
+ pub fn hello() -> &'static str {
+ "hello world"
+ }
+ "#,
+ )
+ .file("vendor/dep2/Cargo.toml", "!INVALID!")
+ .file(
+ "vendor/dep3/Cargo.toml",
+ r#"
+ [package]
+ name = "dep3"
+ version = "0.5.0"
+ [dependencies]
+ subdep1 = { path = "../require-extra-build-step" }
+ "#,
+ )
+ .file("vendor/dep3/src/lib.rs", "")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.dep1]
+
+ version = "0.5.0"
+ git = '{}'
+
+ [[bin]]
+
+ name = "foo"
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/foo.rs",
+ &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+ )
+ .build();
+
+ p.cargo("build").run();
+
+ assert!(p.bin("foo").is_file());
+
+ p.process(&p.bin("foo")).with_stdout("hello world\n").run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_meta_package() {
+ let git_project = git::new("meta-dep", |project| {
+ project
+ .file("dep1/Cargo.toml", &basic_lib_manifest("dep1"))
+ .file(
+ "dep1/src/dep1.rs",
+ r#"
+ pub fn hello() -> &'static str {
+ "this is dep1"
+ }
+ "#,
+ )
+ .file("dep2/Cargo.toml", &basic_lib_manifest("dep2"))
+ .file(
+ "dep2/src/dep2.rs",
+ r#"
+ pub fn hello() -> &'static str {
+ "this is dep2"
+ }
+ "#,
+ )
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.dep1]
+
+ version = "0.5.0"
+ git = '{}'
+
+ [dependencies.dep2]
+
+ version = "0.5.0"
+ git = '{}'
+
+ [[bin]]
+
+ name = "foo"
+ "#,
+ git_project.url(),
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/foo.rs",
+ &main_file(
+ r#""{} {}", dep1::hello(), dep2::hello()"#,
+ &["dep1", "dep2"],
+ ),
+ )
+ .build();
+
+ p.cargo("build").run();
+
+ assert!(p.bin("foo").is_file());
+
+ p.process(&p.bin("foo"))
+ .with_stdout("this is dep1 this is dep2\n")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_short_ssh_git() {
+ let url = "git@github.com:a/dep";
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.dep]
+
+ git = "{}"
+
+ [[bin]]
+
+ name = "foo"
+ "#,
+ url
+ ),
+ )
+ .file(
+ "src/foo.rs",
+ &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+ )
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stdout("")
+ .with_stderr(&format!(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ invalid url `{}`: relative URL without a base
+",
+ url
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn two_revs_same_deps() {
+ let bar = git::new("meta-dep", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("bar", "0.0.0"))
+ .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+ });
+
+ let repo = git2::Repository::open(&bar.root()).unwrap();
+ let rev1 = repo.revparse_single("HEAD").unwrap().id();
+
+ // Commit the changes and make sure we trigger a recompile
+ bar.change_file("src/lib.rs", "pub fn bar() -> i32 { 2 }");
+ git::add(&repo);
+ let rev2 = git::commit(&repo);
+
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies.bar]
+ git = '{}'
+ rev = "{}"
+
+ [dependencies.baz]
+ path = "../baz"
+ "#,
+ bar.url(),
+ rev1
+ ),
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ extern crate bar;
+ extern crate baz;
+
+ fn main() {
+ assert_eq!(bar::bar(), 1);
+ assert_eq!(baz::baz(), 2);
+ }
+ "#,
+ )
+ .build();
+
+ let _baz = project()
+ .at("baz")
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "baz"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies.bar]
+ git = '{}'
+ rev = "{}"
+ "#,
+ bar.url(),
+ rev2
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate bar;
+ pub fn baz() -> i32 { bar::bar() }
+ "#,
+ )
+ .build();
+
+ foo.cargo("build -v").run();
+ assert!(foo.bin("foo").is_file());
+ foo.process(&foo.bin("foo")).run();
+}
+
+#[cargo_test]
+fn recompilation() {
+ let git_project = git::new("bar", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("bar"))
+ .file("src/bar.rs", "pub fn bar() {}")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+
+ version = "0.5.0"
+ git = '{}'
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/main.rs", &main_file(r#""{:?}", bar::bar()"#, &["bar"]))
+ .build();
+
+ // First time around we should compile both foo and bar
+ p.cargo("check")
+ .with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ [CHECKING] bar v0.5.0 ({}#[..])\n\
+ [CHECKING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ git_project.url(),
+ git_project.url(),
+ ))
+ .run();
+
+ // Don't recompile the second time
+ p.cargo("check").with_stdout("").run();
+
+ // Modify a file manually, shouldn't trigger a recompile
+ git_project.change_file("src/bar.rs", r#"pub fn bar() { println!("hello!"); }"#);
+
+ p.cargo("check").with_stdout("").run();
+
+ p.cargo("update")
+ .with_stderr(&format!(
+ "[UPDATING] git repository `{}`",
+ git_project.url()
+ ))
+ .run();
+
+ p.cargo("check").with_stdout("").run();
+
+ // Commit the changes and make sure we don't trigger a recompile because the
+ // lock file says not to change
+ let repo = git2::Repository::open(&git_project.root()).unwrap();
+ git::add(&repo);
+ git::commit(&repo);
+
+ println!("compile after commit");
+ p.cargo("check").with_stdout("").run();
+ p.root().move_into_the_past();
+
+ // Update the dependency and carry on!
+ p.cargo("update")
+ .with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ [UPDATING] bar v0.5.0 ([..]) -> #[..]\n\
+ ",
+ git_project.url()
+ ))
+ .run();
+ println!("going for the last compile");
+ p.cargo("check")
+ .with_stderr(&format!(
+ "[CHECKING] bar v0.5.0 ({}#[..])\n\
+ [CHECKING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ git_project.url(),
+ ))
+ .run();
+
+ // Make sure clean only cleans one dep
+ p.cargo("clean -p foo").with_stdout("").run();
+ p.cargo("check")
+ .with_stderr(
+ "[CHECKING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn update_with_shared_deps() {
+ let git_project = git::new("bar", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("bar"))
+ .file("src/bar.rs", "pub fn bar() {}")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.dep1]
+ path = "dep1"
+ [dependencies.dep2]
+ path = "dep2"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[allow(unused_extern_crates)]
+ extern crate dep1;
+ #[allow(unused_extern_crates)]
+ extern crate dep2;
+ fn main() {}
+ "#,
+ )
+ .file(
+ "dep1/Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "dep1"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+ version = "0.5.0"
+ git = '{}'
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("dep1/src/lib.rs", "")
+ .file(
+ "dep2/Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "dep2"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+ version = "0.5.0"
+ git = '{}'
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("dep2/src/lib.rs", "")
+ .build();
+
+ // First time around we should compile both foo and bar
+ p.cargo("check")
+ .with_stderr(&format!(
+ "\
+[UPDATING] git repository `{git}`
+[CHECKING] bar v0.5.0 ({git}#[..])
+[CHECKING] [..] v0.5.0 ([..])
+[CHECKING] [..] v0.5.0 ([..])
+[CHECKING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ git = git_project.url(),
+ ))
+ .run();
+
+ // Modify a file manually, and commit it
+ git_project.change_file("src/bar.rs", r#"pub fn bar() { println!("hello!"); }"#);
+ let repo = git2::Repository::open(&git_project.root()).unwrap();
+ let old_head = repo.head().unwrap().target().unwrap();
+ git::add(&repo);
+ git::commit(&repo);
+
+ sleep_ms(1000);
+
+ // By default, not transitive updates
+ println!("dep1 update");
+ p.cargo("update -p dep1").with_stdout("").run();
+
+ // Don't do anything bad on a weird --precise argument
+ println!("bar bad precise update");
+ p.cargo("update -p bar --precise 0.1.2")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] Unable to update [..]
+
+Caused by:
+ precise value for git is not a git revision: 0.1.2
+
+Caused by:
+ unable to parse OID - contains invalid characters; class=Invalid (3)
+",
+ )
+ .run();
+
+ // Specifying a precise rev to the old rev shouldn't actually update
+ // anything because we already have the rev in the db.
+ println!("bar precise update");
+ p.cargo("update -p bar --precise")
+ .arg(&old_head.to_string())
+ .with_stdout("")
+ .run();
+
+ // Updating aggressively should, however, update the repo.
+ println!("dep1 aggressive update");
+ p.cargo("update -p dep1 --aggressive")
+ .with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ [UPDATING] bar v0.5.0 ([..]) -> #[..]\n\
+ ",
+ git_project.url()
+ ))
+ .run();
+
+ // Make sure we still only compile one version of the git repo
+ println!("build");
+ p.cargo("check")
+ .with_stderr(&format!(
+ "\
+[CHECKING] bar v0.5.0 ({git}#[..])
+[CHECKING] [..] v0.5.0 ([CWD][..]dep[..])
+[CHECKING] [..] v0.5.0 ([CWD][..]dep[..])
+[CHECKING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ git = git_project.url(),
+ ))
+ .run();
+
+ // We should be able to update transitive deps
+ p.cargo("update -p bar")
+ .with_stderr(&format!(
+ "[UPDATING] git repository `{}`",
+ git_project.url()
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn dep_with_submodule() {
+ let project = project();
+ let git_project = git::new("dep1", |project| {
+ project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0"))
+ });
+ let git_project2 = git::new("dep2", |project| project.file("lib.rs", "pub fn dep() {}"));
+
+ let repo = git2::Repository::open(&git_project.root()).unwrap();
+ let url = path2url(git_project2.root()).to_string();
+ git::add_submodule(&repo, &url, Path::new("src"));
+ git::commit(&repo);
+
+ let project = project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.dep1]
+
+ git = '{}'
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate dep1; pub fn foo() { dep1::dep() }",
+ )
+ .build();
+
+ project
+ .cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] git repository [..]
+[UPDATING] git submodule `file://[..]/dep2`
+[CHECKING] dep1 [..]
+[CHECKING] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dep_with_relative_submodule() {
+ let foo = project();
+ let base = git::new("base", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "base"
+ version = "0.5.0"
+
+ [dependencies]
+ deployment.path = "deployment"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn dep() {
+ deployment::deployment_func();
+ }
+ "#,
+ )
+ });
+ let _deployment = git::new("deployment", |project| {
+ project
+ .file("src/lib.rs", "pub fn deployment_func() {}")
+ .file("Cargo.toml", &basic_lib_manifest("deployment"))
+ });
+
+ let base_repo = git2::Repository::open(&base.root()).unwrap();
+ git::add_submodule(&base_repo, "../deployment", Path::new("deployment"));
+ git::commit(&base_repo);
+
+ let project = foo
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+
+ [dependencies.base]
+ git = '{}'
+ "#,
+ base.url()
+ ),
+ )
+ .file("src/lib.rs", "pub fn foo() { }")
+ .build();
+
+ project
+ .cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] git repository [..]
+[UPDATING] git submodule `file://[..]/deployment`
+[CHECKING] deployment [..]
+[CHECKING] base [..]
+[CHECKING] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dep_with_bad_submodule() {
+ let project = project();
+ let git_project = git::new("dep1", |project| {
+ project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0"))
+ });
+ let git_project2 = git::new("dep2", |project| project.file("lib.rs", "pub fn dep() {}"));
+
+ let repo = git2::Repository::open(&git_project.root()).unwrap();
+ let url = path2url(git_project2.root()).to_string();
+ git::add_submodule(&repo, &url, Path::new("src"));
+ git::commit(&repo);
+
+ // now amend the first commit on git_project2 to make submodule ref point to not-found
+ // commit
+ let repo = git2::Repository::open(&git_project2.root()).unwrap();
+ let original_submodule_ref = repo.refname_to_id("refs/heads/master").unwrap();
+ let commit = repo.find_commit(original_submodule_ref).unwrap();
+ commit
+ .amend(
+ Some("refs/heads/master"),
+ None,
+ None,
+ None,
+ Some("something something"),
+ None,
+ )
+ .unwrap();
+
+ let p = project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.dep1]
+
+ git = '{}'
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate dep1; pub fn foo() { dep1::dep() }",
+ )
+ .build();
+
+ let expected = format!(
+ "\
+[UPDATING] git repository [..]
+[UPDATING] git submodule `file://[..]/dep2`
+[ERROR] failed to get `dep1` as a dependency of package `foo v0.5.0 [..]`
+
+Caused by:
+ failed to load source for dependency `dep1`
+
+Caused by:
+ Unable to update {}
+
+Caused by:
+ failed to update submodule `src`
+
+Caused by:
+ object not found - no match for id [..]
+",
+ path2url(git_project.root())
+ );
+
+ p.cargo("check")
+ .with_stderr(expected)
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn dep_with_skipped_submodule() {
+ // Ensure we skip dependency submodules if their update strategy is `none`.
+ let qux = git::new("qux", |project| {
+ project.no_manifest().file("README", "skip me")
+ });
+
+ let bar = git::new("bar", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("bar", "0.0.0"))
+ .file("src/lib.rs", "")
+ });
+
+ // `qux` is a submodule of `bar`, but we don't want to update it.
+ let repo = git2::Repository::open(&bar.root()).unwrap();
+ git::add_submodule(&repo, qux.url().as_str(), Path::new("qux"));
+
+ let mut conf = git2::Config::open(&bar.root().join(".gitmodules")).unwrap();
+ conf.set_str("submodule.qux.update", "none").unwrap();
+
+ git::add(&repo);
+ git::commit(&repo);
+
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies.bar]
+ git = "{}"
+ "#,
+ bar.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ foo.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] git repository `file://[..]/bar`
+[SKIPPING] git submodule `file://[..]/qux` [..]
+[CHECKING] bar [..]
+[CHECKING] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn ambiguous_published_deps() {
+ let project = project();
+ let git_project = git::new("dep", |project| {
+ project
+ .file(
+ "aaa/Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ publish = true
+ "#
+ ),
+ )
+ .file("aaa/src/lib.rs", "")
+ .file(
+ "bbb/Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ publish = true
+ "#
+ ),
+ )
+ .file("bbb/src/lib.rs", "")
+ });
+
+ let p = project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+ git = '{}'
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() { }")
+ .build();
+
+ p.cargo("build").run();
+ p.cargo("run")
+ .with_stderr(
+ "\
+[WARNING] skipping duplicate package `bar` found at `[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/foo[EXE]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn two_deps_only_update_one() {
+ let project = project();
+ let git1 = git::new("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("dep1", "0.5.0"))
+ .file("src/lib.rs", "")
+ });
+ let git2 = git::new("dep2", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("dep2", "0.5.0"))
+ .file("src/lib.rs", "")
+ });
+
+ let p = project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.dep1]
+ git = '{}'
+ [dependencies.dep2]
+ git = '{}'
+ "#,
+ git1.url(),
+ git2.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ fn oid_to_short_sha(oid: git2::Oid) -> String {
+ oid.to_string()[..8].to_string()
+ }
+ fn git_repo_head_sha(p: &Project) -> String {
+ let repo = git2::Repository::open(p.root()).unwrap();
+ let head = repo.head().unwrap().target().unwrap();
+ oid_to_short_sha(head)
+ }
+
+ println!("dep1 head sha: {}", git_repo_head_sha(&git1));
+ println!("dep2 head sha: {}", git_repo_head_sha(&git2));
+
+ p.cargo("check")
+ .with_stderr(
+ "[UPDATING] git repository `[..]`\n\
+ [UPDATING] git repository `[..]`\n\
+ [CHECKING] [..] v0.5.0 ([..])\n\
+ [CHECKING] [..] v0.5.0 ([..])\n\
+ [CHECKING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ )
+ .run();
+
+ git1.change_file("src/lib.rs", "pub fn foo() {}");
+ let repo = git2::Repository::open(&git1.root()).unwrap();
+ git::add(&repo);
+ let oid = git::commit(&repo);
+ println!("dep1 head sha: {}", oid_to_short_sha(oid));
+
+ p.cargo("update -p dep1")
+ .with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ [UPDATING] dep1 v0.5.0 ([..]) -> #[..]\n\
+ ",
+ git1.url()
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn stale_cached_version() {
+ let bar = git::new("meta-dep", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("bar", "0.0.0"))
+ .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+ });
+
+ // Update the git database in the cache with the current state of the git
+ // repo
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies.bar]
+ git = '{}'
+ "#,
+ bar.url()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ extern crate bar;
+
+ fn main() { assert_eq!(bar::bar(), 1) }
+ "#,
+ )
+ .build();
+
+ foo.cargo("build").run();
+ foo.process(&foo.bin("foo")).run();
+
+ // Update the repo, and simulate someone else updating the lock file and then
+ // us pulling it down.
+ bar.change_file("src/lib.rs", "pub fn bar() -> i32 { 1 + 0 }");
+ let repo = git2::Repository::open(&bar.root()).unwrap();
+ git::add(&repo);
+ git::commit(&repo);
+
+ sleep_ms(1000);
+
+ let rev = repo.revparse_single("HEAD").unwrap().id();
+
+ foo.change_file(
+ "Cargo.lock",
+ &format!(
+ r#"
+ [[package]]
+ name = "foo"
+ version = "0.0.0"
+ dependencies = [
+ 'bar 0.0.0 (git+{url}#{hash})'
+ ]
+
+ [[package]]
+ name = "bar"
+ version = "0.0.0"
+ source = 'git+{url}#{hash}'
+ "#,
+ url = bar.url(),
+ hash = rev
+ ),
+ );
+
+ // Now build!
+ foo.cargo("build")
+ .with_stderr(&format!(
+ "\
+[UPDATING] git repository `{bar}`
+[COMPILING] bar v0.0.0 ({bar}#[..])
+[COMPILING] foo v0.0.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ bar = bar.url(),
+ ))
+ .run();
+ foo.process(&foo.bin("foo")).run();
+}
+
+#[cargo_test]
+fn dep_with_changed_submodule() {
+ let project = project();
+ let git_project = git::new("dep1", |project| {
+ project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0"))
+ });
+
+ let git_project2 = git::new("dep2", |project| {
+ project.file("lib.rs", "pub fn dep() -> &'static str { \"project2\" }")
+ });
+
+ let git_project3 = git::new("dep3", |project| {
+ project.file("lib.rs", "pub fn dep() -> &'static str { \"project3\" }")
+ });
+
+ let repo = git2::Repository::open(&git_project.root()).unwrap();
+ let mut sub = git::add_submodule(&repo, &git_project2.url().to_string(), Path::new("src"));
+ git::commit(&repo);
+
+ let p = project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+ [dependencies.dep1]
+ git = '{}'
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ "
+ extern crate dep1;
+ pub fn main() { println!(\"{}\", dep1::dep()) }
+ ",
+ )
+ .build();
+
+ println!("first run");
+ p.cargo("run")
+ .with_stderr(
+ "[UPDATING] git repository `[..]`\n\
+ [UPDATING] git submodule `file://[..]/dep2`\n\
+ [COMPILING] dep1 v0.5.0 ([..])\n\
+ [COMPILING] foo v0.5.0 ([..])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in \
+ [..]\n\
+ [RUNNING] `target/debug/foo[EXE]`\n",
+ )
+ .with_stdout("project2\n")
+ .run();
+
+ git_project.change_file(
+ ".gitmodules",
+ &format!(
+ "[submodule \"src\"]\n\tpath = src\n\turl={}",
+ git_project3.url()
+ ),
+ );
+
+ // Sync the submodule and reset it to the new remote.
+ sub.sync().unwrap();
+ {
+ let subrepo = sub.open().unwrap();
+ subrepo
+ .remote_add_fetch("origin", "refs/heads/*:refs/heads/*")
+ .unwrap();
+ subrepo
+ .remote_set_url("origin", &git_project3.url().to_string())
+ .unwrap();
+ let mut origin = subrepo.find_remote("origin").unwrap();
+ origin.fetch(&Vec::<String>::new(), None, None).unwrap();
+ let id = subrepo.refname_to_id("refs/remotes/origin/master").unwrap();
+ let obj = subrepo.find_object(id, None).unwrap();
+ subrepo.reset(&obj, git2::ResetType::Hard, None).unwrap();
+ }
+ sub.add_to_index(true).unwrap();
+ git::add(&repo);
+ git::commit(&repo);
+
+ sleep_ms(1000);
+ // Update the dependency and carry on!
+ println!("update");
+ p.cargo("update -v")
+ .with_stderr("")
+ .with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ [UPDATING] git submodule `file://[..]/dep3`\n\
+ [UPDATING] dep1 v0.5.0 ([..]) -> #[..]\n\
+ ",
+ git_project.url()
+ ))
+ .run();
+
+ println!("last run");
+ p.cargo("run")
+ .with_stderr(
+ "[COMPILING] dep1 v0.5.0 ([..])\n\
+ [COMPILING] foo v0.5.0 ([..])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in \
+ [..]\n\
+ [RUNNING] `target/debug/foo[EXE]`\n",
+ )
+ .with_stdout("project3\n")
+ .run();
+}
+
+#[cargo_test]
+fn dev_deps_with_testing() {
+ let p2 = git::new("bar", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn gimme() -> &'static str { "zoidberg" }
+ "#,
+ )
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dev-dependencies.bar]
+ version = "0.5.0"
+ git = '{}'
+ "#,
+ p2.url()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {}
+
+ #[cfg(test)]
+ mod tests {
+ extern crate bar;
+ #[test] fn foo() { bar::gimme(); }
+ }
+ "#,
+ )
+ .build();
+
+ // Generate a lock file which did not use `bar` to compile, but had to update
+ // `bar` to generate the lock file
+ p.cargo("check")
+ .with_stderr(&format!(
+ "\
+[UPDATING] git repository `{bar}`
+[CHECKING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ bar = p2.url()
+ ))
+ .run();
+
+ // Make sure we use the previous resolution of `bar` instead of updating it
+ // a second time.
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] [..] v0.5.0 ([..])
+[COMPILING] [..] v0.5.0 ([..]
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("test tests::foo ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn git_build_cmd_freshness() {
+ let foo = git::new("foo", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+ .file(".gitignore", "src/bar.rs")
+ });
+ foo.root().move_into_the_past();
+
+ sleep_ms(1000);
+
+ foo.cargo("check")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ // Smoke test to make sure it doesn't compile again
+ println!("first pass");
+ foo.cargo("check").with_stdout("").run();
+
+ // Modify an ignored file and make sure we don't rebuild
+ println!("second pass");
+ foo.change_file("src/bar.rs", "");
+ foo.cargo("check").with_stdout("").run();
+}
+
+#[cargo_test]
+fn git_name_not_always_needed() {
+ let p2 = git::new("bar", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn gimme() -> &'static str { "zoidberg" }
+ "#,
+ )
+ });
+
+ let repo = git2::Repository::open(&p2.root()).unwrap();
+ let mut cfg = repo.config().unwrap();
+ let _ = cfg.remove("user.name");
+ let _ = cfg.remove("user.email");
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dev-dependencies.bar]
+ git = '{}'
+ "#,
+ p2.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ // Generate a lock file which did not use `bar` to compile, but had to update
+ // `bar` to generate the lock file
+ p.cargo("check")
+ .with_stderr(&format!(
+ "\
+[UPDATING] git repository `{bar}`
+[CHECKING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ bar = p2.url()
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn git_repo_changing_no_rebuild() {
+ let bar = git::new("bar", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+ });
+
+ // Lock p1 to the first rev in the git repo
+ let p1 = project()
+ .at("p1")
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "p1"
+ version = "0.5.0"
+ authors = []
+ build = 'build.rs'
+ [dependencies.bar]
+ git = '{}'
+ "#,
+ bar.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("build.rs", "fn main() {}")
+ .build();
+ p1.root().move_into_the_past();
+ p1.cargo("check")
+ .with_stderr(&format!(
+ "\
+[UPDATING] git repository `{bar}`
+[COMPILING] [..]
+[CHECKING] [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ bar = bar.url()
+ ))
+ .run();
+
+ // Make a commit to lock p2 to a different rev
+ bar.change_file("src/lib.rs", "pub fn bar() -> i32 { 2 }");
+ let repo = git2::Repository::open(&bar.root()).unwrap();
+ git::add(&repo);
+ git::commit(&repo);
+
+ // Lock p2 to the second rev
+ let p2 = project()
+ .at("p2")
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "p2"
+ version = "0.5.0"
+ authors = []
+ [dependencies.bar]
+ git = '{}'
+ "#,
+ bar.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+ p2.cargo("check")
+ .with_stderr(&format!(
+ "\
+[UPDATING] git repository `{bar}`
+[CHECKING] [..]
+[CHECKING] [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ bar = bar.url()
+ ))
+ .run();
+
+ // And now for the real test! Make sure that p1 doesn't get rebuilt
+ // even though the git repo has changed.
+ p1.cargo("check").with_stdout("").run();
+}
+
+#[cargo_test]
+fn git_dep_build_cmd() {
+ let p = git::new("foo", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+
+ version = "0.5.0"
+ path = "bar"
+
+ [[bin]]
+
+ name = "foo"
+ "#,
+ )
+ .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+ build = "build.rs"
+
+ [lib]
+ name = "bar"
+ path = "src/bar.rs"
+ "#,
+ )
+ .file(
+ "bar/src/bar.rs.in",
+ r#"
+ pub fn gimme() -> i32 { 0 }
+ "#,
+ )
+ .file(
+ "bar/build.rs",
+ r#"
+ use std::fs;
+ fn main() {
+ fs::copy("src/bar.rs.in", "src/bar.rs").unwrap();
+ }
+ "#,
+ )
+ });
+
+ p.root().join("bar").move_into_the_past();
+
+ p.cargo("build").run();
+
+ p.process(&p.bin("foo")).with_stdout("0\n").run();
+
+ // Touching bar.rs.in should cause the `build` command to run again.
+ p.change_file("bar/src/bar.rs.in", "pub fn gimme() -> i32 { 1 }");
+
+ p.cargo("build").run();
+
+ p.process(&p.bin("foo")).with_stdout("1\n").run();
+}
+
+#[cargo_test]
+fn fetch_downloads() {
+ let bar = git::new("bar", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ [dependencies.bar]
+ git = '{}'
+ "#,
+ bar.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+ p.cargo("fetch")
+ .with_stderr(&format!(
+ "[UPDATING] git repository `{url}`",
+ url = bar.url()
+ ))
+ .run();
+
+ p.cargo("fetch").with_stdout("").run();
+}
+
+#[cargo_test]
+fn fetch_downloads_with_git2_first_then_with_gitoxide_and_vice_versa() {
+ let bar = git::new("bar", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+ });
+ let feature_configuration = if cargo_uses_gitoxide() {
+ // When we are always using `gitoxide` by default, create the registry with git2 as well as the download…
+ "-Zgitoxide=internal-use-git2"
+ } else {
+ // …otherwise create the registry and the git download with `gitoxide`.
+ "-Zgitoxide=fetch"
+ };
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ [dependencies.bar]
+ git = '{url}'
+ "#,
+ url = bar.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+ p.cargo("fetch")
+ .arg(feature_configuration)
+ .masquerade_as_nightly_cargo(&["unstable features must be available for -Z gitoxide"])
+ .with_stderr(&format!(
+ "[UPDATING] git repository `{url}`",
+ url = bar.url()
+ ))
+ .run();
+
+ Package::new("bar", "1.0.0").publish(); // trigger a crates-index change.
+ p.cargo("fetch").with_stdout("").run();
+}
+
+#[cargo_test]
+fn warnings_in_git_dep() {
+ let bar = git::new("bar", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file("src/lib.rs", "fn unused() {}")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ [dependencies.bar]
+ git = '{}'
+ "#,
+ bar.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ [CHECKING] bar v0.5.0 ({}#[..])\n\
+ [CHECKING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ bar.url(),
+ bar.url(),
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn update_ambiguous() {
+ let bar1 = git::new("bar1", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file("src/lib.rs", "")
+ });
+ let bar2 = git::new("bar2", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("bar", "0.6.0"))
+ .file("src/lib.rs", "")
+ });
+ let baz = git::new("baz", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "baz"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+ git = '{}'
+ "#,
+ bar2.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ [dependencies.bar]
+ git = '{}'
+ [dependencies.baz]
+ git = '{}'
+ "#,
+ bar1.url(),
+ baz.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("generate-lockfile").run();
+ p.cargo("update -p bar")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] There are multiple `bar` packages in your project, and the specification `bar` \
+is ambiguous.
+Please re-run this command with `-p <spec>` where `<spec>` is one of the \
+following:
+ bar@0.[..].0
+ bar@0.[..].0
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn update_one_dep_in_repo_with_many_deps() {
+ let bar = git::new("bar", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file("src/lib.rs", "")
+ .file("a/Cargo.toml", &basic_manifest("a", "0.5.0"))
+ .file("a/src/lib.rs", "")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ [dependencies.bar]
+ git = '{}'
+ [dependencies.a]
+ git = '{}'
+ "#,
+ bar.url(),
+ bar.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("generate-lockfile").run();
+ p.cargo("update -p bar")
+ .with_stderr(&format!("[UPDATING] git repository `{}`", bar.url()))
+ .run();
+}
+
+#[cargo_test]
+fn switch_deps_does_not_update_transitive() {
+ let transitive = git::new("transitive", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("transitive", "0.5.0"))
+ .file("src/lib.rs", "")
+ });
+ let dep1 = git::new("dep1", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "dep"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.transitive]
+ git = '{}'
+ "#,
+ transitive.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ });
+ let dep2 = git::new("dep2", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "dep"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.transitive]
+ git = '{}'
+ "#,
+ transitive.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ [dependencies.dep]
+ git = '{}'
+ "#,
+ dep1.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(&format!(
+ "\
+[UPDATING] git repository `{}`
+[UPDATING] git repository `{}`
+[CHECKING] transitive [..]
+[CHECKING] dep [..]
+[CHECKING] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ dep1.url(),
+ transitive.url()
+ ))
+ .run();
+
+ // Update the dependency to point to the second repository, but this
+ // shouldn't update the transitive dependency which is the same.
+ p.change_file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ [dependencies.dep]
+ git = '{}'
+ "#,
+ dep2.url()
+ ),
+ );
+
+ p.cargo("check")
+ .with_stderr(&format!(
+ "\
+[UPDATING] git repository `{}`
+[CHECKING] dep [..]
+[CHECKING] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ dep2.url()
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn update_one_source_updates_all_packages_in_that_git_source() {
+ let dep = git::new("dep", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "dep"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies.a]
+ path = "a"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("a/Cargo.toml", &basic_manifest("a", "0.5.0"))
+ .file("a/src/lib.rs", "")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ [dependencies.dep]
+ git = '{}'
+ "#,
+ dep.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check").run();
+
+ let repo = git2::Repository::open(&dep.root()).unwrap();
+ let rev1 = repo.revparse_single("HEAD").unwrap().id();
+
+ // Just be sure to change a file
+ dep.change_file("src/lib.rs", "pub fn bar() -> i32 { 2 }");
+ git::add(&repo);
+ git::commit(&repo);
+
+ p.cargo("update -p dep").run();
+ let lockfile = p.read_lockfile();
+ assert!(
+ !lockfile.contains(&rev1.to_string()),
+ "{} in {}",
+ rev1,
+ lockfile
+ );
+}
+
+#[cargo_test]
+fn switch_sources() {
+ let a1 = git::new("a1", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("a", "0.5.0"))
+ .file("src/lib.rs", "")
+ });
+ let a2 = git::new("a2", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("a", "0.5.1"))
+ .file("src/lib.rs", "")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ [dependencies.b]
+ path = "b"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "b/Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "b"
+ version = "0.5.0"
+ authors = []
+ [dependencies.a]
+ git = '{}'
+ "#,
+ a1.url()
+ ),
+ )
+ .file("b/src/lib.rs", "pub fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] git repository `file://[..]a1`
+[CHECKING] a v0.5.0 ([..]a1#[..]
+[CHECKING] b v0.5.0 ([..])
+[CHECKING] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.change_file(
+ "b/Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "b"
+ version = "0.5.0"
+ authors = []
+ [dependencies.a]
+ git = '{}'
+ "#,
+ a2.url()
+ ),
+ );
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] git repository `file://[..]a2`
+[CHECKING] a v0.5.1 ([..]a2#[..]
+[CHECKING] b v0.5.0 ([..])
+[CHECKING] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dont_require_submodules_are_checked_out() {
+ let p = project().build();
+ let git1 = git::new("dep1", |p| {
+ p.file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "")
+ .file("a/foo", "")
+ });
+ let git2 = git::new("dep2", |p| p);
+
+ let repo = git2::Repository::open(&git1.root()).unwrap();
+ let url = path2url(git2.root()).to_string();
+ git::add_submodule(&repo, &url, Path::new("a/submodule"));
+ git::commit(&repo);
+
+ git2::Repository::init(&p.root()).unwrap();
+ let url = path2url(git1.root()).to_string();
+ let dst = paths::home().join("foo");
+ git2::Repository::clone(&url, &dst).unwrap();
+
+ git1.cargo("check -v").cwd(&dst).run();
+}
+
+#[cargo_test]
+fn doctest_same_name() {
+ let a2 = git::new("a2", |p| {
+ p.file("Cargo.toml", &basic_manifest("a", "0.5.0"))
+ .file("src/lib.rs", "pub fn a2() {}")
+ });
+
+ let a1 = git::new("a1", |p| {
+ p.file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ [dependencies]
+ a = {{ git = '{}' }}
+ "#,
+ a2.url()
+ ),
+ )
+ .file("src/lib.rs", "extern crate a; pub fn a1() {}")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = {{ git = '{}' }}
+ "#,
+ a1.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #[macro_use]
+ extern crate a;
+ "#,
+ )
+ .build();
+
+ p.cargo("test -v").run();
+}
+
+#[cargo_test]
+fn lints_are_suppressed() {
+ let a = git::new("a", |p| {
+ p.file("Cargo.toml", &basic_manifest("a", "0.5.0")).file(
+ "src/lib.rs",
+ "
+ use std::option;
+ ",
+ )
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = {{ git = '{}' }}
+ "#,
+ a.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] git repository `[..]`
+[CHECKING] a v0.5.0 ([..])
+[CHECKING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn denied_lints_are_allowed() {
+ let a = git::new("a", |p| {
+ p.file("Cargo.toml", &basic_manifest("a", "0.5.0")).file(
+ "src/lib.rs",
+ "
+ #![deny(warnings)]
+ use std::option;
+ ",
+ )
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = {{ git = '{}' }}
+ "#,
+ a.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] git repository `[..]`
+[CHECKING] a v0.5.0 ([..])
+[CHECKING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn add_a_git_dep() {
+ let git = git::new("git", |p| {
+ p.file("Cargo.toml", &basic_manifest("git", "0.5.0"))
+ .file("src/lib.rs", "")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = {{ path = 'a' }}
+ git = {{ git = '{}' }}
+ "#,
+ git.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+
+ assert!(paths::home().join(".cargo/git/CACHEDIR.TAG").is_file());
+
+ p.change_file(
+ "a/Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ git = {{ git = '{}' }}
+ "#,
+ git.url()
+ ),
+ );
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn two_at_rev_instead_of_tag() {
+ let git = git::new("git", |p| {
+ p.file("Cargo.toml", &basic_manifest("git1", "0.5.0"))
+ .file("src/lib.rs", "")
+ .file("a/Cargo.toml", &basic_manifest("git2", "0.5.0"))
+ .file("a/src/lib.rs", "")
+ });
+
+ // Make a tag corresponding to the current HEAD
+ let repo = git2::Repository::open(&git.root()).unwrap();
+ let head = repo.head().unwrap().target().unwrap();
+ repo.tag(
+ "v0.1.0",
+ &repo.find_object(head, None).unwrap(),
+ &repo.signature().unwrap(),
+ "make a new tag",
+ false,
+ )
+ .unwrap();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ git1 = {{ git = '{0}', rev = 'v0.1.0' }}
+ git2 = {{ git = '{0}', rev = 'v0.1.0' }}
+ "#,
+ git.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("generate-lockfile").run();
+ p.cargo("check -v").run();
+}
+
+#[cargo_test]
+fn include_overrides_gitignore() {
+ // Make sure that `package.include` takes precedence over .gitignore.
+ let p = git::new("foo", |repo| {
+ repo.file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ include = ["src/lib.rs", "ignored.txt", "Cargo.toml"]
+ "#,
+ )
+ .file(
+ ".gitignore",
+ r#"
+ /target
+ Cargo.lock
+ ignored.txt
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("ignored.txt", "")
+ .file("build.rs", "fn main() {}")
+ });
+
+ p.cargo("check").run();
+ p.change_file("ignored.txt", "Trigger rebuild.");
+ p.cargo("check -v")
+ .with_stderr(
+ "\
+[DIRTY] foo v0.5.0 ([..]): the precalculated components changed
+[COMPILING] foo v0.5.0 ([..])
+[RUNNING] `[..]build-script-build[..]`
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.cargo("package --list --allow-dirty")
+ .with_stdout(
+ "\
+Cargo.toml
+Cargo.toml.orig
+ignored.txt
+src/lib.rs
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid_git_dependency_manifest() {
+ let project = project();
+ let git_project = git::new("dep1", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "dep1"
+ version = "0.5.0"
+ authors = ["carlhuda@example.com"]
+ categories = ["algorithms"]
+ categories = ["algorithms"]
+
+ [lib]
+
+ name = "dep1"
+ "#,
+ )
+ .file(
+ "src/dep1.rs",
+ r#"
+ pub fn hello() -> &'static str {
+ "hello world"
+ }
+ "#,
+ )
+ });
+
+ let project = project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.dep1]
+
+ git = '{}'
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+ )
+ .build();
+
+ let git_root = git_project.root();
+
+ project
+ .cargo("check")
+ .with_status(101)
+ .with_stderr(&format!(
+ "\
+[UPDATING] git repository `{}`
+[ERROR] failed to get `dep1` as a dependency of package `foo v0.5.0 ([..])`
+
+Caused by:
+ failed to load source for dependency `dep1`
+
+Caused by:
+ Unable to update {}
+
+Caused by:
+ failed to parse manifest at `[..]`
+
+Caused by:
+ could not parse input as TOML
+
+Caused by:
+ TOML parse error at line 8, column 21
+ |
+ 8 | categories = [\"algorithms\"]
+ | ^
+ duplicate key `categories` in table `package`
+",
+ path2url(&git_root),
+ path2url(&git_root),
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn failed_submodule_checkout() {
+ let project = project();
+ let git_project = git::new("dep1", |project| {
+ project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0"))
+ });
+
+ let git_project2 = git::new("dep2", |project| project.file("lib.rs", ""));
+
+ let listener = TcpListener::bind("127.0.0.1:0").unwrap();
+ let addr = listener.local_addr().unwrap();
+ let done = Arc::new(AtomicBool::new(false));
+ let done2 = done.clone();
+
+ let t = thread::spawn(move || {
+ while !done2.load(Ordering::SeqCst) {
+ if let Ok((mut socket, _)) = listener.accept() {
+ drop(socket.write_all(b"foo\r\n"));
+ }
+ }
+ });
+
+ let repo = git2::Repository::open(&git_project2.root()).unwrap();
+ let url = format!("https://{}:{}/", addr.ip(), addr.port());
+ {
+ let mut s = repo.submodule(&url, Path::new("bar"), false).unwrap();
+ let subrepo = s.open().unwrap();
+ let mut cfg = subrepo.config().unwrap();
+ cfg.set_str("user.email", "foo@bar.com").unwrap();
+ cfg.set_str("user.name", "Foo Bar").unwrap();
+ git::commit(&subrepo);
+ s.add_finalize().unwrap();
+ }
+ git::commit(&repo);
+ drop((repo, url));
+
+ let repo = git2::Repository::open(&git_project.root()).unwrap();
+ let url = path2url(git_project2.root()).to_string();
+ git::add_submodule(&repo, &url, Path::new("src"));
+ git::commit(&repo);
+ drop(repo);
+
+ let project = project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ dep1 = {{ git = '{}' }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ project
+ .cargo("check")
+ .with_status(101)
+ .with_stderr_contains(" failed to update submodule `src`")
+ .with_stderr_contains(" failed to update submodule `bar`")
+ .run();
+ project
+ .cargo("check")
+ .with_status(101)
+ .with_stderr_contains(" failed to update submodule `src`")
+ .with_stderr_contains(" failed to update submodule `bar`")
+ .run();
+
+ done.store(true, Ordering::SeqCst);
+ drop(TcpStream::connect(&addr));
+ t.join().unwrap();
+}
+
+#[cargo_test(requires_git)]
+fn use_the_cli() {
+ let project = project();
+ let git_project = git::new("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("dep1", "0.5.0"))
+ .file("src/lib.rs", "")
+ });
+
+ let project = project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ dep1 = {{ git = '{}' }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ "
+ [net]
+ git-fetch-with-cli = true
+ ",
+ )
+ .build();
+
+ let stderr = "\
+[UPDATING] git repository `[..]`
+[RUNNING] `git fetch [..]`
+From [..]
+ * [new ref] -> origin/HEAD
+[CHECKING] dep1 [..]
+[RUNNING] `rustc [..]`
+[CHECKING] foo [..]
+[RUNNING] `rustc [..]`
+[FINISHED] [..]
+";
+
+ project.cargo("check -v").with_stderr(stderr).run();
+ assert!(paths::home().join(".cargo/git/CACHEDIR.TAG").is_file());
+}
+
+#[cargo_test]
+fn templatedir_doesnt_cause_problems() {
+ let git_project2 = git::new("dep2", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("dep2", "0.5.0"))
+ .file("src/lib.rs", "")
+ });
+ let git_project = git::new("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("dep1", "0.5.0"))
+ .file("src/lib.rs", "")
+ });
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "fo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ dep1 = {{ git = '{}' }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ fs::write(
+ paths::home().join(".gitconfig"),
+ format!(
+ r#"
+ [init]
+ templatedir = {}
+ "#,
+ git_project2
+ .url()
+ .to_file_path()
+ .unwrap()
+ .to_str()
+ .unwrap()
+ .replace("\\", "/")
+ ),
+ )
+ .unwrap();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test(requires_git)]
+fn git_with_cli_force() {
+ // Supports a force-pushed repo.
+ let git_project = git::new("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("dep1"))
+ .file("src/lib.rs", r#"pub fn f() { println!("one"); }"#)
+ });
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ edition = "2018"
+
+ [dependencies]
+ dep1 = {{ git = "{}" }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() { dep1::f(); }")
+ .file(
+ ".cargo/config",
+ "
+ [net]
+ git-fetch-with-cli = true
+ ",
+ )
+ .build();
+ p.cargo("build").run();
+ p.rename_run("foo", "foo1").with_stdout("one").run();
+
+ // commit --amend a change that will require a force fetch.
+ let repo = git2::Repository::open(&git_project.root()).unwrap();
+ git_project.change_file("src/lib.rs", r#"pub fn f() { println!("two"); }"#);
+ git::add(&repo);
+ let id = repo.refname_to_id("HEAD").unwrap();
+ let commit = repo.find_commit(id).unwrap();
+ let tree_id = t!(t!(repo.index()).write_tree());
+ t!(commit.amend(
+ Some("HEAD"),
+ None,
+ None,
+ None,
+ None,
+ Some(&t!(repo.find_tree(tree_id)))
+ ));
+ // Perform the fetch.
+ p.cargo("update").run();
+ p.cargo("build").run();
+ p.rename_run("foo", "foo2").with_stdout("two").run();
+}
+
+#[cargo_test(requires_git)]
+fn git_fetch_cli_env_clean() {
+ // This tests that git-fetch-with-cli works when GIT_DIR environment
+ // variable is set (for whatever reason).
+ let git_dep = git::new("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("dep1", "0.5.0"))
+ .file("src/lib.rs", "")
+ });
+
+ let git_proj = git::new("foo", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [dependencies]
+ dep1 = {{ git = '{}' }}
+ "#,
+ git_dep.url()
+ ),
+ )
+ .file("src/lib.rs", "pub extern crate dep1;")
+ .file(
+ ".cargo/config",
+ "
+ [net]
+ git-fetch-with-cli = true
+ ",
+ )
+ });
+
+ // The directory set here isn't too important. Pointing to our own git
+ // directory causes git to be confused and fail. Can also point to an
+ // empty directory, or a nonexistent one.
+ git_proj
+ .cargo("fetch")
+ .env("GIT_DIR", git_proj.root().join(".git"))
+ .run();
+}
+
+#[cargo_test]
+fn dirty_submodule() {
+ // `cargo package` warns for dirty file in submodule.
+ let (git_project, repo) = git::new_repo("foo", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("foo", "0.5.0"))
+ // This is necessary because `git::add` is too eager.
+ .file(".gitignore", "/target")
+ });
+ let git_project2 = git::new("src", |project| {
+ project.no_manifest().file("lib.rs", "pub fn f() {}")
+ });
+
+ let url = path2url(git_project2.root()).to_string();
+ git::add_submodule(&repo, &url, Path::new("src"));
+
+ // Submodule added, but not committed.
+ git_project
+ .cargo("package --no-verify")
+ .with_status(101)
+ .with_stderr(
+ "\
+[WARNING] manifest has no [..]
+See [..]
+[ERROR] 1 files in the working directory contain changes that were not yet committed into git:
+
+.gitmodules
+
+to proceed despite [..]
+",
+ )
+ .run();
+
+ git::commit(&repo);
+ git_project.cargo("package --no-verify").run();
+
+ // Modify file, check for warning.
+ git_project.change_file("src/lib.rs", "");
+ git_project
+ .cargo("package --no-verify")
+ .with_status(101)
+ .with_stderr(
+ "\
+[WARNING] manifest has no [..]
+See [..]
+[ERROR] 1 files in the working directory contain changes that were not yet committed into git:
+
+src/lib.rs
+
+to proceed despite [..]
+",
+ )
+ .run();
+ // Commit the change.
+ let sub_repo = git2::Repository::open(git_project.root().join("src")).unwrap();
+ git::add(&sub_repo);
+ git::commit(&sub_repo);
+ git::add(&repo);
+ git::commit(&repo);
+ git_project.cargo("package --no-verify").run();
+
+ // Try with a nested submodule.
+ let git_project3 = git::new("bar", |project| project.no_manifest().file("mod.rs", ""));
+ let url = path2url(git_project3.root()).to_string();
+ git::add_submodule(&sub_repo, &url, Path::new("bar"));
+ git_project
+ .cargo("package --no-verify")
+ .with_status(101)
+ .with_stderr(
+ "\
+[WARNING] manifest has no [..]
+See [..]
+[ERROR] 1 files in the working directory contain changes that were not yet committed into git:
+
+src/.gitmodules
+
+to proceed despite [..]
+",
+ )
+ .run();
+
+ // Commit the submodule addition.
+ git::commit(&sub_repo);
+ git::add(&repo);
+ git::commit(&repo);
+ git_project.cargo("package --no-verify").run();
+ // Modify within nested submodule.
+ git_project.change_file("src/bar/new_file.rs", "//test");
+ git_project
+ .cargo("package --no-verify")
+ .with_status(101)
+ .with_stderr(
+ "\
+[WARNING] manifest has no [..]
+See [..]
+[ERROR] 1 files in the working directory contain changes that were not yet committed into git:
+
+src/bar/new_file.rs
+
+to proceed despite [..]
+",
+ )
+ .run();
+ // And commit the change.
+ let sub_sub_repo = git2::Repository::open(git_project.root().join("src/bar")).unwrap();
+ git::add(&sub_sub_repo);
+ git::commit(&sub_sub_repo);
+ git::add(&sub_repo);
+ git::commit(&sub_repo);
+ git::add(&repo);
+ git::commit(&repo);
+ git_project.cargo("package --no-verify").run();
+}
+
+#[cargo_test]
+fn default_not_master() {
+ let project = project();
+
+ // Create a repository with a `master` branch, but switch the head to a
+ // branch called `main` at the same time.
+ let (git_project, repo) = git::new_repo("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("dep1"))
+ .file("src/lib.rs", "pub fn foo() {}")
+ });
+ let head_id = repo.head().unwrap().target().unwrap();
+ let head = repo.find_commit(head_id).unwrap();
+ repo.branch("main", &head, false).unwrap();
+ repo.set_head("refs/heads/main").unwrap();
+
+ // Then create a commit on the new `main` branch so `master` and `main`
+ // differ.
+ git_project.change_file("src/lib.rs", "pub fn bar() {}");
+ git::add(&repo);
+ git::commit(&repo);
+
+ let project = project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ [dependencies]
+ dep1 = {{ git = '{}' }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/lib.rs", "pub fn foo() { dep1::bar() }")
+ .build();
+
+ project
+ .cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] git repository `[..]`
+[CHECKING] dep1 v0.5.0 ([..])
+[CHECKING] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn historical_lockfile_works() {
+ let project = project();
+
+ let (git_project, repo) = git::new_repo("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("dep1"))
+ .file("src/lib.rs", "")
+ });
+ let head_id = repo.head().unwrap().target().unwrap();
+
+ let project = project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+
+ [dependencies]
+ dep1 = {{ git = '{}', branch = 'master' }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ project.cargo("check").run();
+ project.change_file(
+ "Cargo.lock",
+ &format!(
+ r#"# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+[[package]]
+name = "dep1"
+version = "0.5.0"
+source = "git+{}#{}"
+
+[[package]]
+name = "foo"
+version = "0.5.0"
+dependencies = [
+ "dep1",
+]
+"#,
+ git_project.url(),
+ head_id
+ ),
+ );
+ project
+ .cargo("check")
+ .with_stderr("[FINISHED] [..]\n")
+ .run();
+}
+
+#[cargo_test]
+fn historical_lockfile_works_with_vendor() {
+ let project = project();
+
+ let (git_project, repo) = git::new_repo("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("dep1"))
+ .file("src/lib.rs", "")
+ });
+ let head_id = repo.head().unwrap().target().unwrap();
+
+ let project = project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+
+ [dependencies]
+ dep1 = {{ git = '{}', branch = 'master' }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ let output = project.cargo("vendor").exec_with_output().unwrap();
+ project.change_file(".cargo/config", str::from_utf8(&output.stdout).unwrap());
+ project.change_file(
+ "Cargo.lock",
+ &format!(
+ r#"# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+[[package]]
+name = "dep1"
+version = "0.5.0"
+source = "git+{}#{}"
+
+[[package]]
+name = "foo"
+version = "0.5.0"
+dependencies = [
+ "dep1",
+]
+"#,
+ git_project.url(),
+ head_id
+ ),
+ );
+ project.cargo("check").run();
+}
+
+#[cargo_test]
+fn two_dep_forms() {
+ let project = project();
+
+ let (git_project, _repo) = git::new_repo("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("dep1"))
+ .file("src/lib.rs", "")
+ });
+
+ let project = project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ [dependencies]
+ dep1 = {{ git = '{}', branch = 'master' }}
+ a = {{ path = 'a' }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ [dependencies]
+ dep1 = {{ git = '{}' }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("a/src/lib.rs", "")
+ .build();
+
+ // This'll download the git repository twice, one with HEAD and once with
+ // the master branch. Then it'll compile 4 crates, the 2 git deps, then
+ // the two local deps.
+ project
+ .cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[UPDATING] [..]
+[CHECKING] [..]
+[CHECKING] [..]
+[CHECKING] [..]
+[CHECKING] [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn metadata_master_consistency() {
+ // SourceId consistency in the `cargo metadata` output when `master` is
+ // explicit or implicit, using new or old Cargo.lock.
+ let (git_project, git_repo) = git::new_repo("bar", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("bar", "1.0.0"))
+ .file("src/lib.rs", "")
+ });
+ let bar_hash = git_repo.head().unwrap().target().unwrap().to_string();
+
+ // Explicit branch="master" with a lock file created before 1.47 (does not contain ?branch=master).
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = {{ git = "{}", branch = "master" }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "Cargo.lock",
+ &format!(
+ r#"
+ [[package]]
+ name = "bar"
+ version = "1.0.0"
+ source = "git+{}#{}"
+
+ [[package]]
+ name = "foo"
+ version = "0.1.0"
+ dependencies = [
+ "bar",
+ ]
+ "#,
+ git_project.url(),
+ bar_hash,
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ let metadata = |bar_source| -> String {
+ r#"
+ {
+ "packages": [
+ {
+ "name": "bar",
+ "version": "1.0.0",
+ "id": "bar 1.0.0 (__BAR_SOURCE__#__BAR_HASH__)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": "__BAR_SOURCE__#__BAR_HASH__",
+ "dependencies": [],
+ "targets": "{...}",
+ "features": {},
+ "manifest_path": "[..]",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2015",
+ "links": null
+ },
+ {
+ "name": "foo",
+ "version": "0.1.0",
+ "id": "foo 0.1.0 [..]",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": null,
+ "dependencies": [
+ {
+ "name": "bar",
+ "source": "__BAR_SOURCE__",
+ "req": "*",
+ "kind": null,
+ "rename": null,
+ "optional": false,
+ "uses_default_features": true,
+ "features": [],
+ "target": null,
+ "registry": null
+ }
+ ],
+ "targets": "{...}",
+ "features": {},
+ "manifest_path": "[..]",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2015",
+ "links": null
+ }
+ ],
+ "workspace_members": [
+ "foo 0.1.0 [..]"
+ ],
+ "resolve": {
+ "nodes": [
+ {
+ "id": "bar 1.0.0 (__BAR_SOURCE__#__BAR_HASH__)",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ },
+ {
+ "id": "foo 0.1.0 [..]",
+ "dependencies": [
+ "bar 1.0.0 (__BAR_SOURCE__#__BAR_HASH__)"
+ ],
+ "deps": [
+ {
+ "name": "bar",
+ "pkg": "bar 1.0.0 (__BAR_SOURCE__#__BAR_HASH__)",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": null
+ }
+ ]
+ }
+ ],
+ "features": []
+ }
+ ],
+ "root": "foo 0.1.0 [..]"
+ },
+ "target_directory": "[..]",
+ "version": 1,
+ "workspace_root": "[..]",
+ "metadata": null
+ }
+ "#
+ .replace("__BAR_SOURCE__", bar_source)
+ .replace("__BAR_HASH__", &bar_hash)
+ };
+
+ let bar_source = format!("git+{}?branch=master", git_project.url());
+ p.cargo("metadata").with_json(&metadata(&bar_source)).run();
+
+ // Conversely, remove branch="master" from Cargo.toml, but use a new Cargo.lock that has ?branch=master.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = {{ git = "{}" }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "Cargo.lock",
+ &format!(
+ r#"
+ [[package]]
+ name = "bar"
+ version = "1.0.0"
+ source = "git+{}?branch=master#{}"
+
+ [[package]]
+ name = "foo"
+ version = "0.1.0"
+ dependencies = [
+ "bar",
+ ]
+ "#,
+ git_project.url(),
+ bar_hash
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // No ?branch=master!
+ let bar_source = format!("git+{}", git_project.url());
+ p.cargo("metadata").with_json(&metadata(&bar_source)).run();
+}
+
+#[cargo_test]
+fn git_with_force_push() {
+ // Checks that cargo can handle force-pushes to git repos.
+ // This works by having a git dependency that is updated with an amend
+ // commit, and tries with various forms (default branch, branch, rev,
+ // tag).
+ let main = |text| format!(r#"pub fn f() {{ println!("{}"); }}"#, text);
+ let (git_project, repo) = git::new_repo("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("dep1"))
+ .file("src/lib.rs", &main("one"))
+ });
+ let manifest = |extra| {
+ format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ edition = "2018"
+
+ [dependencies]
+ dep1 = {{ git = "{}"{} }}
+ "#,
+ git_project.url(),
+ extra
+ )
+ };
+ let p = project()
+ .file("Cargo.toml", &manifest(""))
+ .file("src/main.rs", "fn main() { dep1::f(); }")
+ .build();
+ // Download the original and make sure it is OK.
+ p.cargo("build").run();
+ p.rename_run("foo", "foo1").with_stdout("one").run();
+
+ let find_head = || t!(t!(repo.head()).peel_to_commit());
+
+ let amend_commit = |text| {
+ // commit --amend a change that will require a force fetch.
+ git_project.change_file("src/lib.rs", &main(text));
+ git::add(&repo);
+ let commit = find_head();
+ let tree_id = t!(t!(repo.index()).write_tree());
+ t!(commit.amend(
+ Some("HEAD"),
+ None,
+ None,
+ None,
+ None,
+ Some(&t!(repo.find_tree(tree_id)))
+ ));
+ };
+
+ let mut rename_annoyance = 1;
+
+ let mut verify = |text: &str| {
+ // Perform the fetch.
+ p.cargo("update").run();
+ p.cargo("build").run();
+ rename_annoyance += 1;
+ p.rename_run("foo", &format!("foo{}", rename_annoyance))
+ .with_stdout(text)
+ .run();
+ };
+
+ amend_commit("two");
+ verify("two");
+
+ // Try with a rev.
+ let head1 = find_head().id().to_string();
+ let extra = format!(", rev = \"{}\"", head1);
+ p.change_file("Cargo.toml", &manifest(&extra));
+ verify("two");
+ amend_commit("three");
+ let head2 = find_head().id().to_string();
+ assert_ne!(&head1, &head2);
+ let extra = format!(", rev = \"{}\"", head2);
+ p.change_file("Cargo.toml", &manifest(&extra));
+ verify("three");
+
+ // Try with a tag.
+ git::tag(&repo, "my-tag");
+ p.change_file("Cargo.toml", &manifest(", tag = \"my-tag\""));
+ verify("three");
+ amend_commit("tag-three");
+ let head = t!(t!(repo.head()).peel(git2::ObjectType::Commit));
+ t!(repo.tag("my-tag", &head, &t!(repo.signature()), "move tag", true));
+ verify("tag-three");
+
+ // Try with a branch.
+ let br = t!(repo.branch("awesome-stuff", &find_head(), false));
+ t!(repo.checkout_tree(&t!(br.get().peel(git2::ObjectType::Tree)), None));
+ t!(repo.set_head("refs/heads/awesome-stuff"));
+ git_project.change_file("src/lib.rs", &main("awesome-three"));
+ git::add(&repo);
+ git::commit(&repo);
+ p.change_file("Cargo.toml", &manifest(", branch = \"awesome-stuff\""));
+ verify("awesome-three");
+ amend_commit("awesome-four");
+ verify("awesome-four");
+}
+
+#[cargo_test]
+fn corrupted_checkout() {
+ // Test what happens if the checkout is corrupted somehow.
+ _corrupted_checkout(false);
+}
+
+#[cargo_test]
+fn corrupted_checkout_with_cli() {
+ // Test what happens if the checkout is corrupted somehow with git cli.
+ _corrupted_checkout(true);
+}
+
+fn _corrupted_checkout(with_cli: bool) {
+ let git_project = git::new("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("dep1", "0.5.0"))
+ .file("src/lib.rs", "")
+ });
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ dep1 = {{ git = "{}" }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("fetch").run();
+
+ let mut paths = t!(glob::glob(
+ paths::home()
+ .join(".cargo/git/checkouts/dep1-*/*")
+ .to_str()
+ .unwrap()
+ ));
+ let path = paths.next().unwrap().unwrap();
+ let ok = path.join(".cargo-ok");
+
+ // Deleting this file simulates an interrupted checkout.
+ t!(fs::remove_file(&ok));
+
+ // This should refresh the checkout.
+ let mut e = p.cargo("fetch");
+ if with_cli {
+ e.env("CARGO_NET_GIT_FETCH_WITH_CLI", "true");
+ }
+ e.run();
+ assert!(ok.exists());
+}
+
+#[cargo_test]
+fn cleans_temp_pack_files() {
+ // Checks that cargo removes temp files left by libgit2 when it is
+ // interrupted (see clean_repo_temp_files).
+ Package::new("bar", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("fetch").run();
+ // Simulate what happens when libgit2 is interrupted while indexing a pack file.
+ let tmp_path = super::git_gc::find_index().join(".git/objects/pack/pack_git2_91ab40da04fdc2e7");
+ fs::write(&tmp_path, "test").unwrap();
+ let mut perms = fs::metadata(&tmp_path).unwrap().permissions();
+ perms.set_readonly(true);
+ fs::set_permissions(&tmp_path, perms).unwrap();
+
+ // Trigger an index update.
+ p.cargo("generate-lockfile").run();
+ assert!(!tmp_path.exists());
+}
diff --git a/src/tools/cargo/tests/testsuite/git_auth.rs b/src/tools/cargo/tests/testsuite/git_auth.rs
new file mode 100644
index 000000000..b6e68fa3d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/git_auth.rs
@@ -0,0 +1,437 @@
+//! Tests for git authentication.
+
+use std::collections::HashSet;
+use std::io::prelude::*;
+use std::io::BufReader;
+use std::net::{SocketAddr, TcpListener};
+use std::sync::atomic::{AtomicUsize, Ordering::SeqCst};
+use std::sync::Arc;
+use std::thread::{self, JoinHandle};
+
+use cargo_test_support::git::cargo_uses_gitoxide;
+use cargo_test_support::paths;
+use cargo_test_support::{basic_manifest, project};
+
+fn setup_failed_auth_test() -> (SocketAddr, JoinHandle<()>, Arc<AtomicUsize>) {
+ let server = TcpListener::bind("127.0.0.1:0").unwrap();
+ let addr = server.local_addr().unwrap();
+
+ fn headers(rdr: &mut dyn BufRead) -> HashSet<String> {
+ let valid = ["GET", "Authorization", "Accept"];
+ rdr.lines()
+ .map(|s| s.unwrap())
+ .take_while(|s| s.len() > 2)
+ .map(|s| s.trim().to_string())
+ .filter(|s| valid.iter().any(|prefix| s.starts_with(*prefix)))
+ .collect()
+ }
+
+ let connections = Arc::new(AtomicUsize::new(0));
+ let connections2 = connections.clone();
+ let t = thread::spawn(move || {
+ let mut conn = BufReader::new(server.accept().unwrap().0);
+ let req = headers(&mut conn);
+ connections2.fetch_add(1, SeqCst);
+ conn.get_mut()
+ .write_all(
+ b"HTTP/1.1 401 Unauthorized\r\n\
+ WWW-Authenticate: Basic realm=\"wheee\"\r\n\
+ Content-Length: 0\r\n\
+ \r\n",
+ )
+ .unwrap();
+ assert_eq!(
+ req,
+ vec![
+ "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1",
+ "Accept: */*",
+ ]
+ .into_iter()
+ .map(|s| s.to_string())
+ .collect()
+ );
+
+ let req = headers(&mut conn);
+ connections2.fetch_add(1, SeqCst);
+ conn.get_mut()
+ .write_all(
+ b"HTTP/1.1 401 Unauthorized\r\n\
+ WWW-Authenticate: Basic realm=\"wheee\"\r\n\
+ \r\n",
+ )
+ .unwrap();
+ assert_eq!(
+ req,
+ vec![
+ "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1",
+ "Authorization: Basic Zm9vOmJhcg==",
+ "Accept: */*",
+ ]
+ .into_iter()
+ .map(|s| s.to_string())
+ .collect()
+ );
+ });
+
+ let script = project()
+ .at("script")
+ .file("Cargo.toml", &basic_manifest("script", "0.1.0"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ println!("username=foo");
+ println!("password=bar");
+ }
+ "#,
+ )
+ .build();
+
+ script.cargo("build -v").run();
+ let script = script.bin("script");
+
+ let config = paths::home().join(".gitconfig");
+ let mut config = git2::Config::open(&config).unwrap();
+ config
+ .set_str(
+ "credential.helper",
+ // This is a bash script so replace `\` with `/` for Windows
+ &script.display().to_string().replace("\\", "/"),
+ )
+ .unwrap();
+ (addr, t, connections)
+}
+
+// Tests that HTTP auth is offered from `credential.helper`.
+#[cargo_test]
+fn http_auth_offered() {
+ // TODO(Seb): remove this once possible.
+ if cargo_uses_gitoxide() {
+ // Without the fixes in https://github.com/Byron/gitoxide/releases/tag/gix-v0.41.0 this test is flaky.
+ return;
+ }
+ let (addr, t, connections) = setup_failed_auth_test();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ git = "http://127.0.0.1:{}/foo/bar"
+ "#,
+ addr.port()
+ ),
+ )
+ .file("src/main.rs", "")
+ .file(
+ ".cargo/config",
+ "[net]
+ retry = 0
+ ",
+ )
+ .build();
+
+ // This is a "contains" check because the last error differs by platform,
+ // may span multiple lines, and isn't relevant to this test.
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains(&format!(
+ "\
+[UPDATING] git repository `http://{addr}/foo/bar`
+[ERROR] failed to get `bar` as a dependency of package `foo v0.0.1 [..]`
+
+Caused by:
+ failed to load source for dependency `bar`
+
+Caused by:
+ Unable to update http://{addr}/foo/bar
+
+Caused by:
+ failed to clone into: [..]
+
+Caused by:
+ failed to authenticate when downloading repository
+
+ * attempted to find username/password via `credential.helper`, but [..]
+
+ if the git CLI succeeds then `net.git-fetch-with-cli` may help here
+ https://[..]
+
+Caused by:
+"
+ ))
+ .run();
+
+ assert_eq!(connections.load(SeqCst), 2);
+ t.join().ok().unwrap();
+}
+
+// Boy, sure would be nice to have a TLS implementation in rust!
+#[cargo_test]
+fn https_something_happens() {
+ let server = TcpListener::bind("127.0.0.1:0").unwrap();
+ let addr = server.local_addr().unwrap();
+ let t = thread::spawn(move || {
+ let mut conn = server.accept().unwrap().0;
+ drop(conn.write(b"1234"));
+ drop(conn.shutdown(std::net::Shutdown::Write));
+ drop(conn.read(&mut [0; 16]));
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ git = "https://127.0.0.1:{}/foo/bar"
+ "#,
+ addr.port()
+ ),
+ )
+ .file("src/main.rs", "")
+ .file(
+ ".cargo/config",
+ "[net]
+ retry = 0
+ ",
+ )
+ .build();
+
+ p.cargo("check -v")
+ .with_status(101)
+ .with_stderr_contains(&format!(
+ "[UPDATING] git repository `https://{addr}/foo/bar`"
+ ))
+ .with_stderr_contains(&format!(
+ "\
+Caused by:
+ {errmsg}
+",
+ errmsg = if cargo_uses_gitoxide() {
+ "[..]SSL connect error [..]"
+ } else if cfg!(windows) {
+ "[..]failed to send request: [..]"
+ } else if cfg!(target_os = "macos") {
+ // macOS is difficult to tests as some builds may use Security.framework,
+ // while others may use OpenSSL. In that case, let's just not verify the error
+ // message here.
+ "[..]"
+ } else {
+ "[..]SSL error: [..]"
+ }
+ ))
+ .run();
+
+ t.join().ok().unwrap();
+}
+
+// It would sure be nice to have an SSH implementation in Rust!
+#[cargo_test]
+fn ssh_something_happens() {
+ let server = TcpListener::bind("127.0.0.1:0").unwrap();
+ let addr = server.local_addr().unwrap();
+ let t = thread::spawn(move || {
+ drop(server.accept().unwrap());
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ git = "ssh://127.0.0.1:{}/foo/bar"
+ "#,
+ addr.port()
+ ),
+ )
+ .file("src/main.rs", "")
+ .build();
+
+ let (expected_ssh_message, expected_update) = if cargo_uses_gitoxide() {
+ // Due to the usage of `ssh` and `ssh.exe` respectively, the messages change.
+ // This will be adjusted to use `ssh2` to get rid of this dependency and have uniform messaging.
+ let message = if cfg!(windows) {
+ // The order of multiple possible messages isn't deterministic within `ssh`, and `gitoxide` detects both
+ // but gets to report only the first. Thus this test can flip-flop from one version of the error to the other
+ // and we can't test for that.
+ // We'd want to test for:
+ // "[..]ssh: connect to host 127.0.0.1 [..]"
+ // ssh: connect to host example.org port 22: No route to host
+ // "[..]banner exchange: Connection to 127.0.0.1 [..]"
+ // banner exchange: Connection to 127.0.0.1 port 62250: Software caused connection abort
+ // But since there is no common meaningful sequence or word, we can only match a small telling sequence of characters.
+ "[..]onnect[..]"
+ } else {
+ "[..]Connection [..] by [..]"
+ };
+ (
+ message,
+ format!("[..]Unable to update ssh://{addr}/foo/bar"),
+ )
+ } else {
+ (
+ "\
+Caused by:
+ [..]failed to start SSH session: Failed getting banner[..]
+",
+ format!("[UPDATING] git repository `ssh://{addr}/foo/bar`"),
+ )
+ };
+ p.cargo("check -v")
+ .with_status(101)
+ .with_stderr_contains(&expected_update)
+ .with_stderr_contains(expected_ssh_message)
+ .run();
+ t.join().ok().unwrap();
+}
+
+#[cargo_test]
+fn net_err_suggests_fetch_with_cli() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [dependencies]
+ foo = { git = "ssh://needs-proxy.invalid/git" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check -v")
+ .with_status(101)
+ .with_stderr(format!(
+ "\
+[UPDATING] git repository `ssh://needs-proxy.invalid/git`
+warning: spurious network error[..]
+warning: spurious network error[..]
+warning: spurious network error[..]
+[ERROR] failed to get `foo` as a dependency of package `foo v0.0.0 [..]`
+
+Caused by:
+ failed to load source for dependency `foo`
+
+Caused by:
+ Unable to update ssh://needs-proxy.invalid/git
+
+Caused by:
+ failed to clone into: [..]
+
+Caused by:
+ network failure seems to have happened
+ if a proxy or similar is necessary `net.git-fetch-with-cli` may help here
+ https://[..]
+
+Caused by:
+ {trailer}
+",
+ trailer = if cargo_uses_gitoxide() {
+ "An IO error occurred when talking to the server\n\nCaused by:\n ssh: Could not resolve hostname needs-proxy.invalid[..]"
+ } else {
+ "failed to resolve address for needs-proxy.invalid[..]"
+ }
+ ))
+ .run();
+
+ p.change_file(
+ ".cargo/config",
+ "
+ [net]
+ git-fetch-with-cli = true
+ ",
+ );
+
+ p.cargo("check -v")
+ .with_status(101)
+ .with_stderr_contains("[..]Unable to update[..]")
+ .with_stderr_does_not_contain("[..]try enabling `git-fetch-with-cli`[..]")
+ .run();
+}
+
+#[cargo_test]
+fn instead_of_url_printed() {
+ // TODO(Seb): remove this once possible.
+ if cargo_uses_gitoxide() {
+ // Without the fixes in https://github.com/Byron/gitoxide/releases/tag/gix-v0.41.0 this test is flaky.
+ return;
+ }
+ let (addr, t, _connections) = setup_failed_auth_test();
+ let config = paths::home().join(".gitconfig");
+ let mut config = git2::Config::open(&config).unwrap();
+ config
+ .set_str(
+ &format!("url.http://{}/.insteadOf", addr),
+ "https://foo.bar/",
+ )
+ .unwrap();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ git = "https://foo.bar/foo/bar"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(&format!(
+ "\
+[UPDATING] git repository `https://foo.bar/foo/bar`
+[ERROR] failed to get `bar` as a dependency of package `foo [..]`
+
+Caused by:
+ failed to load source for dependency `bar`
+
+Caused by:
+ Unable to update https://foo.bar/foo/bar
+
+Caused by:
+ failed to clone into: [..]
+
+Caused by:
+ failed to authenticate when downloading repository: http://{addr}/foo/bar
+
+ * attempted to find username/password via `credential.helper`, but maybe the found credentials were incorrect
+
+ if the git CLI succeeds then `net.git-fetch-with-cli` may help here
+ https://[..]
+
+Caused by:
+ [..]
+{trailer}",
+ trailer = if cargo_uses_gitoxide() { "\nCaused by:\n [..]" } else { "" }
+ ))
+ .run();
+
+ t.join().ok().unwrap();
+}
diff --git a/src/tools/cargo/tests/testsuite/git_gc.rs b/src/tools/cargo/tests/testsuite/git_gc.rs
new file mode 100644
index 000000000..fd4fe30a9
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/git_gc.rs
@@ -0,0 +1,117 @@
+//! Tests for git garbage collection.
+
+use std::env;
+use std::ffi::OsStr;
+use std::path::PathBuf;
+
+use cargo_test_support::git;
+use cargo_test_support::git::cargo_uses_gitoxide;
+use cargo_test_support::paths;
+use cargo_test_support::project;
+use cargo_test_support::registry::Package;
+
+use url::Url;
+
+pub fn find_index() -> PathBuf {
+ let dir = paths::home().join(".cargo/registry/index");
+ dir.read_dir().unwrap().next().unwrap().unwrap().path()
+}
+
+fn run_test(path_env: Option<&OsStr>) {
+ const N: usize = 50;
+
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ Package::new("bar", "0.1.0").publish();
+
+ foo.cargo("check").run();
+
+ let index = find_index();
+ let path = paths::home().join("tmp");
+ let url = Url::from_file_path(&path).unwrap().to_string();
+ let repo = git2::Repository::init(&path).unwrap();
+ let index = git2::Repository::open(&index).unwrap();
+ let mut cfg = repo.config().unwrap();
+ cfg.set_str("user.email", "foo@bar.com").unwrap();
+ cfg.set_str("user.name", "Foo Bar").unwrap();
+ let mut cfg = index.config().unwrap();
+ cfg.set_str("user.email", "foo@bar.com").unwrap();
+ cfg.set_str("user.name", "Foo Bar").unwrap();
+
+ for _ in 0..N {
+ git::commit(&repo);
+ index
+ .remote_anonymous(&url)
+ .unwrap()
+ .fetch(&["refs/heads/master:refs/remotes/foo/master"], None, None)
+ .unwrap();
+ }
+ drop((repo, index));
+ Package::new("bar", "0.1.1").publish();
+
+ let before = find_index()
+ .join(".git/objects/pack")
+ .read_dir()
+ .unwrap()
+ .count();
+ assert!(before > N);
+
+ let mut cmd = foo.cargo("update");
+ cmd.env("__CARGO_PACKFILE_LIMIT", "10");
+ if let Some(path) = path_env {
+ cmd.env("PATH", path);
+ }
+ cmd.env("CARGO_LOG", "trace");
+ cmd.run();
+ let after = find_index()
+ .join(".git/objects/pack")
+ .read_dir()
+ .unwrap()
+ .count();
+ assert!(
+ after < before,
+ "packfiles before: {}\n\
+ packfiles after: {}",
+ before,
+ after
+ );
+}
+
+#[cargo_test(requires_git)]
+fn use_git_gc() {
+ run_test(None);
+}
+
+#[cargo_test]
+fn avoid_using_git() {
+ if cargo_uses_gitoxide() {
+ // file protocol without git binary is currently not possible - needs built-in upload-pack.
+ // See https://github.com/Byron/gitoxide/issues/734 (support for the file protocol) progress updates.
+ return;
+ }
+ let path = env::var_os("PATH").unwrap_or_default();
+ let mut paths = env::split_paths(&path).collect::<Vec<_>>();
+ let idx = paths
+ .iter()
+ .position(|p| p.join("git").exists() || p.join("git.exe").exists());
+ match idx {
+ Some(i) => {
+ paths.remove(i);
+ }
+ None => return,
+ }
+ run_test(Some(&env::join_paths(&paths).unwrap()));
+}
diff --git a/src/tools/cargo/tests/testsuite/glob_targets.rs b/src/tools/cargo/tests/testsuite/glob_targets.rs
new file mode 100644
index 000000000..8021dffa9
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/glob_targets.rs
@@ -0,0 +1,539 @@
+//! Tests for target filter flags with glob patterns.
+
+use cargo_test_support::{project, Project};
+
+#[cargo_test]
+fn build_example() {
+ full_project()
+ .cargo("build -v --example 'ex*1'")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name example1 [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_bin() {
+ full_project()
+ .cargo("build -v --bin 'bi*1'")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name bin1 [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_bench() {
+ full_project()
+ .cargo("build -v --bench 'be*1'")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bench1 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_test() {
+ full_project()
+ .cargo("build -v --test 'te*1'")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name test1 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn check_example() {
+ full_project()
+ .cargo("check -v --example 'ex*1'")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name example1 [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn check_bin() {
+ full_project()
+ .cargo("check -v --bin 'bi*1'")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name bin1 [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn check_bench() {
+ full_project()
+ .cargo("check -v --bench 'be*1'")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name bench1 [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn check_test() {
+ full_project()
+ .cargo("check -v --test 'te*1'")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name test1 [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn doc_bin() {
+ full_project()
+ .cargo("doc -v --bin 'bi*1'")
+ .with_stderr(
+ "\
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[RUNNING] `rustdoc --crate-type bin --crate-name bin1 [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn fix_example() {
+ full_project()
+ .cargo("fix -v --example 'ex*1' --allow-no-vcs")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.1 ([CWD])
+[RUNNING] `[..] rustc --crate-name example1 [..]`
+[FIXING] examples/example1.rs
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn fix_bin() {
+ full_project()
+ .cargo("fix -v --bin 'bi*1' --allow-no-vcs")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.1 ([CWD])
+[RUNNING] `[..] rustc --crate-name bin1 [..]`
+[FIXING] src/bin/bin1.rs
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn fix_bench() {
+ full_project()
+ .cargo("fix -v --bench 'be*1' --allow-no-vcs")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.1 ([CWD])
+[RUNNING] `[..] rustc --crate-name bench1 [..]`
+[FIXING] benches/bench1.rs
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn fix_test() {
+ full_project()
+ .cargo("fix -v --test 'te*1' --allow-no-vcs")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.1 ([CWD])
+[RUNNING] `[..] rustc --crate-name test1 [..]`
+[FIXING] tests/test1.rs
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn run_example_and_bin() {
+ let p = full_project();
+ p.cargo("run -v --bin 'bi*1'")
+ .with_status(101)
+ .with_stderr("[ERROR] `cargo run` does not support glob patterns on target selection")
+ .run();
+
+ p.cargo("run -v --example 'ex*1'")
+ .with_status(101)
+ .with_stderr("[ERROR] `cargo run` does not support glob patterns on target selection")
+ .run();
+}
+
+#[cargo_test]
+fn test_example() {
+ full_project()
+ .cargo("test -v --example 'ex*1'")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name example1 [..]`
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..]example1[..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_bin() {
+ full_project()
+ .cargo("test -v --bin 'bi*1'")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name bin1 [..]`
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..]bin1[..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_bench() {
+ full_project()
+ .cargo("test -v --bench 'be*1'")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bench1 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..]bench1[..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_test() {
+ full_project()
+ .cargo("test -v --test 'te*1'")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name test1 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..]test1[..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bench_example() {
+ full_project()
+ .cargo("bench -v --example 'ex*1'")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name example1 [..]`
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] `[..]example1[..] --bench`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bench_bin() {
+ full_project()
+ .cargo("bench -v --bin 'bi*1'")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name bin1 [..]`
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] `[..]bin1[..] --bench`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bench_bench() {
+ full_project()
+ .cargo("bench -v --bench 'be*1'")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bench1 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] `[..]bench1[..] --bench`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bench_test() {
+ full_project()
+ .cargo("bench -v --test 'te*1'")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name test1 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] `[..]test1[..] --bench`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn install_example() {
+ full_project()
+ .cargo("install --path . --example 'ex*1'")
+ .with_stderr(
+ "\
+[INSTALLING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [..]/home/.cargo/bin/example1[EXE]
+[INSTALLED] package `foo v0.0.1 ([CWD])` (executable `example1[EXE]`)
+[WARNING] be sure to add [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn install_bin() {
+ full_project()
+ .cargo("install --path . --bin 'bi*1'")
+ .with_stderr(
+ "\
+[INSTALLING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [..]/home/.cargo/bin/bin1[EXE]
+[INSTALLED] package `foo v0.0.1 ([CWD])` (executable `bin1[EXE]`)
+[WARNING] be sure to add [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustdoc_example() {
+ full_project()
+ .cargo("rustdoc -v --example 'ex*1'")
+ .with_stderr(
+ "\
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[RUNNING] `rustdoc --crate-type bin --crate-name example1 [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustdoc_bin() {
+ full_project()
+ .cargo("rustdoc -v --bin 'bi*1'")
+ .with_stderr(
+ "\
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[RUNNING] `rustdoc --crate-type bin --crate-name bin1 [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustdoc_bench() {
+ full_project()
+ .cargo("rustdoc -v --bench 'be*1'")
+ .with_stderr(
+ "\
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[RUNNING] `rustdoc --crate-type bin --crate-name bench1 [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustdoc_test() {
+ full_project()
+ .cargo("rustdoc -v --test 'te*1'")
+ .with_stderr(
+ "\
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[RUNNING] `rustdoc --crate-type bin --crate-name test1 [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustc_example() {
+ full_project()
+ .cargo("rustc -v --example 'ex*1'")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name example1 [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustc_bin() {
+ full_project()
+ .cargo("rustc -v --bin 'bi*1'")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name bin1 [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustc_bench() {
+ full_project()
+ .cargo("rustc -v --bench 'be*1'")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bench1 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustc_test() {
+ full_project()
+ .cargo("rustc -v --test 'te*1'")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name test1 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[RUNNING] `rustc --crate-name [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+fn full_project() -> Project {
+ project()
+ .file("examples/example1.rs", "fn main() { }")
+ .file("examples/example2.rs", "fn main() { }")
+ .file("benches/bench1.rs", "")
+ .file("benches/bench2.rs", "")
+ .file("tests/test1.rs", "")
+ .file("tests/test2.rs", "")
+ .file("src/main.rs", "fn main() { }")
+ .file("src/bin/bin1.rs", "fn main() { }")
+ .file("src/bin/bin2.rs", "fn main() { }")
+ .build()
+}
diff --git a/src/tools/cargo/tests/testsuite/help.rs b/src/tools/cargo/tests/testsuite/help.rs
new file mode 100644
index 000000000..fdb527e76
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/help.rs
@@ -0,0 +1,219 @@
+//! Tests for cargo's help output.
+
+use cargo_test_support::registry::Package;
+use cargo_test_support::{basic_manifest, cargo_exe, cargo_process, paths, process, project};
+use std::fs;
+use std::path::Path;
+use std::str::from_utf8;
+
+#[cargo_test]
+fn help() {
+ cargo_process("").run();
+ cargo_process("help").run();
+ cargo_process("-h").run();
+ cargo_process("help build").run();
+ cargo_process("build -h").run();
+ cargo_process("help help").run();
+ // Ensure that help output goes to stdout, not stderr.
+ cargo_process("search --help").with_stderr("").run();
+ cargo_process("search --help")
+ .with_stdout_contains("[..] --frozen [..]")
+ .run();
+}
+
+#[cargo_test]
+fn help_external_subcommand() {
+ // Check that `help external-subcommand` forwards the --help flag to the
+ // given subcommand.
+ Package::new("cargo-fake-help", "1.0.0")
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ if ::std::env::args().nth(2) == Some(String::from("--help")) {
+ println!("fancy help output");
+ }
+ }
+ "#,
+ )
+ .publish();
+ cargo_process("install cargo-fake-help").run();
+ cargo_process("help fake-help")
+ .with_stdout("fancy help output\n")
+ .run();
+}
+
+#[cargo_test]
+fn z_flags_help() {
+ // Test that the output of `cargo -Z help` shows a different help screen with
+ // all the `-Z` flags.
+ cargo_process("-Z help")
+ .with_stdout_contains(
+ " -Z allow-features[..]-- Allow *only* the listed unstable features",
+ )
+ .run();
+}
+
+fn help_with_man(display_command: &str) {
+ // Build a "man" process that just echoes the contents.
+ let p = project()
+ .at(display_command)
+ .file("Cargo.toml", &basic_manifest(display_command, "1.0.0"))
+ .file(
+ "src/main.rs",
+ &r#"
+ fn main() {
+ eprintln!("custom __COMMAND__");
+ let path = std::env::args().skip(1).next().unwrap();
+ let mut f = std::fs::File::open(path).unwrap();
+ std::io::copy(&mut f, &mut std::io::stdout()).unwrap();
+ }
+ "#
+ .replace("__COMMAND__", display_command),
+ )
+ .build();
+ p.cargo("build").run();
+
+ help_with_man_and_path(display_command, "build", "build", &p.target_debug_dir());
+}
+
+fn help_with_man_and_path(
+ display_command: &str,
+ subcommand: &str,
+ actual_subcommand: &str,
+ path: &Path,
+) {
+ let contents = if display_command == "man" {
+ fs::read_to_string(format!("src/etc/man/cargo-{}.1", actual_subcommand)).unwrap()
+ } else {
+ fs::read_to_string(format!(
+ "src/doc/man/generated_txt/cargo-{}.txt",
+ actual_subcommand
+ ))
+ .unwrap()
+ };
+
+ let output = process(&cargo_exe())
+ .arg("help")
+ .arg(subcommand)
+ .env("PATH", path)
+ .exec_with_output()
+ .unwrap();
+ assert!(output.status.success());
+ let stderr = from_utf8(&output.stderr).unwrap();
+ if display_command.is_empty() {
+ assert_eq!(stderr, "");
+ } else {
+ assert_eq!(stderr, format!("custom {}\n", display_command));
+ }
+ let stdout = from_utf8(&output.stdout).unwrap();
+ assert_eq!(stdout, contents);
+}
+
+fn help_with_stdout_and_path(subcommand: &str, path: &Path) -> String {
+ let output = process(&cargo_exe())
+ .arg("help")
+ .arg(subcommand)
+ .env("PATH", path)
+ .exec_with_output()
+ .unwrap();
+ assert!(output.status.success());
+ let stderr = from_utf8(&output.stderr).unwrap();
+ assert_eq!(stderr, "");
+ let stdout = from_utf8(&output.stdout).unwrap();
+ stdout.to_string()
+}
+
+#[cargo_test]
+fn help_man() {
+ // Checks that `help command` displays the man page using the given command.
+ help_with_man("man");
+ help_with_man("less");
+ help_with_man("more");
+
+ // Check with no commands in PATH.
+ help_with_man_and_path("", "build", "build", Path::new(""));
+}
+
+#[cargo_test]
+fn help_alias() {
+ // Check that `help some_alias` will resolve.
+ help_with_man_and_path("", "b", "build", Path::new(""));
+
+ let config = paths::root().join(".cargo/config");
+ fs::create_dir_all(config.parent().unwrap()).unwrap();
+ fs::write(
+ config,
+ r#"
+ [alias]
+ empty-alias = ""
+ simple-alias = "build"
+ complex-alias = ["build", "--release"]
+ "#,
+ )
+ .unwrap();
+
+ // The `empty-alias` returns an error.
+ cargo_process("help empty-alias")
+ .env("PATH", Path::new(""))
+ .with_stderr_contains("[..]The subcommand 'empty-alias' wasn't recognized[..]")
+ .run_expect_error();
+
+ // Because `simple-alias` aliases a subcommand with no arguments, help shows the manpage.
+ help_with_man_and_path("", "simple-alias", "build", Path::new(""));
+
+ // Help for `complex-alias` displays the full alias command.
+ let out = help_with_stdout_and_path("complex-alias", Path::new(""));
+ assert_eq!(out, "`complex-alias` is aliased to `build --release`\n");
+}
+
+#[cargo_test]
+fn alias_z_flag_help() {
+ cargo_process("build -Z help")
+ .with_stdout_contains(
+ " -Z allow-features[..]-- Allow *only* the listed unstable features",
+ )
+ .run();
+
+ cargo_process("run -Z help")
+ .with_stdout_contains(
+ " -Z allow-features[..]-- Allow *only* the listed unstable features",
+ )
+ .run();
+
+ cargo_process("check -Z help")
+ .with_stdout_contains(
+ " -Z allow-features[..]-- Allow *only* the listed unstable features",
+ )
+ .run();
+
+ cargo_process("test -Z help")
+ .with_stdout_contains(
+ " -Z allow-features[..]-- Allow *only* the listed unstable features",
+ )
+ .run();
+
+ cargo_process("b -Z help")
+ .with_stdout_contains(
+ " -Z allow-features[..]-- Allow *only* the listed unstable features",
+ )
+ .run();
+
+ cargo_process("r -Z help")
+ .with_stdout_contains(
+ " -Z allow-features[..]-- Allow *only* the listed unstable features",
+ )
+ .run();
+
+ cargo_process("c -Z help")
+ .with_stdout_contains(
+ " -Z allow-features[..]-- Allow *only* the listed unstable features",
+ )
+ .run();
+
+ cargo_process("t -Z help")
+ .with_stdout_contains(
+ " -Z allow-features[..]-- Allow *only* the listed unstable features",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/https.rs b/src/tools/cargo/tests/testsuite/https.rs
new file mode 100644
index 000000000..501eeae05
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/https.rs
@@ -0,0 +1,152 @@
+//! Network tests for https transport.
+//!
+//! Note that these tests will generally require setting CARGO_CONTAINER_TESTS
+//! or CARGO_PUBLIC_NETWORK_TESTS.
+
+use cargo_test_support::containers::Container;
+use cargo_test_support::project;
+
+#[cargo_test(container_test)]
+fn self_signed_should_fail() {
+ // Cargo should not allow a connection to a self-signed certificate.
+ let apache = Container::new("apache").launch();
+ let port = apache.port_mappings[&443];
+ let url = format!("https://127.0.0.1:{port}/repos/bar.git");
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = {{ git = "{url}" }}
+ "#
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+ // I think the text here depends on the curl backend.
+ let err_msg = if cfg!(target_os = "macos") {
+ "unexpected return value from ssl handshake -9806; class=Ssl (16)"
+ } else if cfg!(unix) {
+ "the SSL certificate is invalid; class=Ssl (16); code=Certificate (-17)"
+ } else if cfg!(windows) {
+ "user cancelled certificate check; class=Http (34); code=Certificate (-17)"
+ } else {
+ panic!("target not supported");
+ };
+ p.cargo("fetch")
+ .with_status(101)
+ .with_stderr(&format!(
+ "\
+[UPDATING] git repository `https://127.0.0.1:[..]/repos/bar.git`
+error: failed to get `bar` as a dependency of package `foo v0.1.0 ([ROOT]/foo)`
+
+Caused by:
+ failed to load source for dependency `bar`
+
+Caused by:
+ Unable to update https://127.0.0.1:[..]/repos/bar.git
+
+Caused by:
+ failed to clone into: [ROOT]/home/.cargo/git/db/bar-[..]
+
+Caused by:
+ network failure seems to have happened
+ if a proxy or similar is necessary `net.git-fetch-with-cli` may help here
+ https://doc.rust-lang.org/cargo/reference/config.html#netgit-fetch-with-cli
+
+Caused by:
+ {err_msg}
+"
+ ))
+ .run();
+}
+
+#[cargo_test(container_test)]
+fn self_signed_with_cacert() {
+ // When using cainfo, that should allow a connection to a self-signed cert.
+
+ if cfg!(target_os = "macos") {
+ // This test only seems to work with the
+ // curl-sys/force-system-lib-on-osx feature enabled. For some reason
+ // SecureTransport doesn't seem to like the self-signed certificate.
+ // It works if the certificate is manually approved via Keychain
+ // Access. The system libcurl is built with a LibreSSL fallback which
+ // is used when CAINFO is set, which seems to work correctly. This
+ // could use some more investigation. The official Rust binaries use
+ // curl-sys/force-system-lib-on-osx so it is mostly an issue for local
+ // testing.
+ //
+ // The error is:
+ // [60] SSL peer certificate or SSH remote key was not OK (SSL:
+ // certificate verification failed (result: 5)); class=Net (12)
+ let curl_v = curl::Version::get();
+ if curl_v.vendored() {
+ eprintln!(
+ "vendored curl not supported on macOS, \
+ set curl-sys/force-system-lib-on-osx to enable"
+ );
+ return;
+ }
+ }
+
+ let apache = Container::new("apache").launch();
+ let port = apache.port_mappings[&443];
+ let url = format!("https://127.0.0.1:{port}/repos/bar.git");
+ let server_crt = apache.read_file("/usr/local/apache2/conf/server.crt");
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = {{ git = "{url}" }}
+ "#
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config.toml",
+ &format!(
+ r#"
+ [http]
+ cainfo = "server.crt"
+ "#
+ ),
+ )
+ .file("server.crt", &server_crt)
+ .build();
+ p.cargo("fetch")
+ .with_stderr("[UPDATING] git repository `https://127.0.0.1:[..]/repos/bar.git`")
+ .run();
+}
+
+#[cargo_test(public_network_test)]
+fn github_works() {
+ // Check that an https connection to github.com works.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bitflags = { git = "https://github.com/rust-lang/bitflags.git", tag="1.3.2" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("fetch")
+ .with_stderr("[UPDATING] git repository `https://github.com/rust-lang/bitflags.git`")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/inheritable_workspace_fields.rs b/src/tools/cargo/tests/testsuite/inheritable_workspace_fields.rs
new file mode 100644
index 000000000..92c96b985
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/inheritable_workspace_fields.rs
@@ -0,0 +1,1717 @@
+//! Tests for inheriting Cargo.toml fields with field.workspace = true
+use cargo_test_support::registry::{Dependency, Package, RegistryBuilder};
+use cargo_test_support::{
+ basic_lib_manifest, basic_manifest, git, path2url, paths, project, publish, registry,
+};
+
+#[cargo_test]
+fn permit_additional_workspace_fields() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ [workspace.package]
+ version = "1.2.3"
+ authors = ["Rustaceans"]
+ description = "This is a crate"
+ documentation = "https://www.rust-lang.org/learn"
+ readme = "README.md"
+ homepage = "https://www.rust-lang.org"
+ repository = "https://github.com/example/example"
+ license = "MIT"
+ license-file = "LICENSE"
+ keywords = ["cli"]
+ categories = ["development-tools"]
+ publish = false
+ edition = "2018"
+ rust-version = "1.60"
+ exclude = ["foo.txt"]
+ include = ["bar.txt", "**/*.rs", "Cargo.toml", "LICENSE", "README.md"]
+
+ [workspace.package.badges]
+ gitlab = { repository = "https://gitlab.com/rust-lang/rust", branch = "master" }
+
+ [workspace.dependencies]
+ dep = "0.1"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ workspace = ".."
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ // Should not warn about unused fields.
+ .with_stderr(
+ "\
+[CHECKING] bar v0.1.0 ([CWD]/bar)
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("check").run();
+ let lockfile = p.read_lockfile();
+ assert!(!lockfile.contains("dep"));
+}
+
+#[cargo_test]
+fn deny_optional_dependencies() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+
+ [workspace.dependencies]
+ dep1 = { version = "0.1", optional = true }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ workspace = ".."
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]foo/Cargo.toml`
+
+Caused by:
+ dep1 is optional, but workspace dependencies cannot be optional
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn inherit_own_workspace_fields() {
+ let registry = RegistryBuilder::new().http_api().http_index().build();
+
+ let p = project().build();
+
+ let _ = git::repo(&paths::root().join("foo"))
+ .file(
+ "Cargo.toml",
+ r#"
+ badges.workspace = true
+
+ [package]
+ name = "foo"
+ version.workspace = true
+ authors.workspace = true
+ description.workspace = true
+ documentation.workspace = true
+ homepage.workspace = true
+ repository.workspace = true
+ license.workspace = true
+ keywords.workspace = true
+ categories.workspace = true
+ publish.workspace = true
+ edition.workspace = true
+ rust-version.workspace = true
+ exclude.workspace = true
+ include.workspace = true
+
+ [workspace]
+ members = []
+ [workspace.package]
+ version = "1.2.3"
+ authors = ["Rustaceans"]
+ description = "This is a crate"
+ documentation = "https://www.rust-lang.org/learn"
+ homepage = "https://www.rust-lang.org"
+ repository = "https://github.com/example/example"
+ license = "MIT"
+ keywords = ["cli"]
+ categories = ["development-tools"]
+ publish = true
+ edition = "2018"
+ rust-version = "1.60"
+ exclude = ["foo.txt"]
+ include = ["bar.txt", "**/*.rs", "Cargo.toml"]
+ [workspace.package.badges]
+ gitlab = { repository = "https://gitlab.com/rust-lang/rust", branch = "master" }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("foo.txt", "") // should be ignored when packaging
+ .file("bar.txt", "") // should be included when packaging
+ .build();
+
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[WARNING] [..]
+[..]
+[VERIFYING] foo v1.2.3 [..]
+[COMPILING] foo v1.2.3 [..]
+[FINISHED] [..]
+[PACKAGED] [..]
+[UPLOADING] foo v1.2.3 [..]
+[UPLOADED] foo v1.2.3 to registry `crates-io`
+note: Waiting for `foo v1.2.3` to be available at registry `crates-io`.
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+[PUBLISHED] foo v1.2.3 at registry `crates-io`
+",
+ )
+ .run();
+
+ publish::validate_upload_with_contents(
+ r#"
+ {
+ "authors": ["Rustaceans"],
+ "badges": {
+ "gitlab": { "branch": "master", "repository": "https://gitlab.com/rust-lang/rust" }
+ },
+ "categories": ["development-tools"],
+ "deps": [],
+ "description": "This is a crate",
+ "documentation": "https://www.rust-lang.org/learn",
+ "features": {},
+ "homepage": "https://www.rust-lang.org",
+ "keywords": ["cli"],
+ "license": "MIT",
+ "license_file": null,
+ "links": null,
+ "name": "foo",
+ "readme": null,
+ "readme_file": null,
+ "repository": "https://github.com/example/example",
+ "vers": "1.2.3"
+ }
+ "#,
+ "foo-1.2.3.crate",
+ &[
+ "Cargo.lock",
+ "Cargo.toml",
+ "Cargo.toml.orig",
+ "src/main.rs",
+ ".cargo_vcs_info.json",
+ "bar.txt",
+ ],
+ &[(
+ "Cargo.toml",
+ &format!(
+ r#"{}
+[package]
+edition = "2018"
+rust-version = "1.60"
+name = "foo"
+version = "1.2.3"
+authors = ["Rustaceans"]
+exclude = ["foo.txt"]
+include = [
+ "bar.txt",
+ "**/*.rs",
+ "Cargo.toml",
+]
+publish = true
+description = "This is a crate"
+homepage = "https://www.rust-lang.org"
+documentation = "https://www.rust-lang.org/learn"
+keywords = ["cli"]
+categories = ["development-tools"]
+license = "MIT"
+repository = "https://github.com/example/example"
+
+[badges.gitlab]
+branch = "master"
+repository = "https://gitlab.com/rust-lang/rust"
+"#,
+ cargo::core::package::MANIFEST_PREAMBLE
+ ),
+ )],
+ );
+}
+
+#[cargo_test]
+fn inherit_own_dependencies() {
+ let registry = RegistryBuilder::new().http_api().http_index().build();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.2.0"
+ authors = []
+
+ [dependencies]
+ dep.workspace = true
+
+ [build-dependencies]
+ dep-build.workspace = true
+
+ [dev-dependencies]
+ dep-dev.workspace = true
+
+ [workspace]
+ members = []
+
+ [workspace.dependencies]
+ dep = "0.1"
+ dep-build = "0.8"
+ dep-dev = "0.5.2"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("dep", "0.1.2").publish();
+ Package::new("dep-build", "0.8.2").publish();
+ Package::new("dep-dev", "0.5.2").publish();
+
+ p.cargo("check")
+ // Unordered because the download order is nondeterministic.
+ .with_stderr_unordered(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] dep v0.1.2 ([..])
+[DOWNLOADED] dep-build v0.8.2 ([..])
+[CHECKING] dep v0.1.2
+[CHECKING] bar v0.2.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("check").run();
+ let lockfile = p.read_lockfile();
+ assert!(lockfile.contains("dep"));
+ assert!(lockfile.contains("dep-dev"));
+ assert!(lockfile.contains("dep-build"));
+
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[WARNING] [..]
+[..]
+[PACKAGING] bar v0.2.0 [..]
+[UPDATING] [..]
+[VERIFYING] bar v0.2.0 [..]
+[COMPILING] dep v0.1.2
+[COMPILING] bar v0.2.0 [..]
+[FINISHED] [..]
+[PACKAGED] [..]
+[UPLOADING] bar v0.2.0 [..]
+[UPLOADED] bar v0.2.0 to registry `crates-io`
+note: Waiting for `bar v0.2.0` to be available at registry `crates-io`.
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+[PUBLISHED] bar v0.2.0 at registry `crates-io`
+",
+ )
+ .run();
+
+ publish::validate_upload_with_contents(
+ r#"
+ {
+ "authors": [],
+ "badges": {},
+ "categories": [],
+ "deps": [
+ {
+ "default_features": true,
+ "features": [],
+ "kind": "normal",
+ "name": "dep",
+ "optional": false,
+ "target": null,
+ "version_req": "^0.1"
+ },
+ {
+ "default_features": true,
+ "features": [],
+ "kind": "dev",
+ "name": "dep-dev",
+ "optional": false,
+ "target": null,
+ "version_req": "^0.5.2"
+ },
+ {
+ "default_features": true,
+ "features": [],
+ "kind": "build",
+ "name": "dep-build",
+ "optional": false,
+ "target": null,
+ "version_req": "^0.8"
+ }
+ ],
+ "description": null,
+ "documentation": null,
+ "features": {},
+ "homepage": null,
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "name": "bar",
+ "readme": null,
+ "readme_file": null,
+ "repository": null,
+ "vers": "0.2.0"
+ }
+ "#,
+ "bar-0.2.0.crate",
+ &["Cargo.toml", "Cargo.toml.orig", "Cargo.lock", "src/main.rs"],
+ &[(
+ "Cargo.toml",
+ &format!(
+ r#"{}
+[package]
+name = "bar"
+version = "0.2.0"
+authors = []
+
+[dependencies.dep]
+version = "0.1"
+
+[dev-dependencies.dep-dev]
+version = "0.5.2"
+
+[build-dependencies.dep-build]
+version = "0.8"
+"#,
+ cargo::core::package::MANIFEST_PREAMBLE
+ ),
+ )],
+ );
+}
+
+#[cargo_test]
+fn inherit_own_detailed_dependencies() {
+ let registry = RegistryBuilder::new().http_api().http_index().build();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.2.0"
+ authors = []
+
+ [dependencies]
+ dep.workspace = true
+
+ [workspace]
+ members = []
+
+ [workspace.dependencies]
+ dep = { version = "0.1.2", features = ["testing"] }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("dep", "0.1.2")
+ .feature("testing", &vec![])
+ .publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] dep v0.1.2 ([..])
+[CHECKING] dep v0.1.2
+[CHECKING] bar v0.2.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("check").run();
+ let lockfile = p.read_lockfile();
+ assert!(lockfile.contains("dep"));
+
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[WARNING] [..]
+[..]
+[PACKAGING] bar v0.2.0 [..]
+[UPDATING] [..]
+[VERIFYING] bar v0.2.0 [..]
+[COMPILING] dep v0.1.2
+[COMPILING] bar v0.2.0 [..]
+[FINISHED] [..]
+[PACKAGED] [..]
+[UPLOADING] bar v0.2.0 [..]
+[UPLOADED] bar v0.2.0 to registry `crates-io`
+note: Waiting for `bar v0.2.0` to be available at registry `crates-io`.
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+[PUBLISHED] bar v0.2.0 at registry `crates-io`
+",
+ )
+ .run();
+
+ publish::validate_upload_with_contents(
+ r#"
+ {
+ "authors": [],
+ "badges": {},
+ "categories": [],
+ "deps": [
+ {
+ "default_features": true,
+ "features": ["testing"],
+ "kind": "normal",
+ "name": "dep",
+ "optional": false,
+ "target": null,
+ "version_req": "^0.1.2"
+ }
+ ],
+ "description": null,
+ "documentation": null,
+ "features": {},
+ "homepage": null,
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "name": "bar",
+ "readme": null,
+ "readme_file": null,
+ "repository": null,
+ "vers": "0.2.0"
+ }
+ "#,
+ "bar-0.2.0.crate",
+ &["Cargo.toml", "Cargo.toml.orig", "Cargo.lock", "src/main.rs"],
+ &[(
+ "Cargo.toml",
+ &format!(
+ r#"{}
+[package]
+name = "bar"
+version = "0.2.0"
+authors = []
+
+[dependencies.dep]
+version = "0.1.2"
+features = ["testing"]
+"#,
+ cargo::core::package::MANIFEST_PREAMBLE
+ ),
+ )],
+ );
+}
+
+#[cargo_test]
+fn inherit_from_own_undefined_field() {
+ registry::init();
+
+ let p = project().build();
+
+ let _ = git::repo(&paths::root().join("foo"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.2.5"
+ authors = ["rustaceans"]
+ description.workspace = true
+
+ [workspace]
+ members = []
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[CWD]/Cargo.toml`
+
+Caused by:
+ error inheriting `description` from workspace root manifest's `workspace.package.description`
+
+Caused by:
+ `workspace.package.description` was not defined
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn inherited_dependencies_union_features() {
+ Package::new("dep", "0.1.0")
+ .feature("fancy", &["fancy_dep"])
+ .feature("dancy", &["dancy_dep"])
+ .add_dep(Dependency::new("fancy_dep", "0.2").optional(true))
+ .add_dep(Dependency::new("dancy_dep", "0.6").optional(true))
+ .file("src/lib.rs", "")
+ .publish();
+
+ Package::new("fancy_dep", "0.2.4").publish();
+ Package::new("dancy_dep", "0.6.8").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.2.0"
+ authors = []
+ [dependencies]
+ dep = { workspace = true, features = ["dancy"] }
+
+ [workspace]
+ members = []
+ [workspace.dependencies]
+ dep = { version = "0.1", features = ["fancy"] }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] fancy_dep v0.2.4 ([..])
+[DOWNLOADED] dep v0.1.0 ([..])
+[DOWNLOADED] dancy_dep v0.6.8 ([..])
+[CHECKING] [..]
+[CHECKING] [..]
+[CHECKING] dep v0.1.0
+[CHECKING] bar v0.2.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ let lockfile = p.read_lockfile();
+ assert!(lockfile.contains("dep"));
+ assert!(lockfile.contains("fancy_dep"));
+ assert!(lockfile.contains("dancy_dep"));
+}
+
+#[cargo_test]
+fn inherit_workspace_fields() {
+ let registry = RegistryBuilder::new().http_api().http_index().build();
+
+ let p = project().build();
+
+ let _ = git::repo(&paths::root().join("foo"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ [workspace.package]
+ version = "1.2.3"
+ authors = ["Rustaceans"]
+ description = "This is a crate"
+ documentation = "https://www.rust-lang.org/learn"
+ readme = "README.md"
+ homepage = "https://www.rust-lang.org"
+ repository = "https://github.com/example/example"
+ license = "MIT"
+ license-file = "LICENSE"
+ keywords = ["cli"]
+ categories = ["development-tools"]
+ publish = true
+ edition = "2018"
+ rust-version = "1.60"
+ exclude = ["foo.txt"]
+ include = ["bar.txt", "**/*.rs", "Cargo.toml", "LICENSE", "README.md"]
+ [workspace.package.badges]
+ gitlab = { repository = "https://gitlab.com/rust-lang/rust", branch = "master" }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ badges.workspace = true
+ [package]
+ name = "bar"
+ workspace = ".."
+ version.workspace = true
+ authors.workspace = true
+ description.workspace = true
+ documentation.workspace = true
+ readme.workspace = true
+ homepage.workspace = true
+ repository.workspace = true
+ license.workspace = true
+ license-file.workspace = true
+ keywords.workspace = true
+ categories.workspace = true
+ publish.workspace = true
+ edition.workspace = true
+ rust-version.workspace = true
+ exclude.workspace = true
+ include.workspace = true
+ "#,
+ )
+ .file("LICENSE", "license")
+ .file("README.md", "README.md")
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("bar/foo.txt", "") // should be ignored when packaging
+ .file("bar/bar.txt", "") // should be included when packaging
+ .build();
+
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .cwd("bar")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[WARNING] [..]
+[..]
+[VERIFYING] bar v1.2.3 [..]
+[WARNING] [..]
+[..]
+[..]
+[..]
+[COMPILING] bar v1.2.3 [..]
+[FINISHED] [..]
+[PACKAGED] [..]
+[UPLOADING] bar v1.2.3 [..]
+[UPLOADED] bar v1.2.3 to registry `crates-io`
+note: Waiting for `bar v1.2.3` to be available at registry `crates-io`.
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+[PUBLISHED] bar v1.2.3 at registry `crates-io`
+",
+ )
+ .run();
+
+ publish::validate_upload_with_contents(
+ r#"
+ {
+ "authors": ["Rustaceans"],
+ "badges": {
+ "gitlab": { "branch": "master", "repository": "https://gitlab.com/rust-lang/rust" }
+ },
+ "categories": ["development-tools"],
+ "deps": [],
+ "description": "This is a crate",
+ "documentation": "https://www.rust-lang.org/learn",
+ "features": {},
+ "homepage": "https://www.rust-lang.org",
+ "keywords": ["cli"],
+ "license": "MIT",
+ "license_file": "../LICENSE",
+ "links": null,
+ "name": "bar",
+ "readme": "README.md",
+ "readme_file": "../README.md",
+ "repository": "https://github.com/example/example",
+ "vers": "1.2.3"
+ }
+ "#,
+ "bar-1.2.3.crate",
+ &[
+ "Cargo.lock",
+ "Cargo.toml",
+ "Cargo.toml.orig",
+ "src/main.rs",
+ "README.md",
+ "LICENSE",
+ ".cargo_vcs_info.json",
+ "bar.txt",
+ ],
+ &[(
+ "Cargo.toml",
+ &format!(
+ r#"{}
+[package]
+edition = "2018"
+rust-version = "1.60"
+name = "bar"
+version = "1.2.3"
+authors = ["Rustaceans"]
+exclude = ["foo.txt"]
+include = [
+ "bar.txt",
+ "**/*.rs",
+ "Cargo.toml",
+ "LICENSE",
+ "README.md",
+]
+publish = true
+description = "This is a crate"
+homepage = "https://www.rust-lang.org"
+documentation = "https://www.rust-lang.org/learn"
+readme = "README.md"
+keywords = ["cli"]
+categories = ["development-tools"]
+license = "MIT"
+license-file = "LICENSE"
+repository = "https://github.com/example/example"
+
+[badges.gitlab]
+branch = "master"
+repository = "https://gitlab.com/rust-lang/rust"
+"#,
+ cargo::core::package::MANIFEST_PREAMBLE
+ ),
+ )],
+ );
+}
+
+#[cargo_test]
+fn inherit_dependencies() {
+ let registry = RegistryBuilder::new().http_api().http_index().build();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ [workspace.dependencies]
+ dep = "0.1"
+ dep-build = "0.8"
+ dep-dev = "0.5.2"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ workspace = ".."
+ name = "bar"
+ version = "0.2.0"
+ authors = []
+ [dependencies]
+ dep.workspace = true
+ [build-dependencies]
+ dep-build.workspace = true
+ [dev-dependencies]
+ dep-dev.workspace = true
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("dep", "0.1.2").publish();
+ Package::new("dep-build", "0.8.2").publish();
+ Package::new("dep-dev", "0.5.2").publish();
+
+ p.cargo("check")
+ // Unordered because the download order is nondeterministic.
+ .with_stderr_unordered(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] dep v0.1.2 ([..])
+[DOWNLOADED] dep-build v0.8.2 ([..])
+[CHECKING] dep v0.1.2
+[CHECKING] bar v0.2.0 ([CWD]/bar)
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("check").run();
+ let lockfile = p.read_lockfile();
+ assert!(lockfile.contains("dep"));
+ assert!(lockfile.contains("dep-dev"));
+ assert!(lockfile.contains("dep-build"));
+
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .cwd("bar")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[WARNING] [..]
+[..]
+[PACKAGING] bar v0.2.0 [..]
+[UPDATING] [..]
+[VERIFYING] bar v0.2.0 [..]
+[COMPILING] dep v0.1.2
+[COMPILING] bar v0.2.0 [..]
+[FINISHED] [..]
+[PACKAGED] [..]
+[UPLOADING] bar v0.2.0 [..]
+[UPLOADED] bar v0.2.0 to registry `crates-io`
+note: Waiting for `bar v0.2.0` to be available at registry `crates-io`.
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+[PUBLISHED] bar v0.2.0 at registry `crates-io`
+",
+ )
+ .run();
+
+ publish::validate_upload_with_contents(
+ r#"
+ {
+ "authors": [],
+ "badges": {},
+ "categories": [],
+ "deps": [
+ {
+ "default_features": true,
+ "features": [],
+ "kind": "normal",
+ "name": "dep",
+ "optional": false,
+ "target": null,
+ "version_req": "^0.1"
+ },
+ {
+ "default_features": true,
+ "features": [],
+ "kind": "dev",
+ "name": "dep-dev",
+ "optional": false,
+ "target": null,
+ "version_req": "^0.5.2"
+ },
+ {
+ "default_features": true,
+ "features": [],
+ "kind": "build",
+ "name": "dep-build",
+ "optional": false,
+ "target": null,
+ "version_req": "^0.8"
+ }
+ ],
+ "description": null,
+ "documentation": null,
+ "features": {},
+ "homepage": null,
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "name": "bar",
+ "readme": null,
+ "readme_file": null,
+ "repository": null,
+ "vers": "0.2.0"
+ }
+ "#,
+ "bar-0.2.0.crate",
+ &["Cargo.toml", "Cargo.toml.orig", "Cargo.lock", "src/main.rs"],
+ &[(
+ "Cargo.toml",
+ &format!(
+ r#"{}
+[package]
+name = "bar"
+version = "0.2.0"
+authors = []
+
+[dependencies.dep]
+version = "0.1"
+
+[dev-dependencies.dep-dev]
+version = "0.5.2"
+
+[build-dependencies.dep-build]
+version = "0.8"
+"#,
+ cargo::core::package::MANIFEST_PREAMBLE
+ ),
+ )],
+ );
+}
+
+#[cargo_test]
+fn inherit_target_dependencies() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ [workspace.dependencies]
+ dep = "0.1"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ workspace = ".."
+ name = "bar"
+ version = "0.2.0"
+ authors = []
+ [target.'cfg(unix)'.dependencies]
+ dep.workspace = true
+ [target.'cfg(windows)'.dependencies]
+ dep.workspace = true
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("dep", "0.1.2").publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] dep v0.1.2 ([..])
+[CHECKING] dep v0.1.2
+[CHECKING] bar v0.2.0 ([CWD]/bar)
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ let lockfile = p.read_lockfile();
+ assert!(lockfile.contains("dep"));
+}
+
+#[cargo_test]
+fn inherit_dependency_override_optional() {
+ Package::new("dep", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ [workspace.dependencies]
+ dep = "0.1.0"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ workspace = ".."
+ name = "bar"
+ version = "0.2.0"
+ authors = []
+ [dependencies]
+ dep = { workspace = true, optional = true }
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[CHECKING] bar v0.2.0 ([CWD]/bar)
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn inherit_dependency_features() {
+ Package::new("dep", "0.1.0")
+ .feature("fancy", &["fancy_dep"])
+ .add_dep(Dependency::new("fancy_dep", "0.2").optional(true))
+ .file("src/lib.rs", "")
+ .publish();
+
+ Package::new("fancy_dep", "0.2.4").publish();
+ Package::new("dancy_dep", "0.6.8").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.2.0"
+ authors = []
+ [dependencies]
+ dep = { workspace = true, features = ["fancy"] }
+
+ [workspace]
+ members = []
+ [workspace.dependencies]
+ dep = "0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] fancy_dep v0.2.4 ([..])
+[DOWNLOADED] dep v0.1.0 ([..])
+[CHECKING] fancy_dep v0.2.4
+[CHECKING] dep v0.1.0
+[CHECKING] bar v0.2.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ let lockfile = p.read_lockfile();
+ assert!(lockfile.contains("dep"));
+ assert!(lockfile.contains("fancy_dep"));
+}
+
+#[cargo_test]
+fn inherit_detailed_dependencies() {
+ let git_project = git::new("detailed", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("detailed"))
+ .file(
+ "src/detailed.rs",
+ r#"
+ pub fn hello() -> &'static str {
+ "hello world"
+ }
+ "#,
+ )
+ });
+
+ // Make a new branch based on the current HEAD commit
+ let repo = git2::Repository::open(&git_project.root()).unwrap();
+ let head = repo.head().unwrap().target().unwrap();
+ let head = repo.find_commit(head).unwrap();
+ repo.branch("branchy", &head, true).unwrap();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [workspace]
+ members = ["bar"]
+ [workspace.dependencies]
+ detailed = {{ git = '{}', branch = "branchy" }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ workspace = ".."
+ name = "bar"
+ version = "0.2.0"
+ authors = []
+ [dependencies]
+ detailed.workspace = true
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ let git_root = git_project.root();
+
+ p.cargo("check")
+ .with_stderr(&format!(
+ "\
+[UPDATING] git repository `{}`\n\
+[CHECKING] detailed v0.5.0 ({}?branch=branchy#[..])\n\
+[CHECKING] bar v0.2.0 ([CWD]/bar)\n\
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ path2url(&git_root),
+ path2url(&git_root),
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn inherit_path_dependencies() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ [workspace.dependencies]
+ dep = { path = "dep" }
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ workspace = ".."
+ name = "bar"
+ version = "0.2.0"
+ authors = []
+ [dependencies]
+ dep.workspace = true
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("dep/Cargo.toml", &basic_manifest("dep", "0.9.0"))
+ .file("dep/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] dep v0.9.0 ([CWD]/dep)
+[CHECKING] bar v0.2.0 ([CWD]/bar)
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ let lockfile = p.read_lockfile();
+ assert!(lockfile.contains("dep"));
+}
+
+#[cargo_test]
+fn error_workspace_false() {
+ registry::init();
+
+ let p = project().build();
+
+ let _ = git::repo(&paths::root().join("foo"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ workspace = ".."
+ version = "1.2.3"
+ authors = ["rustaceans"]
+ description = { workspace = false }
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .cwd("bar")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[CWD]/Cargo.toml`
+
+Caused by:
+ `workspace` cannot be false
+ in `package.description.workspace`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn error_workspace_dependency_looked_for_workspace_itself() {
+ registry::init();
+
+ let p = project().build();
+
+ let _ = git::repo(&paths::root().join("foo"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "1.2.3"
+
+ [dependencies]
+ dep.workspace = true
+
+ [workspace]
+ members = []
+
+ [workspace.dependencies]
+ dep.workspace = true
+
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[WARNING] [CWD]/Cargo.toml: unused manifest key: workspace.dependencies.dep.workspace
+[WARNING] [CWD]/Cargo.toml: dependency (dep) specified without providing a local path, Git repository, or version to use. This will be considered an error in future versions
+[UPDATING] `dummy-registry` index
+[ERROR] no matching package named `dep` found
+location searched: registry `crates-io`
+required by package `bar v1.2.3 ([CWD])`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn error_malformed_workspace_root() {
+ registry::init();
+
+ let p = project().build();
+
+ let _ = git::repo(&paths::root().join("foo"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = [invalid toml
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ workspace = ".."
+ version = "1.2.3"
+ authors = ["rustaceans"]
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .cwd("bar")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml`
+
+Caused by:
+ [..]
+
+Caused by:
+ [..]
+ |
+ 3 | members = [invalid toml
+ | ^
+ invalid array
+ expected `]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn error_no_root_workspace() {
+ registry::init();
+
+ let p = project().build();
+
+ let _ = git::repo(&paths::root().join("foo"))
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ workspace = ".."
+ version = "1.2.3"
+ authors = ["rustaceans"]
+ description.workspace = true
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .cwd("bar")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]/Cargo.toml`
+
+Caused by:
+ error inheriting `description` from workspace root manifest's `workspace.package.description`
+
+Caused by:
+ root of a workspace inferred but wasn't a root: [..]/Cargo.toml
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn error_inherit_unspecified_dependency() {
+ let p = project().build();
+
+ let _ = git::repo(&paths::root().join("foo"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ workspace = ".."
+ version = "1.2.3"
+ authors = ["rustaceans"]
+ [dependencies]
+ foo.workspace = true
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .cwd("bar")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[CWD]/Cargo.toml`
+
+Caused by:
+ error inheriting `foo` from workspace root manifest's `workspace.dependencies.foo`
+
+Caused by:
+ `workspace.dependencies` was not defined
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn warn_inherit_def_feat_true_member_def_feat_false() {
+ Package::new("dep", "0.1.0")
+ .feature("default", &["fancy_dep"])
+ .add_dep(Dependency::new("fancy_dep", "0.2").optional(true))
+ .file("src/lib.rs", "")
+ .publish();
+
+ Package::new("fancy_dep", "0.2.4").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.2.0"
+ authors = []
+ [dependencies]
+ dep = { workspace = true, default-features = false }
+
+ [workspace]
+ members = []
+ [workspace.dependencies]
+ dep = { version = "0.1.0", default-features = true }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[WARNING] [CWD]/Cargo.toml: `default-features` is ignored for dep, since `default-features` was \
+true for `workspace.dependencies.dep`, this could become a hard error in the future
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] fancy_dep v0.2.4 ([..])
+[DOWNLOADED] dep v0.1.0 ([..])
+[CHECKING] fancy_dep v0.2.4
+[CHECKING] dep v0.1.0
+[CHECKING] bar v0.2.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn warn_inherit_simple_member_def_feat_false() {
+ Package::new("dep", "0.1.0")
+ .feature("default", &["fancy_dep"])
+ .add_dep(Dependency::new("fancy_dep", "0.2").optional(true))
+ .file("src/lib.rs", "")
+ .publish();
+
+ Package::new("fancy_dep", "0.2.4").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.2.0"
+ authors = []
+ [dependencies]
+ dep = { workspace = true, default-features = false }
+
+ [workspace]
+ members = []
+ [workspace.dependencies]
+ dep = "0.1.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[WARNING] [CWD]/Cargo.toml: `default-features` is ignored for dep, since `default-features` was \
+not specified for `workspace.dependencies.dep`, this could become a hard error in the future
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] fancy_dep v0.2.4 ([..])
+[DOWNLOADED] dep v0.1.0 ([..])
+[CHECKING] fancy_dep v0.2.4
+[CHECKING] dep v0.1.0
+[CHECKING] bar v0.2.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn inherit_def_feat_false_member_def_feat_true() {
+ Package::new("dep", "0.1.0")
+ .feature("default", &["fancy_dep"])
+ .add_dep(Dependency::new("fancy_dep", "0.2").optional(true))
+ .file("src/lib.rs", "")
+ .publish();
+
+ Package::new("fancy_dep", "0.2.4").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.2.0"
+ authors = []
+ [dependencies]
+ dep = { workspace = true, default-features = true }
+
+ [workspace]
+ members = []
+ [workspace.dependencies]
+ dep = { version = "0.1.0", default-features = false }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] fancy_dep v0.2.4 ([..])
+[DOWNLOADED] dep v0.1.0 ([..])
+[CHECKING] fancy_dep v0.2.4
+[CHECKING] dep v0.1.0
+[CHECKING] bar v0.2.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cannot_inherit_in_patch() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = []
+
+ [workspace.dependencies]
+ bar = { path = "bar" }
+
+ [package]
+ name = "foo"
+ version = "0.2.0"
+
+ [patch.crates-io]
+ bar.workspace = true
+
+ [dependencies]
+ bar = "0.1.0"
+
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[WARNING] [CWD]/Cargo.toml: unused manifest key: patch.crates-io.bar.workspace
+[WARNING] [CWD]/Cargo.toml: dependency (bar) specified without providing a local path, Git repository, or version to use. This will be considered an error in future versions
+[UPDATING] `dummy-registry` index
+[ERROR] failed to resolve patches for `https://github.com/rust-lang/crates.io-index`
+
+Caused by:
+ patch for `bar` in `https://github.com/rust-lang/crates.io-index` points to the same source, but patches must point to different sources
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn warn_inherit_unused_manifest_key_dep() {
+ Package::new("dep", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = []
+ [workspace.dependencies]
+ dep = { version = "0.1", wxz = "wxz" }
+
+ [package]
+ name = "bar"
+ version = "0.2.0"
+ authors = []
+
+ [dependencies]
+ dep = { workspace = true, wxz = "wxz" }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[WARNING] [CWD]/Cargo.toml: unused manifest key: workspace.dependencies.dep.wxz
+[WARNING] [CWD]/Cargo.toml: unused manifest key: dependencies.dep.wxz
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] dep v0.1.0 ([..])
+[CHECKING] [..]
+[CHECKING] bar v0.2.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn warn_inherit_unused_manifest_key_package() {
+ Package::new("dep", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ badges = { workspace = true, xyz = "abc"}
+
+ [workspace]
+ members = []
+ [workspace.package]
+ version = "1.2.3"
+ authors = ["Rustaceans"]
+ description = "This is a crate"
+ documentation = "https://www.rust-lang.org/learn"
+ homepage = "https://www.rust-lang.org"
+ repository = "https://github.com/example/example"
+ license = "MIT"
+ keywords = ["cli"]
+ categories = ["development-tools"]
+ publish = true
+ edition = "2018"
+ rust-version = "1.60"
+ exclude = ["foo.txt"]
+ include = ["bar.txt", "**/*.rs", "Cargo.toml"]
+ [workspace.package.badges]
+ gitlab = { repository = "https://gitlab.com/rust-lang/rust", branch = "master" }
+
+ [package]
+ name = "bar"
+ version = { workspace = true, xyz = "abc"}
+ authors = { workspace = true, xyz = "abc"}
+ description = { workspace = true, xyz = "abc"}
+ documentation = { workspace = true, xyz = "abc"}
+ homepage = { workspace = true, xyz = "abc"}
+ repository = { workspace = true, xyz = "abc"}
+ license = { workspace = true, xyz = "abc"}
+ keywords = { workspace = true, xyz = "abc"}
+ categories = { workspace = true, xyz = "abc"}
+ publish = { workspace = true, xyz = "abc"}
+ edition = { workspace = true, xyz = "abc"}
+ rust-version = { workspace = true, xyz = "abc"}
+ exclude = { workspace = true, xyz = "abc"}
+ include = { workspace = true, xyz = "abc"}
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[WARNING] [CWD]/Cargo.toml: unused manifest key: package.authors.xyz
+[WARNING] [CWD]/Cargo.toml: unused manifest key: package.categories.xyz
+[WARNING] [CWD]/Cargo.toml: unused manifest key: package.description.xyz
+[WARNING] [CWD]/Cargo.toml: unused manifest key: package.documentation.xyz
+[WARNING] [CWD]/Cargo.toml: unused manifest key: package.edition.xyz
+[WARNING] [CWD]/Cargo.toml: unused manifest key: package.exclude.xyz
+[WARNING] [CWD]/Cargo.toml: unused manifest key: package.homepage.xyz
+[WARNING] [CWD]/Cargo.toml: unused manifest key: package.include.xyz
+[WARNING] [CWD]/Cargo.toml: unused manifest key: package.keywords.xyz
+[WARNING] [CWD]/Cargo.toml: unused manifest key: package.license.xyz
+[WARNING] [CWD]/Cargo.toml: unused manifest key: package.publish.xyz
+[WARNING] [CWD]/Cargo.toml: unused manifest key: package.repository.xyz
+[WARNING] [CWD]/Cargo.toml: unused manifest key: package.rust-version.xyz
+[WARNING] [CWD]/Cargo.toml: unused manifest key: package.version.xyz
+[CHECKING] bar v1.2.3 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/init/auto_git/in b/src/tools/cargo/tests/testsuite/init/auto_git/in
new file mode 120000
index 000000000..1202506b6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/auto_git/in
@@ -0,0 +1 @@
+../empty_dir \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/init/auto_git/mod.rs b/src/tools/cargo/tests/testsuite/init/auto_git/mod.rs
new file mode 100644
index 000000000..68c217520
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/auto_git/mod.rs
@@ -0,0 +1,22 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --lib")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+ assert!(project_root.join(".git").is_dir());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/auto_git/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/auto_git/out/Cargo.toml
new file mode 100644
index 000000000..dcdb8da2c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/auto_git/out/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/auto_git/out/src/lib.rs b/src/tools/cargo/tests/testsuite/init/auto_git/out/src/lib.rs
new file mode 100644
index 000000000..7d12d9af8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/auto_git/out/src/lib.rs
@@ -0,0 +1,14 @@
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn it_works() {
+ let result = add(2, 2);
+ assert_eq!(result, 4);
+ }
+}
diff --git a/src/tools/cargo/tests/testsuite/init/auto_git/stderr.log b/src/tools/cargo/tests/testsuite/init/auto_git/stderr.log
new file mode 100644
index 000000000..f459bf226
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/auto_git/stderr.log
@@ -0,0 +1 @@
+ Created library package
diff --git a/src/tools/cargo/tests/testsuite/init/auto_git/stdout.log b/src/tools/cargo/tests/testsuite/init/auto_git/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/auto_git/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/in/src/main.rs b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/in/src/main.rs
new file mode 100644
index 000000000..65fdcf8da
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/in/src/main.rs
@@ -0,0 +1,4 @@
+fn main() {
+ println!("Check that our file is not overwritten")
+}
+
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/mod.rs b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/mod.rs
new file mode 100644
index 000000000..326bd218a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/mod.rs
@@ -0,0 +1,21 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --bin --vcs none")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/out/Cargo.toml
new file mode 100644
index 000000000..dcdb8da2c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/out/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/out/src/main.rs b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/out/src/main.rs
new file mode 100644
index 000000000..65fdcf8da
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/out/src/main.rs
@@ -0,0 +1,4 @@
+fn main() {
+ println!("Check that our file is not overwritten")
+}
+
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/stderr.log b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/stderr.log
new file mode 100644
index 000000000..3847e4e4a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/stderr.log
@@ -0,0 +1 @@
+ Created binary (application) package
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/stdout.log b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/in/main.rs b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/in/main.rs
new file mode 100644
index 000000000..65fdcf8da
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/in/main.rs
@@ -0,0 +1,4 @@
+fn main() {
+ println!("Check that our file is not overwritten")
+}
+
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/mod.rs b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/mod.rs
new file mode 100644
index 000000000..1f16fb659
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/mod.rs
@@ -0,0 +1,22 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --bin --vcs none")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(!project_root.join("src").is_dir());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/out/Cargo.toml
new file mode 100644
index 000000000..5c6c9158c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/out/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+
+[[bin]]
+name = "case"
+path = "main.rs"
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/out/main.rs b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/out/main.rs
new file mode 100644
index 000000000..65fdcf8da
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/out/main.rs
@@ -0,0 +1,4 @@
+fn main() {
+ println!("Check that our file is not overwritten")
+}
+
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/stderr.log b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/stderr.log
new file mode 100644
index 000000000..3847e4e4a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/stderr.log
@@ -0,0 +1 @@
+ Created binary (application) package
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/stdout.log b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/in/src/main.rs b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/in/src/main.rs
new file mode 100644
index 000000000..65fdcf8da
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/in/src/main.rs
@@ -0,0 +1,4 @@
+fn main() {
+ println!("Check that our file is not overwritten")
+}
+
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/mod.rs b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/mod.rs
new file mode 100644
index 000000000..12349a09b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/mod.rs
@@ -0,0 +1,21 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --vcs none")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/out/Cargo.toml
new file mode 100644
index 000000000..dcdb8da2c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/out/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/out/src/main.rs b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/out/src/main.rs
new file mode 100644
index 000000000..65fdcf8da
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/out/src/main.rs
@@ -0,0 +1,4 @@
+fn main() {
+ println!("Check that our file is not overwritten")
+}
+
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/stderr.log b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/stderr.log
new file mode 100644
index 000000000..3847e4e4a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/stderr.log
@@ -0,0 +1 @@
+ Created binary (application) package
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/stdout.log b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/in/case.rs b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/in/case.rs
new file mode 100644
index 000000000..65fdcf8da
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/in/case.rs
@@ -0,0 +1,4 @@
+fn main() {
+ println!("Check that our file is not overwritten")
+}
+
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/mod.rs b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/mod.rs
new file mode 100644
index 000000000..fe65940db
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/mod.rs
@@ -0,0 +1,22 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --vcs none")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(!project_root.join("src").is_dir());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/out/Cargo.toml
new file mode 100644
index 000000000..8da5fe778
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/out/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+
+[[bin]]
+name = "case"
+path = "case.rs"
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/out/case.rs b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/out/case.rs
new file mode 100644
index 000000000..65fdcf8da
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/out/case.rs
@@ -0,0 +1,4 @@
+fn main() {
+ println!("Check that our file is not overwritten")
+}
+
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/stderr.log b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/stderr.log
new file mode 100644
index 000000000..3847e4e4a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/stderr.log
@@ -0,0 +1 @@
+ Created binary (application) package
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/stdout.log b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/in/src/case.rs b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/in/src/case.rs
new file mode 100644
index 000000000..65fdcf8da
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/in/src/case.rs
@@ -0,0 +1,4 @@
+fn main() {
+ println!("Check that our file is not overwritten")
+}
+
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/mod.rs b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/mod.rs
new file mode 100644
index 000000000..d3e8e66df
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/mod.rs
@@ -0,0 +1,22 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --vcs none")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(!project_root.join("src/main.rs").is_file());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/out/Cargo.toml
new file mode 100644
index 000000000..dec0aaea9
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/out/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+
+[[bin]]
+name = "case"
+path = "src/case.rs"
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/out/src/case.rs b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/out/src/case.rs
new file mode 100644
index 000000000..65fdcf8da
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/out/src/case.rs
@@ -0,0 +1,4 @@
+fn main() {
+ println!("Check that our file is not overwritten")
+}
+
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/stderr.log b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/stderr.log
new file mode 100644
index 000000000..3847e4e4a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/stderr.log
@@ -0,0 +1 @@
+ Created binary (application) package
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/stdout.log b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/in/main.rs b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/in/main.rs
new file mode 100644
index 000000000..65fdcf8da
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/in/main.rs
@@ -0,0 +1,4 @@
+fn main() {
+ println!("Check that our file is not overwritten")
+}
+
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/mod.rs b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/mod.rs
new file mode 100644
index 000000000..fe65940db
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/mod.rs
@@ -0,0 +1,22 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --vcs none")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(!project_root.join("src").is_dir());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/out/Cargo.toml
new file mode 100644
index 000000000..5c6c9158c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/out/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+
+[[bin]]
+name = "case"
+path = "main.rs"
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/out/main.rs b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/out/main.rs
new file mode 100644
index 000000000..65fdcf8da
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/out/main.rs
@@ -0,0 +1,4 @@
+fn main() {
+ println!("Check that our file is not overwritten")
+}
+
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/stderr.log b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/stderr.log
new file mode 100644
index 000000000..3847e4e4a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/stderr.log
@@ -0,0 +1 @@
+ Created binary (application) package
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/stdout.log b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/both_lib_and_bin/mod.rs b/src/tools/cargo/tests/testsuite/init/both_lib_and_bin/mod.rs
new file mode 100644
index 000000000..c9232320a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/both_lib_and_bin/mod.rs
@@ -0,0 +1,19 @@
+use cargo_test_support::paths;
+use cargo_test_support::prelude::*;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let cwd = paths::root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --lib --bin")
+ .current_dir(&cwd)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert!(!cwd.join("Cargo.toml").is_file());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/both_lib_and_bin/stderr.log b/src/tools/cargo/tests/testsuite/init/both_lib_and_bin/stderr.log
new file mode 100644
index 000000000..9d635a427
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/both_lib_and_bin/stderr.log
@@ -0,0 +1 @@
+error: can't specify both lib and binary outputs
diff --git a/src/tools/cargo/tests/testsuite/init/both_lib_and_bin/stdout.log b/src/tools/cargo/tests/testsuite/init/both_lib_and_bin/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/both_lib_and_bin/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/in/case.rs b/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/in/case.rs
new file mode 100644
index 000000000..f328e4d9d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/in/case.rs
@@ -0,0 +1 @@
+fn main() {}
diff --git a/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/in/lib.rs b/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/in/lib.rs
new file mode 100644
index 000000000..59760b549
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/in/lib.rs
@@ -0,0 +1 @@
+fn f() {}
diff --git a/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/mod.rs b/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/mod.rs
new file mode 100644
index 000000000..5e9e1b94c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/mod.rs
@@ -0,0 +1,18 @@
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --lib")
+ .current_dir(project_root)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/stderr.log b/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/stderr.log
new file mode 100644
index 000000000..c08dce96b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/stderr.log
@@ -0,0 +1 @@
+error: cannot have a package with multiple libraries, found both `case.rs` and `lib.rs`
diff --git a/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/stdout.log b/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/in/lib.rs b/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/in/lib.rs
new file mode 100644
index 000000000..321163744
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/in/lib.rs
@@ -0,0 +1 @@
+fn f() { println!("lib.rs"); }
diff --git a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/in/src/lib.rs b/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/in/src/lib.rs
new file mode 100644
index 000000000..f71455a1a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/in/src/lib.rs
@@ -0,0 +1 @@
+fn f() { println!("src/lib.rs"); }
diff --git a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/mod.rs b/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/mod.rs
new file mode 100644
index 000000000..d1cba2ff7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/mod.rs
@@ -0,0 +1,22 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --vcs none")
+ .current_dir(project_root)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(!project_root.join("Cargo.toml").is_file());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/out/lib.rs b/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/out/lib.rs
new file mode 100644
index 000000000..321163744
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/out/lib.rs
@@ -0,0 +1 @@
+fn f() { println!("lib.rs"); }
diff --git a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/out/src/lib.rs b/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/out/src/lib.rs
new file mode 100644
index 000000000..f71455a1a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/out/src/lib.rs
@@ -0,0 +1 @@
+fn f() { println!("src/lib.rs"); }
diff --git a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/stderr.log b/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/stderr.log
new file mode 100644
index 000000000..8dbd2aaf0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/stderr.log
@@ -0,0 +1 @@
+error: cannot have a package with multiple libraries, found both `src/lib.rs` and `lib.rs`
diff --git a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/stdout.log b/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/in/case.rs b/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/in/case.rs
new file mode 100644
index 000000000..f328e4d9d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/in/case.rs
@@ -0,0 +1 @@
+fn main() {}
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/in/lib.rs b/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/in/lib.rs
new file mode 100644
index 000000000..59760b549
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/in/lib.rs
@@ -0,0 +1 @@
+fn f() {}
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/mod.rs b/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/mod.rs
new file mode 100644
index 000000000..326bd218a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/mod.rs
@@ -0,0 +1,21 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --bin --vcs none")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/Cargo.toml
new file mode 100644
index 000000000..675c888a5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/Cargo.toml
@@ -0,0 +1,16 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+
+[[bin]]
+name = "case"
+path = "case.rs"
+
+[lib]
+name = "case"
+path = "lib.rs"
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/case.rs b/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/case.rs
new file mode 100644
index 000000000..f328e4d9d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/case.rs
@@ -0,0 +1 @@
+fn main() {}
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/lib.rs b/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/lib.rs
new file mode 100644
index 000000000..59760b549
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/lib.rs
@@ -0,0 +1 @@
+fn f() {}
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/stderr.log b/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/stderr.log
new file mode 100644
index 000000000..3847e4e4a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/stderr.log
@@ -0,0 +1 @@
+ Created binary (application) package
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/stdout.log b/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/in/case.rs b/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/in/case.rs
new file mode 100644
index 000000000..59760b549
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/in/case.rs
@@ -0,0 +1 @@
+fn f() {}
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/mod.rs b/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/mod.rs
new file mode 100644
index 000000000..326bd218a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/mod.rs
@@ -0,0 +1,21 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --bin --vcs none")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/out/Cargo.toml
new file mode 100644
index 000000000..8da5fe778
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/out/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+
+[[bin]]
+name = "case"
+path = "case.rs"
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/out/case.rs b/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/out/case.rs
new file mode 100644
index 000000000..59760b549
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/out/case.rs
@@ -0,0 +1 @@
+fn f() {}
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/stderr.log b/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/stderr.log
new file mode 100644
index 000000000..ec428f31c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/stderr.log
@@ -0,0 +1,2 @@
+warning: file `case.rs` seems to be a library file
+ Created binary (application) package
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/stdout.log b/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/in/case.rs b/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/in/case.rs
new file mode 100644
index 000000000..f328e4d9d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/in/case.rs
@@ -0,0 +1 @@
+fn main() {}
diff --git a/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/mod.rs b/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/mod.rs
new file mode 100644
index 000000000..59c192cb9
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/mod.rs
@@ -0,0 +1,21 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --lib --vcs none")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/out/Cargo.toml
new file mode 100644
index 000000000..2c0464468
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/out/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+
+[lib]
+name = "case"
+path = "case.rs"
diff --git a/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/out/case.rs b/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/out/case.rs
new file mode 100644
index 000000000..f328e4d9d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/out/case.rs
@@ -0,0 +1 @@
+fn main() {}
diff --git a/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/stderr.log b/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/stderr.log
new file mode 100644
index 000000000..bf070e2da
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/stderr.log
@@ -0,0 +1,2 @@
+warning: file `case.rs` seems to be a binary (application) file
+ Created library package
diff --git a/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/stdout.log b/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/empty_dir/.keep b/src/tools/cargo/tests/testsuite/init/empty_dir/.keep
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/empty_dir/.keep
diff --git a/src/tools/cargo/tests/testsuite/init/empty_dir/mod.rs b/src/tools/cargo/tests/testsuite/init/empty_dir/mod.rs
new file mode 100644
index 000000000..074954f01
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/empty_dir/mod.rs
@@ -0,0 +1,7 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::{command_is_available, paths, Project};
+use std::fs;
+use std::process::Command;
+
+use crate::test_root;
diff --git a/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/in b/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/in
new file mode 120000
index 000000000..1202506b6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/in
@@ -0,0 +1 @@
+../empty_dir \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/mod.rs b/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/mod.rs
new file mode 100644
index 000000000..7314e955c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/mod.rs
@@ -0,0 +1,21 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --vcs git --bin")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/out/Cargo.toml
new file mode 100644
index 000000000..dcdb8da2c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/out/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/out/src/main.rs b/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/out/src/main.rs
new file mode 100644
index 000000000..e7a11a969
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/out/src/main.rs
@@ -0,0 +1,3 @@
+fn main() {
+ println!("Hello, world!");
+}
diff --git a/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/stderr.log b/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/stderr.log
new file mode 100644
index 000000000..3847e4e4a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/stderr.log
@@ -0,0 +1 @@
+ Created binary (application) package
diff --git a/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/stdout.log b/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/formats_source/in/rustfmt.toml b/src/tools/cargo/tests/testsuite/init/formats_source/in/rustfmt.toml
new file mode 100644
index 000000000..b196eaa2d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/formats_source/in/rustfmt.toml
@@ -0,0 +1 @@
+tab_spaces = 2
diff --git a/src/tools/cargo/tests/testsuite/init/formats_source/mod.rs b/src/tools/cargo/tests/testsuite/init/formats_source/mod.rs
new file mode 100644
index 000000000..ac1fb6271
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/formats_source/mod.rs
@@ -0,0 +1,29 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::{process, Project};
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ // This cannot use `requires_rustfmt` because rustfmt is not available in
+ // the rust-lang/rust environment. Additionally, if running cargo without
+ // rustup (but with rustup installed), this test also fails due to HOME
+ // preventing the proxy from choosing a toolchain.
+ if let Err(e) = process("rustfmt").arg("-V").exec_with_output() {
+ eprintln!("skipping test, rustfmt not available:\n{e:?}");
+ return;
+ }
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --lib --vcs none")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/init/formats_source/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/formats_source/out/Cargo.toml
new file mode 100644
index 000000000..dcdb8da2c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/formats_source/out/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/formats_source/out/rustfmt.toml b/src/tools/cargo/tests/testsuite/init/formats_source/out/rustfmt.toml
new file mode 100644
index 000000000..b196eaa2d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/formats_source/out/rustfmt.toml
@@ -0,0 +1 @@
+tab_spaces = 2
diff --git a/src/tools/cargo/tests/testsuite/init/formats_source/out/src/lib.rs b/src/tools/cargo/tests/testsuite/init/formats_source/out/src/lib.rs
new file mode 100644
index 000000000..3b9acffd5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/formats_source/out/src/lib.rs
@@ -0,0 +1,14 @@
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn it_works() {
+ let result = add(2, 2);
+ assert_eq!(result, 4);
+ }
+}
diff --git a/src/tools/cargo/tests/testsuite/init/formats_source/stderr.log b/src/tools/cargo/tests/testsuite/init/formats_source/stderr.log
new file mode 100644
index 000000000..f459bf226
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/formats_source/stderr.log
@@ -0,0 +1 @@
+ Created library package
diff --git a/src/tools/cargo/tests/testsuite/init/formats_source/stdout.log b/src/tools/cargo/tests/testsuite/init/formats_source/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/formats_source/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/in/.fossil/.keep b/src/tools/cargo/tests/testsuite/init/fossil_autodetect/in/.fossil/.keep
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/fossil_autodetect/in/.fossil/.keep
diff --git a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/mod.rs b/src/tools/cargo/tests/testsuite/init/fossil_autodetect/mod.rs
new file mode 100644
index 000000000..d45ba868a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/fossil_autodetect/mod.rs
@@ -0,0 +1,22 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --lib")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(!project_root.join(".git").is_dir());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/.fossil-settings/clean-glob b/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/.fossil-settings/clean-glob
new file mode 100644
index 000000000..a9d37c560
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/.fossil-settings/clean-glob
@@ -0,0 +1,2 @@
+target
+Cargo.lock
diff --git a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/.fossil-settings/ignore-glob b/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/.fossil-settings/ignore-glob
new file mode 100644
index 000000000..a9d37c560
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/.fossil-settings/ignore-glob
@@ -0,0 +1,2 @@
+target
+Cargo.lock
diff --git a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/Cargo.toml
new file mode 100644
index 000000000..dcdb8da2c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/src/lib.rs b/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/src/lib.rs
new file mode 100644
index 000000000..7d12d9af8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/src/lib.rs
@@ -0,0 +1,14 @@
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn it_works() {
+ let result = add(2, 2);
+ assert_eq!(result, 4);
+ }
+}
diff --git a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/stderr.log b/src/tools/cargo/tests/testsuite/init/fossil_autodetect/stderr.log
new file mode 100644
index 000000000..f459bf226
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/fossil_autodetect/stderr.log
@@ -0,0 +1 @@
+ Created library package
diff --git a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/stdout.log b/src/tools/cargo/tests/testsuite/init/fossil_autodetect/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/fossil_autodetect/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/git_autodetect/mod.rs b/src/tools/cargo/tests/testsuite/init/git_autodetect/mod.rs
new file mode 100644
index 000000000..aef47bc7d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/git_autodetect/mod.rs
@@ -0,0 +1,24 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::paths;
+use cargo_test_support::prelude::*;
+use std::fs;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project_root = &paths::root().join("foo");
+ // Need to create `.git` dir manually because it cannot be tracked under a git repo
+ fs::create_dir_all(project_root.join(".git")).unwrap();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --lib")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(project_root.join(".git").is_dir());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/git_autodetect/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/git_autodetect/out/Cargo.toml
new file mode 100644
index 000000000..1d9cfe317
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/git_autodetect/out/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "foo"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/git_autodetect/out/src/lib.rs b/src/tools/cargo/tests/testsuite/init/git_autodetect/out/src/lib.rs
new file mode 100644
index 000000000..7d12d9af8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/git_autodetect/out/src/lib.rs
@@ -0,0 +1,14 @@
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn it_works() {
+ let result = add(2, 2);
+ assert_eq!(result, 4);
+ }
+}
diff --git a/src/tools/cargo/tests/testsuite/init/git_autodetect/stderr.log b/src/tools/cargo/tests/testsuite/init/git_autodetect/stderr.log
new file mode 100644
index 000000000..f459bf226
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/git_autodetect/stderr.log
@@ -0,0 +1 @@
+ Created library package
diff --git a/src/tools/cargo/tests/testsuite/init/git_autodetect/stdout.log b/src/tools/cargo/tests/testsuite/init/git_autodetect/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/git_autodetect/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/mod.rs b/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/mod.rs
new file mode 100644
index 000000000..cd4437c65
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/mod.rs
@@ -0,0 +1,22 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --lib --edition 2015")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(project_root.join(".git").is_dir());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/out/Cargo.toml
new file mode 100644
index 000000000..a6269fdcd
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/out/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2015"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/out/src/lib.rs b/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/out/src/lib.rs
new file mode 100644
index 000000000..7d12d9af8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/out/src/lib.rs
@@ -0,0 +1,14 @@
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn it_works() {
+ let result = add(2, 2);
+ assert_eq!(result, 4);
+ }
+}
diff --git a/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/stderr.log b/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/stderr.log
new file mode 100644
index 000000000..f459bf226
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/stderr.log
@@ -0,0 +1 @@
+ Created library package
diff --git a/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/stdout.log b/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/in/rustfmt.toml b/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/in/rustfmt.toml
new file mode 100644
index 000000000..b196eaa2d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/in/rustfmt.toml
@@ -0,0 +1 @@
+tab_spaces = 2
diff --git a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/mod.rs b/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/mod.rs
new file mode 100644
index 000000000..fd9394049
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/mod.rs
@@ -0,0 +1,22 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --lib --vcs none")
+ .env("PATH", "") // pretend that `rustfmt` is missing
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/Cargo.toml
new file mode 100644
index 000000000..dcdb8da2c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/rustfmt.toml b/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/rustfmt.toml
new file mode 100644
index 000000000..b196eaa2d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/rustfmt.toml
@@ -0,0 +1 @@
+tab_spaces = 2
diff --git a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/src/lib.rs b/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/src/lib.rs
new file mode 100644
index 000000000..7d12d9af8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/src/lib.rs
@@ -0,0 +1,14 @@
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn it_works() {
+ let result = add(2, 2);
+ assert_eq!(result, 4);
+ }
+}
diff --git a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/stderr.log b/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/stderr.log
new file mode 100644
index 000000000..f459bf226
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/stderr.log
@@ -0,0 +1 @@
+ Created library package
diff --git a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/stdout.log b/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/in/main.rs b/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/in/main.rs
new file mode 100644
index 000000000..f328e4d9d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/in/main.rs
@@ -0,0 +1 @@
+fn main() {}
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/mod.rs b/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/mod.rs
new file mode 100644
index 000000000..80bec8893
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/mod.rs
@@ -0,0 +1,21 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --vcs git")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/out/Cargo.toml
new file mode 100644
index 000000000..5c6c9158c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/out/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+
+[[bin]]
+name = "case"
+path = "main.rs"
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/out/main.rs b/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/out/main.rs
new file mode 100644
index 000000000..f328e4d9d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/out/main.rs
@@ -0,0 +1 @@
+fn main() {}
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/stderr.log b/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/stderr.log
new file mode 100644
index 000000000..3847e4e4a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/stderr.log
@@ -0,0 +1 @@
+ Created binary (application) package
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/stdout.log b/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/in/lib.rs b/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/in/lib.rs
new file mode 100644
index 000000000..59760b549
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/in/lib.rs
@@ -0,0 +1 @@
+fn f() {}
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/mod.rs b/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/mod.rs
new file mode 100644
index 000000000..80bec8893
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/mod.rs
@@ -0,0 +1,21 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --vcs git")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/out/Cargo.toml
new file mode 100644
index 000000000..39e95fe94
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/out/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+
+[lib]
+name = "case"
+path = "lib.rs"
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/out/lib.rs b/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/out/lib.rs
new file mode 100644
index 000000000..59760b549
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/out/lib.rs
@@ -0,0 +1 @@
+fn f() {}
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/stderr.log b/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/stderr.log
new file mode 100644
index 000000000..f459bf226
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/stderr.log
@@ -0,0 +1 @@
+ Created library package
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/stdout.log b/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/invalid_dir_name/mod.rs b/src/tools/cargo/tests/testsuite/init/invalid_dir_name/mod.rs
new file mode 100644
index 000000000..2b1be9022
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/invalid_dir_name/mod.rs
@@ -0,0 +1,21 @@
+use cargo_test_support::paths;
+use cargo_test_support::prelude::*;
+use std::fs;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let foo = &paths::root().join("foo.bar");
+ fs::create_dir_all(foo).unwrap();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init")
+ .current_dir(foo)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert!(!foo.join("Cargo.toml").is_file());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/invalid_dir_name/stderr.log b/src/tools/cargo/tests/testsuite/init/invalid_dir_name/stderr.log
new file mode 100644
index 000000000..86d2c665f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/invalid_dir_name/stderr.log
@@ -0,0 +1,8 @@
+error: invalid character `.` in package name: `foo.bar`, characters must be Unicode XID characters (numbers, `-`, `_`, or most letters)
+If you need a package name to not match the directory name, consider using --name flag.
+If you need a binary with the name "foo.bar", use a valid package name, and set the binary name to be different from the package. This can be done by setting the binary filename to `src/bin/foo.bar.rs` or change the name in Cargo.toml with:
+
+ [[bin]]
+ name = "foo.bar"
+ path = "src/main.rs"
+
diff --git a/src/tools/cargo/tests/testsuite/init/invalid_dir_name/stdout.log b/src/tools/cargo/tests/testsuite/init/invalid_dir_name/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/invalid_dir_name/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/in/lib.rs b/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/in/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/in/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/mod.rs b/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/mod.rs
new file mode 100644
index 000000000..d3e8e66df
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/mod.rs
@@ -0,0 +1,22 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --vcs none")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(!project_root.join("src/main.rs").is_file());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/out/Cargo.toml
new file mode 100644
index 000000000..39e95fe94
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/out/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+
+[lib]
+name = "case"
+path = "lib.rs"
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/out/lib.rs b/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/out/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/out/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/stderr.log b/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/stderr.log
new file mode 100644
index 000000000..f459bf226
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/stderr.log
@@ -0,0 +1 @@
+ Created library package
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/stdout.log b/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/in/src/lib.rs b/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/in/src/lib.rs
new file mode 100644
index 000000000..59760b549
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/in/src/lib.rs
@@ -0,0 +1 @@
+fn f() {}
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/mod.rs b/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/mod.rs
new file mode 100644
index 000000000..d3e8e66df
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/mod.rs
@@ -0,0 +1,22 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --vcs none")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(!project_root.join("src/main.rs").is_file());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/out/Cargo.toml
new file mode 100644
index 000000000..dcdb8da2c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/out/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/out/src/lib.rs b/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/out/src/lib.rs
new file mode 100644
index 000000000..59760b549
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/out/src/lib.rs
@@ -0,0 +1 @@
+fn f() {}
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/stderr.log b/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/stderr.log
new file mode 100644
index 000000000..f459bf226
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/stderr.log
@@ -0,0 +1 @@
+ Created library package
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/stdout.log b/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/mod.rs b/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/mod.rs
new file mode 100644
index 000000000..d45ba868a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/mod.rs
@@ -0,0 +1,22 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --lib")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(!project_root.join(".git").is_dir());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/out/Cargo.toml
new file mode 100644
index 000000000..dcdb8da2c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/out/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/out/src/lib.rs b/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/out/src/lib.rs
new file mode 100644
index 000000000..7d12d9af8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/out/src/lib.rs
@@ -0,0 +1,14 @@
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn it_works() {
+ let result = add(2, 2);
+ assert_eq!(result, 4);
+ }
+}
diff --git a/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/stderr.log b/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/stderr.log
new file mode 100644
index 000000000..f459bf226
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/stderr.log
@@ -0,0 +1 @@
+ Created library package
diff --git a/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/stdout.log b/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/mod.rs b/src/tools/cargo/tests/testsuite/init/mod.rs
new file mode 100644
index 000000000..99df9d39d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/mod.rs
@@ -0,0 +1,42 @@
+//! Tests for the `cargo init` command.
+
+mod auto_git;
+mod bin_already_exists_explicit;
+mod bin_already_exists_explicit_nosrc;
+mod bin_already_exists_implicit;
+mod bin_already_exists_implicit_namenosrc;
+mod bin_already_exists_implicit_namesrc;
+mod bin_already_exists_implicit_nosrc;
+mod both_lib_and_bin;
+mod cant_create_library_when_both_binlib_present;
+mod confused_by_multiple_lib_files;
+mod creates_binary_when_both_binlib_present;
+mod creates_binary_when_instructed_and_has_lib_file;
+mod creates_library_when_instructed_and_has_bin_file;
+mod explicit_bin_with_git;
+mod formats_source;
+mod fossil_autodetect;
+mod git_autodetect;
+mod git_ignore_exists_no_conflicting_entries;
+mod ignores_failure_to_format_source;
+mod inferred_bin_with_git;
+mod inferred_lib_with_git;
+mod invalid_dir_name;
+mod lib_already_exists_nosrc;
+mod lib_already_exists_src;
+mod mercurial_autodetect;
+mod multibin_project_name_clash;
+#[cfg(not(windows))]
+mod no_filename;
+#[cfg(unix)]
+mod path_contains_separator;
+mod pijul_autodetect;
+mod reserved_name;
+mod simple_bin;
+mod simple_git;
+mod simple_git_ignore_exists;
+mod simple_hg;
+mod simple_hg_ignore_exists;
+mod simple_lib;
+mod unknown_flags;
+mod with_argument;
diff --git a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/in/case.rs b/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/in/case.rs
new file mode 100644
index 000000000..b31221118
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/in/case.rs
@@ -0,0 +1 @@
+fn main() { println!("foo.rs"); }
diff --git a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/in/main.rs b/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/in/main.rs
new file mode 100644
index 000000000..7937627b9
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/in/main.rs
@@ -0,0 +1 @@
+fn main() { println!("main.rs"); }
diff --git a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/mod.rs b/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/mod.rs
new file mode 100644
index 000000000..fdd4476d9
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/mod.rs
@@ -0,0 +1,22 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --lib --vcs none")
+ .current_dir(project_root)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(!project_root.join("Cargo.toml").is_file());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/out/case.rs b/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/out/case.rs
new file mode 100644
index 000000000..b31221118
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/out/case.rs
@@ -0,0 +1 @@
+fn main() { println!("foo.rs"); }
diff --git a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/out/main.rs b/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/out/main.rs
new file mode 100644
index 000000000..7937627b9
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/out/main.rs
@@ -0,0 +1 @@
+fn main() { println!("main.rs"); }
diff --git a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/stderr.log b/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/stderr.log
new file mode 100644
index 000000000..21a1dabee
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/stderr.log
@@ -0,0 +1,4 @@
+error: multiple possible binary sources found:
+ main.rs
+ case.rs
+cannot automatically generate Cargo.toml as the main target would be ambiguous
diff --git a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/stdout.log b/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/no_filename/mod.rs b/src/tools/cargo/tests/testsuite/init/no_filename/mod.rs
new file mode 100644
index 000000000..8edfd2823
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/no_filename/mod.rs
@@ -0,0 +1,16 @@
+use cargo_test_support::paths;
+use cargo_test_support::prelude::*;
+
+use cargo_test_support::curr_dir;
+
+#[cfg(not(windows))]
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init /")
+ .current_dir(paths::root())
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/init/no_filename/stderr.log b/src/tools/cargo/tests/testsuite/init/no_filename/stderr.log
new file mode 100644
index 000000000..bd087ec90
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/no_filename/stderr.log
@@ -0,0 +1 @@
+error: cannot auto-detect package name from path "/" ; use --name to override
diff --git a/src/tools/cargo/tests/testsuite/init/no_filename/stdout.log b/src/tools/cargo/tests/testsuite/init/no_filename/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/no_filename/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/path_contains_separator/in/.keep b/src/tools/cargo/tests/testsuite/init/path_contains_separator/in/.keep
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/path_contains_separator/in/.keep
diff --git a/src/tools/cargo/tests/testsuite/init/path_contains_separator/mod.rs b/src/tools/cargo/tests/testsuite/init/path_contains_separator/mod.rs
new file mode 100644
index 000000000..0a12f4269
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/path_contains_separator/mod.rs
@@ -0,0 +1,26 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::{t, Project};
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root().join("test:ing");
+
+ if !project_root.exists() {
+ t!(std::fs::create_dir(&project_root));
+ }
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --bin --vcs none --edition 2015 --name testing")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(!project_root.join(".gitignore").is_file());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/path_contains_separator/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/path_contains_separator/out/Cargo.toml
new file mode 100644
index 000000000..11465f1fc
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/path_contains_separator/out/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "testing"
+version = "0.1.0"
+edition = "2015"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/path_contains_separator/out/src/main.rs b/src/tools/cargo/tests/testsuite/init/path_contains_separator/out/src/main.rs
new file mode 100644
index 000000000..e7a11a969
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/path_contains_separator/out/src/main.rs
@@ -0,0 +1,3 @@
+fn main() {
+ println!("Hello, world!");
+}
diff --git a/src/tools/cargo/tests/testsuite/init/path_contains_separator/stderr.log b/src/tools/cargo/tests/testsuite/init/path_contains_separator/stderr.log
new file mode 100644
index 000000000..d7947aea2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/path_contains_separator/stderr.log
@@ -0,0 +1,3 @@
+warning: the path `[ROOT]/case/test:ing/.` contains invalid PATH characters (usually `:`, `;`, or `"`)
+It is recommended to use a different name to avoid problems.
+ Created binary (application) package
diff --git a/src/tools/cargo/tests/testsuite/init/path_contains_separator/stdout.log b/src/tools/cargo/tests/testsuite/init/path_contains_separator/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/path_contains_separator/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/in/.pijul/.keep b/src/tools/cargo/tests/testsuite/init/pijul_autodetect/in/.pijul/.keep
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/pijul_autodetect/in/.pijul/.keep
diff --git a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/mod.rs b/src/tools/cargo/tests/testsuite/init/pijul_autodetect/mod.rs
new file mode 100644
index 000000000..d45ba868a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/pijul_autodetect/mod.rs
@@ -0,0 +1,22 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --lib")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(!project_root.join(".git").is_dir());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/.ignore b/src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/.ignore
new file mode 100644
index 000000000..4fffb2f89
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/.ignore
@@ -0,0 +1,2 @@
+/target
+/Cargo.lock
diff --git a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/Cargo.toml
new file mode 100644
index 000000000..dcdb8da2c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/src/lib.rs b/src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/src/lib.rs
new file mode 100644
index 000000000..7d12d9af8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/src/lib.rs
@@ -0,0 +1,14 @@
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn it_works() {
+ let result = add(2, 2);
+ assert_eq!(result, 4);
+ }
+}
diff --git a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/stderr.log b/src/tools/cargo/tests/testsuite/init/pijul_autodetect/stderr.log
new file mode 100644
index 000000000..f459bf226
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/pijul_autodetect/stderr.log
@@ -0,0 +1 @@
+ Created library package
diff --git a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/stdout.log b/src/tools/cargo/tests/testsuite/init/pijul_autodetect/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/pijul_autodetect/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/reserved_name/mod.rs b/src/tools/cargo/tests/testsuite/init/reserved_name/mod.rs
new file mode 100644
index 000000000..cc65fd0a1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/reserved_name/mod.rs
@@ -0,0 +1,21 @@
+use cargo_test_support::paths;
+use cargo_test_support::prelude::*;
+use std::fs;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project_root = &paths::root().join("test");
+ fs::create_dir_all(project_root).unwrap();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init")
+ .current_dir(project_root)
+ .assert()
+ .code(101)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert!(!project_root.join("Cargo.toml").is_file());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/reserved_name/stderr.log b/src/tools/cargo/tests/testsuite/init/reserved_name/stderr.log
new file mode 100644
index 000000000..748971bdf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/reserved_name/stderr.log
@@ -0,0 +1,8 @@
+error: the name `test` cannot be used as a package name, it conflicts with Rust's built-in test library
+If you need a package name to not match the directory name, consider using --name flag.
+If you need a binary with the name "test", use a valid package name, and set the binary name to be different from the package. This can be done by setting the binary filename to `src/bin/test.rs` or change the name in Cargo.toml with:
+
+ [[bin]]
+ name = "test"
+ path = "src/main.rs"
+
diff --git a/src/tools/cargo/tests/testsuite/init/reserved_name/stdout.log b/src/tools/cargo/tests/testsuite/init/reserved_name/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/reserved_name/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/simple_bin/in b/src/tools/cargo/tests/testsuite/init/simple_bin/in
new file mode 120000
index 000000000..1202506b6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_bin/in
@@ -0,0 +1 @@
+../empty_dir \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/init/simple_bin/mod.rs b/src/tools/cargo/tests/testsuite/init/simple_bin/mod.rs
new file mode 100644
index 000000000..eaf0955f9
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_bin/mod.rs
@@ -0,0 +1,29 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --bin --vcs none --edition 2015")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(!project_root.join(".gitignore").is_file());
+
+ snapbox::cmd::Command::cargo_ui()
+ .current_dir(project_root)
+ .arg("build")
+ .assert()
+ .success();
+ assert!(project.bin("case").is_file());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/simple_bin/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/simple_bin/out/Cargo.toml
new file mode 100644
index 000000000..a6269fdcd
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_bin/out/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2015"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/simple_bin/out/src/main.rs b/src/tools/cargo/tests/testsuite/init/simple_bin/out/src/main.rs
new file mode 100644
index 000000000..e7a11a969
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_bin/out/src/main.rs
@@ -0,0 +1,3 @@
+fn main() {
+ println!("Hello, world!");
+}
diff --git a/src/tools/cargo/tests/testsuite/init/simple_bin/stderr.log b/src/tools/cargo/tests/testsuite/init/simple_bin/stderr.log
new file mode 100644
index 000000000..3847e4e4a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_bin/stderr.log
@@ -0,0 +1 @@
+ Created binary (application) package
diff --git a/src/tools/cargo/tests/testsuite/init/simple_bin/stdout.log b/src/tools/cargo/tests/testsuite/init/simple_bin/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_bin/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git/in b/src/tools/cargo/tests/testsuite/init/simple_git/in
new file mode 120000
index 000000000..1202506b6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_git/in
@@ -0,0 +1 @@
+../empty_dir \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git/mod.rs b/src/tools/cargo/tests/testsuite/init/simple_git/mod.rs
new file mode 100644
index 000000000..c373fe2a2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_git/mod.rs
@@ -0,0 +1,22 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --lib --vcs git")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(project_root.join(".git").is_dir());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/simple_git/out/Cargo.toml
new file mode 100644
index 000000000..dcdb8da2c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_git/out/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git/out/src/lib.rs b/src/tools/cargo/tests/testsuite/init/simple_git/out/src/lib.rs
new file mode 100644
index 000000000..7d12d9af8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_git/out/src/lib.rs
@@ -0,0 +1,14 @@
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn it_works() {
+ let result = add(2, 2);
+ assert_eq!(result, 4);
+ }
+}
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git/stderr.log b/src/tools/cargo/tests/testsuite/init/simple_git/stderr.log
new file mode 100644
index 000000000..f459bf226
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_git/stderr.log
@@ -0,0 +1 @@
+ Created library package
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git/stdout.log b/src/tools/cargo/tests/testsuite/init/simple_git/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_git/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/mod.rs b/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/mod.rs
new file mode 100644
index 000000000..142e86efd
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/mod.rs
@@ -0,0 +1,28 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --lib --edition 2015")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(project_root.join(".git").is_dir());
+
+ snapbox::cmd::Command::cargo_ui()
+ .current_dir(project_root)
+ .arg("build")
+ .assert()
+ .success();
+}
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/out/Cargo.toml
new file mode 100644
index 000000000..a6269fdcd
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/out/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2015"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/out/src/lib.rs b/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/out/src/lib.rs
new file mode 100644
index 000000000..7d12d9af8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/out/src/lib.rs
@@ -0,0 +1,14 @@
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn it_works() {
+ let result = add(2, 2);
+ assert_eq!(result, 4);
+ }
+}
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/stderr.log b/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/stderr.log
new file mode 100644
index 000000000..f459bf226
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/stderr.log
@@ -0,0 +1 @@
+ Created library package
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/stdout.log b/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg/in b/src/tools/cargo/tests/testsuite/init/simple_hg/in
new file mode 120000
index 000000000..1202506b6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_hg/in
@@ -0,0 +1 @@
+../empty_dir \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg/mod.rs b/src/tools/cargo/tests/testsuite/init/simple_hg/mod.rs
new file mode 100644
index 000000000..1d6765453
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_hg/mod.rs
@@ -0,0 +1,22 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test(requires_hg)]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --lib --vcs hg")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(!project_root.join(".git").is_dir());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/simple_hg/out/Cargo.toml
new file mode 100644
index 000000000..dcdb8da2c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_hg/out/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg/out/src/lib.rs b/src/tools/cargo/tests/testsuite/init/simple_hg/out/src/lib.rs
new file mode 100644
index 000000000..7d12d9af8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_hg/out/src/lib.rs
@@ -0,0 +1,14 @@
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn it_works() {
+ let result = add(2, 2);
+ assert_eq!(result, 4);
+ }
+}
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg/stderr.log b/src/tools/cargo/tests/testsuite/init/simple_hg/stderr.log
new file mode 100644
index 000000000..f459bf226
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_hg/stderr.log
@@ -0,0 +1 @@
+ Created library package
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg/stdout.log b/src/tools/cargo/tests/testsuite/init/simple_hg/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_hg/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/mod.rs b/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/mod.rs
new file mode 100644
index 000000000..d45ba868a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/mod.rs
@@ -0,0 +1,22 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --lib")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(!project_root.join(".git").is_dir());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/out/Cargo.toml
new file mode 100644
index 000000000..dcdb8da2c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/out/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/out/src/lib.rs b/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/out/src/lib.rs
new file mode 100644
index 000000000..7d12d9af8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/out/src/lib.rs
@@ -0,0 +1,14 @@
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn it_works() {
+ let result = add(2, 2);
+ assert_eq!(result, 4);
+ }
+}
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/stderr.log b/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/stderr.log
new file mode 100644
index 000000000..f459bf226
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/stderr.log
@@ -0,0 +1 @@
+ Created library package
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/stdout.log b/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/simple_lib/in b/src/tools/cargo/tests/testsuite/init/simple_lib/in
new file mode 120000
index 000000000..1202506b6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_lib/in
@@ -0,0 +1 @@
+../empty_dir \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/init/simple_lib/mod.rs b/src/tools/cargo/tests/testsuite/init/simple_lib/mod.rs
new file mode 100644
index 000000000..d6bae5167
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_lib/mod.rs
@@ -0,0 +1,29 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init --lib --vcs none --edition 2015")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+ assert!(!project_root.join(".gitignore").is_file());
+
+ snapbox::cmd::Command::cargo_ui()
+ .current_dir(project_root)
+ .arg("build")
+ .assert()
+ .success();
+ assert!(!project.bin("foo").is_file());
+}
diff --git a/src/tools/cargo/tests/testsuite/init/simple_lib/out/Cargo.toml b/src/tools/cargo/tests/testsuite/init/simple_lib/out/Cargo.toml
new file mode 100644
index 000000000..a6269fdcd
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_lib/out/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "case"
+version = "0.1.0"
+edition = "2015"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/simple_lib/out/src/lib.rs b/src/tools/cargo/tests/testsuite/init/simple_lib/out/src/lib.rs
new file mode 100644
index 000000000..7d12d9af8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_lib/out/src/lib.rs
@@ -0,0 +1,14 @@
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn it_works() {
+ let result = add(2, 2);
+ assert_eq!(result, 4);
+ }
+}
diff --git a/src/tools/cargo/tests/testsuite/init/simple_lib/stderr.log b/src/tools/cargo/tests/testsuite/init/simple_lib/stderr.log
new file mode 100644
index 000000000..f459bf226
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_lib/stderr.log
@@ -0,0 +1 @@
+ Created library package
diff --git a/src/tools/cargo/tests/testsuite/init/simple_lib/stdout.log b/src/tools/cargo/tests/testsuite/init/simple_lib/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/simple_lib/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/unknown_flags/mod.rs b/src/tools/cargo/tests/testsuite/init/unknown_flags/mod.rs
new file mode 100644
index 000000000..4289b4b9e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/unknown_flags/mod.rs
@@ -0,0 +1,15 @@
+use cargo_test_support::paths;
+use cargo_test_support::prelude::*;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init foo --flag")
+ .current_dir(paths::root())
+ .assert()
+ .code(1)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/init/unknown_flags/stderr.log b/src/tools/cargo/tests/testsuite/init/unknown_flags/stderr.log
new file mode 100644
index 000000000..980e8acd8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/unknown_flags/stderr.log
@@ -0,0 +1,7 @@
+error: unexpected argument '--flag' found
+
+ tip: to pass '--flag' as a value, use '-- --flag'
+
+Usage: cargo[EXE] init <path>
+
+For more information, try '--help'.
diff --git a/src/tools/cargo/tests/testsuite/init/unknown_flags/stdout.log b/src/tools/cargo/tests/testsuite/init/unknown_flags/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/unknown_flags/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/with_argument/in/foo/.keep b/src/tools/cargo/tests/testsuite/init/with_argument/in/foo/.keep
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/with_argument/in/foo/.keep
diff --git a/src/tools/cargo/tests/testsuite/init/with_argument/mod.rs b/src/tools/cargo/tests/testsuite/init/with_argument/mod.rs
new file mode 100644
index 000000000..0b5e342a1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/with_argument/mod.rs
@@ -0,0 +1,21 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = &project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg_line("init foo --vcs none")
+ .current_dir(project_root)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/init/with_argument/out/foo/Cargo.toml b/src/tools/cargo/tests/testsuite/init/with_argument/out/foo/Cargo.toml
new file mode 100644
index 000000000..1d9cfe317
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/with_argument/out/foo/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "foo"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/cargo/tests/testsuite/init/with_argument/out/foo/src/main.rs b/src/tools/cargo/tests/testsuite/init/with_argument/out/foo/src/main.rs
new file mode 100644
index 000000000..e7a11a969
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/with_argument/out/foo/src/main.rs
@@ -0,0 +1,3 @@
+fn main() {
+ println!("Hello, world!");
+}
diff --git a/src/tools/cargo/tests/testsuite/init/with_argument/stderr.log b/src/tools/cargo/tests/testsuite/init/with_argument/stderr.log
new file mode 100644
index 000000000..3847e4e4a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/with_argument/stderr.log
@@ -0,0 +1 @@
+ Created binary (application) package
diff --git a/src/tools/cargo/tests/testsuite/init/with_argument/stdout.log b/src/tools/cargo/tests/testsuite/init/with_argument/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/init/with_argument/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/install.rs b/src/tools/cargo/tests/testsuite/install.rs
new file mode 100644
index 000000000..dd9844f17
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/install.rs
@@ -0,0 +1,2289 @@
+//! Tests for the `cargo install` command.
+
+use std::fs::{self, OpenOptions};
+use std::io::prelude::*;
+use std::path::Path;
+
+use cargo_test_support::compare;
+use cargo_test_support::cross_compile;
+use cargo_test_support::git;
+use cargo_test_support::registry::{self, registry_path, Package};
+use cargo_test_support::{
+ basic_manifest, cargo_process, no_such_file_err_msg, project, project_in, symlink_supported, t,
+};
+use cargo_util::ProcessError;
+
+use cargo_test_support::install::{
+ assert_has_installed_exe, assert_has_not_installed_exe, cargo_home,
+};
+use cargo_test_support::paths::{self, CargoPathExt};
+use std::env;
+use std::path::PathBuf;
+
+fn pkg(name: &str, vers: &str) {
+ Package::new(name, vers)
+ .file("src/lib.rs", "")
+ .file(
+ "src/main.rs",
+ &format!("extern crate {}; fn main() {{}}", name),
+ )
+ .publish();
+}
+
+#[cargo_test]
+fn simple() {
+ pkg("foo", "0.0.1");
+
+ cargo_process("install foo")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v0.0.1 (registry [..])
+[INSTALLING] foo v0.0.1
+[COMPILING] foo v0.0.1
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE]
+[INSTALLED] package `foo v0.0.1` (executable `foo[EXE]`)
+[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+ )
+ .run();
+ assert_has_installed_exe(cargo_home(), "foo");
+
+ cargo_process("uninstall foo")
+ .with_stderr("[REMOVING] [CWD]/home/.cargo/bin/foo[EXE]")
+ .run();
+ assert_has_not_installed_exe(cargo_home(), "foo");
+}
+
+#[cargo_test]
+fn simple_with_message_format() {
+ pkg("foo", "0.0.1");
+
+ cargo_process("install foo --message-format=json")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v0.0.1 (registry [..])
+[INSTALLING] foo v0.0.1
+[COMPILING] foo v0.0.1
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE]
+[INSTALLED] package `foo v0.0.1` (executable `foo[EXE]`)
+[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+ )
+ .with_json(
+ r#"
+ {
+ "reason": "compiler-artifact",
+ "package_id": "foo 0.0.1 ([..])",
+ "manifest_path": "[..]",
+ "target": {
+ "kind": [
+ "lib"
+ ],
+ "crate_types": [
+ "lib"
+ ],
+ "name": "foo",
+ "src_path": "[..]/foo-0.0.1/src/lib.rs",
+ "edition": "2015",
+ "doc": true,
+ "doctest": true,
+ "test": true
+ },
+ "profile": "{...}",
+ "features": [],
+ "filenames": "{...}",
+ "executable": null,
+ "fresh": false
+ }
+
+ {
+ "reason": "compiler-artifact",
+ "package_id": "foo 0.0.1 ([..])",
+ "manifest_path": "[..]",
+ "target": {
+ "kind": [
+ "bin"
+ ],
+ "crate_types": [
+ "bin"
+ ],
+ "name": "foo",
+ "src_path": "[..]/foo-0.0.1/src/main.rs",
+ "edition": "2015",
+ "doc": true,
+ "doctest": false,
+ "test": true
+ },
+ "profile": "{...}",
+ "features": [],
+ "filenames": "{...}",
+ "executable": "[..]",
+ "fresh": false
+ }
+
+ {"reason":"build-finished","success":true}
+ "#,
+ )
+ .run();
+ assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[cargo_test]
+fn with_index() {
+ let registry = registry::init();
+ pkg("foo", "0.0.1");
+
+ cargo_process("install foo --index")
+ .arg(registry.index_url().as_str())
+ .with_stderr(&format!(
+ "\
+[UPDATING] `{reg}` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v0.0.1 (registry `{reg}`)
+[INSTALLING] foo v0.0.1 (registry `{reg}`)
+[COMPILING] foo v0.0.1 (registry `{reg}`)
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE]
+[INSTALLED] package `foo v0.0.1 (registry `{reg}`)` (executable `foo[EXE]`)
+[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+ reg = registry_path().to_str().unwrap()
+ ))
+ .run();
+ assert_has_installed_exe(cargo_home(), "foo");
+
+ cargo_process("uninstall foo")
+ .with_stderr("[REMOVING] [CWD]/home/.cargo/bin/foo[EXE]")
+ .run();
+ assert_has_not_installed_exe(cargo_home(), "foo");
+}
+
+#[cargo_test]
+fn multiple_pkgs() {
+ pkg("foo", "0.0.1");
+ pkg("bar", "0.0.2");
+
+ cargo_process("install foo bar baz")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v0.0.1 (registry `dummy-registry`)
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.2 (registry `dummy-registry`)
+[ERROR] could not find `baz` in registry `[..]` with version `*`
+[INSTALLING] foo v0.0.1
+[COMPILING] foo v0.0.1
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE]
+[INSTALLED] package `foo v0.0.1` (executable `foo[EXE]`)
+[INSTALLING] bar v0.0.2
+[COMPILING] bar v0.0.2
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/bar[EXE]
+[INSTALLED] package `bar v0.0.2` (executable `bar[EXE]`)
+[SUMMARY] Successfully installed foo, bar! Failed to install baz (see error(s) above).
+[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries
+[ERROR] some crates failed to install
+",
+ )
+ .run();
+ assert_has_installed_exe(cargo_home(), "foo");
+ assert_has_installed_exe(cargo_home(), "bar");
+
+ cargo_process("uninstall foo bar")
+ .with_stderr(
+ "\
+[REMOVING] [CWD]/home/.cargo/bin/foo[EXE]
+[REMOVING] [CWD]/home/.cargo/bin/bar[EXE]
+[SUMMARY] Successfully uninstalled foo, bar!
+",
+ )
+ .run();
+
+ assert_has_not_installed_exe(cargo_home(), "foo");
+ assert_has_not_installed_exe(cargo_home(), "bar");
+}
+
+fn path() -> Vec<PathBuf> {
+ env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect()
+}
+
+#[cargo_test]
+fn multiple_pkgs_path_set() {
+ // confirm partial failure results in 101 status code and does not have the
+ // '[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries'
+ // even if CARGO_HOME/bin is in the PATH
+ pkg("foo", "0.0.1");
+ pkg("bar", "0.0.2");
+
+ // add CARGO_HOME/bin to path
+ let mut path = path();
+ path.push(cargo_home().join("bin"));
+ let new_path = env::join_paths(path).unwrap();
+ cargo_process("install foo bar baz")
+ .env("PATH", new_path)
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v0.0.1 (registry `dummy-registry`)
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.2 (registry `dummy-registry`)
+[ERROR] could not find `baz` in registry `[..]` with version `*`
+[INSTALLING] foo v0.0.1
+[COMPILING] foo v0.0.1
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE]
+[INSTALLED] package `foo v0.0.1` (executable `foo[EXE]`)
+[INSTALLING] bar v0.0.2
+[COMPILING] bar v0.0.2
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/bar[EXE]
+[INSTALLED] package `bar v0.0.2` (executable `bar[EXE]`)
+[SUMMARY] Successfully installed foo, bar! Failed to install baz (see error(s) above).
+[ERROR] some crates failed to install
+",
+ )
+ .run();
+ assert_has_installed_exe(cargo_home(), "foo");
+ assert_has_installed_exe(cargo_home(), "bar");
+
+ cargo_process("uninstall foo bar")
+ .with_stderr(
+ "\
+[REMOVING] [CWD]/home/.cargo/bin/foo[EXE]
+[REMOVING] [CWD]/home/.cargo/bin/bar[EXE]
+[SUMMARY] Successfully uninstalled foo, bar!
+",
+ )
+ .run();
+
+ assert_has_not_installed_exe(cargo_home(), "foo");
+ assert_has_not_installed_exe(cargo_home(), "bar");
+}
+
+#[cargo_test]
+fn pick_max_version() {
+ pkg("foo", "0.1.0");
+ pkg("foo", "0.2.0");
+ pkg("foo", "0.2.1");
+ pkg("foo", "0.2.1-pre.1");
+ pkg("foo", "0.3.0-pre.2");
+
+ cargo_process("install foo")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v0.2.1 (registry [..])
+[INSTALLING] foo v0.2.1
+[COMPILING] foo v0.2.1
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE]
+[INSTALLED] package `foo v0.2.1` (executable `foo[EXE]`)
+[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+ )
+ .run();
+ assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[cargo_test]
+fn installs_beta_version_by_explicit_name_from_git() {
+ let p = git::repo(&paths::root().join("foo"))
+ .file("Cargo.toml", &basic_manifest("foo", "0.3.0-beta.1"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ cargo_process("install --git")
+ .arg(p.url().to_string())
+ .arg("foo")
+ .run();
+ assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[cargo_test]
+fn missing() {
+ pkg("foo", "0.0.1");
+ cargo_process("install bar")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..] index
+[ERROR] could not find `bar` in registry `[..]` with version `*`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn missing_current_working_directory() {
+ cargo_process("install .")
+ .with_status(101)
+ .with_stderr(
+ "error: To install the binaries for the package in current working \
+ directory use `cargo install --path .`. \n\
+ Use `cargo build` if you want to simply build the package.",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_version() {
+ pkg("foo", "0.0.1");
+ cargo_process("install foo --version=0.2.0")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..] index
+[ERROR] could not find `foo` in registry `[..]` with version `=0.2.0`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_paths() {
+ cargo_process("install")
+ .with_status(101)
+ .with_stderr("[ERROR] `[CWD]` is not a crate root; specify a crate to install [..]")
+ .run();
+
+ cargo_process("install --path .")
+ .with_status(101)
+ .with_stderr("[ERROR] `[CWD]` does not contain a Cargo.toml file[..]")
+ .run();
+
+ let toml = paths::root().join("Cargo.toml");
+ fs::write(toml, "").unwrap();
+ cargo_process("install --path Cargo.toml")
+ .with_status(101)
+ .with_stderr("[ERROR] `[CWD]/Cargo.toml` is not a directory[..]")
+ .run();
+
+ cargo_process("install --path .")
+ .with_status(101)
+ .with_stderr_contains("[ERROR] failed to parse manifest at `[CWD]/Cargo.toml`")
+ .run();
+}
+
+#[cargo_test]
+fn install_location_precedence() {
+ pkg("foo", "0.0.1");
+
+ let root = paths::root();
+ let t1 = root.join("t1");
+ let t2 = root.join("t2");
+ let t3 = root.join("t3");
+ let t4 = cargo_home();
+
+ fs::create_dir(root.join(".cargo")).unwrap();
+ fs::write(
+ root.join(".cargo/config"),
+ &format!(
+ "[install]
+ root = '{}'
+ ",
+ t3.display()
+ ),
+ )
+ .unwrap();
+
+ println!("install --root");
+
+ cargo_process("install foo --root")
+ .arg(&t1)
+ .env("CARGO_INSTALL_ROOT", &t2)
+ .run();
+ assert_has_installed_exe(&t1, "foo");
+ assert_has_not_installed_exe(&t2, "foo");
+
+ println!("install CARGO_INSTALL_ROOT");
+
+ cargo_process("install foo")
+ .env("CARGO_INSTALL_ROOT", &t2)
+ .run();
+ assert_has_installed_exe(&t2, "foo");
+ assert_has_not_installed_exe(&t3, "foo");
+
+ println!("install install.root");
+
+ cargo_process("install foo").run();
+ assert_has_installed_exe(&t3, "foo");
+ assert_has_not_installed_exe(&t4, "foo");
+
+ fs::remove_file(root.join(".cargo/config")).unwrap();
+
+ println!("install cargo home");
+
+ cargo_process("install foo").run();
+ assert_has_installed_exe(&t4, "foo");
+}
+
+#[cargo_test]
+fn install_path() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ cargo_process("install --path").arg(p.root()).run();
+ assert_has_installed_exe(cargo_home(), "foo");
+ // path-style installs force a reinstall
+ p.cargo("install --path .")
+ .with_stderr(
+ "\
+[INSTALLING] foo v0.0.1 [..]
+[FINISHED] release [..]
+[REPLACING] [..]/.cargo/bin/foo[EXE]
+[REPLACED] package `foo v0.0.1 [..]` with `foo v0.0.1 [..]` (executable `foo[EXE]`)
+[WARNING] be sure to add [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn install_target_dir() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ p.cargo("install --target-dir td_test")
+ .with_stderr(
+ "\
+[WARNING] Using `cargo install` [..]
+[INSTALLING] foo v0.0.1 [..]
+[COMPILING] foo v0.0.1 [..]
+[FINISHED] release [..]
+[INSTALLING] [..]foo[EXE]
+[INSTALLED] package `foo v0.0.1 [..]foo[..]` (executable `foo[EXE]`)
+[WARNING] be sure to add [..]
+",
+ )
+ .run();
+
+ let mut path = p.root();
+ path.push("td_test");
+ assert!(path.exists());
+
+ #[cfg(not(windows))]
+ path.push("release/foo");
+ #[cfg(windows)]
+ path.push("release/foo.exe");
+ assert!(path.exists());
+}
+
+#[cargo_test]
+#[cfg(target_os = "linux")]
+fn install_path_with_lowercase_cargo_toml() {
+ let toml = paths::root().join("cargo.toml");
+ fs::write(toml, "").unwrap();
+
+ cargo_process("install --path .")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] `[CWD]` does not contain a Cargo.toml file, \
+but found cargo.toml please try to rename it to Cargo.toml. --path must point to a directory containing a Cargo.toml file.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn install_relative_path_outside_current_ws() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["baz"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "baz/Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.1.0"
+ authors = []
+ edition = "2021"
+
+ [dependencies]
+ foo = "1"
+ "#,
+ )
+ .file("baz/src/lib.rs", "")
+ .build();
+
+ let _bin_project = project_in("bar")
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("install --path ../bar/foo")
+ .with_stderr(&format!(
+ "\
+[INSTALLING] foo v0.0.1 ([..]/bar/foo)
+[COMPILING] foo v0.0.1 ([..]/bar/foo)
+[FINISHED] release [..]
+[INSTALLING] {home}/bin/foo[EXE]
+[INSTALLED] package `foo v0.0.1 ([..]/bar/foo)` (executable `foo[EXE]`)
+[WARNING] be sure to add [..]
+",
+ home = cargo_home().display(),
+ ))
+ .run();
+
+ // Validate the workspace error message to display available targets.
+ p.cargo("install --path ../bar/foo --bin")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] \"--bin\" takes one argument.
+Available binaries:
+ foo
+
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn multiple_packages_containing_binaries() {
+ let p = git::repo(&paths::root().join("foo"))
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/main.rs", "fn main() {}")
+ .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("a/src/main.rs", "fn main() {}")
+ .build();
+
+ let git_url = p.url().to_string();
+ cargo_process("install --git")
+ .arg(p.url().to_string())
+ .with_status(101)
+ .with_stderr(format!(
+ "\
+[UPDATING] git repository [..]
+[ERROR] multiple packages with binaries found: bar, foo. \
+When installing a git repository, cargo will always search the entire repo for any Cargo.toml.
+Please specify a package, e.g. `cargo install --git {git_url} bar`.
+"
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn multiple_packages_matching_example() {
+ let p = git::repo(&paths::root().join("foo"))
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/lib.rs", "")
+ .file("examples/ex1.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "")
+ .file("bar/examples/ex1.rs", "fn main() {}")
+ .build();
+
+ let git_url = p.url().to_string();
+ cargo_process("install --example ex1 --git")
+ .arg(p.url().to_string())
+ .with_status(101)
+ .with_stderr(format!(
+ "\
+[UPDATING] git repository [..]
+[ERROR] multiple packages with examples found: bar, foo. \
+When installing a git repository, cargo will always search the entire repo for any Cargo.toml.
+Please specify a package, e.g. `cargo install --git {git_url} bar`."
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn multiple_binaries_deep_select_uses_package_name() {
+ let p = git::repo(&paths::root().join("foo"))
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("bar/baz/src/main.rs", "fn main() {}")
+ .build();
+
+ cargo_process("install --git")
+ .arg(p.url().to_string())
+ .arg("baz")
+ .run();
+}
+
+#[cargo_test]
+fn multiple_binaries_in_selected_package_installs_all() {
+ let p = git::repo(&paths::root().join("foo"))
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/bin/bin1.rs", "fn main() {}")
+ .file("bar/src/bin/bin2.rs", "fn main() {}")
+ .build();
+
+ cargo_process("install --git")
+ .arg(p.url().to_string())
+ .arg("bar")
+ .run();
+
+ let cargo_home = cargo_home();
+ assert_has_installed_exe(&cargo_home, "bin1");
+ assert_has_installed_exe(&cargo_home, "bin2");
+}
+
+#[cargo_test]
+fn multiple_binaries_in_selected_package_with_bin_option_installs_only_one() {
+ let p = git::repo(&paths::root().join("foo"))
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/bin/bin1.rs", "fn main() {}")
+ .file("bar/src/bin/bin2.rs", "fn main() {}")
+ .build();
+
+ cargo_process("install --bin bin1 --git")
+ .arg(p.url().to_string())
+ .arg("bar")
+ .run();
+
+ let cargo_home = cargo_home();
+ assert_has_installed_exe(&cargo_home, "bin1");
+ assert_has_not_installed_exe(&cargo_home, "bin2");
+}
+
+#[cargo_test]
+fn multiple_crates_select() {
+ let p = git::repo(&paths::root().join("foo"))
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/main.rs", "fn main() {}")
+ .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("a/src/main.rs", "fn main() {}")
+ .build();
+
+ cargo_process("install --git")
+ .arg(p.url().to_string())
+ .arg("foo")
+ .run();
+ assert_has_installed_exe(cargo_home(), "foo");
+ assert_has_not_installed_exe(cargo_home(), "bar");
+
+ cargo_process("install --git")
+ .arg(p.url().to_string())
+ .arg("bar")
+ .run();
+ assert_has_installed_exe(cargo_home(), "bar");
+}
+
+#[cargo_test]
+fn multiple_crates_git_all() {
+ let p = git::repo(&paths::root().join("foo"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bin1", "bin2"]
+ "#,
+ )
+ .file("bin1/Cargo.toml", &basic_manifest("bin1", "0.1.0"))
+ .file("bin2/Cargo.toml", &basic_manifest("bin2", "0.1.0"))
+ .file(
+ "bin1/src/main.rs",
+ r#"fn main() { println!("Hello, world!"); }"#,
+ )
+ .file(
+ "bin2/src/main.rs",
+ r#"fn main() { println!("Hello, world!"); }"#,
+ )
+ .build();
+
+ cargo_process(&format!("install --git {} bin1 bin2", p.url().to_string())).run();
+}
+
+#[cargo_test]
+fn multiple_crates_auto_binaries() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "a" }
+ "#,
+ )
+ .file("src/main.rs", "extern crate bar; fn main() {}")
+ .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("a/src/lib.rs", "")
+ .build();
+
+ cargo_process("install --path").arg(p.root()).run();
+ assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[cargo_test]
+fn multiple_crates_auto_examples() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "a" }
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar;")
+ .file(
+ "examples/foo.rs",
+ "
+ extern crate bar;
+ extern crate foo;
+ fn main() {}
+ ",
+ )
+ .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("a/src/lib.rs", "")
+ .build();
+
+ cargo_process("install --path")
+ .arg(p.root())
+ .arg("--example=foo")
+ .run();
+ assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[cargo_test]
+fn no_binaries_or_examples() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "a" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("a/src/lib.rs", "")
+ .build();
+
+ cargo_process("install --path")
+ .arg(p.root())
+ .with_status(101)
+ .with_stderr("[ERROR] no packages found with binaries or examples")
+ .run();
+}
+
+#[cargo_test]
+fn no_binaries() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("examples/foo.rs", "fn main() {}")
+ .build();
+
+ cargo_process("install --path")
+ .arg(p.root())
+ .arg("foo")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] there is nothing to install in `foo v0.0.1 ([..])`, because it has no binaries[..]
+[..]
+To use a library crate, add it as a dependency to a Cargo project with `cargo add`.",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn examples() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("examples/foo.rs", "extern crate foo; fn main() {}")
+ .build();
+
+ cargo_process("install --path")
+ .arg(p.root())
+ .arg("--example=foo")
+ .run();
+ assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[cargo_test]
+fn install_force() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ cargo_process("install --path").arg(p.root()).run();
+
+ let p = project()
+ .at("foo2")
+ .file("Cargo.toml", &basic_manifest("foo", "0.2.0"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ cargo_process("install --force --path")
+ .arg(p.root())
+ .with_stderr(
+ "\
+[INSTALLING] foo v0.2.0 ([..])
+[COMPILING] foo v0.2.0 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+[REPLACING] [CWD]/home/.cargo/bin/foo[EXE]
+[REPLACED] package `foo v0.0.1 ([..]/foo)` with `foo v0.2.0 ([..]/foo2)` (executable `foo[EXE]`)
+[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+ )
+ .run();
+
+ cargo_process("install --list")
+ .with_stdout(
+ "\
+foo v0.2.0 ([..]):
+ foo[..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn install_force_partial_overlap() {
+ let p = project()
+ .file("src/bin/foo-bin1.rs", "fn main() {}")
+ .file("src/bin/foo-bin2.rs", "fn main() {}")
+ .build();
+
+ cargo_process("install --path").arg(p.root()).run();
+
+ let p = project()
+ .at("foo2")
+ .file("Cargo.toml", &basic_manifest("foo", "0.2.0"))
+ .file("src/bin/foo-bin2.rs", "fn main() {}")
+ .file("src/bin/foo-bin3.rs", "fn main() {}")
+ .build();
+
+ cargo_process("install --force --path")
+ .arg(p.root())
+ .with_stderr(
+ "\
+[INSTALLING] foo v0.2.0 ([..])
+[COMPILING] foo v0.2.0 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/foo-bin3[EXE]
+[REPLACING] [CWD]/home/.cargo/bin/foo-bin2[EXE]
+[REMOVING] executable `[..]/bin/foo-bin1[EXE]` from previous version foo v0.0.1 [..]
+[INSTALLED] package `foo v0.2.0 ([..]/foo2)` (executable `foo-bin3[EXE]`)
+[REPLACED] package `foo v0.0.1 ([..]/foo)` with `foo v0.2.0 ([..]/foo2)` (executable `foo-bin2[EXE]`)
+[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+ )
+ .run();
+
+ cargo_process("install --list")
+ .with_stdout(
+ "\
+foo v0.2.0 ([..]):
+ foo-bin2[..]
+ foo-bin3[..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn install_force_bin() {
+ let p = project()
+ .file("src/bin/foo-bin1.rs", "fn main() {}")
+ .file("src/bin/foo-bin2.rs", "fn main() {}")
+ .build();
+
+ cargo_process("install --path").arg(p.root()).run();
+
+ let p = project()
+ .at("foo2")
+ .file("Cargo.toml", &basic_manifest("foo", "0.2.0"))
+ .file("src/bin/foo-bin1.rs", "fn main() {}")
+ .file("src/bin/foo-bin2.rs", "fn main() {}")
+ .build();
+
+ cargo_process("install --force --bin foo-bin2 --path")
+ .arg(p.root())
+ .with_stderr(
+ "\
+[INSTALLING] foo v0.2.0 ([..])
+[COMPILING] foo v0.2.0 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+[REPLACING] [CWD]/home/.cargo/bin/foo-bin2[EXE]
+[REPLACED] package `foo v0.0.1 ([..]/foo)` with `foo v0.2.0 ([..]/foo2)` (executable `foo-bin2[EXE]`)
+[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+ )
+ .run();
+
+ cargo_process("install --list")
+ .with_stdout(
+ "\
+foo v0.0.1 ([..]):
+ foo-bin1[..]
+foo v0.2.0 ([..]):
+ foo-bin2[..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn compile_failure() {
+ let p = project().file("src/main.rs", "").build();
+
+ cargo_process("install --path")
+ .arg(p.root())
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[ERROR] could not compile `foo` (bin \"foo\") due to previous error
+[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be \
+ found at `[..]target`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn git_repo() {
+ let p = git::repo(&paths::root().join("foo"))
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ // Use `--locked` to test that we don't even try to write a lock file.
+ cargo_process("install --locked --git")
+ .arg(p.url().to_string())
+ .with_stderr(
+ "\
+[UPDATING] git repository `[..]`
+[WARNING] no Cargo.lock file published in foo v0.1.0 ([..])
+[INSTALLING] foo v0.1.0 ([..])
+[COMPILING] foo v0.1.0 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE]
+[INSTALLED] package `foo v0.1.0 ([..]/foo#[..])` (executable `foo[EXE]`)
+[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+ )
+ .run();
+ assert_has_installed_exe(cargo_home(), "foo");
+ assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[cargo_test]
+#[cfg(target_os = "linux")]
+fn git_repo_with_lowercase_cargo_toml() {
+ let p = git::repo(&paths::root().join("foo"))
+ .file("cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ cargo_process("install --git")
+ .arg(p.url().to_string())
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] git repository [..]
+[ERROR] Could not find Cargo.toml in `[..]`, but found cargo.toml please try to rename it to Cargo.toml
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn list() {
+ pkg("foo", "0.0.1");
+ pkg("bar", "0.2.1");
+ pkg("bar", "0.2.2");
+
+ cargo_process("install --list").with_stdout("").run();
+
+ cargo_process("install bar --version =0.2.1").run();
+ cargo_process("install foo").run();
+ cargo_process("install --list")
+ .with_stdout(
+ "\
+bar v0.2.1:
+ bar[..]
+foo v0.0.1:
+ foo[..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn list_error() {
+ pkg("foo", "0.0.1");
+ cargo_process("install foo").run();
+ cargo_process("install --list")
+ .with_stdout(
+ "\
+foo v0.0.1:
+ foo[..]
+",
+ )
+ .run();
+ let mut worldfile_path = cargo_home();
+ worldfile_path.push(".crates.toml");
+ let mut worldfile = OpenOptions::new()
+ .write(true)
+ .open(worldfile_path)
+ .expect(".crates.toml should be there");
+ worldfile.write_all(b"\x00").unwrap();
+ drop(worldfile);
+ cargo_process("install --list --verbose")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse crate metadata at `[..]`
+
+Caused by:
+ invalid TOML found for metadata
+
+Caused by:
+ TOML parse error at line 1, column 1
+ |
+ 1 | [..]
+ | ^
+ invalid key
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn uninstall_pkg_does_not_exist() {
+ cargo_process("uninstall foo")
+ .with_status(101)
+ .with_stderr("[ERROR] package ID specification `foo` did not match any packages")
+ .run();
+}
+
+#[cargo_test]
+fn uninstall_bin_does_not_exist() {
+ pkg("foo", "0.0.1");
+
+ cargo_process("install foo").run();
+ cargo_process("uninstall foo --bin=bar")
+ .with_status(101)
+ .with_stderr("[ERROR] binary `bar[..]` not installed as part of `foo v0.0.1`")
+ .run();
+}
+
+#[cargo_test]
+fn uninstall_piecemeal() {
+ let p = project()
+ .file("src/bin/foo.rs", "fn main() {}")
+ .file("src/bin/bar.rs", "fn main() {}")
+ .build();
+
+ cargo_process("install --path").arg(p.root()).run();
+ assert_has_installed_exe(cargo_home(), "foo");
+ assert_has_installed_exe(cargo_home(), "bar");
+
+ cargo_process("uninstall foo --bin=bar")
+ .with_stderr("[REMOVING] [..]bar[..]")
+ .run();
+
+ assert_has_installed_exe(cargo_home(), "foo");
+ assert_has_not_installed_exe(cargo_home(), "bar");
+
+ cargo_process("uninstall foo --bin=foo")
+ .with_stderr("[REMOVING] [..]foo[..]")
+ .run();
+ assert_has_not_installed_exe(cargo_home(), "foo");
+
+ cargo_process("uninstall foo")
+ .with_status(101)
+ .with_stderr("[ERROR] package ID specification `foo` did not match any packages")
+ .run();
+}
+
+#[cargo_test]
+fn subcommand_works_out_of_the_box() {
+ Package::new("cargo-foo", "1.0.0")
+ .file("src/main.rs", r#"fn main() { println!("bar"); }"#)
+ .publish();
+ cargo_process("install cargo-foo").run();
+ cargo_process("foo").with_stdout("bar\n").run();
+ cargo_process("--list")
+ .with_stdout_contains(" foo\n")
+ .run();
+}
+
+#[cargo_test]
+fn installs_from_cwd_by_default() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ p.cargo("install")
+ .with_stderr_contains(
+ "warning: Using `cargo install` to install the binaries for the \
+ package in current working directory is deprecated, \
+ use `cargo install --path .` instead. \
+ Use `cargo build` if you want to simply build the package.",
+ )
+ .run();
+ assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[cargo_test]
+fn installs_from_cwd_with_2018_warnings() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ edition = "2018"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("install")
+ .with_status(101)
+ .with_stderr_contains(
+ "error: Using `cargo install` to install the binaries for the \
+ package in current working directory is no longer supported, \
+ use `cargo install --path .` instead. \
+ Use `cargo build` if you want to simply build the package.",
+ )
+ .run();
+ assert_has_not_installed_exe(cargo_home(), "foo");
+}
+
+#[cargo_test]
+fn uninstall_cwd() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+ p.cargo("install --path .")
+ .with_stderr(&format!(
+ "\
+[INSTALLING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] {home}/bin/foo[EXE]
+[INSTALLED] package `foo v0.0.1 ([..]/foo)` (executable `foo[EXE]`)
+[WARNING] be sure to add `{home}/bin` to your PATH to be able to run the installed binaries",
+ home = cargo_home().display(),
+ ))
+ .run();
+ assert_has_installed_exe(cargo_home(), "foo");
+
+ p.cargo("uninstall")
+ .with_stdout("")
+ .with_stderr(&format!(
+ "[REMOVING] {home}/bin/foo[EXE]",
+ home = cargo_home().display()
+ ))
+ .run();
+ assert_has_not_installed_exe(cargo_home(), "foo");
+}
+
+#[cargo_test]
+fn uninstall_cwd_not_installed() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+ p.cargo("uninstall")
+ .with_status(101)
+ .with_stdout("")
+ .with_stderr("error: package `foo v0.0.1 ([CWD])` is not installed")
+ .run();
+}
+
+#[cargo_test]
+fn uninstall_cwd_no_project() {
+ cargo_process("uninstall")
+ .with_status(101)
+ .with_stdout("")
+ .with_stderr(format!(
+ "\
+[ERROR] failed to read `[CWD]/Cargo.toml`
+
+Caused by:
+ {err_msg}",
+ err_msg = no_such_file_err_msg(),
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn do_not_rebuilds_on_local_install() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ p.cargo("build --release").run();
+ cargo_process("install --path")
+ .arg(p.root())
+ .with_stderr(
+ "\
+[INSTALLING] [..]
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [..]
+[INSTALLED] package `foo v0.0.1 ([..]/foo)` (executable `foo[EXE]`)
+[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+ )
+ .run();
+
+ assert!(p.build_dir().exists());
+ assert!(p.release_bin("foo").exists());
+ assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[cargo_test]
+fn reports_unsuccessful_subcommand_result() {
+ Package::new("cargo-fail", "1.0.0")
+ .file("src/main.rs", "fn main() { panic!(); }")
+ .publish();
+ cargo_process("install cargo-fail").run();
+ cargo_process("--list")
+ .with_stdout_contains(" fail\n")
+ .run();
+ cargo_process("fail")
+ .with_status(101)
+ .with_stderr_contains("thread '[..]' panicked at 'explicit panic', [..]")
+ .run();
+}
+
+#[cargo_test]
+fn git_with_lockfile() {
+ let p = git::repo(&paths::root().join("foo"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "fn main() {}")
+ .file(
+ "Cargo.lock",
+ r#"
+ [[package]]
+ name = "foo"
+ version = "0.1.0"
+ dependencies = [ "bar 0.1.0" ]
+
+ [[package]]
+ name = "bar"
+ version = "0.1.0"
+ "#,
+ )
+ .build();
+
+ cargo_process("install --git")
+ .arg(p.url().to_string())
+ .run();
+}
+
+#[cargo_test]
+fn q_silences_warnings() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ cargo_process("install -q --path")
+ .arg(p.root())
+ .with_stderr("")
+ .run();
+}
+
+#[cargo_test]
+fn readonly_dir() {
+ pkg("foo", "0.0.1");
+
+ let root = paths::root();
+ let dir = &root.join("readonly");
+ fs::create_dir(root.join("readonly")).unwrap();
+ let mut perms = fs::metadata(dir).unwrap().permissions();
+ perms.set_readonly(true);
+ fs::set_permissions(dir, perms).unwrap();
+
+ cargo_process("install foo").cwd(dir).run();
+ assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[cargo_test]
+fn use_path_workspace() {
+ Package::new("foo", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["baz"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "baz/Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ foo = "1"
+ "#,
+ )
+ .file("baz/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+ let lock = p.read_lockfile();
+ p.cargo("install").run();
+ let lock2 = p.read_lockfile();
+ assert_eq!(lock, lock2, "different lockfiles");
+}
+
+#[cargo_test]
+fn path_install_workspace_root_despite_default_members() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "ws-root"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["ws-member"]
+ default-members = ["ws-member"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "ws-member/Cargo.toml",
+ r#"
+ [package]
+ name = "ws-member"
+ version = "0.1.0"
+ authors = []
+ "#,
+ )
+ .file("ws-member/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("install --path")
+ .arg(p.root())
+ .arg("ws-root")
+ .with_stderr_contains(
+ "[INSTALLED] package `ws-root v0.1.0 ([..])` (executable `ws-root[EXE]`)",
+ )
+ // Particularly avoid "Installed package `ws-root v0.1.0 ([..]])` (executable `ws-member`)":
+ .with_stderr_does_not_contain("ws-member")
+ .run();
+}
+
+#[cargo_test]
+fn dev_dependencies_no_check() {
+ Package::new("foo", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dev-dependencies]
+ baz = "1.0.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr_contains("[..] no matching package named `baz` found")
+ .run();
+ p.cargo("install").run();
+}
+
+#[cargo_test]
+fn dev_dependencies_lock_file_untouched() {
+ Package::new("foo", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dev-dependencies]
+ bar = { path = "a" }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+ let lock = p.read_lockfile();
+ p.cargo("install").run();
+ let lock2 = p.read_lockfile();
+ assert!(lock == lock2, "different lockfiles");
+}
+
+#[cargo_test]
+fn install_target_native() {
+ pkg("foo", "0.1.0");
+
+ cargo_process("install foo --target")
+ .arg(cargo_test_support::rustc_host())
+ .run();
+ assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[cargo_test]
+fn install_target_foreign() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ pkg("foo", "0.1.0");
+
+ cargo_process("install foo --target")
+ .arg(cross_compile::alternate())
+ .run();
+ assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[cargo_test]
+fn vers_precise() {
+ pkg("foo", "0.1.1");
+ pkg("foo", "0.1.2");
+
+ cargo_process("install foo --vers 0.1.1")
+ .with_stderr_contains("[DOWNLOADED] foo v0.1.1 (registry [..])")
+ .run();
+}
+
+#[cargo_test]
+fn version_precise() {
+ pkg("foo", "0.1.1");
+ pkg("foo", "0.1.2");
+
+ cargo_process("install foo --version 0.1.1")
+ .with_stderr_contains("[DOWNLOADED] foo v0.1.1 (registry [..])")
+ .run();
+}
+
+#[cargo_test]
+fn inline_version_precise() {
+ pkg("foo", "0.1.1");
+ pkg("foo", "0.1.2");
+
+ cargo_process("install foo@0.1.1")
+ .with_stderr_contains("[DOWNLOADED] foo v0.1.1 (registry [..])")
+ .run();
+}
+
+#[cargo_test]
+fn inline_version_multiple() {
+ pkg("foo", "0.1.0");
+ pkg("foo", "0.1.1");
+ pkg("foo", "0.1.2");
+ pkg("bar", "0.2.0");
+ pkg("bar", "0.2.1");
+ pkg("bar", "0.2.2");
+
+ cargo_process("install foo@0.1.1 bar@0.2.1")
+ .with_stderr_contains("[DOWNLOADED] foo v0.1.1 (registry [..])")
+ .with_stderr_contains("[DOWNLOADED] bar v0.2.1 (registry [..])")
+ .run();
+}
+
+#[cargo_test]
+fn inline_version_without_name() {
+ pkg("foo", "0.1.1");
+ pkg("foo", "0.1.2");
+
+ cargo_process("install @0.1.1")
+ .with_status(101)
+ .with_stderr("error: missing crate name for `@0.1.1`")
+ .run();
+}
+
+#[cargo_test]
+fn inline_and_explicit_version() {
+ pkg("foo", "0.1.1");
+ pkg("foo", "0.1.2");
+
+ cargo_process("install foo@0.1.1 --version 0.1.1")
+ .with_status(101)
+ .with_stderr("error: cannot specify both `@0.1.1` and `--version`")
+ .run();
+}
+
+#[cargo_test]
+fn not_both_vers_and_version() {
+ pkg("foo", "0.1.1");
+ pkg("foo", "0.1.2");
+
+ cargo_process("install foo --version 0.1.1 --vers 0.1.2")
+ .with_status(1)
+ .with_stderr_contains(
+ "\
+[ERROR] the argument '--version <VERSION>' cannot be used multiple times
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_install_git_cannot_be_a_base_url() {
+ cargo_process("install --git github.com:rust-lang/rustfmt.git")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] invalid url `github.com:rust-lang/rustfmt.git`: cannot-be-a-base-URLs are not supported",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn uninstall_multiple_and_specifying_bin() {
+ cargo_process("uninstall foo bar --bin baz")
+ .with_status(101)
+ .with_stderr("\
+[ERROR] A binary can only be associated with a single installed package, specifying multiple specs with --bin is redundant.")
+ .run();
+}
+
+#[cargo_test]
+fn uninstall_with_empty_package_option() {
+ cargo_process("uninstall -p")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] \"--package <SPEC>\" requires a SPEC format value.
+Run `cargo help pkgid` for more information about SPEC format.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn uninstall_multiple_and_some_pkg_does_not_exist() {
+ pkg("foo", "0.0.1");
+
+ cargo_process("install foo").run();
+
+ cargo_process("uninstall foo bar")
+ .with_status(101)
+ .with_stderr(
+ "\
+[REMOVING] [CWD]/home/.cargo/bin/foo[EXE]
+error: package ID specification `bar` did not match any packages
+[SUMMARY] Successfully uninstalled foo! Failed to uninstall bar (see error(s) above).
+error: some packages failed to uninstall
+",
+ )
+ .run();
+
+ assert_has_not_installed_exe(cargo_home(), "foo");
+ assert_has_not_installed_exe(cargo_home(), "bar");
+}
+
+#[cargo_test]
+fn custom_target_dir_for_git_source() {
+ let p = git::repo(&paths::root().join("foo"))
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ cargo_process("install --git")
+ .arg(p.url().to_string())
+ .run();
+ assert!(!paths::root().join("target/release").is_dir());
+
+ cargo_process("install --force --git")
+ .arg(p.url().to_string())
+ .env("CARGO_TARGET_DIR", "target")
+ .run();
+ assert!(paths::root().join("target/release").is_dir());
+}
+
+#[cargo_test]
+fn install_respects_lock_file() {
+ // `cargo install` now requires --locked to use a Cargo.lock.
+ Package::new("bar", "0.1.0").publish();
+ Package::new("bar", "0.1.1")
+ .file("src/lib.rs", "not rust")
+ .publish();
+ Package::new("foo", "0.1.0")
+ .dep("bar", "0.1")
+ .file("src/lib.rs", "")
+ .file(
+ "src/main.rs",
+ "extern crate foo; extern crate bar; fn main() {}",
+ )
+ .file(
+ "Cargo.lock",
+ r#"
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "foo"
+version = "0.1.0"
+dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+"#,
+ )
+ .publish();
+
+ cargo_process("install foo")
+ .with_stderr_contains("[..]not rust[..]")
+ .with_status(101)
+ .run();
+ cargo_process("install --locked foo").run();
+}
+
+#[cargo_test]
+fn install_path_respects_lock_file() {
+ // --path version of install_path_respects_lock_file, --locked is required
+ // to use Cargo.lock.
+ Package::new("bar", "0.1.0").publish();
+ Package::new("bar", "0.1.1")
+ .file("src/lib.rs", "not rust")
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1"
+ "#,
+ )
+ .file("src/main.rs", "extern crate bar; fn main() {}")
+ .file(
+ "Cargo.lock",
+ r#"
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "foo"
+version = "0.1.0"
+dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+"#,
+ )
+ .build();
+
+ p.cargo("install --path .")
+ .with_stderr_contains("[..]not rust[..]")
+ .with_status(101)
+ .run();
+ p.cargo("install --path . --locked").run();
+}
+
+#[cargo_test]
+fn lock_file_path_deps_ok() {
+ Package::new("bar", "0.1.0").publish();
+
+ Package::new("foo", "0.1.0")
+ .dep("bar", "0.1")
+ .file("src/lib.rs", "")
+ .file(
+ "src/main.rs",
+ "extern crate foo; extern crate bar; fn main() {}",
+ )
+ .file(
+ "Cargo.lock",
+ r#"
+ [[package]]
+ name = "bar"
+ version = "0.1.0"
+
+ [[package]]
+ name = "foo"
+ version = "0.1.0"
+ dependencies = [
+ "bar 0.1.0",
+ ]
+ "#,
+ )
+ .publish();
+
+ cargo_process("install foo").run();
+}
+
+#[cargo_test]
+fn install_empty_argument() {
+ // Bug 5229
+ cargo_process("install")
+ .arg("")
+ .with_status(1)
+ .with_stderr_contains("[ERROR] a value is required for '[crate]...' but none was supplied")
+ .run();
+}
+
+#[cargo_test]
+fn git_repo_replace() {
+ let p = git::repo(&paths::root().join("foo"))
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+ let repo = git2::Repository::open(&p.root()).unwrap();
+ let old_rev = repo.revparse_single("HEAD").unwrap().id();
+ cargo_process("install --git")
+ .arg(p.url().to_string())
+ .run();
+ git::commit(&repo);
+ let new_rev = repo.revparse_single("HEAD").unwrap().id();
+ let mut path = paths::home();
+ path.push(".cargo/.crates.toml");
+
+ assert_ne!(old_rev, new_rev);
+ assert!(fs::read_to_string(path.clone())
+ .unwrap()
+ .contains(&format!("{}", old_rev)));
+ cargo_process("install --force --git")
+ .arg(p.url().to_string())
+ .run();
+ assert!(fs::read_to_string(path)
+ .unwrap()
+ .contains(&format!("{}", new_rev)));
+}
+
+#[cargo_test]
+fn workspace_uses_workspace_target_dir() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+
+ [dependencies]
+ bar = { path = 'bar' }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --release").cwd("bar").run();
+ cargo_process("install --path")
+ .arg(p.root().join("bar"))
+ .with_stderr(
+ "[INSTALLING] [..]
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [..]
+[INSTALLED] package `bar v0.1.0 ([..]/bar)` (executable `bar[EXE]`)
+[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn install_ignores_local_cargo_config() {
+ pkg("bar", "0.0.1");
+
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ target = "non-existing-target"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("install bar").run();
+ assert_has_installed_exe(cargo_home(), "bar");
+}
+
+#[cargo_test]
+fn install_ignores_unstable_table_in_local_cargo_config() {
+ pkg("bar", "0.0.1");
+
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [unstable]
+ build-std = ["core"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("install bar")
+ .masquerade_as_nightly_cargo(&["build-std"])
+ .run();
+ assert_has_installed_exe(cargo_home(), "bar");
+}
+
+#[cargo_test]
+fn install_global_cargo_config() {
+ pkg("bar", "0.0.1");
+
+ let config = cargo_home().join("config");
+ let mut toml = fs::read_to_string(&config).unwrap_or_default();
+
+ toml.push_str(
+ r#"
+ [build]
+ target = 'nonexistent'
+ "#,
+ );
+ fs::write(&config, toml).unwrap();
+
+ cargo_process("install bar")
+ .with_status(101)
+ .with_stderr_contains("[..]--target nonexistent[..]")
+ .run();
+}
+
+#[cargo_test]
+fn install_path_config() {
+ project()
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ target = 'nonexistent'
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+ cargo_process("install --path foo")
+ .with_status(101)
+ .with_stderr_contains("[..]--target nonexistent[..]")
+ .run();
+}
+
+#[cargo_test]
+fn install_version_req() {
+ // Try using a few versionreq styles.
+ pkg("foo", "0.0.3");
+ pkg("foo", "1.0.4");
+ pkg("foo", "1.0.5");
+ cargo_process("install foo --version=*")
+ .with_stderr_does_not_contain("[WARNING][..]is not a valid semver[..]")
+ .with_stderr_contains("[INSTALLING] foo v1.0.5")
+ .run();
+ cargo_process("uninstall foo").run();
+ cargo_process("install foo --version=^1.0")
+ .with_stderr_does_not_contain("[WARNING][..]is not a valid semver[..]")
+ .with_stderr_contains("[INSTALLING] foo v1.0.5")
+ .run();
+ cargo_process("uninstall foo").run();
+ cargo_process("install foo --version=0.0.*")
+ .with_stderr_does_not_contain("[WARNING][..]is not a valid semver[..]")
+ .with_stderr_contains("[INSTALLING] foo v0.0.3")
+ .run();
+}
+
+#[cargo_test]
+fn git_install_reads_workspace_manifest() {
+ let p = git::repo(&paths::root().join("foo"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bin1"]
+
+ [profile.release]
+ incremental = 3
+ "#,
+ )
+ .file("bin1/Cargo.toml", &basic_manifest("bin1", "0.1.0"))
+ .file(
+ "bin1/src/main.rs",
+ r#"fn main() { println!("Hello, world!"); }"#,
+ )
+ .build();
+
+ cargo_process(&format!("install --git {}", p.url().to_string()))
+ .with_status(101)
+ .with_stderr_contains(" invalid type: integer `3`[..]")
+ .run();
+}
+
+#[cargo_test]
+fn install_git_with_symlink_home() {
+ // Ensure that `cargo install` with a git repo is OK when CARGO_HOME is a
+ // symlink, and uses an build script.
+ if !symlink_supported() {
+ return;
+ }
+ let p = git::new("foo", |p| {
+ p.file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
+ .file("src/main.rs", "fn main() {}")
+ // This triggers discover_git_and_list_files for detecting changed files.
+ .file("build.rs", "fn main() {}")
+ });
+ #[cfg(unix)]
+ use std::os::unix::fs::symlink;
+ #[cfg(windows)]
+ use std::os::windows::fs::symlink_dir as symlink;
+
+ let actual = paths::root().join("actual-home");
+ t!(std::fs::create_dir(&actual));
+ t!(symlink(&actual, paths::home().join(".cargo")));
+ cargo_process("install --git")
+ .arg(p.url().to_string())
+ .with_stderr(
+ "\
+[UPDATING] git repository [..]
+[INSTALLING] foo v1.0.0 [..]
+[COMPILING] foo v1.0.0 [..]
+[FINISHED] [..]
+[INSTALLING] [..]home/.cargo/bin/foo[..]
+[INSTALLED] package `foo [..]
+[WARNING] be sure to add [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn install_yanked_cargo_package() {
+ Package::new("baz", "0.0.1").yanked(true).publish();
+ cargo_process("install baz --version 0.0.1")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[ERROR] cannot install package `baz`, it has been yanked from registry `crates-io`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn install_cargo_package_in_a_patched_workspace() {
+ pkg("foo", "0.1.0");
+ pkg("fizz", "1.0.0");
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["baz"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "baz/Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ fizz = "1"
+
+ [patch.crates-io]
+ fizz = { version = "=1.0.0" }
+ "#,
+ )
+ .file("baz/src/lib.rs", "")
+ .build();
+
+ let stderr = "\
+[WARNING] patch for the non root package will be ignored, specify patch at the workspace root:
+package: [..]/foo/baz/Cargo.toml
+workspace: [..]/foo/Cargo.toml
+";
+ p.cargo("check").with_stderr_contains(&stderr).run();
+
+ // A crate installation must not emit any message from a workspace under
+ // current working directory.
+ // See https://github.com/rust-lang/cargo/issues/8619
+ p.cargo("install foo")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v0.1.0 (registry [..])
+[INSTALLING] foo v0.1.0
+[COMPILING] foo v0.1.0
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [..]foo[EXE]
+[INSTALLED] package `foo v0.1.0` (executable `foo[EXE]`)
+[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+ )
+ .run();
+ assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[cargo_test]
+fn locked_install_without_published_lockfile() {
+ Package::new("foo", "0.1.0")
+ .file("src/main.rs", "//! Some docs\nfn main() {}")
+ .publish();
+
+ cargo_process("install foo --locked")
+ .with_stderr_contains("[WARNING] no Cargo.lock file published in foo v0.1.0")
+ .run();
+}
+
+#[cargo_test]
+fn install_semver_metadata() {
+ // Check trying to install a package that uses semver metadata.
+ // This uses alt registry because the bug this is exercising doesn't
+ // trigger with a replaced source.
+ registry::alt_init();
+ Package::new("foo", "1.0.0+abc")
+ .alternative(true)
+ .file("src/main.rs", "fn main() {}")
+ .publish();
+
+ cargo_process("install foo --registry alternative --version 1.0.0+abc").run();
+ cargo_process("install foo --registry alternative")
+ .with_stderr("\
+[UPDATING] `alternative` index
+[IGNORED] package `foo v1.0.0+abc (registry `alternative`)` is already installed, use --force to override
+[WARNING] be sure to add [..]
+")
+ .run();
+ // "Updating" is not displayed here due to the --version fast-path.
+ cargo_process("install foo --registry alternative --version 1.0.0+abc")
+ .with_stderr("\
+[IGNORED] package `foo v1.0.0+abc (registry `alternative`)` is already installed, use --force to override
+[WARNING] be sure to add [..]
+")
+ .run();
+ cargo_process("install foo --registry alternative --version 1.0.0 --force")
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[INSTALLING] foo v1.0.0+abc (registry `alternative`)
+[COMPILING] foo v1.0.0+abc (registry `alternative`)
+[FINISHED] [..]
+[REPLACING] [ROOT]/home/.cargo/bin/foo[EXE]
+[REPLACED] package [..]
+[WARNING] be sure to add [..]
+",
+ )
+ .run();
+ // Check that from a fresh cache will work without metadata, too.
+ paths::home().join(".cargo/registry").rm_rf();
+ paths::home().join(".cargo/bin").rm_rf();
+ cargo_process("install foo --registry alternative --version 1.0.0")
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v1.0.0+abc (registry `alternative`)
+[INSTALLING] foo v1.0.0+abc (registry `alternative`)
+[COMPILING] foo v1.0.0+abc (registry `alternative`)
+[FINISHED] [..]
+[INSTALLING] [ROOT]/home/.cargo/bin/foo[EXE]
+[INSTALLED] package `foo v1.0.0+abc (registry `alternative`)` (executable `foo[EXE]`)
+[WARNING] be sure to add [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn no_auto_fix_note() {
+ Package::new("auto_fix", "0.0.1")
+ .file("src/lib.rs", "use std::io;")
+ .file(
+ "src/main.rs",
+ &format!("extern crate {}; use std::io; fn main() {{}}", "auto_fix"),
+ )
+ .publish();
+
+ // This should not contain a suggestion to run `cargo fix`
+ //
+ // This is checked by matching the full output as `with_stderr_does_not_contain`
+ // can be brittle
+ cargo_process("install auto_fix")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] auto_fix v0.0.1 (registry [..])
+[INSTALLING] auto_fix v0.0.1
+[COMPILING] auto_fix v0.0.1
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/auto_fix[EXE]
+[INSTALLED] package `auto_fix v0.0.1` (executable `auto_fix[EXE]`)
+[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+ )
+ .run();
+ assert_has_installed_exe(cargo_home(), "auto_fix");
+
+ cargo_process("uninstall auto_fix")
+ .with_stderr("[REMOVING] [CWD]/home/.cargo/bin/auto_fix[EXE]")
+ .run();
+ assert_has_not_installed_exe(cargo_home(), "auto_fix");
+}
+
+#[cargo_test]
+fn failed_install_retains_temp_directory() {
+ // Verifies that the temporary directory persists after a build failure.
+ Package::new("foo", "0.0.1")
+ .file("src/main.rs", "x")
+ .publish();
+ let err = cargo_process("install foo").exec_with_output().unwrap_err();
+ let err = err.downcast::<ProcessError>().unwrap();
+ let stderr = String::from_utf8(err.stderr.unwrap()).unwrap();
+ compare::match_contains(
+ "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v0.0.1 (registry `dummy-registry`)
+[INSTALLING] foo v0.0.1
+[COMPILING] foo v0.0.1
+",
+ &stderr,
+ None,
+ )
+ .unwrap();
+ compare::match_contains(
+ "error: failed to compile `foo v0.0.1`, intermediate artifacts can be found at `[..]`",
+ &stderr,
+ None,
+ )
+ .unwrap();
+
+ // Find the path in the output.
+ let start = stderr.find("found at `").unwrap() + 10;
+ let end = stderr[start..].find('\n').unwrap() - 1;
+ let path = Path::new(&stderr[start..(end + start)]);
+ assert!(path.exists());
+ assert!(path.join("release/deps").exists());
+}
+
+#[cargo_test]
+fn sparse_install() {
+ // Checks for an issue where uninstalling something corrupted
+ // the SourceIds of sparse registries.
+ // See https://github.com/rust-lang/cargo/issues/11751
+ let _registry = registry::RegistryBuilder::new().http_index().build();
+
+ pkg("foo", "0.0.1");
+ pkg("bar", "0.0.1");
+
+ cargo_process("install foo --registry dummy-registry")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v0.0.1 (registry `dummy-registry`)
+[INSTALLING] foo v0.0.1 (registry `dummy-registry`)
+[UPDATING] `dummy-registry` index
+[COMPILING] foo v0.0.1 (registry `dummy-registry`)
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [ROOT]/home/.cargo/bin/foo[EXE]
+[INSTALLED] package `foo v0.0.1 (registry `dummy-registry`)` (executable `foo[EXE]`)
+[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+ )
+ .run();
+ assert_has_installed_exe(cargo_home(), "foo");
+ let assert_v1 = |expected| {
+ let v1 = fs::read_to_string(paths::home().join(".cargo/.crates.toml")).unwrap();
+ compare::assert_match_exact(expected, &v1);
+ };
+ assert_v1(
+ r#"[v1]
+"foo 0.0.1 (sparse+http://127.0.0.1:[..]/index/)" = ["foo[EXE]"]
+"#,
+ );
+ cargo_process("install bar").run();
+ assert_has_installed_exe(cargo_home(), "bar");
+ assert_v1(
+ r#"[v1]
+"bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = ["bar[EXE]"]
+"foo 0.0.1 (sparse+http://127.0.0.1:[..]/index/)" = ["foo[EXE]"]
+"#,
+ );
+
+ cargo_process("uninstall bar")
+ .with_stderr("[REMOVING] [CWD]/home/.cargo/bin/bar[EXE]")
+ .run();
+ assert_has_not_installed_exe(cargo_home(), "bar");
+ assert_v1(
+ r#"[v1]
+"foo 0.0.1 (sparse+http://127.0.0.1:[..]/index/)" = ["foo[EXE]"]
+"#,
+ );
+ cargo_process("uninstall foo")
+ .with_stderr("[REMOVING] [CWD]/home/.cargo/bin/foo[EXE]")
+ .run();
+ assert_has_not_installed_exe(cargo_home(), "foo");
+ assert_v1(
+ r#"[v1]
+"#,
+ );
+}
diff --git a/src/tools/cargo/tests/testsuite/install_upgrade.rs b/src/tools/cargo/tests/testsuite/install_upgrade.rs
new file mode 100644
index 000000000..ae641ba98
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/install_upgrade.rs
@@ -0,0 +1,862 @@
+//! Tests for `cargo install` where it upgrades a package if it is out-of-date.
+
+use cargo::core::PackageId;
+use std::collections::BTreeSet;
+use std::env;
+use std::fs;
+use std::path::PathBuf;
+use std::sync::atomic::{AtomicUsize, Ordering};
+
+use cargo_test_support::install::{cargo_home, exe};
+use cargo_test_support::paths::CargoPathExt;
+use cargo_test_support::registry::{self, Package};
+use cargo_test_support::{
+ basic_manifest, cargo_process, cross_compile, execs, git, process, project, Execs,
+};
+
+fn pkg_maybe_yanked(name: &str, vers: &str, yanked: bool) {
+ Package::new(name, vers)
+ .yanked(yanked)
+ .file(
+ "src/main.rs",
+ r#"fn main() { println!("{}", env!("CARGO_PKG_VERSION")) }"#,
+ )
+ .publish();
+}
+
+// Helper for publishing a package.
+fn pkg(name: &str, vers: &str) {
+ pkg_maybe_yanked(name, vers, false)
+}
+
+fn v1_path() -> PathBuf {
+ cargo_home().join(".crates.toml")
+}
+
+fn v2_path() -> PathBuf {
+ cargo_home().join(".crates2.json")
+}
+
+fn load_crates1() -> toml::Value {
+ toml::from_str(&fs::read_to_string(v1_path()).unwrap()).unwrap()
+}
+
+fn load_crates2() -> serde_json::Value {
+ serde_json::from_str(&fs::read_to_string(v2_path()).unwrap()).unwrap()
+}
+
+fn installed_exe(name: &str) -> PathBuf {
+ cargo_home().join("bin").join(exe(name))
+}
+
+/// Helper for executing binaries installed by cargo.
+fn installed_process(name: &str) -> Execs {
+ static NEXT_ID: AtomicUsize = AtomicUsize::new(0);
+ thread_local!(static UNIQUE_ID: usize = NEXT_ID.fetch_add(1, Ordering::SeqCst));
+
+ // This copies the executable to a unique name so that it may be safely
+ // replaced on Windows. See Project::rename_run for details.
+ let src = installed_exe(name);
+ let dst = installed_exe(&UNIQUE_ID.with(|my_id| format!("{}-{}", name, my_id)));
+ // Note: Cannot use copy. On Linux, file descriptors may be left open to
+ // the executable as other tests in other threads are constantly spawning
+ // new processes (see https://github.com/rust-lang/cargo/pull/5557 for
+ // more).
+ fs::rename(&src, &dst)
+ .unwrap_or_else(|e| panic!("Failed to rename `{:?}` to `{:?}`: {}", src, dst, e));
+ // Leave behind a fake file so that reinstall duplicate check works.
+ fs::write(src, "").unwrap();
+ let p = process(dst);
+ execs().with_process_builder(p)
+}
+
+/// Check that the given package name/version has the following bins listed in
+/// the trackers. Also verifies that both trackers are in sync and valid.
+/// Pass in an empty `bins` list to assert that the package is *not* installed.
+fn validate_trackers(name: &str, version: &str, bins: &[&str]) {
+ let v1 = load_crates1();
+ let v1_table = v1.get("v1").unwrap().as_table().unwrap();
+ let v2 = load_crates2();
+ let v2_table = v2["installs"].as_object().unwrap();
+ assert_eq!(v1_table.len(), v2_table.len());
+ // Convert `bins` to a BTreeSet.
+ let bins: BTreeSet<String> = bins
+ .iter()
+ .map(|b| format!("{}{}", b, env::consts::EXE_SUFFIX))
+ .collect();
+ // Check every entry matches between v1 and v2.
+ for (pkg_id_str, v1_bins) in v1_table {
+ let pkg_id: PackageId = toml::Value::from(pkg_id_str.to_string())
+ .try_into()
+ .unwrap();
+ let v1_bins: BTreeSet<String> = v1_bins
+ .as_array()
+ .unwrap()
+ .iter()
+ .map(|b| b.as_str().unwrap().to_string())
+ .collect();
+ if pkg_id.name().as_str() == name && pkg_id.version().to_string() == version {
+ if bins.is_empty() {
+ panic!(
+ "Expected {} to not be installed, but found: {:?}",
+ name, v1_bins
+ );
+ } else {
+ assert_eq!(bins, v1_bins);
+ }
+ }
+ let pkg_id_value = serde_json::to_value(&pkg_id).unwrap();
+ let pkg_id_str = pkg_id_value.as_str().unwrap();
+ let v2_info = v2_table
+ .get(pkg_id_str)
+ .expect("v2 missing v1 pkg")
+ .as_object()
+ .unwrap();
+ let v2_bins = v2_info["bins"].as_array().unwrap();
+ let v2_bins: BTreeSet<String> = v2_bins
+ .iter()
+ .map(|b| b.as_str().unwrap().to_string())
+ .collect();
+ assert_eq!(v1_bins, v2_bins);
+ }
+}
+
+#[cargo_test]
+fn registry_upgrade() {
+ // Installing and upgrading from a registry.
+ pkg("foo", "1.0.0");
+ cargo_process("install foo")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v1.0.0 (registry [..])
+[INSTALLING] foo v1.0.0
+[COMPILING] foo v1.0.0
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE]
+[INSTALLED] package `foo v1.0.0` (executable `foo[EXE]`)
+[WARNING] be sure to add [..]
+",
+ )
+ .run();
+ installed_process("foo").with_stdout("1.0.0").run();
+ validate_trackers("foo", "1.0.0", &["foo"]);
+
+ cargo_process("install foo")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[IGNORED] package `foo v1.0.0` is already installed[..]
+[WARNING] be sure to add [..]
+",
+ )
+ .run();
+
+ pkg("foo", "1.0.1");
+
+ cargo_process("install foo")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v1.0.1 (registry [..])
+[INSTALLING] foo v1.0.1
+[COMPILING] foo v1.0.1
+[FINISHED] release [optimized] target(s) in [..]
+[REPLACING] [CWD]/home/.cargo/bin/foo[EXE]
+[REPLACED] package `foo v1.0.0` with `foo v1.0.1` (executable `foo[EXE]`)
+[WARNING] be sure to add [..]
+",
+ )
+ .run();
+
+ installed_process("foo").with_stdout("1.0.1").run();
+ validate_trackers("foo", "1.0.1", &["foo"]);
+
+ cargo_process("install foo --version=1.0.0")
+ .with_stderr_contains("[COMPILING] foo v1.0.0")
+ .run();
+ installed_process("foo").with_stdout("1.0.0").run();
+ validate_trackers("foo", "1.0.0", &["foo"]);
+
+ cargo_process("install foo --version=^1.0")
+ .with_stderr_contains("[COMPILING] foo v1.0.1")
+ .run();
+ installed_process("foo").with_stdout("1.0.1").run();
+ validate_trackers("foo", "1.0.1", &["foo"]);
+
+ cargo_process("install foo --version=^1.0")
+ .with_stderr_contains("[IGNORED] package `foo v1.0.1` is already installed[..]")
+ .run();
+}
+
+#[cargo_test]
+fn uninstall() {
+ // Basic uninstall test.
+ pkg("foo", "1.0.0");
+ cargo_process("install foo").run();
+ cargo_process("uninstall foo").run();
+ let data = load_crates2();
+ assert_eq!(data["installs"].as_object().unwrap().len(), 0);
+ let v1_table = load_crates1();
+ assert_eq!(v1_table.get("v1").unwrap().as_table().unwrap().len(), 0);
+}
+
+#[cargo_test]
+fn upgrade_force() {
+ pkg("foo", "1.0.0");
+ cargo_process("install foo").run();
+ cargo_process("install foo --force")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[INSTALLING] foo v1.0.0
+[COMPILING] foo v1.0.0
+[FINISHED] release [optimized] target(s) in [..]
+[REPLACING] [..]/.cargo/bin/foo[EXE]
+[REPLACED] package `foo v1.0.0` with `foo v1.0.0` (executable `foo[EXE]`)
+[WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..]
+",
+ )
+ .run();
+ validate_trackers("foo", "1.0.0", &["foo"]);
+}
+
+#[cargo_test]
+fn ambiguous_version_no_longer_allowed() {
+ // Non-semver-requirement is not allowed for `--version`.
+ pkg("foo", "1.0.0");
+ cargo_process("install foo --version=1.0")
+ .with_stderr(
+ "\
+[ERROR] the `--version` provided, `1.0`, is not a valid semver version: cannot parse '1.0' as a semver
+
+if you want to specify semver range, add an explicit qualifier, like ^1.0
+",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn path_is_always_dirty() {
+ // --path should always reinstall.
+ let p = project().file("src/main.rs", "fn main() {}").build();
+ p.cargo("install --path .").run();
+ p.cargo("install --path .")
+ .with_stderr_contains("[REPLACING] [..]/foo[EXE]")
+ .run();
+}
+
+#[cargo_test]
+fn fails_for_conflicts_unknown() {
+ // If an untracked file is in the way, it should fail.
+ pkg("foo", "1.0.0");
+ let exe = installed_exe("foo");
+ exe.parent().unwrap().mkdir_p();
+ fs::write(exe, "").unwrap();
+ cargo_process("install foo")
+ .with_stderr_contains("[ERROR] binary `foo[EXE]` already exists in destination")
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn fails_for_conflicts_known() {
+ // If the same binary exists in another package, it should fail.
+ pkg("foo", "1.0.0");
+ Package::new("bar", "1.0.0")
+ .file("src/bin/foo.rs", "fn main() {}")
+ .publish();
+ cargo_process("install foo").run();
+ cargo_process("install bar")
+ .with_stderr_contains(
+ "[ERROR] binary `foo[EXE]` already exists in destination as part of `foo v1.0.0`",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn supports_multiple_binary_names() {
+ // Can individually install with --bin or --example
+ Package::new("foo", "1.0.0")
+ .file("src/main.rs", r#"fn main() { println!("foo"); }"#)
+ .file("src/bin/a.rs", r#"fn main() { println!("a"); }"#)
+ .file("examples/ex1.rs", r#"fn main() { println!("ex1"); }"#)
+ .publish();
+ cargo_process("install foo --bin foo").run();
+ installed_process("foo").with_stdout("foo").run();
+ assert!(!installed_exe("a").exists());
+ assert!(!installed_exe("ex1").exists());
+ validate_trackers("foo", "1.0.0", &["foo"]);
+ cargo_process("install foo --bin a").run();
+ installed_process("a").with_stdout("a").run();
+ assert!(!installed_exe("ex1").exists());
+ validate_trackers("foo", "1.0.0", &["a", "foo"]);
+ cargo_process("install foo --example ex1").run();
+ installed_process("ex1").with_stdout("ex1").run();
+ validate_trackers("foo", "1.0.0", &["a", "ex1", "foo"]);
+ cargo_process("uninstall foo --bin foo").run();
+ assert!(!installed_exe("foo").exists());
+ assert!(installed_exe("ex1").exists());
+ validate_trackers("foo", "1.0.0", &["a", "ex1"]);
+ cargo_process("uninstall foo").run();
+ assert!(!installed_exe("ex1").exists());
+ assert!(!installed_exe("a").exists());
+}
+
+#[cargo_test]
+fn v1_already_installed_fresh() {
+ // Install with v1, then try to install again with v2.
+ pkg("foo", "1.0.0");
+ cargo_process("install foo").run();
+ cargo_process("install foo")
+ .with_stderr_contains("[IGNORED] package `foo v1.0.0` is already installed[..]")
+ .run();
+}
+
+#[cargo_test]
+fn v1_already_installed_dirty() {
+ // Install with v1, then install a new version with v2.
+ pkg("foo", "1.0.0");
+ cargo_process("install foo").run();
+ pkg("foo", "1.0.1");
+ cargo_process("install foo")
+ .with_stderr_contains("[COMPILING] foo v1.0.1")
+ .with_stderr_contains("[REPLACING] [..]/foo[EXE]")
+ .run();
+ validate_trackers("foo", "1.0.1", &["foo"]);
+}
+
+#[cargo_test]
+fn change_features_rebuilds() {
+ Package::new("foo", "1.0.0")
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ if cfg!(feature = "f1") {
+ println!("f1");
+ }
+ if cfg!(feature = "f2") {
+ println!("f2");
+ }
+ }
+ "#,
+ )
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+
+ [features]
+ f1 = []
+ f2 = []
+ default = ["f1"]
+ "#,
+ )
+ .publish();
+ cargo_process("install foo").run();
+ installed_process("foo").with_stdout("f1").run();
+ cargo_process("install foo --no-default-features").run();
+ installed_process("foo").with_stdout("").run();
+ cargo_process("install foo --all-features").run();
+ installed_process("foo").with_stdout("f1\nf2").run();
+ cargo_process("install foo --no-default-features --features=f1").run();
+ installed_process("foo").with_stdout("f1").run();
+}
+
+#[cargo_test]
+fn change_profile_rebuilds() {
+ pkg("foo", "1.0.0");
+ cargo_process("install foo").run();
+ cargo_process("install foo --debug")
+ .with_stderr_contains("[COMPILING] foo v1.0.0")
+ .with_stderr_contains("[REPLACING] [..]foo[EXE]")
+ .run();
+ cargo_process("install foo --debug")
+ .with_stderr_contains("[IGNORED] package `foo v1.0.0` is already installed[..]")
+ .run();
+}
+
+#[cargo_test]
+fn change_target_rebuilds() {
+ if cross_compile::disabled() {
+ return;
+ }
+ pkg("foo", "1.0.0");
+ cargo_process("install foo").run();
+ let target = cross_compile::alternate();
+ cargo_process("install foo -v --target")
+ .arg(&target)
+ .with_stderr_contains("[COMPILING] foo v1.0.0")
+ .with_stderr_contains("[REPLACING] [..]foo[EXE]")
+ .with_stderr_contains(&format!("[..]--target {}[..]", target))
+ .run();
+}
+
+#[cargo_test]
+fn change_bin_sets_rebuilds() {
+ // Changing which bins in a multi-bin project should reinstall.
+ Package::new("foo", "1.0.0")
+ .file("src/main.rs", "fn main() { }")
+ .file("src/bin/x.rs", "fn main() { }")
+ .file("src/bin/y.rs", "fn main() { }")
+ .publish();
+ cargo_process("install foo --bin x").run();
+ assert!(installed_exe("x").exists());
+ assert!(!installed_exe("y").exists());
+ assert!(!installed_exe("foo").exists());
+ validate_trackers("foo", "1.0.0", &["x"]);
+ cargo_process("install foo --bin y")
+ .with_stderr_contains("[INSTALLED] package `foo v1.0.0` (executable `y[EXE]`)")
+ .run();
+ assert!(installed_exe("x").exists());
+ assert!(installed_exe("y").exists());
+ assert!(!installed_exe("foo").exists());
+ validate_trackers("foo", "1.0.0", &["x", "y"]);
+ cargo_process("install foo")
+ .with_stderr_contains("[INSTALLED] package `foo v1.0.0` (executable `foo[EXE]`)")
+ .with_stderr_contains(
+ "[REPLACED] package `foo v1.0.0` with `foo v1.0.0` (executables `x[EXE]`, `y[EXE]`)",
+ )
+ .run();
+ assert!(installed_exe("x").exists());
+ assert!(installed_exe("y").exists());
+ assert!(installed_exe("foo").exists());
+ validate_trackers("foo", "1.0.0", &["foo", "x", "y"]);
+}
+
+#[cargo_test]
+fn forwards_compatible() {
+ // Unknown fields should be preserved.
+ pkg("foo", "1.0.0");
+ pkg("bar", "1.0.0");
+ cargo_process("install foo").run();
+ let key = "foo 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)";
+ let v2 = cargo_home().join(".crates2.json");
+ let mut data = load_crates2();
+ data["newfield"] = serde_json::Value::Bool(true);
+ data["installs"][key]["moreinfo"] = serde_json::Value::String("shazam".to_string());
+ fs::write(&v2, serde_json::to_string(&data).unwrap()).unwrap();
+ cargo_process("install bar").run();
+ let data: serde_json::Value = serde_json::from_str(&fs::read_to_string(&v2).unwrap()).unwrap();
+ assert_eq!(data["newfield"].as_bool().unwrap(), true);
+ assert_eq!(
+ data["installs"][key]["moreinfo"].as_str().unwrap(),
+ "shazam"
+ );
+}
+
+#[cargo_test]
+fn v2_syncs() {
+ // V2 inherits the installs from V1.
+ pkg("one", "1.0.0");
+ pkg("two", "1.0.0");
+ pkg("three", "1.0.0");
+ let p = project()
+ .file("src/bin/x.rs", "fn main() {}")
+ .file("src/bin/y.rs", "fn main() {}")
+ .build();
+ cargo_process("install one").run();
+ validate_trackers("one", "1.0.0", &["one"]);
+ p.cargo("install --path .").run();
+ validate_trackers("foo", "1.0.0", &["x", "y"]);
+ // v1 add/remove
+ cargo_process("install two").run();
+ cargo_process("uninstall one").run();
+ // This should pick up that `two` was added, `one` was removed.
+ cargo_process("install three").run();
+ validate_trackers("three", "1.0.0", &["three"]);
+ cargo_process("install --list")
+ .with_stdout(
+ "\
+foo v0.0.1 ([..]/foo):
+ x[EXE]
+ y[EXE]
+three v1.0.0:
+ three[EXE]
+two v1.0.0:
+ two[EXE]
+",
+ )
+ .run();
+ cargo_process("install one").run();
+ installed_process("one").with_stdout("1.0.0").run();
+ validate_trackers("one", "1.0.0", &["one"]);
+ cargo_process("install two")
+ .with_stderr_contains("[IGNORED] package `two v1.0.0` is already installed[..]")
+ .run();
+ // v1 remove
+ p.cargo("uninstall --bin x").run();
+ pkg("x", "1.0.0");
+ pkg("y", "1.0.0");
+ // This should succeed because `x` was removed in V1.
+ cargo_process("install x").run();
+ validate_trackers("x", "1.0.0", &["x"]);
+ // This should fail because `y` still exists in a different package.
+ cargo_process("install y")
+ .with_stderr_contains(
+ "[ERROR] binary `y[EXE]` already exists in destination \
+ as part of `foo v0.0.1 ([..])`",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn upgrade_git() {
+ let git_project = git::new("foo", |project| project.file("src/main.rs", "fn main() {}"));
+ // install
+ cargo_process("install --git")
+ .arg(git_project.url().to_string())
+ .run();
+ // Check install stays fresh.
+ cargo_process("install --git")
+ .arg(git_project.url().to_string())
+ .with_stderr_contains(
+ "[IGNORED] package `foo v0.0.1 (file://[..]/foo#[..])` is \
+ already installed,[..]",
+ )
+ .run();
+ // Modify a file.
+ let repo = git2::Repository::open(git_project.root()).unwrap();
+ git_project.change_file("src/main.rs", r#"fn main() {println!("onomatopoeia");}"#);
+ git::add(&repo);
+ git::commit(&repo);
+ // Install should reinstall.
+ cargo_process("install --git")
+ .arg(git_project.url().to_string())
+ .with_stderr_contains("[COMPILING] foo v0.0.1 ([..])")
+ .with_stderr_contains("[REPLACING] [..]/foo[EXE]")
+ .run();
+ installed_process("foo").with_stdout("onomatopoeia").run();
+ // Check install stays fresh.
+ cargo_process("install --git")
+ .arg(git_project.url().to_string())
+ .with_stderr_contains(
+ "[IGNORED] package `foo v0.0.1 (file://[..]/foo#[..])` is \
+ already installed,[..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn switch_sources() {
+ // Installing what appears to be the same thing, but from different
+ // sources should reinstall.
+ registry::alt_init();
+ pkg("foo", "1.0.0");
+ Package::new("foo", "1.0.0")
+ .file("src/main.rs", r#"fn main() { println!("alt"); }"#)
+ .alternative(true)
+ .publish();
+ let p = project()
+ .at("foo-local") // so it doesn't use the same directory as the git project
+ .file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
+ .file("src/main.rs", r#"fn main() { println!("local"); }"#)
+ .build();
+ let git_project = git::new("foo", |project| {
+ project.file("src/main.rs", r#"fn main() { println!("git"); }"#)
+ });
+
+ cargo_process("install foo").run();
+ installed_process("foo").with_stdout("1.0.0").run();
+ cargo_process("install foo --registry alternative").run();
+ installed_process("foo").with_stdout("alt").run();
+ p.cargo("install --path .").run();
+ installed_process("foo").with_stdout("local").run();
+ cargo_process("install --git")
+ .arg(git_project.url().to_string())
+ .run();
+ installed_process("foo").with_stdout("git").run();
+}
+
+#[cargo_test]
+fn multiple_report() {
+ // Testing the full output that indicates installed/ignored/replaced/summary.
+ pkg("one", "1.0.0");
+ pkg("two", "1.0.0");
+ fn three(vers: &str) {
+ Package::new("three", vers)
+ .file("src/main.rs", "fn main() { }")
+ .file("src/bin/x.rs", "fn main() { }")
+ .file("src/bin/y.rs", "fn main() { }")
+ .publish();
+ }
+ three("1.0.0");
+ cargo_process("install one two three")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] one v1.0.0 (registry `[..]`)
+[DOWNLOADING] crates ...
+[DOWNLOADED] two v1.0.0 (registry `[..]`)
+[DOWNLOADING] crates ...
+[DOWNLOADED] three v1.0.0 (registry `[..]`)
+[INSTALLING] one v1.0.0
+[COMPILING] one v1.0.0
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [..]/.cargo/bin/one[EXE]
+[INSTALLED] package `one v1.0.0` (executable `one[EXE]`)
+[INSTALLING] two v1.0.0
+[COMPILING] two v1.0.0
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [..]/.cargo/bin/two[EXE]
+[INSTALLED] package `two v1.0.0` (executable `two[EXE]`)
+[INSTALLING] three v1.0.0
+[COMPILING] three v1.0.0
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [..]/.cargo/bin/three[EXE]
+[INSTALLING] [..]/.cargo/bin/x[EXE]
+[INSTALLING] [..]/.cargo/bin/y[EXE]
+[INSTALLED] package `three v1.0.0` (executables `three[EXE]`, `x[EXE]`, `y[EXE]`)
+[SUMMARY] Successfully installed one, two, three!
+[WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..]
+",
+ )
+ .run();
+ pkg("foo", "1.0.1");
+ pkg("bar", "1.0.1");
+ three("1.0.1");
+ cargo_process("install one two three")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[IGNORED] package `one v1.0.0` is already installed, use --force to override
+[IGNORED] package `two v1.0.0` is already installed, use --force to override
+[DOWNLOADING] crates ...
+[DOWNLOADED] three v1.0.1 (registry `[..]`)
+[INSTALLING] three v1.0.1
+[COMPILING] three v1.0.1
+[FINISHED] release [optimized] target(s) in [..]
+[REPLACING] [..]/.cargo/bin/three[EXE]
+[REPLACING] [..]/.cargo/bin/x[EXE]
+[REPLACING] [..]/.cargo/bin/y[EXE]
+[REPLACED] package `three v1.0.0` with `three v1.0.1` (executables `three[EXE]`, `x[EXE]`, `y[EXE]`)
+[SUMMARY] Successfully installed one, two, three!
+[WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..]
+",
+ )
+ .run();
+ cargo_process("uninstall three")
+ .with_stderr(
+ "\
+[REMOVING] [..]/.cargo/bin/three[EXE]
+[REMOVING] [..]/.cargo/bin/x[EXE]
+[REMOVING] [..]/.cargo/bin/y[EXE]
+",
+ )
+ .run();
+ cargo_process("install three --bin x")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[INSTALLING] three v1.0.1
+[COMPILING] three v1.0.1
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [..]/.cargo/bin/x[EXE]
+[INSTALLED] package `three v1.0.1` (executable `x[EXE]`)
+[WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..]
+",
+ )
+ .run();
+ cargo_process("install three")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[INSTALLING] three v1.0.1
+[COMPILING] three v1.0.1
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [..]/.cargo/bin/three[EXE]
+[INSTALLING] [..]/.cargo/bin/y[EXE]
+[REPLACING] [..]/.cargo/bin/x[EXE]
+[INSTALLED] package `three v1.0.1` (executables `three[EXE]`, `y[EXE]`)
+[REPLACED] package `three v1.0.1` with `three v1.0.1` (executable `x[EXE]`)
+[WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn no_track() {
+ pkg("foo", "1.0.0");
+ cargo_process("install --no-track foo").run();
+ assert!(!v1_path().exists());
+ assert!(!v2_path().exists());
+ cargo_process("install --no-track foo")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[ERROR] binary `foo[EXE]` already exists in destination `[..]/.cargo/bin/foo[EXE]`
+Add --force to overwrite
+",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn deletes_orphaned() {
+ // When an executable is removed from a project, upgrading should remove it.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("src/bin/other.rs", "fn main() {}")
+ .file("examples/ex1.rs", "fn main() {}")
+ .build();
+ p.cargo("install --path . --bins --examples").run();
+ assert!(installed_exe("other").exists());
+
+ // Remove a binary, add a new one, and bump the version.
+ fs::remove_file(p.root().join("src/bin/other.rs")).unwrap();
+ p.change_file("examples/ex2.rs", "fn main() {}");
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.2.0"
+ "#,
+ );
+ p.cargo("install --path . --bins --examples")
+ .with_stderr(
+ "\
+[INSTALLING] foo v0.2.0 [..]
+[COMPILING] foo v0.2.0 [..]
+[FINISHED] release [..]
+[INSTALLING] [..]/.cargo/bin/ex2[EXE]
+[REPLACING] [..]/.cargo/bin/ex1[EXE]
+[REPLACING] [..]/.cargo/bin/foo[EXE]
+[REMOVING] executable `[..]/.cargo/bin/other[EXE]` from previous version foo v0.1.0 [..]
+[INSTALLED] package `foo v0.2.0 [..]` (executable `ex2[EXE]`)
+[REPLACED] package `foo v0.1.0 [..]` with `foo v0.2.0 [..]` (executables `ex1[EXE]`, `foo[EXE]`)
+[WARNING] be sure to add [..]
+",
+ )
+ .run();
+ assert!(!installed_exe("other").exists());
+ validate_trackers("foo", "0.2.0", &["foo", "ex1", "ex2"]);
+ // 0.1.0 should not have any entries.
+ validate_trackers("foo", "0.1.0", &[]);
+}
+
+#[cargo_test]
+fn already_installed_exact_does_not_update() {
+ pkg("foo", "1.0.0");
+ cargo_process("install foo --version=1.0.0").run();
+ cargo_process("install foo --version=1.0.0")
+ .with_stderr(
+ "\
+[IGNORED] package `foo v1.0.0` is already installed[..]
+[WARNING] be sure to add [..]
+",
+ )
+ .run();
+
+ cargo_process("install foo --version=>=1.0.0")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[IGNORED] package `foo v1.0.0` is already installed[..]
+[WARNING] be sure to add [..]
+",
+ )
+ .run();
+ pkg("foo", "1.0.1");
+ cargo_process("install foo --version=>=1.0.0")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v1.0.1 (registry [..])
+[INSTALLING] foo v1.0.1
+[COMPILING] foo v1.0.1
+[FINISHED] release [optimized] target(s) in [..]
+[REPLACING] [CWD]/home/.cargo/bin/foo[EXE]
+[REPLACED] package `foo v1.0.0` with `foo v1.0.1` (executable `foo[EXE]`)
+[WARNING] be sure to add [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn already_installed_updates_yank_status_on_upgrade() {
+ pkg("foo", "1.0.0");
+ pkg_maybe_yanked("foo", "1.0.1", true);
+ cargo_process("install foo --version=1.0.0").run();
+
+ cargo_process("install foo --version=1.0.1")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[ERROR] cannot install package `foo`, it has been yanked from registry `crates-io`
+",
+ )
+ .run();
+
+ pkg_maybe_yanked("foo", "1.0.1", false);
+
+ pkg("foo", "1.0.1");
+ cargo_process("install foo --version=1.0.1")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v1.0.1 (registry [..])
+[INSTALLING] foo v1.0.1
+[COMPILING] foo v1.0.1
+[FINISHED] release [optimized] target(s) in [..]
+[REPLACING] [CWD]/home/.cargo/bin/foo[EXE]
+[REPLACED] package `foo v1.0.0` with `foo v1.0.1` (executable `foo[EXE]`)
+[WARNING] be sure to add [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn partially_already_installed_does_one_update() {
+ pkg("foo", "1.0.0");
+ cargo_process("install foo --version=1.0.0").run();
+ pkg("bar", "1.0.0");
+ pkg("baz", "1.0.0");
+ cargo_process("install foo bar baz --version=1.0.0")
+ .with_stderr(
+ "\
+[IGNORED] package `foo v1.0.0` is already installed[..]
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v1.0.0 (registry [..])
+[DOWNLOADING] crates ...
+[DOWNLOADED] baz v1.0.0 (registry [..])
+[INSTALLING] bar v1.0.0
+[COMPILING] bar v1.0.0
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/bar[EXE]
+[INSTALLED] package `bar v1.0.0` (executable `bar[EXE]`)
+[INSTALLING] baz v1.0.0
+[COMPILING] baz v1.0.0
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/baz[EXE]
+[INSTALLED] package `baz v1.0.0` (executable `baz[EXE]`)
+[SUMMARY] Successfully installed foo, bar, baz!
+[WARNING] be sure to add [..]
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/jobserver.rs b/src/tools/cargo/tests/testsuite/jobserver.rs
new file mode 100644
index 000000000..9ccff141e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/jobserver.rs
@@ -0,0 +1,250 @@
+//! Tests for the jobserver protocol.
+
+use cargo_util::is_ci;
+use std::net::TcpListener;
+use std::process::Command;
+use std::thread;
+
+use cargo_test_support::install::{assert_has_installed_exe, cargo_home};
+use cargo_test_support::{cargo_exe, project};
+
+const EXE_CONTENT: &str = r#"
+use std::env;
+
+fn main() {
+ let var = env::var("CARGO_MAKEFLAGS").unwrap();
+ let arg = var.split(' ')
+ .find(|p| p.starts_with("--jobserver"))
+ .unwrap();
+ let val = &arg[arg.find('=').unwrap() + 1..];
+ validate(val);
+}
+
+#[cfg(unix)]
+fn validate(s: &str) {
+ use std::fs::{self, File};
+ use std::io::*;
+ use std::os::unix::prelude::*;
+
+ if let Some((r, w)) = s.split_once(',') {
+ // `--jobserver-auth=R,W`
+ unsafe {
+ let mut read = File::from_raw_fd(r.parse().unwrap());
+ let mut write = File::from_raw_fd(w.parse().unwrap());
+
+ let mut buf = [0];
+ assert_eq!(read.read(&mut buf).unwrap(), 1);
+ assert_eq!(write.write(&buf).unwrap(), 1);
+ }
+ } else {
+ // `--jobserver-auth=fifo:PATH` is the default since GNU Make 4.4
+ let (_, path) = s.split_once(':').expect("fifo:PATH");
+ assert!(fs::metadata(path).unwrap().file_type().is_fifo());
+ }
+}
+
+#[cfg(windows)]
+fn validate(_: &str) {
+ // a little too complicated for a test...
+}
+"#;
+
+#[cargo_test]
+fn jobserver_exists() {
+ let p = project()
+ .file("build.rs", EXE_CONTENT)
+ .file("src/lib.rs", "")
+ .build();
+
+ // Explicitly use `-j2` to ensure that there's eventually going to be a
+ // token to read from `validate` above, since running the build script
+ // itself consumes a token.
+ p.cargo("check -j2").run();
+}
+
+#[cargo_test]
+fn external_subcommand_inherits_jobserver() {
+ let make = if cfg!(windows) {
+ "mingw32-make"
+ } else {
+ "make"
+ };
+ if Command::new(make).arg("--version").output().is_err() {
+ return;
+ }
+
+ let name = "cargo-jobserver-check";
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "{name}"
+ version = "0.0.1"
+ "#
+ ),
+ )
+ .file("src/main.rs", EXE_CONTENT)
+ .file(
+ "Makefile",
+ "\
+all:
+\t+$(CARGO) jobserver-check
+",
+ )
+ .build();
+
+ p.cargo("install --path .").run();
+ assert_has_installed_exe(cargo_home(), name);
+
+ p.process(make).env("CARGO", cargo_exe()).arg("-j2").run();
+}
+
+#[cargo_test]
+fn makes_jobserver_used() {
+ let make = if cfg!(windows) {
+ "mingw32-make"
+ } else {
+ "make"
+ };
+ if !is_ci() && Command::new(make).arg("--version").output().is_err() {
+ return;
+ }
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ d1 = { path = "d1" }
+ d2 = { path = "d2" }
+ d3 = { path = "d3" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "d1/Cargo.toml",
+ r#"
+ [package]
+ name = "d1"
+ version = "0.0.1"
+ authors = []
+ build = "../dbuild.rs"
+ "#,
+ )
+ .file("d1/src/lib.rs", "")
+ .file(
+ "d2/Cargo.toml",
+ r#"
+ [package]
+ name = "d2"
+ version = "0.0.1"
+ authors = []
+ build = "../dbuild.rs"
+ "#,
+ )
+ .file("d2/src/lib.rs", "")
+ .file(
+ "d3/Cargo.toml",
+ r#"
+ [package]
+ name = "d3"
+ version = "0.0.1"
+ authors = []
+ build = "../dbuild.rs"
+ "#,
+ )
+ .file("d3/src/lib.rs", "")
+ .file(
+ "dbuild.rs",
+ r#"
+ use std::net::TcpStream;
+ use std::env;
+ use std::io::Read;
+
+ fn main() {
+ let addr = env::var("ADDR").unwrap();
+ let mut stream = TcpStream::connect(addr).unwrap();
+ let mut v = Vec::new();
+ stream.read_to_end(&mut v).unwrap();
+ }
+ "#,
+ )
+ .file(
+ "Makefile",
+ "\
+all:
+\t+$(CARGO) build
+",
+ )
+ .build();
+
+ let l = TcpListener::bind("127.0.0.1:0").unwrap();
+ let addr = l.local_addr().unwrap();
+
+ let child = thread::spawn(move || {
+ let a1 = l.accept().unwrap();
+ let a2 = l.accept().unwrap();
+ l.set_nonblocking(true).unwrap();
+
+ for _ in 0..1000 {
+ assert!(l.accept().is_err());
+ thread::yield_now();
+ }
+
+ drop(a1);
+ l.set_nonblocking(false).unwrap();
+ let a3 = l.accept().unwrap();
+
+ drop((a2, a3));
+ });
+
+ p.process(make)
+ .env("CARGO", cargo_exe())
+ .env("ADDR", addr.to_string())
+ .arg("-j2")
+ .run();
+ child.join().unwrap();
+}
+
+#[cargo_test]
+fn jobserver_and_j() {
+ let make = if cfg!(windows) {
+ "mingw32-make"
+ } else {
+ "make"
+ };
+ if !is_ci() && Command::new(make).arg("--version").output().is_err() {
+ return;
+ }
+
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "Makefile",
+ "\
+all:
+\t+$(CARGO) build -j2
+",
+ )
+ .build();
+
+ p.process(make)
+ .env("CARGO", cargo_exe())
+ .arg("-j2")
+ .with_stderr(
+ "\
+warning: a `-j` argument was passed to Cargo but Cargo is also configured \
+with an external jobserver in its environment, ignoring the `-j` parameter
+[COMPILING] [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/list_availables.rs b/src/tools/cargo/tests/testsuite/list_availables.rs
new file mode 100644
index 000000000..6bbbeb160
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/list_availables.rs
@@ -0,0 +1,232 @@
+//! Tests for packages/target filter flags giving suggestions on which
+//! packages/targets are available.
+
+use cargo_test_support::project;
+
+const EXAMPLE: u8 = 1 << 0;
+const BIN: u8 = 1 << 1;
+const TEST: u8 = 1 << 2;
+const BENCH: u8 = 1 << 3;
+const PACKAGE: u8 = 1 << 4;
+
+fn list_availables_test(command: &str, targets: u8) {
+ let full_project = project()
+ .file("examples/a.rs", "fn main() { }")
+ .file("examples/b.rs", "fn main() { }")
+ .file("benches/bench1.rs", "")
+ .file("benches/bench2.rs", "")
+ .file("tests/test1.rs", "")
+ .file("tests/test2.rs", "")
+ .file("src/main.rs", "fn main() { }")
+ .file("Cargo.lock", "") // for `cargo pkgid`
+ .build();
+
+ if targets & EXAMPLE != 0 {
+ full_project
+ .cargo(&format!("{} --example", command))
+ .with_stderr(
+ "\
+error: \"--example\" takes one argument.
+Available examples:
+ a
+ b
+
+",
+ )
+ .with_status(101)
+ .run();
+ }
+
+ if targets & BIN != 0 {
+ full_project
+ .cargo(&format!("{} --bin", command))
+ .with_stderr(
+ "\
+error: \"--bin\" takes one argument.
+Available binaries:
+ foo
+
+",
+ )
+ .with_status(101)
+ .run();
+ }
+
+ if targets & BENCH != 0 {
+ full_project
+ .cargo(&format!("{} --bench", command))
+ .with_stderr(
+ "\
+error: \"--bench\" takes one argument.
+Available benches:
+ bench1
+ bench2
+
+",
+ )
+ .with_status(101)
+ .run();
+ }
+
+ if targets & TEST != 0 {
+ full_project
+ .cargo(&format!("{} --test", command))
+ .with_stderr(
+ "\
+error: \"--test\" takes one argument.
+Available tests:
+ test1
+ test2
+
+",
+ )
+ .with_status(101)
+ .run();
+ }
+
+ if targets & PACKAGE != 0 {
+ full_project
+ .cargo(&format!("{} -p", command))
+ .with_stderr(
+ "\
+[ERROR] \"--package <SPEC>\" requires a SPEC format value, \
+which can be any package ID specifier in the dependency graph.
+Run `cargo help pkgid` for more information about SPEC format.
+
+Possible packages/workspace members:
+ foo
+
+",
+ )
+ .with_status(101)
+ .run();
+ }
+
+ let empty_project = project().file("src/lib.rs", "").build();
+
+ if targets & EXAMPLE != 0 {
+ empty_project
+ .cargo(&format!("{} --example", command))
+ .with_stderr(
+ "\
+error: \"--example\" takes one argument.
+No examples available.
+
+",
+ )
+ .with_status(101)
+ .run();
+ }
+
+ if targets & BIN != 0 {
+ empty_project
+ .cargo(&format!("{} --bin", command))
+ .with_stderr(
+ "\
+error: \"--bin\" takes one argument.
+No binaries available.
+
+",
+ )
+ .with_status(101)
+ .run();
+ }
+
+ if targets & BENCH != 0 {
+ empty_project
+ .cargo(&format!("{} --bench", command))
+ .with_stderr(
+ "\
+error: \"--bench\" takes one argument.
+No benches available.
+
+",
+ )
+ .with_status(101)
+ .run();
+ }
+
+ if targets & TEST != 0 {
+ empty_project
+ .cargo(&format!("{} --test", command))
+ .with_stderr(
+ "\
+error: \"--test\" takes one argument.
+No tests available.
+
+",
+ )
+ .with_status(101)
+ .run();
+ }
+}
+
+#[cargo_test]
+fn build_list_availables() {
+ list_availables_test("build", EXAMPLE | BIN | TEST | BENCH | PACKAGE);
+}
+
+#[cargo_test]
+fn check_list_availables() {
+ list_availables_test("check", EXAMPLE | BIN | TEST | BENCH | PACKAGE);
+}
+
+#[cargo_test]
+fn doc_list_availables() {
+ list_availables_test("doc", BIN | PACKAGE);
+}
+
+#[cargo_test]
+fn fix_list_availables() {
+ list_availables_test("fix", EXAMPLE | BIN | TEST | BENCH | PACKAGE);
+}
+
+#[cargo_test]
+fn run_list_availables() {
+ list_availables_test("run", EXAMPLE | BIN | PACKAGE);
+}
+
+#[cargo_test]
+fn test_list_availables() {
+ list_availables_test("test", EXAMPLE | BIN | TEST | BENCH | PACKAGE);
+}
+
+#[cargo_test]
+fn bench_list_availables() {
+ list_availables_test("bench", EXAMPLE | BIN | TEST | BENCH | PACKAGE);
+}
+
+#[cargo_test]
+fn install_list_availables() {
+ list_availables_test("install", EXAMPLE | BIN);
+}
+
+#[cargo_test]
+fn rustdoc_list_availables() {
+ list_availables_test("rustdoc", EXAMPLE | BIN | TEST | BENCH | PACKAGE);
+}
+
+#[cargo_test]
+fn rustc_list_availables() {
+ list_availables_test("rustc", EXAMPLE | BIN | TEST | BENCH | PACKAGE);
+}
+
+#[cargo_test]
+fn pkgid_list_availables() {
+ list_availables_test("pkgid", PACKAGE);
+}
+
+#[cargo_test]
+fn tree_list_availables() {
+ list_availables_test("tree", PACKAGE);
+}
+
+#[cargo_test]
+fn clean_list_availables() {
+ list_availables_test("clean", PACKAGE);
+}
+
+#[cargo_test]
+fn update_list_availables() {
+ list_availables_test("update", PACKAGE);
+}
diff --git a/src/tools/cargo/tests/testsuite/local_registry.rs b/src/tools/cargo/tests/testsuite/local_registry.rs
new file mode 100644
index 000000000..374ea9370
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/local_registry.rs
@@ -0,0 +1,528 @@
+//! Tests for local-registry sources.
+
+use cargo_test_support::paths::{self, CargoPathExt};
+use cargo_test_support::registry::{registry_path, Package};
+use cargo_test_support::{basic_manifest, project, t};
+use std::fs;
+
+fn setup() {
+ let root = paths::root();
+ t!(fs::create_dir(&root.join(".cargo")));
+ t!(fs::write(
+ root.join(".cargo/config"),
+ r#"
+ [source.crates-io]
+ registry = 'https://wut'
+ replace-with = 'my-awesome-local-registry'
+
+ [source.my-awesome-local-registry]
+ local-registry = 'registry'
+ "#
+ ));
+}
+
+#[cargo_test]
+fn simple() {
+ setup();
+ Package::new("bar", "0.0.1")
+ .local(true)
+ .file("src/lib.rs", "pub fn bar() {}")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.0.1"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate bar; pub fn foo() { bar::bar(); }",
+ )
+ .build();
+
+ p.cargo("build")
+ .with_stderr(
+ "\
+[UNPACKING] bar v0.0.1 ([..])
+[COMPILING] bar v0.0.1
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.cargo("build").with_stderr("[FINISHED] [..]").run();
+ p.cargo("test").run();
+}
+
+#[cargo_test]
+fn not_found() {
+ setup();
+ // Publish a package so that the directory hierarchy is created.
+ // Note, however, that we declare a dependency on baZ.
+ Package::new("bar", "0.0.1").local(true).publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ baz = "0.0.1"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate baz; pub fn foo() { baz::bar(); }",
+ )
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] no matching package named `baz` found
+location searched: registry `crates-io`
+required by package `foo v0.0.1 ([..]/foo)`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn depend_on_yanked() {
+ setup();
+ Package::new("bar", "0.0.1").local(true).publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // Run cargo to create lock file.
+ p.cargo("check").run();
+
+ registry_path().join("index").join("3").rm_rf();
+ Package::new("bar", "0.0.1")
+ .local(true)
+ .yanked(true)
+ .publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn multiple_versions() {
+ setup();
+ Package::new("bar", "0.0.1").local(true).publish();
+ Package::new("bar", "0.1.0")
+ .local(true)
+ .file("src/lib.rs", "pub fn bar() {}")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate bar; pub fn foo() { bar::bar(); }",
+ )
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UNPACKING] bar v0.1.0 ([..])
+[CHECKING] bar v0.1.0
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ Package::new("bar", "0.2.0")
+ .local(true)
+ .file("src/lib.rs", "pub fn bar() {}")
+ .publish();
+
+ p.cargo("update -v")
+ .with_stderr("[UPDATING] bar v0.1.0 -> v0.2.0")
+ .run();
+}
+
+#[cargo_test]
+fn multiple_names() {
+ setup();
+ Package::new("bar", "0.0.1")
+ .local(true)
+ .file("src/lib.rs", "pub fn bar() {}")
+ .publish();
+ Package::new("baz", "0.1.0")
+ .local(true)
+ .file("src/lib.rs", "pub fn baz() {}")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ baz = "*"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate bar;
+ extern crate baz;
+ pub fn foo() {
+ bar::bar();
+ baz::baz();
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UNPACKING] [..]
+[UNPACKING] [..]
+[CHECKING] [..]
+[CHECKING] [..]
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn interdependent() {
+ setup();
+ Package::new("bar", "0.0.1")
+ .local(true)
+ .file("src/lib.rs", "pub fn bar() {}")
+ .publish();
+ Package::new("baz", "0.1.0")
+ .local(true)
+ .dep("bar", "*")
+ .file("src/lib.rs", "extern crate bar; pub fn baz() {}")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ baz = "*"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate bar;
+ extern crate baz;
+ pub fn foo() {
+ bar::bar();
+ baz::baz();
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UNPACKING] [..]
+[UNPACKING] [..]
+[CHECKING] bar v0.0.1
+[CHECKING] baz v0.1.0
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn path_dep_rewritten() {
+ setup();
+ Package::new("bar", "0.0.1")
+ .local(true)
+ .file("src/lib.rs", "pub fn bar() {}")
+ .publish();
+ Package::new("baz", "0.1.0")
+ .local(true)
+ .dep("bar", "*")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar", version = "*" }
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar; pub fn baz() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ baz = "*"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate bar;
+ extern crate baz;
+ pub fn foo() {
+ bar::bar();
+ baz::baz();
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UNPACKING] [..]
+[UNPACKING] [..]
+[CHECKING] bar v0.0.1
+[CHECKING] baz v0.1.0
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid_dir_bad() {
+ setup();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [source.crates-io]
+ registry = 'https://wut'
+ replace-with = 'my-awesome-local-directory'
+
+ [source.my-awesome-local-directory]
+ local-registry = '/path/to/nowhere'
+ "#,
+ )
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to get `bar` as a dependency of package `foo v0.0.1 [..]`
+
+Caused by:
+ failed to load source for dependency `bar`
+
+Caused by:
+ Unable to update registry `crates-io`
+
+Caused by:
+ failed to update replaced source registry `crates-io`
+
+Caused by:
+ local registry path is not a directory: [..]path[..]to[..]nowhere
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn different_directory_replacing_the_registry_is_bad() {
+ setup();
+
+ // Move our test's .cargo/config to a temporary location and publish a
+ // registry package we're going to use first.
+ let config = paths::root().join(".cargo");
+ let config_tmp = paths::root().join(".cargo-old");
+ t!(fs::rename(&config, &config_tmp));
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // Generate a lock file against the crates.io registry
+ Package::new("bar", "0.0.1").publish();
+ p.cargo("check").run();
+
+ // Switch back to our directory source, and now that we're replacing
+ // crates.io make sure that this fails because we're replacing with a
+ // different checksum
+ config.rm_rf();
+ t!(fs::rename(&config_tmp, &config));
+ Package::new("bar", "0.0.1")
+ .file("src/lib.rs", "invalid")
+ .local(true)
+ .publish();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] checksum for `bar v0.0.1` changed between lock files
+
+this could be indicative of a few possible errors:
+
+ * the lock file is corrupt
+ * a replacement source in use (e.g., a mirror) returned a different checksum
+ * the source itself may be corrupt in one way or another
+
+unable to verify that `bar v0.0.1` is the same as when the lockfile was generated
+
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn crates_io_registry_url_is_optional() {
+ let root = paths::root();
+ t!(fs::create_dir(&root.join(".cargo")));
+ t!(fs::write(
+ root.join(".cargo/config"),
+ r#"
+ [source.crates-io]
+ replace-with = 'my-awesome-local-registry'
+
+ [source.my-awesome-local-registry]
+ local-registry = 'registry'
+ "#
+ ));
+
+ Package::new("bar", "0.0.1")
+ .local(true)
+ .file("src/lib.rs", "pub fn bar() {}")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.0.1"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate bar; pub fn foo() { bar::bar(); }",
+ )
+ .build();
+
+ p.cargo("build")
+ .with_stderr(
+ "\
+[UNPACKING] bar v0.0.1 ([..])
+[COMPILING] bar v0.0.1
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.cargo("build").with_stderr("[FINISHED] [..]").run();
+ p.cargo("test").run();
+}
diff --git a/src/tools/cargo/tests/testsuite/locate_project.rs b/src/tools/cargo/tests/testsuite/locate_project.rs
new file mode 100644
index 000000000..7e8ceb4c6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/locate_project.rs
@@ -0,0 +1,76 @@
+//! Tests for the `cargo locate-project` command.
+
+use cargo_test_support::project;
+
+#[cargo_test]
+fn simple() {
+ let p = project().build();
+
+ p.cargo("locate-project")
+ .with_json(r#"{"root": "[ROOT]/foo/Cargo.toml"}"#)
+ .run();
+}
+
+#[cargo_test]
+fn message_format() {
+ let p = project().build();
+
+ p.cargo("locate-project --message-format plain")
+ .with_stdout("[ROOT]/foo/Cargo.toml")
+ .run();
+
+ p.cargo("locate-project --message-format json")
+ .with_json(r#"{"root": "[ROOT]/foo/Cargo.toml"}"#)
+ .run();
+
+ p.cargo("locate-project --message-format cryptic")
+ .with_stderr("error: invalid message format specifier: `cryptic`")
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn workspace() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "outer"
+ version = "0.0.0"
+
+ [workspace]
+ members = ["inner"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "inner/Cargo.toml",
+ r#"
+ [package]
+ name = "inner"
+ version = "0.0.0"
+ "#,
+ )
+ .file("inner/src/lib.rs", "")
+ .build();
+
+ let outer_manifest = r#"{"root": "[ROOT]/foo/Cargo.toml"}"#;
+ let inner_manifest = r#"{"root": "[ROOT]/foo/inner/Cargo.toml"}"#;
+
+ p.cargo("locate-project").with_json(outer_manifest).run();
+
+ p.cargo("locate-project")
+ .cwd("inner")
+ .with_json(inner_manifest)
+ .run();
+
+ p.cargo("locate-project --workspace")
+ .with_json(outer_manifest)
+ .run();
+
+ p.cargo("locate-project --workspace")
+ .cwd("inner")
+ .with_json(outer_manifest)
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/lockfile_compat.rs b/src/tools/cargo/tests/testsuite/lockfile_compat.rs
new file mode 100644
index 000000000..aad8723c3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/lockfile_compat.rs
@@ -0,0 +1,890 @@
+//! Tests for supporting older versions of the Cargo.lock file format.
+
+use cargo_test_support::compare::assert_match_exact;
+use cargo_test_support::git;
+use cargo_test_support::registry::Package;
+use cargo_test_support::{basic_lib_manifest, basic_manifest, project};
+
+#[cargo_test]
+fn oldest_lockfile_still_works() {
+ let cargo_commands = vec!["build", "update"];
+ for cargo_command in cargo_commands {
+ oldest_lockfile_still_works_with_command(cargo_command);
+ }
+}
+
+fn oldest_lockfile_still_works_with_command(cargo_command: &str) {
+ Package::new("bar", "0.1.0").publish();
+
+ let expected_lockfile = r#"# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "[..]"
+
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar",
+]
+"#;
+
+ let old_lockfile = r#"
+[root]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+"#;
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("Cargo.lock", old_lockfile)
+ .build();
+
+ p.cargo(cargo_command).run();
+
+ let lock = p.read_lockfile();
+ assert_match_exact(expected_lockfile, &lock);
+}
+
+#[cargo_test]
+fn frozen_flag_preserves_old_lockfile() {
+ let cksum = Package::new("bar", "0.1.0").publish();
+
+ let old_lockfile = format!(
+ r#"[root]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[metadata]
+"checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "{}"
+"#,
+ cksum,
+ );
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("Cargo.lock", &old_lockfile)
+ .build();
+
+ p.cargo("check --locked").run();
+
+ let lock = p.read_lockfile();
+ assert_match_exact(&old_lockfile, &lock);
+}
+
+#[cargo_test]
+fn totally_wild_checksums_works() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "Cargo.lock",
+ r#"
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[metadata]
+"checksum baz 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum"
+"checksum bar 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum"
+"#,
+ );
+
+ let p = p.build();
+
+ p.cargo("check").run();
+
+ let lock = p.read_lockfile();
+ assert_match_exact(
+ r#"# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "[..]"
+
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar",
+]
+"#,
+ &lock,
+ );
+}
+
+#[cargo_test]
+fn wrong_checksum_is_an_error() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "Cargo.lock",
+ r#"
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[metadata]
+"checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum"
+"#,
+ );
+
+ let p = p.build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+error: checksum for `bar v0.1.0` changed between lock files
+
+this could be indicative of a few possible errors:
+
+ * the lock file is corrupt
+ * a replacement source in use (e.g., a mirror) returned a different checksum
+ * the source itself may be corrupt in one way or another
+
+unable to verify that `bar v0.1.0` is the same as when the lockfile was generated
+
+",
+ )
+ .run();
+}
+
+// If the checksum is unlisted in the lock file (e.g., <none>) yet we can
+// calculate it (e.g., it's a registry dep), then we should in theory just fill
+// it in.
+#[cargo_test]
+fn unlisted_checksum_is_bad_if_we_calculate() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "Cargo.lock",
+ r#"
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[metadata]
+"checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "<none>"
+"#,
+ );
+ let p = p.build();
+
+ p.cargo("fetch")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+error: checksum for `bar v0.1.0` was not previously calculated, but a checksum \
+could now be calculated
+
+this could be indicative of a few possible situations:
+
+ * the source `[..]` did not previously support checksums,
+ but was replaced with one that does
+ * newer Cargo implementations know how to checksum this source, but this
+ older implementation does not
+ * the lock file is corrupt
+
+",
+ )
+ .run();
+}
+
+// If the checksum is listed in the lock file yet we cannot calculate it (e.g.,
+// Git dependencies as of today), then make sure we choke.
+#[cargo_test]
+fn listed_checksum_bad_if_we_cannot_compute() {
+ let git = git::new("bar", |p| {
+ p.file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = {{ git = '{}' }}
+ "#,
+ git.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "Cargo.lock",
+ &format!(
+ r#"
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar 0.1.0 (git+{0})"
+]
+
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "git+{0}"
+
+[metadata]
+"checksum bar 0.1.0 (git+{0})" = "checksum"
+"#,
+ git.url()
+ ),
+ );
+
+ let p = p.build();
+
+ p.cargo("fetch")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] git repository `[..]`
+error: checksum for `bar v0.1.0 ([..])` could not be calculated, but a \
+checksum is listed in the existing lock file[..]
+
+this could be indicative of a few possible situations:
+
+ * the source `[..]` supports checksums,
+ but was replaced with one that doesn't
+ * the lock file is corrupt
+
+unable to verify that `bar v0.1.0 ([..])` is the same as when the lockfile was generated
+
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn current_lockfile_format() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "");
+ let p = p.build();
+
+ p.cargo("check").run();
+
+ let actual = p.read_lockfile();
+
+ let expected = "\
+# This file is automatically @generated by Cargo.\n# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = \"bar\"
+version = \"0.1.0\"
+source = \"registry+https://github.com/rust-lang/crates.io-index\"
+checksum = \"[..]\"
+
+[[package]]
+name = \"foo\"
+version = \"0.0.1\"
+dependencies = [
+ \"bar\",
+]
+";
+ assert_match_exact(expected, &actual);
+}
+
+#[cargo_test]
+fn lockfile_without_root() {
+ Package::new("bar", "0.1.0").publish();
+
+ let lockfile = r#"
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar",
+]
+"#;
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("Cargo.lock", lockfile);
+
+ let p = p.build();
+
+ p.cargo("check").run();
+
+ let lock = p.read_lockfile();
+ assert_match_exact(
+ r#"# [..]
+# [..]
+version = 3
+
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "[..]"
+
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar",
+]
+"#,
+ &lock,
+ );
+}
+
+#[cargo_test]
+fn locked_correct_error() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "");
+ let p = p.build();
+
+ p.cargo("check --locked")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+error: the lock file [CWD]/Cargo.lock needs to be updated but --locked was passed to prevent this
+If you want to try to generate the lock file without accessing the network, \
+remove the --locked flag and use --offline instead.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn v2_format_preserved() {
+ let cksum = Package::new("bar", "0.1.0").publish();
+
+ let lockfile = format!(
+ r#"# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "{}"
+
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar",
+]
+"#,
+ cksum
+ );
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("Cargo.lock", &lockfile)
+ .build();
+
+ p.cargo("fetch").run();
+
+ let lock = p.read_lockfile();
+ assert_match_exact(&lockfile, &lock);
+}
+
+#[cargo_test]
+fn v2_path_and_crates_io() {
+ let cksum010 = Package::new("a", "0.1.0").publish();
+ let cksum020 = Package::new("a", "0.2.0").publish();
+
+ let lockfile = format!(
+ r#"# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+[[package]]
+name = "a"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "{}"
+
+[[package]]
+name = "a"
+version = "0.2.0"
+
+[[package]]
+name = "a"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "{}"
+
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "a 0.1.0",
+ "a 0.2.0",
+ "a 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+"#,
+ cksum010, cksum020,
+ );
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = { path = 'a' }
+ b = { version = "0.1", package = 'a' }
+ c = { version = "0.2", package = 'a' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.2.0"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file("Cargo.lock", &lockfile)
+ .build();
+
+ p.cargo("fetch").run();
+ p.cargo("fetch").run();
+
+ let lock = p.read_lockfile();
+ assert_match_exact(&lockfile, &lock);
+}
+
+#[cargo_test]
+fn v3_and_git() {
+ let (git_project, repo) = git::new_repo("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("dep1"))
+ .file("src/lib.rs", "")
+ });
+ let head_id = repo.head().unwrap().target().unwrap();
+
+ let lockfile = format!(
+ r#"# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "dep1"
+version = "0.5.0"
+source = "git+{}?branch=master#{}"
+
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "dep1",
+]
+"#,
+ git_project.url(),
+ head_id,
+ );
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ dep1 = {{ git = '{}', branch = 'master' }}
+ "#,
+ git_project.url(),
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file("Cargo.lock", "version = 3")
+ .build();
+
+ p.cargo("fetch").run();
+
+ let lock = p.read_lockfile();
+ assert_match_exact(&lockfile, &lock);
+}
+
+#[cargo_test]
+fn lock_from_the_future() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("Cargo.lock", "version = 10000000")
+ .build();
+
+ p.cargo("fetch")
+ .with_stderr(
+ "\
+error: failed to parse lock file at: [..]
+
+Caused by:
+ lock file version `10000000` was found, but this version of Cargo does not \
+ understand this lock file, perhaps Cargo needs to be updated?
+",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn preserve_old_format_if_no_update_needed() {
+ let cksum = Package::new("bar", "0.1.0").publish();
+ let lockfile = format!(
+ r#"# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[metadata]
+"checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "{}"
+"#,
+ cksum
+ );
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("Cargo.lock", &lockfile)
+ .build();
+
+ p.cargo("check --locked").run();
+}
+
+#[cargo_test]
+fn same_name_version_different_sources() {
+ let cksum = Package::new("foo", "0.1.0").publish();
+ let (git_project, repo) = git::new_repo("dep1", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ });
+ let head_id = repo.head().unwrap().target().unwrap();
+
+ // Lockfile was generated with Rust 1.51
+ let lockfile = format!(
+ r#"# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "foo"
+version = "0.1.0"
+dependencies = [
+ "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "foo 0.1.0 (git+{url})",
+]
+
+[[package]]
+name = "foo"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "{cksum}"
+
+[[package]]
+name = "foo"
+version = "0.1.0"
+source = "git+{url}#{sha}"
+"#,
+ sha = head_id,
+ url = git_project.url(),
+ cksum = cksum
+ );
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ foo = "0.1.0"
+ foo2 = {{ git = '{}', package = 'foo' }}
+ "#,
+ git_project.url(),
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file("Cargo.lock", &lockfile)
+ .build();
+
+ p.cargo("check").run();
+
+ assert_eq!(p.read_file("Cargo.lock"), lockfile);
+}
+
+#[cargo_test]
+fn bad_data_in_lockfile_error_meg() {
+ Package::new("bar", "0.0.1").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "test"
+ version = "0.0.0"
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "Cargo.lock",
+ r#"# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e1b9346248cf3391ead604c4407258d327c28e37209f6d56127598165165dda"
+
+[[package]]
+name = "test"
+version = "0.0.0"
+dependencies = [
+ "bar",
+]"#,
+ )
+ .build();
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[..]
+[ERROR] failed to select a version for the requirement `bar = \"*\"` (locked to 0.1.0)
+candidate versions found which didn't match: 0.0.1
+location searched: `dummy-registry` index (which is replacing registry `crates-io`)
+required by package `test v0.0.0 ([..])`
+perhaps a crate was updated and forgotten to be re-vendored?
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/login.rs b/src/tools/cargo/tests/testsuite/login.rs
new file mode 100644
index 000000000..85b299f28
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/login.rs
@@ -0,0 +1,404 @@
+//! Tests for the `cargo login` command.
+
+use cargo_test_support::cargo_process;
+use cargo_test_support::paths::{self, CargoPathExt};
+use cargo_test_support::registry::{self, RegistryBuilder};
+use cargo_test_support::t;
+use std::fs;
+use std::path::PathBuf;
+
+const TOKEN: &str = "test-token";
+const TOKEN2: &str = "test-token2";
+const ORIGINAL_TOKEN: &str = "api-token";
+
+fn credentials_toml() -> PathBuf {
+ paths::home().join(".cargo/credentials.toml")
+}
+
+fn setup_new_credentials() {
+ setup_new_credentials_at(credentials_toml());
+}
+
+fn setup_new_credentials_at(config: PathBuf) {
+ t!(fs::create_dir_all(config.parent().unwrap()));
+ t!(fs::write(
+ &config,
+ format!(r#"token = "{token}""#, token = ORIGINAL_TOKEN)
+ ));
+}
+
+/// Asserts whether or not the token is set to the given value for the given registry.
+pub fn check_token(expected_token: Option<&str>, registry: Option<&str>) {
+ let credentials = credentials_toml();
+ assert!(credentials.is_file());
+
+ let contents = fs::read_to_string(&credentials).unwrap();
+ let toml: toml::Table = contents.parse().unwrap();
+
+ let actual_token = match registry {
+ // A registry has been provided, so check that the token exists in a
+ // table for the registry.
+ Some(registry) => toml
+ .get("registries")
+ .and_then(|registries_table| registries_table.get(registry))
+ .and_then(|registry_table| match registry_table.get("token") {
+ Some(&toml::Value::String(ref token)) => Some(token.as_str().to_string()),
+ _ => None,
+ }),
+ // There is no registry provided, so check the global token instead.
+ None => toml
+ .get("registry")
+ .and_then(|registry_table| registry_table.get("token"))
+ .and_then(|v| match v {
+ toml::Value::String(ref token) => Some(token.as_str().to_string()),
+ _ => None,
+ }),
+ };
+
+ match (actual_token, expected_token) {
+ (None, None) => {}
+ (Some(actual), Some(expected)) => assert_eq!(actual, expected),
+ (None, Some(expected)) => {
+ panic!("expected `{registry:?}` to be `{expected}`, but was not set")
+ }
+ (Some(actual), None) => {
+ panic!("expected `{registry:?}` to be unset, but was set to `{actual}`")
+ }
+ }
+}
+
+#[cargo_test]
+fn registry_credentials() {
+ let _alternative = RegistryBuilder::new().alternative().build();
+ let _alternative2 = RegistryBuilder::new()
+ .alternative_named("alternative2")
+ .build();
+
+ setup_new_credentials();
+
+ let reg = "alternative";
+
+ cargo_process("login --registry").arg(reg).arg(TOKEN).run();
+
+ // Ensure that we have not updated the default token
+ check_token(Some(ORIGINAL_TOKEN), None);
+
+ // Also ensure that we get the new token for the registry
+ check_token(Some(TOKEN), Some(reg));
+
+ let reg2 = "alternative2";
+ cargo_process("login --registry")
+ .arg(reg2)
+ .arg(TOKEN2)
+ .run();
+
+ // Ensure not overwriting 1st alternate registry token with
+ // 2nd alternate registry token (see rust-lang/cargo#7701).
+ check_token(Some(ORIGINAL_TOKEN), None);
+ check_token(Some(TOKEN), Some(reg));
+ check_token(Some(TOKEN2), Some(reg2));
+}
+
+#[cargo_test]
+fn empty_login_token() {
+ let registry = RegistryBuilder::new()
+ .no_configure_registry()
+ .no_configure_token()
+ .build();
+ setup_new_credentials();
+
+ cargo_process("login")
+ .replace_crates_io(registry.index_url())
+ .with_stdout("please paste the token found on [..]/me below")
+ .with_stdin("\t\n")
+ .with_stderr(
+ "\
+[UPDATING] crates.io index
+[ERROR] please provide a non-empty token
+",
+ )
+ .with_status(101)
+ .run();
+
+ cargo_process("login")
+ .replace_crates_io(registry.index_url())
+ .arg("")
+ .with_stderr(
+ "\
+[ERROR] please provide a non-empty token
+",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn invalid_login_token() {
+ let registry = RegistryBuilder::new()
+ .no_configure_registry()
+ .no_configure_token()
+ .build();
+ setup_new_credentials();
+
+ let check = |stdin: &str, stderr: &str, status: i32| {
+ cargo_process("login")
+ .replace_crates_io(registry.index_url())
+ .with_stdout("please paste the token found on [..]/me below")
+ .with_stdin(stdin)
+ .with_stderr(stderr)
+ .with_status(status)
+ .run();
+ };
+
+ let invalid = |stdin: &str| {
+ check(
+ stdin,
+ "[ERROR] token contains invalid characters.
+Only printable ISO-8859-1 characters are allowed as it is sent in a HTTPS header.",
+ 101,
+ )
+ };
+ let valid = |stdin: &str| check(stdin, "[LOGIN] token for `crates.io` saved", 0);
+
+ // Update config.json so that the rest of the tests don't need to care
+ // whether or not `Updating` is printed.
+ check(
+ "test",
+ "\
+[UPDATING] crates.io index
+[LOGIN] token for `crates.io` saved
+",
+ 0,
+ );
+
+ invalid("😄");
+ invalid("\u{0016}");
+ invalid("\u{0000}");
+ invalid("你好");
+ valid("foo\tbar");
+ valid("foo bar");
+ valid(
+ r##"!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~"##,
+ );
+}
+
+#[cargo_test]
+fn bad_asymmetric_token_args() {
+ // These cases are kept brief as the implementation is covered by clap, so this is only smoke testing that we have clap configured correctly.
+ cargo_process("login --key-subject=foo tok")
+ .with_stderr_contains(
+ "error: the argument '--key-subject <SUBJECT>' cannot be used with '[token]'",
+ )
+ .with_status(1)
+ .run();
+
+ cargo_process("login --generate-keypair tok")
+ .with_stderr_contains(
+ "error: the argument '--generate-keypair' cannot be used with '[token]'",
+ )
+ .with_status(1)
+ .run();
+
+ cargo_process("login --secret-key tok")
+ .with_stderr_contains("error: the argument '--secret-key' cannot be used with '[token]'")
+ .with_status(1)
+ .run();
+
+ cargo_process("login --generate-keypair --secret-key")
+ .with_stderr_contains(
+ "error: the argument '--generate-keypair' cannot be used with '--secret-key'",
+ )
+ .with_status(1)
+ .run();
+}
+
+#[cargo_test]
+fn asymmetric_requires_nightly() {
+ let registry = registry::init();
+ cargo_process("login --key-subject=foo")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr_contains("[ERROR] the `key-subject` flag is unstable, pass `-Z registry-auth` to enable it\n\
+ See https://github.com/rust-lang/cargo/issues/10519 for more information about the `key-subject` flag.")
+ .run();
+ cargo_process("login --generate-keypair")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr_contains("[ERROR] the `generate-keypair` flag is unstable, pass `-Z registry-auth` to enable it\n\
+ See https://github.com/rust-lang/cargo/issues/10519 for more information about the `generate-keypair` flag.")
+ .run();
+ cargo_process("login --secret-key")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr_contains("[ERROR] the `secret-key` flag is unstable, pass `-Z registry-auth` to enable it\n\
+ See https://github.com/rust-lang/cargo/issues/10519 for more information about the `secret-key` flag.")
+ .run();
+}
+
+#[cargo_test]
+fn login_with_no_cargo_dir() {
+ // Create a config in the root directory because `login` requires the
+ // index to be updated, and we don't want to hit crates.io.
+ let registry = registry::init();
+ fs::rename(paths::home().join(".cargo"), paths::root().join(".cargo")).unwrap();
+ paths::home().rm_rf();
+ cargo_process("login foo -v")
+ .replace_crates_io(registry.index_url())
+ .run();
+ let credentials = fs::read_to_string(credentials_toml()).unwrap();
+ assert_eq!(credentials, "[registry]\ntoken = \"foo\"\n");
+}
+
+#[cargo_test]
+fn login_with_differently_sized_token() {
+ // Verify that the configuration file gets properly truncated.
+ let registry = registry::init();
+ let credentials = credentials_toml();
+ fs::remove_file(&credentials).unwrap();
+ cargo_process("login lmaolmaolmao -v")
+ .replace_crates_io(registry.index_url())
+ .run();
+ cargo_process("login lmao -v")
+ .replace_crates_io(registry.index_url())
+ .run();
+ cargo_process("login lmaolmaolmao -v")
+ .replace_crates_io(registry.index_url())
+ .run();
+ let credentials = fs::read_to_string(&credentials).unwrap();
+ assert_eq!(credentials, "[registry]\ntoken = \"lmaolmaolmao\"\n");
+}
+
+#[cargo_test]
+fn login_with_token_on_stdin() {
+ let registry = registry::init();
+ let credentials = credentials_toml();
+ fs::remove_file(&credentials).unwrap();
+ cargo_process("login lmao -v")
+ .replace_crates_io(registry.index_url())
+ .run();
+ cargo_process("login")
+ .replace_crates_io(registry.index_url())
+ .with_stdout("please paste the token found on [..]/me below")
+ .with_stdin("some token")
+ .run();
+ let credentials = fs::read_to_string(&credentials).unwrap();
+ assert_eq!(credentials, "[registry]\ntoken = \"some token\"\n");
+}
+
+#[cargo_test]
+fn login_with_asymmetric_token_and_subject_on_stdin() {
+ let registry = registry::init();
+ let credentials = credentials_toml();
+ fs::remove_file(&credentials).unwrap();
+ cargo_process("login --key-subject=foo --secret-key -v -Z registry-auth")
+ .masquerade_as_nightly_cargo(&["registry-auth"])
+ .replace_crates_io(registry.index_url())
+ .with_stdout(
+ "\
+ please paste the API secret key below
+k3.public.AmDwjlyf8jAV3gm5Z7Kz9xAOcsKslt_Vwp5v-emjFzBHLCtcANzTaVEghTNEMj9PkQ",
+ )
+ .with_stdin("k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36")
+ .run();
+ let credentials = fs::read_to_string(&credentials).unwrap();
+ assert!(credentials.starts_with("[registry]\n"));
+ assert!(credentials.contains("secret-key-subject = \"foo\"\n"));
+ assert!(credentials.contains("secret-key = \"k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36\"\n"));
+}
+
+#[cargo_test]
+fn login_with_asymmetric_token_on_stdin() {
+ let registry = registry::init();
+ let credentials = credentials_toml();
+ fs::remove_file(&credentials).unwrap();
+ cargo_process("login --secret-key -v -Z registry-auth")
+ .masquerade_as_nightly_cargo(&["registry-auth"])
+ .replace_crates_io(registry.index_url())
+ .with_stdout(
+ "\
+ please paste the API secret key below
+k3.public.AmDwjlyf8jAV3gm5Z7Kz9xAOcsKslt_Vwp5v-emjFzBHLCtcANzTaVEghTNEMj9PkQ",
+ )
+ .with_stdin("k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36")
+ .run();
+ let credentials = fs::read_to_string(&credentials).unwrap();
+ assert_eq!(credentials, "[registry]\nsecret-key = \"k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36\"\n");
+}
+
+#[cargo_test]
+fn login_with_asymmetric_key_subject_without_key() {
+ let registry = registry::init();
+ let credentials = credentials_toml();
+ fs::remove_file(&credentials).unwrap();
+ cargo_process("login --key-subject=foo -Z registry-auth")
+ .masquerade_as_nightly_cargo(&["registry-auth"])
+ .replace_crates_io(registry.index_url())
+ .with_stderr_contains("error: need a secret_key to set a key_subject")
+ .with_status(101)
+ .run();
+
+ // ok so add a secret_key to the credentials
+ cargo_process("login --secret-key -v -Z registry-auth")
+ .masquerade_as_nightly_cargo(&["registry-auth"])
+ .replace_crates_io(registry.index_url())
+ .with_stdout(
+ "please paste the API secret key below
+k3.public.AmDwjlyf8jAV3gm5Z7Kz9xAOcsKslt_Vwp5v-emjFzBHLCtcANzTaVEghTNEMj9PkQ",
+ )
+ .with_stdin("k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36")
+ .run();
+
+ // and then it should work
+ cargo_process("login --key-subject=foo -Z registry-auth")
+ .masquerade_as_nightly_cargo(&["registry-auth"])
+ .replace_crates_io(registry.index_url())
+ .run();
+
+ let credentials = fs::read_to_string(&credentials).unwrap();
+ assert!(credentials.starts_with("[registry]\n"));
+ assert!(credentials.contains("secret-key-subject = \"foo\"\n"));
+ assert!(credentials.contains("secret-key = \"k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36\"\n"));
+}
+
+#[cargo_test]
+fn login_with_generate_asymmetric_token() {
+ let registry = registry::init();
+ let credentials = credentials_toml();
+ fs::remove_file(&credentials).unwrap();
+ cargo_process("login --generate-keypair -Z registry-auth")
+ .masquerade_as_nightly_cargo(&["registry-auth"])
+ .replace_crates_io(registry.index_url())
+ .with_stdout("k3.public.[..]")
+ .run();
+ let credentials = fs::read_to_string(&credentials).unwrap();
+ assert!(credentials.contains("secret-key = \"k3.secret."));
+}
+
+#[cargo_test]
+fn default_registry_configured() {
+ // When registry.default is set, login should use that one when
+ // --registry is not used.
+ let _alternative = RegistryBuilder::new().alternative().build();
+ let cargo_home = paths::home().join(".cargo");
+ cargo_util::paths::append(
+ &cargo_home.join("config"),
+ br#"
+ [registry]
+ default = "alternative"
+ "#,
+ )
+ .unwrap();
+
+ cargo_process("login")
+ .arg("a-new-token")
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[LOGIN] token for `alternative` saved
+",
+ )
+ .run();
+
+ check_token(None, None);
+ check_token(Some("a-new-token"), Some("alternative"));
+}
diff --git a/src/tools/cargo/tests/testsuite/logout.rs b/src/tools/cargo/tests/testsuite/logout.rs
new file mode 100644
index 000000000..7b5e10de2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/logout.rs
@@ -0,0 +1,104 @@
+//! Tests for the `cargo logout` command.
+
+use super::login::check_token;
+use cargo_test_support::paths::{self, CargoPathExt};
+use cargo_test_support::registry::TestRegistry;
+use cargo_test_support::{cargo_process, registry};
+
+fn simple_logout_test(registry: &TestRegistry, reg: Option<&str>, flag: &str, note: &str) {
+ let msg = reg.unwrap_or("crates-io");
+ check_token(Some(registry.token()), reg);
+ let mut cargo = cargo_process(&format!("logout {}", flag));
+ if reg.is_none() {
+ cargo.replace_crates_io(registry.index_url());
+ }
+ cargo
+ .with_stderr(&format!(
+ "\
+[LOGOUT] token for `{msg}` has been removed from local storage
+[NOTE] This does not revoke the token on the registry server.\n \
+If you need to revoke the token, visit {note} and follow the instructions there.
+"
+ ))
+ .run();
+ check_token(None, reg);
+
+ let mut cargo = cargo_process(&format!("logout {}", flag));
+ if reg.is_none() {
+ cargo.replace_crates_io(registry.index_url());
+ }
+ cargo
+ .with_stderr(&format!("[LOGOUT] not currently logged in to `{msg}`"))
+ .run();
+ check_token(None, reg);
+}
+
+#[cargo_test]
+fn default_registry_unconfigured() {
+ let registry = registry::init();
+ simple_logout_test(&registry, None, "", "<https://crates.io/me>");
+}
+
+#[cargo_test]
+fn other_registry() {
+ let registry = registry::alt_init();
+ simple_logout_test(
+ &registry,
+ Some("alternative"),
+ "--registry alternative",
+ "the `alternative` website",
+ );
+ // It should not touch crates.io.
+ check_token(Some("sekrit"), None);
+}
+
+#[cargo_test]
+fn default_registry_configured() {
+ // When registry.default is set, logout should use that one when
+ // --registry is not used.
+ let cargo_home = paths::home().join(".cargo");
+ cargo_home.mkdir_p();
+ cargo_util::paths::write(
+ &cargo_home.join("config.toml"),
+ r#"
+ [registry]
+ default = "dummy-registry"
+
+ [registries.dummy-registry]
+ index = "https://127.0.0.1/index"
+ "#,
+ )
+ .unwrap();
+ cargo_util::paths::write(
+ &cargo_home.join("credentials.toml"),
+ r#"
+ [registry]
+ token = "crates-io-token"
+
+ [registries.dummy-registry]
+ token = "dummy-token"
+ "#,
+ )
+ .unwrap();
+ check_token(Some("dummy-token"), Some("dummy-registry"));
+ check_token(Some("crates-io-token"), None);
+
+ cargo_process("logout -Zunstable-options")
+ .masquerade_as_nightly_cargo(&["cargo-logout"])
+ .with_stderr(
+ "\
+[LOGOUT] token for `dummy-registry` has been removed from local storage
+[NOTE] This does not revoke the token on the registry server.
+ If you need to revoke the token, visit the `dummy-registry` website \
+ and follow the instructions there.
+",
+ )
+ .run();
+ check_token(None, Some("dummy-registry"));
+ check_token(Some("crates-io-token"), None);
+
+ cargo_process("logout -Zunstable-options")
+ .masquerade_as_nightly_cargo(&["cargo-logout"])
+ .with_stderr("[LOGOUT] not currently logged in to `dummy-registry`")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/lto.rs b/src/tools/cargo/tests/testsuite/lto.rs
new file mode 100644
index 000000000..40b4f7ca2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/lto.rs
@@ -0,0 +1,850 @@
+use cargo::core::compiler::Lto;
+use cargo_test_support::registry::Package;
+use cargo_test_support::{basic_manifest, project, Project};
+use std::process::Output;
+
+#[cargo_test]
+fn with_deps() {
+ Package::new("bar", "0.0.1").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "test"
+ version = "0.0.0"
+
+ [dependencies]
+ bar = "*"
+
+ [profile.release]
+ lto = true
+ "#,
+ )
+ .file("src/main.rs", "extern crate bar; fn main() {}")
+ .build();
+ p.cargo("build -v --release")
+ .with_stderr_contains("[..]`rustc[..]--crate-name bar[..]-C linker-plugin-lto[..]`")
+ .with_stderr_contains("[..]`rustc[..]--crate-name test[..]-C lto[..]`")
+ .run();
+}
+
+#[cargo_test]
+fn shared_deps() {
+ Package::new("bar", "0.0.1").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "test"
+ version = "0.0.0"
+
+ [dependencies]
+ bar = "*"
+
+ [build-dependencies]
+ bar = "*"
+
+ [profile.release]
+ lto = true
+ "#,
+ )
+ .file("build.rs", "extern crate bar; fn main() {}")
+ .file("src/main.rs", "extern crate bar; fn main() {}")
+ .build();
+ p.cargo("build -v --release")
+ .with_stderr_contains("[..]`rustc[..]--crate-name test[..]-C lto[..]`")
+ .run();
+}
+
+#[cargo_test]
+fn build_dep_not_ltod() {
+ Package::new("bar", "0.0.1").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "test"
+ version = "0.0.0"
+
+ [build-dependencies]
+ bar = "*"
+
+ [profile.release]
+ lto = true
+ "#,
+ )
+ .file("build.rs", "extern crate bar; fn main() {}")
+ .file("src/main.rs", "fn main() {}")
+ .build();
+ p.cargo("build -v --release")
+ .with_stderr_contains("[..]`rustc[..]--crate-name bar[..]-C embed-bitcode=no[..]`")
+ .with_stderr_contains("[..]`rustc[..]--crate-name test[..]-C lto[..]`")
+ .run();
+}
+
+#[cargo_test]
+fn complicated() {
+ Package::new("dep-shared", "0.0.1")
+ .file("src/lib.rs", "pub fn foo() {}")
+ .publish();
+ Package::new("dep-normal2", "0.0.1")
+ .file("src/lib.rs", "pub fn foo() {}")
+ .publish();
+ Package::new("dep-normal", "0.0.1")
+ .dep("dep-shared", "*")
+ .dep("dep-normal2", "*")
+ .file(
+ "src/lib.rs",
+ "
+ pub fn foo() {
+ dep_shared::foo();
+ dep_normal2::foo();
+ }
+ ",
+ )
+ .publish();
+ Package::new("dep-build2", "0.0.1")
+ .file("src/lib.rs", "pub fn foo() {}")
+ .publish();
+ Package::new("dep-build", "0.0.1")
+ .dep("dep-shared", "*")
+ .dep("dep-build2", "*")
+ .file(
+ "src/lib.rs",
+ "
+ pub fn foo() {
+ dep_shared::foo();
+ dep_build2::foo();
+ }
+ ",
+ )
+ .publish();
+ Package::new("dep-proc-macro2", "0.0.1")
+ .file("src/lib.rs", "pub fn foo() {}")
+ .publish();
+ Package::new("dep-proc-macro", "0.0.1")
+ .proc_macro(true)
+ .dep("dep-shared", "*")
+ .dep("dep-proc-macro2", "*")
+ .file(
+ "src/lib.rs",
+ "
+ extern crate proc_macro;
+ use proc_macro::TokenStream;
+
+ #[proc_macro_attribute]
+ pub fn foo(_: TokenStream, a: TokenStream) -> TokenStream {
+ dep_shared::foo();
+ dep_proc_macro2::foo();
+ a
+ }
+ ",
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "test"
+ version = "0.0.0"
+
+ [lib]
+ crate-type = ['cdylib', 'staticlib']
+
+ [dependencies]
+ dep-normal = "*"
+ dep-proc-macro = "*"
+
+ [build-dependencies]
+ dep-build = "*"
+
+ [profile.release]
+ lto = true
+
+ # force build deps to share an opt-level with the rest of the
+ # graph so they only get built once.
+ [profile.release.build-override]
+ opt-level = 3
+ "#,
+ )
+ .file("build.rs", "fn main() { dep_build::foo() }")
+ .file(
+ "src/bin/foo-bin.rs",
+ "#[dep_proc_macro::foo] fn main() { dep_normal::foo() }",
+ )
+ .file(
+ "src/lib.rs",
+ "#[dep_proc_macro::foo] pub fn foo() { dep_normal::foo() }",
+ )
+ .build();
+ p.cargo("build -v --release")
+ // normal deps and their transitive dependencies do not need object
+ // code, so they should have linker-plugin-lto specified
+ .with_stderr_contains(
+ "[..]`rustc[..]--crate-name dep_normal2 [..]-C linker-plugin-lto[..]`",
+ )
+ .with_stderr_contains("[..]`rustc[..]--crate-name dep_normal [..]-C linker-plugin-lto[..]`")
+ // build dependencies and their transitive deps don't need any bitcode,
+ // so embedding should be turned off
+ .with_stderr_contains("[..]`rustc[..]--crate-name dep_build2 [..]-C embed-bitcode=no[..]`")
+ .with_stderr_contains("[..]`rustc[..]--crate-name dep_build [..]-C embed-bitcode=no[..]`")
+ .with_stderr_contains(
+ "[..]`rustc[..]--crate-name build_script_build [..]-C embed-bitcode=no[..]`",
+ )
+ // proc macro deps are the same as build deps here
+ .with_stderr_contains(
+ "[..]`rustc[..]--crate-name dep_proc_macro2 [..]-C embed-bitcode=no[..]`",
+ )
+ .with_stderr_contains(
+ "[..]`rustc[..]--crate-name dep_proc_macro [..]-C embed-bitcode=no[..]`",
+ )
+ .with_stderr_contains(
+ "[..]`rustc[..]--crate-name foo_bin [..]--crate-type bin[..]-C lto[..]`",
+ )
+ .with_stderr_contains(
+ "[..]`rustc[..]--crate-name test [..]--crate-type cdylib[..]-C lto[..]`",
+ )
+ .with_stderr_contains("[..]`rustc[..]--crate-name dep_shared [..]`")
+ .with_stderr_does_not_contain("[..]--crate-name dep_shared[..]-C lto[..]")
+ .with_stderr_does_not_contain("[..]--crate-name dep_shared[..]-C linker-plugin-lto[..]")
+ .with_stderr_does_not_contain("[..]--crate-name dep_shared[..]-C embed-bitcode[..]")
+ .run();
+}
+
+#[cargo_test]
+fn off_in_manifest_works() {
+ Package::new("bar", "0.0.1")
+ .file("src/lib.rs", "pub fn foo() {}")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "test"
+ version = "0.0.0"
+
+ [dependencies]
+ bar = "*"
+
+ [profile.release]
+ lto = "off"
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
+ .file(
+ "src/main.rs",
+ "fn main() {
+ test::foo();
+ bar::foo();
+ }",
+ )
+ .build();
+ p.cargo("build -v --release")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] [..]
+[DOWNLOADED] [..]
+[COMPILING] bar v0.0.1
+[RUNNING] `rustc --crate-name bar [..]--crate-type lib [..]-C lto=off -C embed-bitcode=no[..]
+[COMPILING] test [..]
+[RUNNING] `rustc --crate-name test [..]--crate-type lib [..]-C lto=off -C embed-bitcode=no[..]
+[RUNNING] `rustc --crate-name test src/main.rs [..]--crate-type bin [..]-C lto=off[..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn between_builds() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "test"
+ version = "0.0.0"
+
+ [profile.release]
+ lto = true
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
+ .file("src/main.rs", "fn main() { test::foo() }")
+ .build();
+ p.cargo("build -v --release --lib")
+ .with_stderr(
+ "\
+[COMPILING] test [..]
+[RUNNING] `rustc [..]--crate-type lib[..]-C linker-plugin-lto[..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.cargo("build -v --release")
+ .with_stderr_contains(
+ "\
+[COMPILING] test [..]
+[RUNNING] `rustc [..]--crate-type bin[..]-C lto[..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_all() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+
+ [profile.release]
+ lto = true
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("tests/a.rs", "")
+ .file("tests/b.rs", "")
+ .build();
+ p.cargo("test --release -v")
+ .with_stderr_contains("[RUNNING] `rustc[..]--crate-name foo[..]-C lto[..]")
+ .run();
+}
+
+#[cargo_test]
+fn test_all_and_bench() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+
+ [profile.release]
+ lto = true
+ [profile.bench]
+ lto = true
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("tests/a.rs", "")
+ .file("tests/b.rs", "")
+ .build();
+ p.cargo("test --release -v")
+ .with_stderr_contains("[RUNNING] `rustc[..]--crate-name a[..]-C lto[..]")
+ .with_stderr_contains("[RUNNING] `rustc[..]--crate-name b[..]-C lto[..]")
+ .with_stderr_contains("[RUNNING] `rustc[..]--crate-name foo[..]-C lto[..]")
+ .run();
+}
+
+/// Basic setup:
+///
+/// foo v0.0.0
+/// ├── bar v0.0.0
+/// │ ├── registry v0.0.1
+/// │ └── registry-shared v0.0.1
+/// └── registry-shared v0.0.1
+///
+/// Where `bar` will have the given crate types.
+fn project_with_dep(crate_types: &str) -> Project {
+ Package::new("registry", "0.0.1")
+ .file("src/lib.rs", r#"pub fn foo() { println!("registry"); }"#)
+ .publish();
+ Package::new("registry-shared", "0.0.1")
+ .file("src/lib.rs", r#"pub fn foo() { println!("shared"); }"#)
+ .publish();
+
+ project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.0"
+
+ [workspace]
+
+ [dependencies]
+ bar = { path = 'bar' }
+ registry-shared = "*"
+
+ [profile.release]
+ lto = true
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "
+ fn main() {
+ bar::foo();
+ registry_shared::foo();
+ }
+ ",
+ )
+ .file(
+ "bar/Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.0"
+
+ [dependencies]
+ registry = "*"
+ registry-shared = "*"
+
+ [lib]
+ crate-type = [{}]
+ "#,
+ crate_types
+ ),
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ pub fn foo() {
+ println!("bar");
+ registry::foo();
+ registry_shared::foo();
+ }
+ "#,
+ )
+ .file("tests/a.rs", "")
+ .file("bar/tests/b.rs", "")
+ .build()
+}
+
+/// Helper for checking which LTO behavior is used for a specific crate.
+///
+/// `krate_info` is extra compiler flags used to distinguish this if the same
+/// crate name is being built multiple times.
+fn verify_lto(output: &Output, krate: &str, krate_info: &str, expected_lto: Lto) {
+ let stderr = std::str::from_utf8(&output.stderr).unwrap();
+ let mut matches = stderr.lines().filter(|line| {
+ line.contains("Running")
+ && line.contains(&format!("--crate-name {} ", krate))
+ && line.contains(krate_info)
+ });
+ let line = matches.next().unwrap_or_else(|| {
+ panic!(
+ "expected to find crate `{}` info: `{}`, not found in output:\n{}",
+ krate, krate_info, stderr
+ );
+ });
+ if let Some(line2) = matches.next() {
+ panic!(
+ "found multiple lines matching crate `{}` info: `{}`:\nline1:{}\nline2:{}\noutput:\n{}",
+ krate, krate_info, line, line2, stderr
+ );
+ }
+ let actual_lto = if let Some(index) = line.find("-C lto=") {
+ let s = &line[index..];
+ let end = s.find(' ').unwrap();
+ let mode = &line[index..index + end];
+ if mode == "off" {
+ Lto::Off
+ } else {
+ Lto::Run(Some(mode.into()))
+ }
+ } else if line.contains("-C lto") {
+ Lto::Run(None)
+ } else if line.contains("-C linker-plugin-lto") {
+ Lto::OnlyBitcode
+ } else if line.contains("-C embed-bitcode=no") {
+ Lto::OnlyObject
+ } else {
+ Lto::ObjectAndBitcode
+ };
+ assert_eq!(
+ actual_lto, expected_lto,
+ "did not find expected LTO in line: {}",
+ line
+ );
+}
+
+#[cargo_test]
+fn cdylib_and_rlib() {
+ let p = project_with_dep("'cdylib', 'rlib'");
+ let output = p.cargo("build --release -v").exec_with_output().unwrap();
+ // `registry` is ObjectAndBitcode because it needs Object for the
+ // rlib, and Bitcode for the cdylib (which doesn't support LTO).
+ verify_lto(
+ &output,
+ "registry",
+ "--crate-type lib",
+ Lto::ObjectAndBitcode,
+ );
+ // Same as `registry`
+ verify_lto(
+ &output,
+ "registry_shared",
+ "--crate-type lib",
+ Lto::ObjectAndBitcode,
+ );
+ // Same as `registry`
+ verify_lto(
+ &output,
+ "bar",
+ "--crate-type cdylib --crate-type rlib",
+ Lto::ObjectAndBitcode,
+ );
+ verify_lto(&output, "foo", "--crate-type bin", Lto::Run(None));
+ p.cargo("test --release -v")
+ .with_stderr_unordered(
+ "\
+[FRESH] registry v0.0.1
+[FRESH] registry-shared v0.0.1
+[FRESH] bar v0.0.0 [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name foo [..]-C lto [..]--test[..]
+[RUNNING] `rustc --crate-name a [..]-C lto [..]--test[..]
+[FINISHED] [..]
+[RUNNING] [..]
+[RUNNING] [..]
+",
+ )
+ .run();
+ p.cargo("build --release -v --manifest-path bar/Cargo.toml")
+ .with_stderr_unordered(
+ "\
+[FRESH] registry-shared v0.0.1
+[FRESH] registry v0.0.1
+[FRESH] bar v0.0.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.cargo("test --release -v --manifest-path bar/Cargo.toml")
+ .with_stderr_unordered(
+ "\
+[FRESH] registry-shared v0.0.1
+[FRESH] registry v0.0.1
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar [..]-C lto[..]--test[..]
+[RUNNING] `rustc --crate-name b [..]-C lto[..]--test[..]
+[FINISHED] [..]
+[RUNNING] [..]target/release/deps/bar-[..]
+[RUNNING] [..]target/release/deps/b-[..]
+[DOCTEST] bar
+[RUNNING] `rustdoc --crate-type cdylib --crate-type rlib --crate-name bar --test [..]-C lto[..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dylib() {
+ let p = project_with_dep("'dylib'");
+ let output = p.cargo("build --release -v").exec_with_output().unwrap();
+ // `registry` is OnlyObject because rustc doesn't support LTO with dylibs.
+ verify_lto(&output, "registry", "--crate-type lib", Lto::OnlyObject);
+ // `registry_shared` is both because it is needed by both bar (Object) and
+ // foo (Bitcode for LTO).
+ verify_lto(
+ &output,
+ "registry_shared",
+ "--crate-type lib",
+ Lto::ObjectAndBitcode,
+ );
+ // `bar` is OnlyObject because rustc doesn't support LTO with dylibs.
+ verify_lto(&output, "bar", "--crate-type dylib", Lto::OnlyObject);
+ // `foo` is LTO because it is a binary, and the profile specifies `lto=true`.
+ verify_lto(&output, "foo", "--crate-type bin", Lto::Run(None));
+ // `cargo test` should not rebuild dependencies. It builds the test
+ // executables with `lto=true` because the tests are built with the
+ // `--release` flag.
+ p.cargo("test --release -v")
+ .with_stderr_unordered(
+ "\
+[FRESH] registry v0.0.1
+[FRESH] registry-shared v0.0.1
+[FRESH] bar v0.0.0 [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name foo [..]-C lto [..]--test[..]
+[RUNNING] `rustc --crate-name a [..]-C lto [..]--test[..]
+[FINISHED] [..]
+[RUNNING] [..]
+[RUNNING] [..]
+",
+ )
+ .run();
+ // Building just `bar` causes `registry-shared` to get rebuilt because it
+ // switches to OnlyObject because it is now only being used with a dylib
+ // which does not support LTO.
+ //
+ // `bar` gets rebuilt because `registry_shared` got rebuilt.
+ p.cargo("build --release -v --manifest-path bar/Cargo.toml")
+ .with_stderr_unordered(
+ "\
+[COMPILING] registry-shared v0.0.1
+[FRESH] registry v0.0.1
+[RUNNING] `rustc --crate-name registry_shared [..]-C embed-bitcode=no[..]
+[DIRTY] bar v0.0.0 ([..]): dependency info changed
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar [..]--crate-type dylib [..]-C embed-bitcode=no[..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ // Testing just `bar` causes `registry` to get rebuilt because it switches
+ // to needing both Object (for the `bar` dylib) and Bitcode (for the test
+ // built with LTO).
+ //
+ // `bar` the dylib gets rebuilt because `registry` got rebuilt.
+ p.cargo("test --release -v --manifest-path bar/Cargo.toml")
+ .with_stderr_unordered(
+ "\
+[FRESH] registry-shared v0.0.1
+[COMPILING] registry v0.0.1
+[RUNNING] `rustc --crate-name registry [..]
+[DIRTY] bar v0.0.0 ([..]): dependency info changed
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar [..]--crate-type dylib [..]-C embed-bitcode=no[..]
+[RUNNING] `rustc --crate-name bar [..]-C lto [..]--test[..]
+[RUNNING] `rustc --crate-name b [..]-C lto [..]--test[..]
+[FINISHED] [..]
+[RUNNING] [..]
+[RUNNING] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+// This is currently broken on windows-gnu, see https://github.com/rust-lang/rust/issues/109797
+#[cfg_attr(
+ all(target_os = "windows", target_env = "gnu"),
+ ignore = "windows-gnu not working"
+)]
+fn test_profile() {
+ Package::new("bar", "0.0.1")
+ .file("src/lib.rs", "pub fn foo() -> i32 { 123 } ")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [profile.test]
+ lto = 'thin'
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #[test]
+ fn t1() {
+ assert_eq!(123, bar::foo());
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("test -v")
+ // unordered because the two `foo` builds start in parallel
+ .with_stderr_unordered("\
+[UPDATING] [..]
+[DOWNLOADING] [..]
+[DOWNLOADED] [..]
+[COMPILING] bar v0.0.1
+[RUNNING] `rustc --crate-name bar [..]crate-type lib[..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name foo [..]--crate-type lib --emit=dep-info,metadata,link -C linker-plugin-lto[..]
+[RUNNING] `rustc --crate-name foo [..]--emit=dep-info,link -C lto=thin [..]--test[..]
+[FINISHED] [..]
+[RUNNING] [..]
+[DOCTEST] foo
+[RUNNING] `rustdoc [..]
+")
+ .run();
+}
+
+#[cargo_test]
+fn doctest() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [profile.release]
+ lto = true
+
+ [dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ /// Foo!
+ ///
+ /// ```
+ /// foo::foo();
+ /// ```
+ pub fn foo() { bar::bar(); }
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ pub fn bar() { println!("hi!"); }
+ "#,
+ )
+ .build();
+
+ p.cargo("test --doc --release -v")
+ .with_stderr_contains("[..]`rustc --crate-name bar[..]-C linker-plugin-lto[..]")
+ .with_stderr_contains("[..]`rustc --crate-name foo[..]-C linker-plugin-lto[..]")
+ // embed-bitcode should be harmless here
+ .with_stderr_contains("[..]`rustdoc [..]-C lto[..]")
+ .run();
+
+ // Try with bench profile.
+ p.cargo("test --doc --release -v")
+ .env("CARGO_PROFILE_BENCH_LTO", "true")
+ .with_stderr_unordered(
+ "\
+[FRESH] bar v0.1.0 [..]
+[FRESH] foo v0.1.0 [..]
+[FINISHED] release [..]
+[DOCTEST] foo
+[RUNNING] `rustdoc [..]-C lto[..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dylib_rlib_bin() {
+ // dylib+rlib linked with a binary
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [lib]
+ crate-type = ["dylib", "rlib"]
+
+ [profile.release]
+ lto = true
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() { println!(\"hi!\"); }")
+ .file("src/bin/ferret.rs", "fn main() { foo::foo(); }")
+ .build();
+
+ let output = p.cargo("build --release -v").exec_with_output().unwrap();
+ verify_lto(
+ &output,
+ "foo",
+ "--crate-type dylib --crate-type rlib",
+ Lto::ObjectAndBitcode,
+ );
+ verify_lto(&output, "ferret", "--crate-type bin", Lto::Run(None));
+}
+
+#[cargo_test]
+fn fresh_swapping_commands() {
+ // In some rare cases, different commands end up building dependencies
+ // with different LTO settings. This checks that it doesn't cause the
+ // cache to thrash in that scenario.
+ Package::new("bar", "1.0.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+
+ [profile.release]
+ lto = true
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() { println!(\"hi!\"); }")
+ .build();
+
+ p.cargo("build --release -v")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v1.0.0 [..]
+[COMPILING] bar v1.0.0
+[RUNNING] `rustc --crate-name bar [..]-C linker-plugin-lto[..]
+[COMPILING] foo v0.1.0 [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]-C linker-plugin-lto[..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.cargo("test --release -v")
+ .with_stderr_unordered(
+ "\
+[FRESH] bar v1.0.0
+[COMPILING] foo v0.1.0 [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]-C lto[..]--test[..]
+[FINISHED] [..]
+[RUNNING] `[..]/foo[..]`
+[DOCTEST] foo
+[RUNNING] `rustdoc [..]-C lto[..]
+",
+ )
+ .run();
+
+ p.cargo("build --release -v")
+ .with_stderr(
+ "\
+[FRESH] bar v1.0.0
+[FRESH] foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.cargo("test --release -v --no-run -v")
+ .with_stderr(
+ "\
+[FRESH] bar v1.0.0
+[FRESH] foo [..]
+[FINISHED] [..]
+[EXECUTABLE] `[..]/target/release/deps/foo-[..][EXE]`
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/main.rs b/src/tools/cargo/tests/testsuite/main.rs
new file mode 100644
index 000000000..a1e293acd
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/main.rs
@@ -0,0 +1,146 @@
+// See src/cargo/lib.rs for notes on these lint settings.
+#![warn(rust_2018_idioms)]
+#![allow(clippy::all)]
+
+#[macro_use]
+extern crate cargo_test_macro;
+
+mod advanced_env;
+mod alt_registry;
+mod artifact_dep;
+mod bad_config;
+mod bad_manifest_path;
+mod bench;
+mod binary_name;
+mod build;
+mod build_plan;
+mod build_script;
+mod build_script_env;
+mod build_script_extra_link_arg;
+mod cache_messages;
+mod cargo_add;
+mod cargo_alias_config;
+mod cargo_command;
+mod cargo_config;
+mod cargo_env_config;
+mod cargo_features;
+mod cargo_remove;
+mod cargo_targets;
+mod cfg;
+mod check;
+mod check_cfg;
+mod clean;
+mod collisions;
+mod concurrent;
+mod config;
+mod config_cli;
+mod config_include;
+mod corrupt_git;
+mod credential_process;
+mod cross_compile;
+mod cross_publish;
+mod custom_target;
+mod death;
+mod dep_info;
+mod direct_minimal_versions;
+mod directory;
+mod doc;
+mod docscrape;
+mod edition;
+mod error;
+mod features;
+mod features2;
+mod features_namespaced;
+mod fetch;
+mod fix;
+mod freshness;
+mod future_incompat_report;
+mod generate_lockfile;
+mod git;
+mod git_auth;
+mod git_gc;
+mod glob_targets;
+mod help;
+mod https;
+mod inheritable_workspace_fields;
+mod init;
+mod install;
+mod install_upgrade;
+mod jobserver;
+mod list_availables;
+mod local_registry;
+mod locate_project;
+mod lockfile_compat;
+mod login;
+mod logout;
+mod lto;
+mod member_discovery;
+mod member_errors;
+mod message_format;
+mod messages;
+mod metabuild;
+mod metadata;
+mod minimal_versions;
+mod multitarget;
+mod net_config;
+mod new;
+mod offline;
+mod old_cargos;
+mod out_dir;
+mod owner;
+mod package;
+mod package_features;
+mod patch;
+mod path;
+mod paths;
+mod pkgid;
+mod plugins;
+mod proc_macro;
+mod profile_config;
+mod profile_custom;
+mod profile_overrides;
+mod profile_targets;
+mod profiles;
+mod progress;
+mod pub_priv;
+mod publish;
+mod publish_lockfile;
+mod read_manifest;
+mod registry;
+mod registry_auth;
+mod rename_deps;
+mod replace;
+mod required_features;
+mod run;
+mod rust_version;
+mod rustc;
+mod rustc_info_cache;
+mod rustdoc;
+mod rustdoc_extern_html;
+mod rustdocflags;
+mod rustflags;
+mod search;
+mod shell_quoting;
+mod source_replacement;
+mod ssh;
+mod standard_lib;
+mod test;
+mod timings;
+mod tool_paths;
+mod tree;
+mod tree_graph_features;
+mod unit_graph;
+mod update;
+mod vendor;
+mod verify_project;
+mod version;
+mod warn_on_failure;
+mod weak_dep_features;
+mod workspaces;
+mod yank;
+
+#[cargo_test]
+fn aaa_trigger_cross_compile_disabled_check() {
+ // This triggers the cross compile disabled check to run ASAP, see #5141
+ cargo_test_support::cross_compile::disabled();
+}
diff --git a/src/tools/cargo/tests/testsuite/member_discovery.rs b/src/tools/cargo/tests/testsuite/member_discovery.rs
new file mode 100644
index 000000000..5377443b6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/member_discovery.rs
@@ -0,0 +1,44 @@
+//! Tests for workspace member discovery.
+
+use cargo::core::{Shell, Workspace};
+use cargo::util::config::Config;
+
+use cargo_test_support::install::cargo_home;
+use cargo_test_support::project;
+use cargo_test_support::registry;
+
+/// Tests exclusion of non-directory files from workspace member discovery using glob `*`.
+#[cargo_test]
+fn bad_file_member_exclusion() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = [ "crates/*" ]
+ "#,
+ )
+ .file("crates/.DS_Store", "PLACEHOLDER")
+ .file(
+ "crates/bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ "#,
+ )
+ .file("crates/bar/src/main.rs", "fn main() {}")
+ .build();
+
+ // Prevent this test from accessing the network by setting up .cargo/config.
+ registry::init();
+ let config = Config::new(
+ Shell::from_write(Box::new(Vec::new())),
+ cargo_home(),
+ cargo_home(),
+ );
+ let ws = Workspace::new(&p.root().join("Cargo.toml"), &config).unwrap();
+ assert_eq!(ws.members().count(), 1);
+ assert_eq!(ws.members().next().unwrap().name(), "bar");
+}
diff --git a/src/tools/cargo/tests/testsuite/member_errors.rs b/src/tools/cargo/tests/testsuite/member_errors.rs
new file mode 100644
index 000000000..c3c340ce0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/member_errors.rs
@@ -0,0 +1,164 @@
+//! Tests for workspace member errors.
+
+use cargo::core::resolver::ResolveError;
+use cargo::core::{compiler::CompileMode, Shell, Workspace};
+use cargo::ops::{self, CompileOptions};
+use cargo::util::{config::Config, errors::ManifestError};
+
+use cargo_test_support::install::cargo_home;
+use cargo_test_support::project;
+use cargo_test_support::registry;
+
+/// Tests inclusion of a `ManifestError` pointing to a member manifest
+/// when that manifest fails to deserialize.
+#[cargo_test]
+fn toml_deserialize_manifest_error() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar" }
+
+ [workspace]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ foobar == "0.55"
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ let root_manifest_path = p.root().join("Cargo.toml");
+ let member_manifest_path = p.root().join("bar").join("Cargo.toml");
+
+ let error = Workspace::new(&root_manifest_path, &Config::default().unwrap()).unwrap_err();
+ eprintln!("{:?}", error);
+
+ let manifest_err: &ManifestError = error.downcast_ref().expect("Not a ManifestError");
+ assert_eq!(manifest_err.manifest_path(), &root_manifest_path);
+
+ let causes: Vec<_> = manifest_err.manifest_causes().collect();
+ assert_eq!(causes.len(), 1, "{:?}", causes);
+ assert_eq!(causes[0].manifest_path(), &member_manifest_path);
+}
+
+/// Tests inclusion of a `ManifestError` pointing to a member manifest
+/// when that manifest has an invalid dependency path.
+#[cargo_test]
+fn member_manifest_path_io_error() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar" }
+
+ [workspace]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ foobar = { path = "nosuch" }
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ let root_manifest_path = p.root().join("Cargo.toml");
+ let member_manifest_path = p.root().join("bar").join("Cargo.toml");
+ let missing_manifest_path = p.root().join("bar").join("nosuch").join("Cargo.toml");
+
+ let error = Workspace::new(&root_manifest_path, &Config::default().unwrap()).unwrap_err();
+ eprintln!("{:?}", error);
+
+ let manifest_err: &ManifestError = error.downcast_ref().expect("Not a ManifestError");
+ assert_eq!(manifest_err.manifest_path(), &root_manifest_path);
+
+ let causes: Vec<_> = manifest_err.manifest_causes().collect();
+ assert_eq!(causes.len(), 2, "{:?}", causes);
+ assert_eq!(causes[0].manifest_path(), &member_manifest_path);
+ assert_eq!(causes[1].manifest_path(), &missing_manifest_path);
+}
+
+/// Tests dependency version errors provide which package failed via a `ResolveError`.
+#[cargo_test]
+fn member_manifest_version_error() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar" }
+
+ [workspace]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ i-dont-exist = "0.55"
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ // Prevent this test from accessing the network by setting up .cargo/config.
+ registry::init();
+ let config = Config::new(
+ Shell::from_write(Box::new(Vec::new())),
+ cargo_home(),
+ cargo_home(),
+ );
+ let ws = Workspace::new(&p.root().join("Cargo.toml"), &config).unwrap();
+ let compile_options = CompileOptions::new(&config, CompileMode::Build).unwrap();
+ let member_bar = ws.members().find(|m| &*m.name() == "bar").unwrap();
+
+ let error = ops::compile(&ws, &compile_options).map(|_| ()).unwrap_err();
+ eprintln!("{:?}", error);
+
+ let resolve_err: &ResolveError = error.downcast_ref().expect("Not a ResolveError");
+ let package_path = resolve_err.package_path();
+ assert_eq!(package_path.len(), 1, "package_path: {:?}", package_path);
+ assert_eq!(package_path[0], member_bar.package_id());
+}
diff --git a/src/tools/cargo/tests/testsuite/message_format.rs b/src/tools/cargo/tests/testsuite/message_format.rs
new file mode 100644
index 000000000..e9310b261
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/message_format.rs
@@ -0,0 +1,133 @@
+//! Tests for --message-format flag.
+
+use cargo_test_support::{basic_lib_manifest, basic_manifest, project};
+
+#[cargo_test]
+fn cannot_specify_two() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ let formats = ["human", "json", "short"];
+
+ let two_kinds = "error: cannot specify two kinds of `message-format` arguments\n";
+ for a in formats.iter() {
+ for b in formats.iter() {
+ p.cargo(&format!("build --message-format {},{}", a, b))
+ .with_status(101)
+ .with_stderr(two_kinds)
+ .run();
+ }
+ }
+}
+
+#[cargo_test]
+fn double_json_works() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check --message-format json,json-render-diagnostics")
+ .run();
+ p.cargo("check --message-format json,json-diagnostic-short")
+ .run();
+ p.cargo("check --message-format json,json-diagnostic-rendered-ansi")
+ .run();
+ p.cargo("check --message-format json --message-format json-diagnostic-rendered-ansi")
+ .run();
+ p.cargo("check --message-format json-diagnostic-rendered-ansi")
+ .run();
+ p.cargo("check --message-format json-diagnostic-short,json-diagnostic-rendered-ansi")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_renders() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = 'foo'
+ version = '0.1.0'
+
+ [dependencies]
+ bar = { path = 'bar' }
+ "#,
+ )
+ .file("src/main.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check --message-format json-render-diagnostics")
+ .with_status(101)
+ .with_stdout(
+ "{\"reason\":\"compiler-artifact\",[..]\n\
+ {\"reason\":\"build-finished\",\"success\":false}",
+ )
+ .with_stderr_contains(
+ "\
+[CHECKING] bar [..]
+[CHECKING] foo [..]
+error[..]`main`[..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_renders_short() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/main.rs", "")
+ .build();
+
+ p.cargo("check --message-format json-render-diagnostics,json-diagnostic-short")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[CHECKING] foo [..]
+error[..]`main`[..]
+",
+ )
+ .with_stderr_does_not_contain("note:")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_renders_ansi() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/main.rs", "")
+ .build();
+
+ p.cargo("check --message-format json-diagnostic-rendered-ansi")
+ .with_status(101)
+ .with_stdout_contains("[..]\\u001b[38;5;9merror[..]")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_renders_doctests() {
+ let p = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file(
+ "src/lib.rs",
+ "\
+ /// ```rust
+ /// bar()
+ /// ```
+ pub fn bar() {}
+ ",
+ )
+ .build();
+
+ p.cargo("test --doc --message-format short")
+ .with_status(101)
+ .with_stdout_contains("src/lib.rs:2:1: error[E0425]:[..]")
+ .with_stdout_contains("[..]src/lib.rs - bar (line 1)[..]")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/messages.rs b/src/tools/cargo/tests/testsuite/messages.rs
new file mode 100644
index 000000000..2c534d8f0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/messages.rs
@@ -0,0 +1,144 @@
+//! General tests specifically about diagnostics and other messages.
+//!
+//! Tests for message caching can be found in `cache_messages`.
+
+use cargo_test_support::{process, project, Project};
+use cargo_util::ProcessError;
+
+/// Captures the actual diagnostics displayed by rustc. This is done to avoid
+/// relying on the exact message formatting in rustc.
+pub fn raw_rustc_output(project: &Project, path: &str, extra: &[&str]) -> String {
+ let mut proc = process("rustc");
+ if cfg!(windows) {
+ // Sanitize in case the caller wants to do direct string comparison with Cargo's output.
+ proc.arg(path.replace('/', "\\"));
+ } else {
+ proc.arg(path);
+ }
+ let rustc_output = match proc
+ .arg("--crate-type=lib")
+ .args(extra)
+ .cwd(project.root())
+ .exec_with_output()
+ {
+ Ok(output) => output.stderr,
+ Err(e) => e.downcast::<ProcessError>().unwrap().stderr.unwrap(),
+ };
+ // Do a little dance to remove rustc's "warnings emitted" message and the subsequent newline.
+ let stderr = std::str::from_utf8(&rustc_output).expect("utf8");
+ let mut lines = stderr.lines();
+ let mut result = String::new();
+ while let Some(line) = lines.next() {
+ if line.contains("warning emitted")
+ || line.contains("warnings emitted")
+ || line.contains("aborting due to")
+ {
+ // Eat blank line.
+ match lines.next() {
+ None | Some("") => continue,
+ Some(s) => panic!("unexpected str {}", s),
+ }
+ }
+ result.push_str(line);
+ result.push('\n');
+ }
+ result
+}
+
+#[cargo_test]
+fn deduplicate_messages_basic() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() {
+ let x = 1;
+ }
+ "#,
+ )
+ .build();
+ let rustc_message = raw_rustc_output(&p, "src/lib.rs", &[]);
+ let expected_output = format!(
+ "{}\
+warning: `foo` (lib) generated 1 warning (run `cargo fix --lib -p foo` to apply 1 suggestion)
+warning: `foo` (lib test) generated 1 warning (1 duplicate)
+[FINISHED] [..]
+[EXECUTABLE] unittests src/lib.rs (target/debug/deps/foo-[..][EXE])
+",
+ rustc_message
+ );
+ p.cargo("test --no-run -j1")
+ .with_stderr(&format!("[COMPILING] foo [..]\n{}", expected_output))
+ .run();
+ // Run again, to check for caching behavior.
+ p.cargo("test --no-run -j1")
+ .with_stderr(expected_output)
+ .run();
+}
+
+#[cargo_test]
+fn deduplicate_messages_mismatched_warnings() {
+ // One execution prints 1 warning, the other prints 2 where there is an overlap.
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() {
+ let x = 1;
+ }
+
+ #[test]
+ fn t1() {
+ let MY_VALUE = 1;
+ assert_eq!(MY_VALUE, 1);
+ }
+ "#,
+ )
+ .build();
+ let lib_output = raw_rustc_output(&p, "src/lib.rs", &[]);
+ let mut lib_test_output = raw_rustc_output(&p, "src/lib.rs", &["--test"]);
+ // Remove the duplicate warning.
+ let start = lib_test_output.find(&lib_output).expect("same warning");
+ lib_test_output.replace_range(start..start + lib_output.len(), "");
+ let expected_output = format!(
+ "\
+{}\
+warning: `foo` (lib) generated 1 warning (run `cargo fix --lib -p foo` to apply 1 suggestion)
+{}\
+warning: `foo` (lib test) generated 2 warnings (1 duplicate)
+[FINISHED] [..]
+[EXECUTABLE] unittests src/lib.rs (target/debug/deps/foo-[..][EXE])
+",
+ lib_output, lib_test_output
+ );
+ p.cargo("test --no-run -j1")
+ .with_stderr(&format!("[COMPILING] foo v0.0.1 [..]\n{}", expected_output))
+ .run();
+ // Run again, to check for caching behavior.
+ p.cargo("test --no-run -j1")
+ .with_stderr(expected_output)
+ .run();
+}
+
+#[cargo_test]
+fn deduplicate_errors() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ this should not compile
+ "#,
+ )
+ .build();
+ let rustc_message = raw_rustc_output(&p, "src/lib.rs", &[]);
+ p.cargo("test -j1")
+ .with_status(101)
+ .with_stderr(&format!(
+ "\
+[COMPILING] foo v0.0.1 [..]
+{}error: could not compile `foo` (lib) due to previous error
+",
+ rustc_message
+ ))
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/metabuild.rs b/src/tools/cargo/tests/testsuite/metabuild.rs
new file mode 100644
index 000000000..022d0bff0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/metabuild.rs
@@ -0,0 +1,771 @@
+//! Tests for the metabuild feature (declarative build scripts).
+
+use cargo_test_support::{
+ basic_lib_manifest, basic_manifest, is_coarse_mtime, project, registry::Package, rustc_host,
+ Project,
+};
+
+use std::str;
+
+#[cargo_test]
+fn metabuild_gated() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ metabuild = ["mb"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .masquerade_as_nightly_cargo(&["metabuild"])
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ feature `metabuild` is required
+
+ The package requires the Cargo feature called `metabuild`, \
+ but that feature is not stabilized in this version of Cargo (1.[..]).
+ Consider adding `cargo-features = [\"metabuild\"]` to the top of Cargo.toml \
+ (above the [package] table) to tell Cargo you are opting in to use this unstable feature.
+ See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#metabuild \
+ for more information about the status of this feature.
+",
+ )
+ .run();
+}
+
+fn basic_project() -> Project {
+ project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["metabuild"]
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ metabuild = ["mb", "mb-other"]
+
+ [build-dependencies]
+ mb = {path="mb"}
+ mb-other = {path="mb-other"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("mb/Cargo.toml", &basic_lib_manifest("mb"))
+ .file(
+ "mb/src/lib.rs",
+ r#"pub fn metabuild() { println!("Hello mb"); }"#,
+ )
+ .file(
+ "mb-other/Cargo.toml",
+ r#"
+ [package]
+ name = "mb-other"
+ version = "0.0.1"
+ "#,
+ )
+ .file(
+ "mb-other/src/lib.rs",
+ r#"pub fn metabuild() { println!("Hello mb-other"); }"#,
+ )
+ .build()
+}
+
+#[cargo_test]
+fn metabuild_basic() {
+ let p = basic_project();
+ p.cargo("check -vv")
+ .masquerade_as_nightly_cargo(&["metabuild"])
+ .with_stdout_contains("[foo 0.0.1] Hello mb")
+ .with_stdout_contains("[foo 0.0.1] Hello mb-other")
+ .run();
+}
+
+#[cargo_test]
+fn metabuild_error_both() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["metabuild"]
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ metabuild = "mb"
+
+ [build-dependencies]
+ mb = {path="mb"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", r#"fn main() {}"#)
+ .file("mb/Cargo.toml", &basic_lib_manifest("mb"))
+ .file(
+ "mb/src/lib.rs",
+ r#"pub fn metabuild() { println!("Hello mb"); }"#,
+ )
+ .build();
+
+ p.cargo("check -vv")
+ .masquerade_as_nightly_cargo(&["metabuild"])
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: failed to parse manifest at [..]
+
+Caused by:
+ cannot specify both `metabuild` and `build`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn metabuild_missing_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["metabuild"]
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ metabuild = "mb"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check -vv")
+ .masquerade_as_nightly_cargo(&["metabuild"])
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: failed to parse manifest at [..]
+
+Caused by:
+ metabuild package `mb` must be specified in `build-dependencies`",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn metabuild_optional_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["metabuild"]
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ metabuild = "mb"
+
+ [build-dependencies]
+ mb = {path="mb", optional=true}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("mb/Cargo.toml", &basic_lib_manifest("mb"))
+ .file(
+ "mb/src/lib.rs",
+ r#"pub fn metabuild() { println!("Hello mb"); }"#,
+ )
+ .build();
+
+ p.cargo("check -vv")
+ .masquerade_as_nightly_cargo(&["metabuild"])
+ .with_stdout_does_not_contain("[foo 0.0.1] Hello mb")
+ .run();
+
+ p.cargo("check -vv --features mb")
+ .masquerade_as_nightly_cargo(&["metabuild"])
+ .with_stdout_contains("[foo 0.0.1] Hello mb")
+ .run();
+}
+
+#[cargo_test]
+fn metabuild_lib_name() {
+ // Test when setting `name` on [lib].
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["metabuild"]
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ metabuild = "mb"
+
+ [build-dependencies]
+ mb = {path="mb"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "mb/Cargo.toml",
+ r#"
+ [package]
+ name = "mb"
+ version = "0.0.1"
+ [lib]
+ name = "other"
+ "#,
+ )
+ .file(
+ "mb/src/lib.rs",
+ r#"pub fn metabuild() { println!("Hello mb"); }"#,
+ )
+ .build();
+
+ p.cargo("check -vv")
+ .masquerade_as_nightly_cargo(&["metabuild"])
+ .with_stdout_contains("[foo 0.0.1] Hello mb")
+ .run();
+}
+
+#[cargo_test]
+fn metabuild_fresh() {
+ if is_coarse_mtime() {
+ // This test doesn't work on coarse mtimes very well. Because the
+ // metabuild script is created at build time, its mtime is almost
+ // always equal to the mtime of the output. The second call to `build`
+ // will then think it needs to be rebuilt when it should be fresh.
+ return;
+ }
+
+ // Check that rebuild is fresh.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["metabuild"]
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ metabuild = "mb"
+
+ [build-dependencies]
+ mb = {path="mb"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("mb/Cargo.toml", &basic_lib_manifest("mb"))
+ .file(
+ "mb/src/lib.rs",
+ r#"pub fn metabuild() { println!("Hello mb"); }"#,
+ )
+ .build();
+
+ p.cargo("check -vv")
+ .masquerade_as_nightly_cargo(&["metabuild"])
+ .with_stdout_contains("[foo 0.0.1] Hello mb")
+ .run();
+
+ p.cargo("check -vv")
+ .masquerade_as_nightly_cargo(&["metabuild"])
+ .with_stdout_does_not_contain("[foo 0.0.1] Hello mb")
+ .with_stderr(
+ "\
+[FRESH] mb [..]
+[FRESH] foo [..]
+[FINISHED] dev [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn metabuild_links() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["metabuild"]
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ links = "cat"
+ metabuild = "mb"
+
+ [build-dependencies]
+ mb = {path="mb"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("mb/Cargo.toml", &basic_lib_manifest("mb"))
+ .file(
+ "mb/src/lib.rs",
+ r#"
+ pub fn metabuild() {
+ assert_eq!(std::env::var("CARGO_MANIFEST_LINKS"),
+ Ok("cat".to_string()));
+ println!("Hello mb");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("check -vv")
+ .masquerade_as_nightly_cargo(&["metabuild"])
+ .with_stdout_contains("[foo 0.0.1] Hello mb")
+ .run();
+}
+
+#[cargo_test]
+fn metabuild_override() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["metabuild"]
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ links = "cat"
+ metabuild = "mb"
+
+ [build-dependencies]
+ mb = {path="mb"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("mb/Cargo.toml", &basic_lib_manifest("mb"))
+ .file(
+ "mb/src/lib.rs",
+ r#"pub fn metabuild() { panic!("should not run"); }"#,
+ )
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}.cat]
+ rustc-link-lib = ["a"]
+ "#,
+ rustc_host()
+ ),
+ )
+ .build();
+
+ p.cargo("check -vv")
+ .masquerade_as_nightly_cargo(&["metabuild"])
+ .run();
+}
+
+#[cargo_test]
+fn metabuild_workspace() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["member1", "member2"]
+ "#,
+ )
+ .file(
+ "member1/Cargo.toml",
+ r#"
+ cargo-features = ["metabuild"]
+ [package]
+ name = "member1"
+ version = "0.0.1"
+ metabuild = ["mb1", "mb2"]
+
+ [build-dependencies]
+ mb1 = {path="../../mb1"}
+ mb2 = {path="../../mb2"}
+ "#,
+ )
+ .file("member1/src/lib.rs", "")
+ .file(
+ "member2/Cargo.toml",
+ r#"
+ cargo-features = ["metabuild"]
+ [package]
+ name = "member2"
+ version = "0.0.1"
+ metabuild = ["mb1"]
+
+ [build-dependencies]
+ mb1 = {path="../../mb1"}
+ "#,
+ )
+ .file("member2/src/lib.rs", "")
+ .build();
+
+ project()
+ .at("mb1")
+ .file("Cargo.toml", &basic_lib_manifest("mb1"))
+ .file(
+ "src/lib.rs",
+ r#"pub fn metabuild() { println!("Hello mb1 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#,
+ )
+ .build();
+
+ project()
+ .at("mb2")
+ .file("Cargo.toml", &basic_lib_manifest("mb2"))
+ .file(
+ "src/lib.rs",
+ r#"pub fn metabuild() { println!("Hello mb2 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#,
+ )
+ .build();
+
+ p.cargo("check -vv --workspace")
+ .masquerade_as_nightly_cargo(&["metabuild"])
+ .with_stdout_contains("[member1 0.0.1] Hello mb1 [..]member1")
+ .with_stdout_contains("[member1 0.0.1] Hello mb2 [..]member1")
+ .with_stdout_contains("[member2 0.0.1] Hello mb1 [..]member2")
+ .with_stdout_does_not_contain("[member2 0.0.1] Hello mb2 [..]member2")
+ .run();
+}
+
+#[cargo_test]
+fn metabuild_metadata() {
+ // The metabuild Target is filtered out of the `metadata` results.
+ let p = basic_project();
+
+ let meta = p
+ .cargo("metadata --format-version=1")
+ .masquerade_as_nightly_cargo(&["metabuild"])
+ .run_json();
+ let mb_info: Vec<&str> = meta["packages"]
+ .as_array()
+ .unwrap()
+ .iter()
+ .find(|p| p["name"].as_str().unwrap() == "foo")
+ .unwrap()["metabuild"]
+ .as_array()
+ .unwrap()
+ .iter()
+ .map(|s| s.as_str().unwrap())
+ .collect();
+ assert_eq!(mb_info, ["mb", "mb-other"]);
+}
+
+#[cargo_test]
+fn metabuild_build_plan() {
+ let p = basic_project();
+
+ p.cargo("build --build-plan -Zunstable-options")
+ .masquerade_as_nightly_cargo(&["metabuild", "build-plan"])
+ .with_json(
+ r#"
+ {
+ "invocations": [
+ {
+ "package_name": "mb",
+ "package_version": "0.5.0",
+ "target_kind": ["lib"],
+ "compile_mode": "build",
+ "kind": null,
+ "deps": [],
+ "outputs": [
+ "[..]/target/debug/deps/libmb-[..].rlib",
+ "[..]/target/debug/deps/libmb-[..].rmeta"
+ ],
+ "links": {},
+ "program": "rustc",
+ "args": "{...}",
+ "env": "{...}",
+ "cwd": "[..]"
+ },
+ {
+ "package_name": "mb-other",
+ "package_version": "0.0.1",
+ "target_kind": ["lib"],
+ "compile_mode": "build",
+ "kind": null,
+ "deps": [],
+ "outputs": [
+ "[..]/target/debug/deps/libmb_other-[..].rlib",
+ "[..]/target/debug/deps/libmb_other-[..].rmeta"
+ ],
+ "links": {},
+ "program": "rustc",
+ "args": "{...}",
+ "env": "{...}",
+ "cwd": "[..]"
+ },
+ {
+ "package_name": "foo",
+ "package_version": "0.0.1",
+ "target_kind": ["custom-build"],
+ "compile_mode": "build",
+ "kind": null,
+ "deps": [0, 1],
+ "outputs": "{...}",
+ "links": "{...}",
+ "program": "rustc",
+ "args": "{...}",
+ "env": "{...}",
+ "cwd": "[..]"
+ },
+ {
+ "package_name": "foo",
+ "package_version": "0.0.1",
+ "target_kind": ["custom-build"],
+ "compile_mode": "run-custom-build",
+ "kind": null,
+ "deps": [2],
+ "outputs": [],
+ "links": {},
+ "program": "[..]/foo/target/debug/build/foo-[..]/metabuild-foo",
+ "args": [],
+ "env": "{...}",
+ "cwd": "[..]"
+ },
+ {
+ "package_name": "foo",
+ "package_version": "0.0.1",
+ "target_kind": ["lib"],
+ "compile_mode": "build",
+ "kind": null,
+ "deps": [3],
+ "outputs": [
+ "[..]/foo/target/debug/deps/libfoo-[..].rlib",
+ "[..]/foo/target/debug/deps/libfoo-[..].rmeta"
+ ],
+ "links": "{...}",
+ "program": "rustc",
+ "args": "{...}",
+ "env": "{...}",
+ "cwd": "[..]"
+ }
+ ],
+ "inputs": [
+ "[..]/foo/Cargo.toml",
+ "[..]/foo/mb/Cargo.toml",
+ "[..]/foo/mb-other/Cargo.toml"
+ ]
+ }
+ "#,
+ )
+ .run();
+
+ assert_eq!(p.glob("target/.metabuild/metabuild-foo-*.rs").count(), 1);
+}
+
+#[cargo_test]
+fn metabuild_two_versions() {
+ // Two versions of a metabuild dep with the same name.
+ let p = project()
+ .at("ws")
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["member1", "member2"]
+ "#,
+ )
+ .file(
+ "member1/Cargo.toml",
+ r#"
+ cargo-features = ["metabuild"]
+ [package]
+ name = "member1"
+ version = "0.0.1"
+ metabuild = ["mb"]
+
+ [build-dependencies]
+ mb = {path="../../mb1"}
+ "#,
+ )
+ .file("member1/src/lib.rs", "")
+ .file(
+ "member2/Cargo.toml",
+ r#"
+ cargo-features = ["metabuild"]
+ [package]
+ name = "member2"
+ version = "0.0.1"
+ metabuild = ["mb"]
+
+ [build-dependencies]
+ mb = {path="../../mb2"}
+ "#,
+ )
+ .file("member2/src/lib.rs", "")
+ .build();
+
+ project().at("mb1")
+ .file("Cargo.toml", r#"
+ [package]
+ name = "mb"
+ version = "0.0.1"
+ "#)
+ .file(
+ "src/lib.rs",
+ r#"pub fn metabuild() { println!("Hello mb1 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#,
+ )
+ .build();
+
+ project().at("mb2")
+ .file("Cargo.toml", r#"
+ [package]
+ name = "mb"
+ version = "0.0.2"
+ "#)
+ .file(
+ "src/lib.rs",
+ r#"pub fn metabuild() { println!("Hello mb2 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#,
+ )
+ .build();
+
+ p.cargo("check -vv --workspace")
+ .masquerade_as_nightly_cargo(&["metabuild"])
+ .with_stdout_contains("[member1 0.0.1] Hello mb1 [..]member1")
+ .with_stdout_contains("[member2 0.0.1] Hello mb2 [..]member2")
+ .run();
+
+ assert_eq!(
+ p.glob("target/.metabuild/metabuild-member?-*.rs").count(),
+ 2
+ );
+}
+
+#[cargo_test]
+fn metabuild_external_dependency() {
+ Package::new("mb", "1.0.0")
+ .file("Cargo.toml", &basic_manifest("mb", "1.0.0"))
+ .file(
+ "src/lib.rs",
+ r#"pub fn metabuild() { println!("Hello mb"); }"#,
+ )
+ .publish();
+ Package::new("dep", "1.0.0")
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["metabuild"]
+ [package]
+ name = "dep"
+ version = "1.0.0"
+ metabuild = ["mb"]
+
+ [build-dependencies]
+ mb = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build_dep("mb", "1.0.0")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ [dependencies]
+ dep = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "extern crate dep;")
+ .build();
+
+ p.cargo("check -vv")
+ .masquerade_as_nightly_cargo(&["metabuild"])
+ .with_stdout_contains("[dep 1.0.0] Hello mb")
+ .run();
+
+ assert_eq!(p.glob("target/.metabuild/metabuild-dep-*.rs").count(), 1);
+}
+
+#[cargo_test]
+fn metabuild_json_artifact() {
+ let p = basic_project();
+ p.cargo("check --message-format=json")
+ .masquerade_as_nightly_cargo(&["metabuild"])
+ .with_json_contains_unordered(
+ r#"
+ {
+ "executable": null,
+ "features": [],
+ "filenames": "{...}",
+ "fresh": false,
+ "package_id": "foo [..]",
+ "manifest_path": "[..]",
+ "profile": "{...}",
+ "reason": "compiler-artifact",
+ "target": {
+ "crate_types": [
+ "bin"
+ ],
+ "doc": false,
+ "doctest": false,
+ "edition": "2018",
+ "kind": [
+ "custom-build"
+ ],
+ "name": "metabuild-foo",
+ "src_path": "[..]/foo/target/.metabuild/metabuild-foo-[..].rs",
+ "test": false
+ }
+ }
+
+ {
+ "cfgs": [],
+ "env": [],
+ "linked_libs": [],
+ "linked_paths": [],
+ "package_id": "foo [..]",
+ "out_dir": "[..]",
+ "reason": "build-script-executed"
+ }
+ "#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn metabuild_failed_build_json() {
+ let p = basic_project();
+ // Modify the metabuild dep so that it fails to compile.
+ p.change_file("mb/src/lib.rs", "");
+ p.cargo("check --message-format=json")
+ .masquerade_as_nightly_cargo(&["metabuild"])
+ .with_status(101)
+ .with_json_contains_unordered(
+ r#"
+ {
+ "message": {
+ "children": "{...}",
+ "code": "{...}",
+ "level": "error",
+ "message": "cannot find function `metabuild` in [..] `mb`",
+ "rendered": "{...}",
+ "spans": "{...}"
+ },
+ "package_id": "foo [..]",
+ "manifest_path": "[..]",
+ "reason": "compiler-message",
+ "target": {
+ "crate_types": [
+ "bin"
+ ],
+ "doc": false,
+ "doctest": false,
+ "edition": "2018",
+ "kind": [
+ "custom-build"
+ ],
+ "name": "metabuild-foo",
+ "src_path": null,
+ "test": false
+ }
+ }
+ "#,
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/metadata.rs b/src/tools/cargo/tests/testsuite/metadata.rs
new file mode 100644
index 000000000..547916e7a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/metadata.rs
@@ -0,0 +1,4192 @@
+//! Tests for the `cargo metadata` command.
+
+use cargo_test_support::install::cargo_home;
+use cargo_test_support::paths::CargoPathExt;
+use cargo_test_support::registry::Package;
+use cargo_test_support::{basic_bin_manifest, basic_lib_manifest, main_file, project, rustc_host};
+use serde_json::json;
+
+#[cargo_test]
+fn cargo_metadata_simple() {
+ let p = project()
+ .file("src/foo.rs", "")
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .build();
+
+ p.cargo("metadata")
+ .with_json(
+ r#"
+ {
+ "packages": [
+ {
+ "authors": [
+ "wycats@example.com"
+ ],
+ "categories": [],
+ "default_run": null,
+ "name": "foo",
+ "version": "0.5.0",
+ "id": "foo[..]",
+ "keywords": [],
+ "source": null,
+ "dependencies": [],
+ "edition": "2015",
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "description": null,
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "homepage": null,
+ "documentation": null,
+ "targets": [
+ {
+ "kind": [
+ "bin"
+ ],
+ "crate_types": [
+ "bin"
+ ],
+ "doc": true,
+ "doctest": false,
+ "test": true,
+ "edition": "2015",
+ "name": "foo",
+ "src_path": "[..]/foo/src/foo.rs"
+ }
+ ],
+ "features": {},
+ "manifest_path": "[..]Cargo.toml",
+ "metadata": null,
+ "publish": null
+ }
+ ],
+ "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"],
+ "resolve": {
+ "nodes": [
+ {
+ "dependencies": [],
+ "deps": [],
+ "features": [],
+ "id": "foo 0.5.0 (path+file:[..]foo)"
+ }
+ ],
+ "root": "foo 0.5.0 (path+file:[..]foo)"
+ },
+ "target_directory": "[..]foo/target",
+ "version": 1,
+ "workspace_root": "[..]/foo",
+ "metadata": null
+ }"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_metadata_warns_on_implicit_version() {
+ let p = project()
+ .file("src/foo.rs", "")
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .build();
+
+ p.cargo("metadata").with_stderr("[WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems").run();
+
+ p.cargo("metadata --format-version 1").with_stderr("").run();
+}
+
+#[cargo_test]
+fn library_with_several_crate_types() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "Cargo.toml",
+ r#"
+[package]
+name = "foo"
+version = "0.5.0"
+
+[lib]
+crate-type = ["lib", "staticlib"]
+ "#,
+ )
+ .build();
+
+ p.cargo("metadata")
+ .with_json(
+ r#"
+ {
+ "packages": [
+ {
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "name": "foo",
+ "readme": null,
+ "repository": null,
+ "homepage": null,
+ "documentation": null,
+ "version": "0.5.0",
+ "rust_version": null,
+ "id": "foo[..]",
+ "keywords": [],
+ "source": null,
+ "dependencies": [],
+ "edition": "2015",
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "description": null,
+ "targets": [
+ {
+ "kind": [
+ "lib",
+ "staticlib"
+ ],
+ "crate_types": [
+ "lib",
+ "staticlib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2015",
+ "name": "foo",
+ "src_path": "[..]/foo/src/lib.rs"
+ }
+ ],
+ "features": {},
+ "manifest_path": "[..]Cargo.toml",
+ "metadata": null,
+ "publish": null
+ }
+ ],
+ "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"],
+ "resolve": {
+ "nodes": [
+ {
+ "dependencies": [],
+ "deps": [],
+ "features": [],
+ "id": "foo 0.5.0 (path+file:[..]foo)"
+ }
+ ],
+ "root": "foo 0.5.0 (path+file:[..]foo)"
+ },
+ "target_directory": "[..]foo/target",
+ "version": 1,
+ "workspace_root": "[..]/foo",
+ "metadata": null
+ }"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn library_with_features() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "Cargo.toml",
+ r#"
+[package]
+name = "foo"
+version = "0.5.0"
+
+[features]
+default = ["default_feat"]
+default_feat = []
+optional_feat = []
+ "#,
+ )
+ .build();
+
+ p.cargo("metadata")
+ .with_json(
+ r#"
+ {
+ "packages": [
+ {
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "name": "foo",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "version": "0.5.0",
+ "id": "foo[..]",
+ "keywords": [],
+ "source": null,
+ "dependencies": [],
+ "edition": "2015",
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "description": null,
+ "targets": [
+ {
+ "kind": [
+ "lib"
+ ],
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2015",
+ "name": "foo",
+ "src_path": "[..]/foo/src/lib.rs"
+ }
+ ],
+ "features": {
+ "default": [
+ "default_feat"
+ ],
+ "default_feat": [],
+ "optional_feat": []
+ },
+ "manifest_path": "[..]Cargo.toml",
+ "metadata": null,
+ "publish": null
+ }
+ ],
+ "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"],
+ "resolve": {
+ "nodes": [
+ {
+ "dependencies": [],
+ "deps": [],
+ "features": [
+ "default",
+ "default_feat"
+ ],
+ "id": "foo 0.5.0 (path+file:[..]foo)"
+ }
+ ],
+ "root": "foo 0.5.0 (path+file:[..]foo)"
+ },
+ "target_directory": "[..]foo/target",
+ "version": 1,
+ "workspace_root": "[..]/foo",
+ "metadata": null
+ }"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_metadata_with_deps_and_version() {
+ let p = project()
+ .file("src/foo.rs", "")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ license = "MIT"
+ description = "foo"
+
+ [[bin]]
+ name = "foo"
+
+ [dependencies]
+ bar = "*"
+ [dev-dependencies]
+ foobar = "*"
+ "#,
+ )
+ .build();
+ Package::new("baz", "0.0.1").publish();
+ Package::new("foobar", "0.0.1").publish();
+ Package::new("bar", "0.0.1").dep("baz", "0.0.1").publish();
+
+ p.cargo("metadata -q --format-version 1")
+ .with_json(
+ r#"
+ {
+ "packages": [
+ {
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [
+ {
+ "features": [],
+ "kind": null,
+ "name": "baz",
+ "optional": false,
+ "registry": null,
+ "rename": null,
+ "req": "^0.0.1",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "target": null,
+ "uses_default_features": true
+ }
+ ],
+ "description": null,
+ "edition": "2015",
+ "features": {},
+ "id": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "name": "bar",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "targets": [
+ {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2015",
+ "kind": [
+ "lib"
+ ],
+ "name": "bar",
+ "src_path": "[..]src/lib.rs"
+ }
+ ],
+ "version": "0.0.1"
+ },
+ {
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [],
+ "description": null,
+ "edition": "2015",
+ "features": {},
+ "id": "baz 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "name": "baz",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "targets": [
+ {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2015",
+ "kind": [
+ "lib"
+ ],
+ "name": "baz",
+ "src_path": "[..]src/lib.rs"
+ }
+ ],
+ "version": "0.0.1"
+ },
+ {
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [
+ {
+ "features": [],
+ "kind": null,
+ "name": "bar",
+ "optional": false,
+ "registry": null,
+ "rename": null,
+ "req": "*",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "target": null,
+ "uses_default_features": true
+ },
+ {
+ "features": [],
+ "kind": "dev",
+ "name": "foobar",
+ "optional": false,
+ "registry": null,
+ "rename": null,
+ "req": "*",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "target": null,
+ "uses_default_features": true
+ }
+ ],
+ "description": "foo",
+ "edition": "2015",
+ "features": {},
+ "id": "foo 0.5.0 (path+file:[..]foo)",
+ "keywords": [],
+ "license": "MIT",
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "name": "foo",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "source": null,
+ "targets": [
+ {
+ "crate_types": [
+ "bin"
+ ],
+ "doc": true,
+ "doctest": false,
+ "test": true,
+ "edition": "2015",
+ "kind": [
+ "bin"
+ ],
+ "name": "foo",
+ "src_path": "[..]src/foo.rs"
+ }
+ ],
+ "version": "0.5.0"
+ },
+ {
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [],
+ "description": null,
+ "edition": "2015",
+ "features": {},
+ "id": "foobar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "name": "foobar",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "targets": [
+ {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2015",
+ "kind": [
+ "lib"
+ ],
+ "name": "foobar",
+ "src_path": "[..]src/lib.rs"
+ }
+ ],
+ "version": "0.0.1"
+ }
+ ],
+ "resolve": {
+ "nodes": [
+ {
+ "dependencies": [
+ "baz 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)"
+ ],
+ "deps": [
+ {
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": null
+ }
+ ],
+ "name": "baz",
+ "pkg": "baz 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)"
+ }
+ ],
+ "features": [],
+ "id": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)"
+ },
+ {
+ "dependencies": [],
+ "deps": [],
+ "features": [],
+ "id": "baz 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)"
+ },
+ {
+ "dependencies": [
+ "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "foobar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)"
+ ],
+ "deps": [
+ {
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": null
+ }
+ ],
+ "name": "bar",
+ "pkg": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)"
+ },
+ {
+ "dep_kinds": [
+ {
+ "kind": "dev",
+ "target": null
+ }
+ ],
+ "name": "foobar",
+ "pkg": "foobar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)"
+ }
+ ],
+ "features": [],
+ "id": "foo 0.5.0 (path+file:[..]foo)"
+ },
+ {
+ "dependencies": [],
+ "deps": [],
+ "features": [],
+ "id": "foobar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)"
+ }
+ ],
+ "root": "foo 0.5.0 (path+file:[..]foo)"
+ },
+ "target_directory": "[..]foo/target",
+ "version": 1,
+ "workspace_members": [
+ "foo 0.5.0 (path+file:[..]foo)"
+ ],
+ "workspace_root": "[..]/foo",
+ "metadata": null
+ }"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn example() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("examples/ex.rs", "")
+ .file(
+ "Cargo.toml",
+ r#"
+[package]
+name = "foo"
+version = "0.1.0"
+
+[[example]]
+name = "ex"
+ "#,
+ )
+ .build();
+
+ p.cargo("metadata")
+ .with_json(
+ r#"
+ {
+ "packages": [
+ {
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "name": "foo",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "version": "0.1.0",
+ "id": "foo[..]",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "description": null,
+ "edition": "2015",
+ "source": null,
+ "dependencies": [],
+ "targets": [
+ {
+ "kind": [ "lib" ],
+ "crate_types": [ "lib" ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2015",
+ "name": "foo",
+ "src_path": "[..]/foo/src/lib.rs"
+ },
+ {
+ "kind": [ "example" ],
+ "crate_types": [ "bin" ],
+ "doc": false,
+ "doctest": false,
+ "test": false,
+ "edition": "2015",
+ "name": "ex",
+ "src_path": "[..]/foo/examples/ex.rs"
+ }
+ ],
+ "features": {},
+ "manifest_path": "[..]Cargo.toml",
+ "metadata": null,
+ "publish": null
+ }
+ ],
+ "workspace_members": [
+ "foo 0.1.0 (path+file:[..]foo)"
+ ],
+ "resolve": {
+ "root": "foo 0.1.0 (path+file://[..]foo)",
+ "nodes": [
+ {
+ "id": "foo 0.1.0 (path+file:[..]foo)",
+ "features": [],
+ "dependencies": [],
+ "deps": []
+ }
+ ]
+ },
+ "target_directory": "[..]foo/target",
+ "version": 1,
+ "workspace_root": "[..]/foo",
+ "metadata": null
+ }"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn example_lib() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("examples/ex.rs", "")
+ .file(
+ "Cargo.toml",
+ r#"
+[package]
+name = "foo"
+version = "0.1.0"
+
+[[example]]
+name = "ex"
+crate-type = ["rlib", "dylib"]
+ "#,
+ )
+ .build();
+
+ p.cargo("metadata")
+ .with_json(
+ r#"
+ {
+ "packages": [
+ {
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "name": "foo",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "version": "0.1.0",
+ "id": "foo[..]",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "description": null,
+ "edition": "2015",
+ "source": null,
+ "dependencies": [],
+ "targets": [
+ {
+ "kind": [ "lib" ],
+ "crate_types": [ "lib" ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2015",
+ "name": "foo",
+ "src_path": "[..]/foo/src/lib.rs"
+ },
+ {
+ "kind": [ "example" ],
+ "crate_types": [ "rlib", "dylib" ],
+ "doc": false,
+ "doctest": false,
+ "test": false,
+ "edition": "2015",
+ "name": "ex",
+ "src_path": "[..]/foo/examples/ex.rs"
+ }
+ ],
+ "features": {},
+ "manifest_path": "[..]Cargo.toml",
+ "metadata": null,
+ "publish": null
+ }
+ ],
+ "workspace_members": [
+ "foo 0.1.0 (path+file:[..]foo)"
+ ],
+ "resolve": {
+ "root": "foo 0.1.0 (path+file://[..]foo)",
+ "nodes": [
+ {
+ "id": "foo 0.1.0 (path+file:[..]foo)",
+ "features": [],
+ "dependencies": [],
+ "deps": []
+ }
+ ]
+ },
+ "target_directory": "[..]foo/target",
+ "version": 1,
+ "workspace_root": "[..]/foo",
+ "metadata": null
+ }"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn workspace_metadata() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+
+ [workspace.metadata]
+ tool1 = "hello"
+ tool2 = [1, 2, 3]
+
+ [workspace.metadata.foo]
+ bar = 3
+
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+ .file("bar/src/lib.rs", "")
+ .file("baz/Cargo.toml", &basic_lib_manifest("baz"))
+ .file("baz/src/lib.rs", "")
+ .build();
+
+ p.cargo("metadata")
+ .with_json(
+ r#"
+ {
+ "packages": [
+ {
+ "authors": [
+ "wycats@example.com"
+ ],
+ "categories": [],
+ "default_run": null,
+ "name": "bar",
+ "version": "0.5.0",
+ "id": "bar[..]",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "keywords": [],
+ "source": null,
+ "dependencies": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "description": null,
+ "edition": "2015",
+ "targets": [
+ {
+ "kind": [ "lib" ],
+ "crate_types": [ "lib" ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2015",
+ "name": "bar",
+ "src_path": "[..]bar/src/lib.rs"
+ }
+ ],
+ "features": {},
+ "manifest_path": "[..]bar/Cargo.toml",
+ "metadata": null,
+ "publish": null
+ },
+ {
+ "authors": [
+ "wycats@example.com"
+ ],
+ "categories": [],
+ "default_run": null,
+ "name": "baz",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "version": "0.5.0",
+ "id": "baz[..]",
+ "keywords": [],
+ "source": null,
+ "dependencies": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "description": null,
+ "edition": "2015",
+ "targets": [
+ {
+ "kind": [ "lib" ],
+ "crate_types": [ "lib" ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2015",
+ "name": "baz",
+ "src_path": "[..]baz/src/lib.rs"
+ }
+ ],
+ "features": {},
+ "manifest_path": "[..]baz/Cargo.toml",
+ "metadata": null,
+ "publish": null
+ }
+ ],
+ "workspace_members": ["bar 0.5.0 (path+file:[..]bar)", "baz 0.5.0 (path+file:[..]baz)"],
+ "resolve": {
+ "nodes": [
+ {
+ "dependencies": [],
+ "deps": [],
+ "features": [],
+ "id": "bar 0.5.0 (path+file:[..]bar)"
+ },
+ {
+ "dependencies": [],
+ "deps": [],
+ "features": [],
+ "id": "baz 0.5.0 (path+file:[..]baz)"
+ }
+ ],
+ "root": null
+ },
+ "target_directory": "[..]foo/target",
+ "version": 1,
+ "workspace_root": "[..]/foo",
+ "metadata": {
+ "tool1": "hello",
+ "tool2": [1, 2, 3],
+ "foo": {
+ "bar": 3
+ }
+ }
+ }"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn workspace_metadata_with_dependencies_no_deps() {
+ let p = project()
+ // NOTE that 'artifact' isn't mentioned in the workspace here, yet it shows up as member.
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies]
+ baz = { path = "../baz/" }
+ artifact = { path = "../artifact/", artifact = "bin" }
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .file("baz/Cargo.toml", &basic_lib_manifest("baz"))
+ .file("baz/src/lib.rs", "")
+ .file("artifact/Cargo.toml", &basic_bin_manifest("artifact"))
+ .file("artifact/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("metadata --no-deps -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_json(
+ r#"
+ {
+ "packages": [
+ {
+ "authors": [
+ "wycats@example.com"
+ ],
+ "categories": [],
+ "default_run": null,
+ "name": "bar",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "version": "0.5.0",
+ "id": "bar[..]",
+ "keywords": [],
+ "source": null,
+ "license": null,
+ "dependencies": [
+ {
+ "features": [],
+ "kind": null,
+ "name": "artifact",
+ "optional": false,
+ "path": "[..]/foo/artifact",
+ "registry": null,
+ "rename": null,
+ "req": "*",
+ "source": null,
+ "target": null,
+ "uses_default_features": true,
+ "artifact": {
+ "kinds": [
+ "bin"
+ ],
+ "lib": false,
+ "target": null
+ }
+ },
+ {
+ "features": [],
+ "kind": null,
+ "name": "baz",
+ "optional": false,
+ "path": "[..]/foo/baz",
+ "registry": null,
+ "rename": null,
+ "req": "*",
+ "source": null,
+ "target": null,
+ "uses_default_features": true
+ }
+ ],
+ "license_file": null,
+ "links": null,
+ "description": null,
+ "edition": "2015",
+ "targets": [
+ {
+ "kind": [ "lib" ],
+ "crate_types": [ "lib" ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2015",
+ "name": "bar",
+ "src_path": "[..]bar/src/lib.rs"
+ }
+ ],
+ "features": {},
+ "manifest_path": "[..]bar/Cargo.toml",
+ "metadata": null,
+ "publish": null
+ },
+ {
+ "authors": [
+ "wycats@example.com"
+ ],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [],
+ "description": null,
+ "documentation": null,
+ "edition": "2015",
+ "features": {},
+ "homepage": null,
+ "id": "artifact 0.5.0 (path+file:[..]/foo/artifact)",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]/foo/artifact/Cargo.toml",
+ "metadata": null,
+ "name": "artifact",
+ "publish": null,
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "source": null,
+ "targets": [
+ {
+ "crate_types": [
+ "bin"
+ ],
+ "doc": true,
+ "doctest": false,
+ "edition": "2015",
+ "kind": [
+ "bin"
+ ],
+ "name": "artifact",
+ "src_path": "[..]/foo/artifact/src/main.rs",
+ "test": true
+ }
+ ],
+ "version": "0.5.0"
+ },
+ {
+ "authors": [
+ "wycats@example.com"
+ ],
+ "categories": [],
+ "default_run": null,
+ "name": "baz",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "version": "0.5.0",
+ "id": "baz[..]",
+ "keywords": [],
+ "source": null,
+ "dependencies": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "description": null,
+ "edition": "2015",
+ "targets": [
+ {
+ "kind": [ "lib" ],
+ "crate_types": ["lib"],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2015",
+ "name": "baz",
+ "src_path": "[..]baz/src/lib.rs"
+ }
+ ],
+ "features": {},
+ "manifest_path": "[..]baz/Cargo.toml",
+ "metadata": null,
+ "publish": null
+ }
+ ],
+ "workspace_members": [
+ "bar 0.5.0 (path+file:[..]bar)",
+ "artifact 0.5.0 (path+file:[..]/foo/artifact)",
+ "baz 0.5.0 (path+file:[..]baz)"
+ ],
+ "resolve": null,
+ "target_directory": "[..]foo/target",
+ "version": 1,
+ "workspace_root": "[..]/foo",
+ "metadata": null
+ }"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn workspace_metadata_with_dependencies_and_resolve() {
+ let alt_target = "wasm32-unknown-unknown";
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "artifact", "non-artifact", "bin-only-artifact"]
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ &r#"
+ [package]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+
+ [build-dependencies]
+ artifact = { path = "../artifact/", artifact = "bin", target = "target" }
+ bin-only-artifact = { path = "../bin-only-artifact/", artifact = "bin", target = "$ALT_TARGET" }
+ non-artifact = { path = "../non-artifact" }
+
+ [dependencies]
+ artifact = { path = "../artifact/", artifact = ["cdylib", "staticlib", "bin:baz-name"], lib = true, target = "$ALT_TARGET" }
+ bin-only-artifact = { path = "../bin-only-artifact/", artifact = "bin:a-name" }
+ non-artifact = { path = "../non-artifact" }
+
+ [dev-dependencies]
+ artifact = { path = "../artifact/" }
+ non-artifact = { path = "../non-artifact" }
+ bin-only-artifact = { path = "../bin-only-artifact/", artifact = "bin:b-name" }
+ "#.replace("$ALT_TARGET", alt_target),
+ )
+ .file("bar/src/lib.rs", "")
+ .file("bar/build.rs", "fn main() {}")
+ .file(
+ "artifact/Cargo.toml",
+ r#"
+ [package]
+ name = "artifact"
+ version = "0.5.0"
+ authors = []
+
+ [lib]
+ crate-type = ["staticlib", "cdylib", "rlib"]
+
+ [[bin]]
+ name = "bar-name"
+
+ [[bin]]
+ name = "baz-name"
+ "#,
+ )
+ .file("artifact/src/main.rs", "fn main() {}")
+ .file("artifact/src/lib.rs", "")
+ .file(
+ "bin-only-artifact/Cargo.toml",
+ r#"
+ [package]
+ name = "bin-only-artifact"
+ version = "0.5.0"
+ authors = []
+
+ [[bin]]
+ name = "a-name"
+
+ [[bin]]
+ name = "b-name"
+ "#,
+ )
+ .file("bin-only-artifact/src/main.rs", "fn main() {}")
+ .file("non-artifact/Cargo.toml",
+ r#"
+ [package]
+
+ name = "non-artifact"
+ version = "0.5.0"
+ authors = []
+ "#,
+ )
+ .file("non-artifact/src/lib.rs", "")
+ .build();
+
+ p.cargo("metadata -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_json(
+ r#"
+ {
+ "metadata": null,
+ "packages": [
+ {
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [],
+ "description": null,
+ "documentation": null,
+ "edition": "2015",
+ "features": {},
+ "homepage": null,
+ "id": "artifact 0.5.0 (path+file://[..]/foo/artifact)",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]/foo/artifact/Cargo.toml",
+ "metadata": null,
+ "name": "artifact",
+ "publish": null,
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "source": null,
+ "targets": [
+ {
+ "crate_types": [
+ "staticlib",
+ "cdylib",
+ "rlib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "edition": "2015",
+ "kind": [
+ "staticlib",
+ "cdylib",
+ "rlib"
+ ],
+ "name": "artifact",
+ "src_path": "[..]/foo/artifact/src/lib.rs",
+ "test": true
+ },
+ {
+ "crate_types": [
+ "bin"
+ ],
+ "doc": true,
+ "doctest": false,
+ "edition": "2015",
+ "kind": [
+ "bin"
+ ],
+ "name": "bar-name",
+ "src_path": "[..]/foo/artifact/src/main.rs",
+ "test": true
+ },
+ {
+ "crate_types": [
+ "bin"
+ ],
+ "doc": true,
+ "doctest": false,
+ "edition": "2015",
+ "kind": [
+ "bin"
+ ],
+ "name": "baz-name",
+ "src_path": "[..]/foo/artifact/src/main.rs",
+ "test": true
+ }
+ ],
+ "version": "0.5.0"
+ },
+ {
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [
+ {
+ "artifact": {
+ "kinds": [
+ "cdylib",
+ "staticlib",
+ "bin:baz-name"
+ ],
+ "lib": true,
+ "target": "wasm32-unknown-unknown"
+ },
+ "features": [],
+ "kind": null,
+ "name": "artifact",
+ "optional": false,
+ "path": "[..]/foo/artifact",
+ "registry": null,
+ "rename": null,
+ "req": "*",
+ "source": null,
+ "target": null,
+ "uses_default_features": true
+ },
+ {
+ "artifact": {
+ "kinds": [
+ "bin:a-name"
+ ],
+ "lib": false,
+ "target": null
+ },
+ "features": [],
+ "kind": null,
+ "name": "bin-only-artifact",
+ "optional": false,
+ "path": "[..]/foo/bin-only-artifact",
+ "registry": null,
+ "rename": null,
+ "req": "*",
+ "source": null,
+ "target": null,
+ "uses_default_features": true
+ },
+ {
+ "features": [],
+ "kind": null,
+ "name": "non-artifact",
+ "optional": false,
+ "path": "[..]/foo/non-artifact",
+ "registry": null,
+ "rename": null,
+ "req": "*",
+ "source": null,
+ "target": null,
+ "uses_default_features": true
+ },
+ {
+ "features": [],
+ "kind": "dev",
+ "name": "artifact",
+ "optional": false,
+ "path": "[..]/foo/artifact",
+ "registry": null,
+ "rename": null,
+ "req": "*",
+ "source": null,
+ "target": null,
+ "uses_default_features": true
+ },
+ {
+ "artifact": {
+ "kinds": [
+ "bin:b-name"
+ ],
+ "lib": false,
+ "target": null
+ },
+ "features": [],
+ "kind": "dev",
+ "name": "bin-only-artifact",
+ "optional": false,
+ "path": "[..]/foo/bin-only-artifact",
+ "registry": null,
+ "rename": null,
+ "req": "*",
+ "source": null,
+ "target": null,
+ "uses_default_features": true
+ },
+ {
+ "features": [],
+ "kind": "dev",
+ "name": "non-artifact",
+ "optional": false,
+ "path": "[..]/foo/non-artifact",
+ "registry": null,
+ "rename": null,
+ "req": "*",
+ "source": null,
+ "target": null,
+ "uses_default_features": true
+ },
+ {
+ "artifact": {
+ "kinds": [
+ "bin"
+ ],
+ "lib": false,
+ "target": "target"
+ },
+ "features": [],
+ "kind": "build",
+ "name": "artifact",
+ "optional": false,
+ "path": "[..]/foo/artifact",
+ "registry": null,
+ "rename": null,
+ "req": "*",
+ "source": null,
+ "target": null,
+ "uses_default_features": true
+ },
+ {
+ "artifact": {
+ "kinds": [
+ "bin"
+ ],
+ "lib": false,
+ "target": "wasm32-unknown-unknown"
+ },
+ "features": [],
+ "kind": "build",
+ "name": "bin-only-artifact",
+ "optional": false,
+ "path": "[..]/foo/bin-only-artifact",
+ "registry": null,
+ "rename": null,
+ "req": "*",
+ "source": null,
+ "target": null,
+ "uses_default_features": true
+ },
+ {
+ "features": [],
+ "kind": "build",
+ "name": "non-artifact",
+ "optional": false,
+ "path": "[..]/foo/non-artifact",
+ "registry": null,
+ "rename": null,
+ "req": "*",
+ "source": null,
+ "target": null,
+ "uses_default_features": true
+ }
+ ],
+ "description": null,
+ "documentation": null,
+ "edition": "2015",
+ "features": {},
+ "homepage": null,
+ "id": "bar 0.5.0 (path+file://[..]/foo/bar)",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]/foo/bar/Cargo.toml",
+ "metadata": null,
+ "name": "bar",
+ "publish": null,
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "source": null,
+ "targets": [
+ {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "edition": "2015",
+ "kind": [
+ "lib"
+ ],
+ "name": "bar",
+ "src_path": "[..]/foo/bar/src/lib.rs",
+ "test": true
+ },
+ {
+ "crate_types": [
+ "bin"
+ ],
+ "doc": false,
+ "doctest": false,
+ "edition": "2015",
+ "kind": [
+ "custom-build"
+ ],
+ "name": "build-script-build",
+ "src_path": "[..]/foo/bar/build.rs",
+ "test": false
+ }
+ ],
+ "version": "0.5.0"
+ },
+ {
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [],
+ "description": null,
+ "documentation": null,
+ "edition": "2015",
+ "features": {},
+ "homepage": null,
+ "id": "bin-only-artifact 0.5.0 (path+file://[..]/foo/bin-only-artifact)",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]/foo/bin-only-artifact/Cargo.toml",
+ "metadata": null,
+ "name": "bin-only-artifact",
+ "publish": null,
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "source": null,
+ "targets": [
+ {
+ "crate_types": [
+ "bin"
+ ],
+ "doc": true,
+ "doctest": false,
+ "edition": "2015",
+ "kind": [
+ "bin"
+ ],
+ "name": "a-name",
+ "src_path": "[..]/foo/bin-only-artifact/src/main.rs",
+ "test": true
+ },
+ {
+ "crate_types": [
+ "bin"
+ ],
+ "doc": true,
+ "doctest": false,
+ "edition": "2015",
+ "kind": [
+ "bin"
+ ],
+ "name": "b-name",
+ "src_path": "[..]/foo/bin-only-artifact/src/main.rs",
+ "test": true
+ }
+ ],
+ "version": "0.5.0"
+ },
+ {
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [],
+ "description": null,
+ "documentation": null,
+ "edition": "2015",
+ "features": {},
+ "homepage": null,
+ "id": "non-artifact 0.5.0 (path+file://[..]/foo/non-artifact)",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]/foo/non-artifact/Cargo.toml",
+ "metadata": null,
+ "name": "non-artifact",
+ "publish": null,
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "source": null,
+ "targets": [
+ {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "edition": "2015",
+ "kind": [
+ "lib"
+ ],
+ "name": "non-artifact",
+ "src_path": "[..]/foo/non-artifact/src/lib.rs",
+ "test": true
+ }
+ ],
+ "version": "0.5.0"
+ }
+ ],
+ "resolve": {
+ "nodes": [
+ {
+ "dependencies": [],
+ "deps": [],
+ "features": [],
+ "id": "artifact 0.5.0 (path+file://[..]/foo/artifact)"
+ },
+ {
+ "dependencies": [
+ "artifact 0.5.0 (path+file://[..]/foo/artifact)",
+ "bin-only-artifact 0.5.0 (path+file://[..]/foo/bin-only-artifact)",
+ "non-artifact 0.5.0 (path+file://[..]/foo/non-artifact)"
+ ],
+ "deps": [
+ {
+ "dep_kinds": [
+ {
+ "extern_name": "artifact",
+ "kind": null,
+ "target": null
+ },
+ {
+ "artifact": "cdylib",
+ "compile_target": "wasm32-unknown-unknown",
+ "extern_name": "artifact",
+ "kind": null,
+ "target": null
+ },
+ {
+ "artifact": "staticlib",
+ "compile_target": "wasm32-unknown-unknown",
+ "extern_name": "artifact",
+ "kind": null,
+ "target": null
+ },
+ {
+ "artifact": "bin",
+ "bin_name": "baz-name",
+ "compile_target": "wasm32-unknown-unknown",
+ "extern_name": "baz_name",
+ "kind": null,
+ "target": null
+ },
+ {
+ "kind": "dev",
+ "target": null
+ },
+ {
+ "artifact": "bin",
+ "bin_name": "bar-name",
+ "compile_target": "<target>",
+ "extern_name": "bar_name",
+ "kind": "build",
+ "target": null
+ },
+ {
+ "artifact": "bin",
+ "bin_name": "baz-name",
+ "compile_target": "<target>",
+ "extern_name": "baz_name",
+ "kind": "build",
+ "target": null
+ }
+ ],
+ "name": "artifact",
+ "pkg": "artifact 0.5.0 (path+file://[..]/foo/artifact)"
+ },
+ {
+ "dep_kinds": [
+ {
+ "artifact": "bin",
+ "bin_name": "a-name",
+ "extern_name": "a_name",
+ "kind": null,
+ "target": null
+ },
+ {
+ "artifact": "bin",
+ "bin_name": "b-name",
+ "extern_name": "b_name",
+ "kind": "dev",
+ "target": null
+ },
+ {
+ "artifact": "bin",
+ "bin_name": "a-name",
+ "compile_target": "wasm32-unknown-unknown",
+ "extern_name": "a_name",
+ "kind": "build",
+ "target": null
+ },
+ {
+ "artifact": "bin",
+ "bin_name": "b-name",
+ "compile_target": "wasm32-unknown-unknown",
+ "extern_name": "b_name",
+ "kind": "build",
+ "target": null
+ }
+ ],
+ "name": "",
+ "pkg": "bin-only-artifact 0.5.0 (path+file://[..]/foo/bin-only-artifact)"
+ },
+ {
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": null
+ },
+ {
+ "kind": "dev",
+ "target": null
+ },
+ {
+ "kind": "build",
+ "target": null
+ }
+ ],
+ "name": "non_artifact",
+ "pkg": "non-artifact 0.5.0 (path+file://[..]/foo/non-artifact)"
+ }
+ ],
+ "features": [],
+ "id": "bar 0.5.0 (path+file://[..]/foo/bar)"
+ },
+ {
+ "dependencies": [],
+ "deps": [],
+ "features": [],
+ "id": "bin-only-artifact 0.5.0 (path+file://[..]/foo/bin-only-artifact)"
+ },
+ {
+ "dependencies": [],
+ "deps": [],
+ "features": [],
+ "id": "non-artifact 0.5.0 (path+file://[..]/foo/non-artifact)"
+ }
+ ],
+ "root": null
+ },
+ "target_directory": "[..]/foo/target",
+ "version": 1,
+ "workspace_members": [
+ "bar 0.5.0 (path+file://[..]/foo/bar)",
+ "artifact 0.5.0 (path+file://[..]/foo/artifact)",
+ "bin-only-artifact 0.5.0 (path+file://[..]/foo/bin-only-artifact)",
+ "non-artifact 0.5.0 (path+file://[..]/foo/non-artifact)"
+ ],
+ "workspace_root": "[..]/foo"
+ }
+ "#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_metadata_with_invalid_manifest() {
+ let p = project().file("Cargo.toml", "").build();
+
+ p.cargo("metadata --format-version 1")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ virtual manifests must be configured with [workspace]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_metadata_with_invalid_authors_field() {
+ let p = project()
+ .file("src/foo.rs", "")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ authors = ""
+ "#,
+ )
+ .build();
+
+ p.cargo("metadata")
+ .with_status(101)
+ .with_stderr(
+ r#"[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ invalid type: string "", expected a vector of strings or workspace
+ in `package.authors`"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_metadata_with_invalid_version_field() {
+ let p = project()
+ .file("src/foo.rs", "")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ version = 1
+ "#,
+ )
+ .build();
+
+ p.cargo("metadata")
+ .with_status(101)
+ .with_stderr(
+ r#"[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ invalid type: integer `1`, expected SemVer version
+ in `package.version`"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_metadata_with_invalid_publish_field() {
+ let p = project()
+ .file("src/foo.rs", "")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ publish = "foo"
+ "#,
+ )
+ .build();
+
+ p.cargo("metadata")
+ .with_status(101)
+ .with_stderr(
+ r#"[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ invalid type: string "foo", expected a boolean, a vector of strings, or workspace
+ in `package.publish`"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_metadata_with_invalid_artifact_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+
+ [dependencies]
+ artifact = { path = "artifact", artifact = "bin:notfound" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("artifact/Cargo.toml", &basic_bin_manifest("artifact"))
+ .file("artifact/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("metadata -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_status(101)
+ .with_stderr(
+ "\
+[WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems
+[ERROR] dependency `artifact` in package `foo` requires a `bin:notfound` artifact to be present.",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_metadata_with_invalid_duplicate_renamed_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+
+ [dependencies]
+ bar = { path = "bar" }
+ baz = { path = "bar", package = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("metadata")
+ .with_status(101)
+ .with_stderr(
+ "\
+[WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems
+[ERROR] the crate `foo v0.5.0 ([..])` depends on crate `bar v0.5.0 ([..])` multiple times with different names",
+ )
+ .run();
+}
+
+const MANIFEST_OUTPUT: &str = r#"
+{
+ "packages": [{
+ "authors": [
+ "wycats@example.com"
+ ],
+ "categories": [],
+ "default_run": null,
+ "name":"foo",
+ "version":"0.5.0",
+ "id":"foo[..]0.5.0[..](path+file://[..]/foo)",
+ "source":null,
+ "dependencies":[],
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "description": null,
+ "edition": "2015",
+ "targets":[{
+ "kind":["bin"],
+ "crate_types":["bin"],
+ "doc": true,
+ "doctest": false,
+ "test": true,
+ "edition": "2015",
+ "name":"foo",
+ "src_path":"[..]/foo/src/foo.rs"
+ }],
+ "features":{},
+ "manifest_path":"[..]Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null
+ }],
+ "workspace_members": [ "foo 0.5.0 (path+file:[..]foo)" ],
+ "resolve": null,
+ "target_directory": "[..]foo/target",
+ "version": 1,
+ "workspace_root": "[..]/foo",
+ "metadata": null
+}"#;
+
+#[cargo_test]
+fn cargo_metadata_no_deps_path_to_cargo_toml_relative() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("metadata --no-deps --manifest-path foo/Cargo.toml")
+ .cwd(p.root().parent().unwrap())
+ .with_json(MANIFEST_OUTPUT)
+ .run();
+}
+
+#[cargo_test]
+fn cargo_metadata_no_deps_path_to_cargo_toml_absolute() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("metadata --no-deps --manifest-path")
+ .arg(p.root().join("Cargo.toml"))
+ .cwd(p.root().parent().unwrap())
+ .with_json(MANIFEST_OUTPUT)
+ .run();
+}
+
+#[cargo_test]
+fn cargo_metadata_no_deps_path_to_cargo_toml_parent_relative() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("metadata --no-deps --manifest-path foo")
+ .cwd(p.root().parent().unwrap())
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] the manifest-path must be \
+ a path to a Cargo.toml file",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_metadata_no_deps_path_to_cargo_toml_parent_absolute() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("metadata --no-deps --manifest-path")
+ .arg(p.root())
+ .cwd(p.root().parent().unwrap())
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] the manifest-path must be \
+ a path to a Cargo.toml file",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_metadata_no_deps_cwd() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("metadata --no-deps")
+ .with_json(MANIFEST_OUTPUT)
+ .run();
+}
+
+#[cargo_test]
+fn cargo_metadata_bad_version() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("metadata --no-deps --format-version 2")
+ .with_status(1)
+ .with_stderr_contains(
+ "\
+error: invalid value '2' for '--format-version <VERSION>'
+ [possible values: 1]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn multiple_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [features]
+ a = []
+ b = []
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("metadata --features").arg("a b").run();
+}
+
+#[cargo_test]
+fn package_metadata() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = ["wycats@example.com"]
+ categories = ["database"]
+ keywords = ["database"]
+ readme = "README.md"
+ repository = "https://github.com/rust-lang/cargo"
+ homepage = "https://rust-lang.org"
+ documentation = "https://doc.rust-lang.org/stable/std/"
+
+ [package.metadata.bar]
+ baz = "quux"
+ "#,
+ )
+ .file("README.md", "")
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("metadata --no-deps")
+ .with_json(
+ r#"
+ {
+ "packages": [
+ {
+ "authors": ["wycats@example.com"],
+ "categories": ["database"],
+ "default_run": null,
+ "name": "foo",
+ "readme": "README.md",
+ "repository": "https://github.com/rust-lang/cargo",
+ "rust_version": null,
+ "homepage": "https://rust-lang.org",
+ "documentation": "https://doc.rust-lang.org/stable/std/",
+ "version": "0.1.0",
+ "id": "foo[..]",
+ "keywords": ["database"],
+ "source": null,
+ "dependencies": [],
+ "edition": "2015",
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "description": null,
+ "targets": [
+ {
+ "kind": [ "lib" ],
+ "crate_types": [ "lib" ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2015",
+ "name": "foo",
+ "src_path": "[..]foo/src/lib.rs"
+ }
+ ],
+ "features": {},
+ "manifest_path": "[..]foo/Cargo.toml",
+ "metadata": {
+ "bar": {
+ "baz": "quux"
+ }
+ },
+ "publish": null
+ }
+ ],
+ "workspace_members": ["foo[..]"],
+ "resolve": null,
+ "target_directory": "[..]foo/target",
+ "version": 1,
+ "workspace_root": "[..]/foo",
+ "metadata": null
+ }"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn package_publish() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = ["wycats@example.com"]
+ categories = ["database"]
+ keywords = ["database"]
+ readme = "README.md"
+ repository = "https://github.com/rust-lang/cargo"
+ publish = ["my-registry"]
+ "#,
+ )
+ .file("README.md", "")
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("metadata --no-deps")
+ .with_json(
+ r#"
+ {
+ "packages": [
+ {
+ "authors": ["wycats@example.com"],
+ "categories": ["database"],
+ "default_run": null,
+ "name": "foo",
+ "readme": "README.md",
+ "repository": "https://github.com/rust-lang/cargo",
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "version": "0.1.0",
+ "id": "foo[..]",
+ "keywords": ["database"],
+ "source": null,
+ "dependencies": [],
+ "edition": "2015",
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "description": null,
+ "targets": [
+ {
+ "kind": [ "lib" ],
+ "crate_types": [ "lib" ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2015",
+ "name": "foo",
+ "src_path": "[..]foo/src/lib.rs"
+ }
+ ],
+ "features": {},
+ "manifest_path": "[..]foo/Cargo.toml",
+ "metadata": null,
+ "publish": ["my-registry"]
+ }
+ ],
+ "workspace_members": ["foo[..]"],
+ "resolve": null,
+ "target_directory": "[..]foo/target",
+ "version": 1,
+ "workspace_root": "[..]/foo",
+ "metadata": null
+ }"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_metadata_path_to_cargo_toml_project() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("package --manifest-path")
+ .arg(p.root().join("bar/Cargo.toml"))
+ .cwd(p.root().parent().unwrap())
+ .run();
+
+ p.cargo("metadata --manifest-path")
+ .arg(p.root().join("target/package/bar-0.5.0/Cargo.toml"))
+ .with_json(
+ r#"
+ {
+ "packages": [
+ {
+ "authors": [
+ "wycats@example.com"
+ ],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [],
+ "description": null,
+ "edition": "2015",
+ "features": {},
+ "id": "bar 0.5.0 ([..])",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "name": "bar",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "source": null,
+ "targets": [
+ {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2015",
+ "kind": [
+ "lib"
+ ],
+ "name": "bar",
+ "src_path": "[..]src/lib.rs"
+ }
+ ],
+ "version": "0.5.0"
+ }
+ ],
+ "resolve": {
+ "nodes": [
+ {
+ "dependencies": [],
+ "deps": [],
+ "features": [],
+ "id": "bar 0.5.0 ([..])"
+ }
+ ],
+ "root": "bar 0.5.0 (path+file:[..])"
+ },
+ "target_directory": "[..]",
+ "version": 1,
+ "workspace_members": [
+ "bar 0.5.0 (path+file:[..])"
+ ],
+ "workspace_root": "[..]",
+ "metadata": null
+ }
+ "#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn package_edition_2018() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = ["wycats@example.com"]
+ edition = "2018"
+ "#,
+ )
+ .build();
+ p.cargo("metadata")
+ .with_json(
+ r#"
+ {
+ "packages": [
+ {
+ "authors": [
+ "wycats@example.com"
+ ],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [],
+ "description": null,
+ "edition": "2018",
+ "features": {},
+ "id": "foo 0.1.0 (path+file:[..])",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "name": "foo",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "source": null,
+ "targets": [
+ {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2018",
+ "kind": [
+ "lib"
+ ],
+ "name": "foo",
+ "src_path": "[..]src/lib.rs"
+ }
+ ],
+ "version": "0.1.0"
+ }
+ ],
+ "resolve": {
+ "nodes": [
+ {
+ "dependencies": [],
+ "deps": [],
+ "features": [],
+ "id": "foo 0.1.0 (path+file:[..])"
+ }
+ ],
+ "root": "foo 0.1.0 (path+file:[..])"
+ },
+ "target_directory": "[..]",
+ "version": 1,
+ "workspace_members": [
+ "foo 0.1.0 (path+file:[..])"
+ ],
+ "workspace_root": "[..]",
+ "metadata": null
+ }
+ "#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn package_default_run() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("src/bin/a.rs", r#"fn main() { println!("hello A"); }"#)
+ .file("src/bin/b.rs", r#"fn main() { println!("hello B"); }"#)
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = ["wycats@example.com"]
+ edition = "2018"
+ default-run = "a"
+ "#,
+ )
+ .build();
+ let json = p.cargo("metadata").run_json();
+ assert_eq!(json["packages"][0]["default_run"], json!("a"));
+}
+
+#[cargo_test]
+fn package_rust_version() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = ["wycats@example.com"]
+ edition = "2018"
+ rust-version = "1.56"
+ "#,
+ )
+ .build();
+ let json = p.cargo("metadata").run_json();
+ assert_eq!(json["packages"][0]["rust_version"], json!("1.56"));
+}
+
+#[cargo_test]
+fn target_edition_2018() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("src/main.rs", "")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = ["wycats@example.com"]
+ edition = "2015"
+
+ [lib]
+ edition = "2018"
+ "#,
+ )
+ .build();
+ p.cargo("metadata")
+ .with_json(
+ r#"
+ {
+ "packages": [
+ {
+ "authors": [
+ "wycats@example.com"
+ ],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [],
+ "description": null,
+ "edition": "2015",
+ "features": {},
+ "id": "foo 0.1.0 (path+file:[..])",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "name": "foo",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "source": null,
+ "targets": [
+ {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2018",
+ "kind": [
+ "lib"
+ ],
+ "name": "foo",
+ "src_path": "[..]src/lib.rs"
+ },
+ {
+ "crate_types": [
+ "bin"
+ ],
+ "doc": true,
+ "doctest": false,
+ "test": true,
+ "edition": "2015",
+ "kind": [
+ "bin"
+ ],
+ "name": "foo",
+ "src_path": "[..]src/main.rs"
+ }
+ ],
+ "version": "0.1.0"
+ }
+ ],
+ "resolve": {
+ "nodes": [
+ {
+ "dependencies": [],
+ "deps": [],
+ "features": [],
+ "id": "foo 0.1.0 (path+file:[..])"
+ }
+ ],
+ "root": "foo 0.1.0 (path+file:[..])"
+ },
+ "target_directory": "[..]",
+ "version": 1,
+ "workspace_members": [
+ "foo 0.1.0 (path+file:[..])"
+ ],
+ "workspace_root": "[..]",
+ "metadata": null
+ }
+ "#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rename_dependency() {
+ Package::new("bar", "0.1.0").publish();
+ Package::new("bar", "0.2.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = { version = "0.1.0" }
+ baz = { version = "0.2.0", package = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar; extern crate baz;")
+ .build();
+
+ p.cargo("metadata")
+ .with_json(
+ r#"
+{
+ "packages": [
+ {
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [],
+ "description": null,
+ "edition": "2015",
+ "features": {},
+ "id": "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]",
+ "metadata": null,
+ "publish": null,
+ "name": "bar",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "targets": [
+ {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2015",
+ "kind": [
+ "lib"
+ ],
+ "name": "bar",
+ "src_path": "[..]"
+ }
+ ],
+ "version": "0.1.0"
+ },
+ {
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [],
+ "description": null,
+ "edition": "2015",
+ "features": {},
+ "id": "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]",
+ "metadata": null,
+ "publish": null,
+ "name": "bar",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "targets": [
+ {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2015",
+ "kind": [
+ "lib"
+ ],
+ "name": "bar",
+ "src_path": "[..]"
+ }
+ ],
+ "version": "0.2.0"
+ },
+ {
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [
+ {
+ "features": [],
+ "kind": null,
+ "name": "bar",
+ "optional": false,
+ "rename": null,
+ "registry": null,
+ "req": "^0.1.0",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "target": null,
+ "uses_default_features": true
+ },
+ {
+ "features": [],
+ "kind": null,
+ "name": "bar",
+ "optional": false,
+ "rename": "baz",
+ "registry": null,
+ "req": "^0.2.0",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "target": null,
+ "uses_default_features": true
+ }
+ ],
+ "description": null,
+ "edition": "2015",
+ "features": {},
+ "id": "foo 0.0.1[..]",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]",
+ "metadata": null,
+ "publish": null,
+ "name": "foo",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "source": null,
+ "targets": [
+ {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2015",
+ "kind": [
+ "lib"
+ ],
+ "name": "foo",
+ "src_path": "[..]"
+ }
+ ],
+ "version": "0.0.1"
+ }
+ ],
+ "resolve": {
+ "nodes": [
+ {
+ "dependencies": [],
+ "deps": [],
+ "features": [],
+ "id": "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)"
+ },
+ {
+ "dependencies": [],
+ "deps": [],
+ "features": [],
+ "id": "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)"
+ },
+ {
+ "dependencies": [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)"
+ ],
+ "deps": [
+ {
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": null
+ }
+ ],
+ "name": "bar",
+ "pkg": "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)"
+ },
+ {
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": null
+ }
+ ],
+ "name": "baz",
+ "pkg": "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)"
+ }
+ ],
+ "features": [],
+ "id": "foo 0.0.1[..]"
+ }
+ ],
+ "root": "foo 0.0.1[..]"
+ },
+ "target_directory": "[..]",
+ "version": 1,
+ "workspace_members": [
+ "foo 0.0.1[..]"
+ ],
+ "workspace_root": "[..]",
+ "metadata": null
+}"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn metadata_links() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ links = "a"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .build();
+
+ p.cargo("metadata")
+ .with_json(
+ r#"
+ {
+ "packages": [
+ {
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [],
+ "description": null,
+ "edition": "2015",
+ "features": {},
+ "id": "foo 0.5.0 [..]",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": "a",
+ "manifest_path": "[..]/foo/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "name": "foo",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "source": null,
+ "targets": [
+ {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2015",
+ "kind": [
+ "lib"
+ ],
+ "name": "foo",
+ "src_path": "[..]/foo/src/lib.rs"
+ },
+ {
+ "crate_types": [
+ "bin"
+ ],
+ "doc": false,
+ "doctest": false,
+ "test": false,
+ "edition": "2015",
+ "kind": [
+ "custom-build"
+ ],
+ "name": "build-script-build",
+ "src_path": "[..]/foo/build.rs"
+ }
+ ],
+ "version": "0.5.0"
+ }
+ ],
+ "resolve": {
+ "nodes": [
+ {
+ "dependencies": [],
+ "deps": [],
+ "features": [],
+ "id": "foo 0.5.0 [..]"
+ }
+ ],
+ "root": "foo 0.5.0 [..]"
+ },
+ "target_directory": "[..]/foo/target",
+ "version": 1,
+ "workspace_members": [
+ "foo 0.5.0 [..]"
+ ],
+ "workspace_root": "[..]/foo",
+ "metadata": null
+ }
+ "#,
+ )
+ .run()
+}
+
+#[cargo_test]
+fn deps_with_bin_only() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [dependencies]
+ bdep = { path = "bdep" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bdep/Cargo.toml", &basic_bin_manifest("bdep"))
+ .file("bdep/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("metadata")
+ .with_json(
+ r#"
+ {
+ "packages": [
+ {
+ "name": "foo",
+ "version": "0.1.0",
+ "id": "foo 0.1.0 ([..])",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": null,
+ "dependencies": [
+ {
+ "name": "bdep",
+ "source": null,
+ "req": "*",
+ "kind": null,
+ "rename": null,
+ "optional": false,
+ "uses_default_features": true,
+ "path": "[..]/foo/bdep",
+ "features": [],
+ "target": null,
+ "registry": null
+ }
+ ],
+ "targets": [
+ {
+ "kind": [
+ "lib"
+ ],
+ "crate_types": [
+ "lib"
+ ],
+ "name": "foo",
+ "src_path": "[..]/foo/src/lib.rs",
+ "edition": "2015",
+ "doc": true,
+ "doctest": true,
+ "test": true
+ }
+ ],
+ "features": {},
+ "manifest_path": "[..]/foo/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2015",
+ "links": null
+ }
+ ],
+ "workspace_members": [
+ "foo 0.1.0 ([..])"
+ ],
+ "resolve": {
+ "nodes": [
+ {
+ "id": "foo 0.1.0 ([..])",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ }
+ ],
+ "root": "foo 0.1.0 ([..])"
+ },
+ "target_directory": "[..]/foo/target",
+ "version": 1,
+ "workspace_root": "[..]foo",
+ "metadata": null
+ }
+ "#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn filter_platform() {
+ // Testing the --filter-platform flag.
+ Package::new("normal-dep", "0.0.1").publish();
+ Package::new("host-dep", "0.0.1").publish();
+ Package::new("alt-dep", "0.0.1").publish();
+ Package::new("cfg-dep", "0.0.1").publish();
+ // Just needs to be a valid target that is different from host.
+ // Presumably nobody runs these tests on wasm. 🙃
+ let alt_target = "wasm32-unknown-unknown";
+ let host_target = rustc_host();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ normal-dep = "0.0.1"
+
+ [target.{}.dependencies]
+ host-dep = "0.0.1"
+
+ [target.{}.dependencies]
+ alt-dep = "0.0.1"
+
+ [target.'cfg(foobar)'.dependencies]
+ cfg-dep = "0.0.1"
+ "#,
+ host_target, alt_target
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ let alt_dep = r#"
+ {
+ "name": "alt-dep",
+ "version": "0.0.1",
+ "id": "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "dependencies": [],
+ "targets": [
+ {
+ "kind": [
+ "lib"
+ ],
+ "crate_types": [
+ "lib"
+ ],
+ "name": "alt-dep",
+ "src_path": "[..]/alt-dep-0.0.1/src/lib.rs",
+ "edition": "2015",
+ "test": true,
+ "doc": true,
+ "doctest": true
+ }
+ ],
+ "features": {},
+ "manifest_path": "[..]/alt-dep-0.0.1/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2015",
+ "links": null
+ }
+ "#;
+
+ let cfg_dep = r#"
+ {
+ "name": "cfg-dep",
+ "version": "0.0.1",
+ "id": "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "dependencies": [],
+ "targets": [
+ {
+ "kind": [
+ "lib"
+ ],
+ "crate_types": [
+ "lib"
+ ],
+ "name": "cfg-dep",
+ "src_path": "[..]/cfg-dep-0.0.1/src/lib.rs",
+ "edition": "2015",
+ "test": true,
+ "doc": true,
+ "doctest": true
+ }
+ ],
+ "features": {},
+ "manifest_path": "[..]/cfg-dep-0.0.1/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2015",
+ "links": null
+ }
+ "#;
+
+ let host_dep = r#"
+ {
+ "name": "host-dep",
+ "version": "0.0.1",
+ "id": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "dependencies": [],
+ "targets": [
+ {
+ "kind": [
+ "lib"
+ ],
+ "crate_types": [
+ "lib"
+ ],
+ "name": "host-dep",
+ "src_path": "[..]/host-dep-0.0.1/src/lib.rs",
+ "edition": "2015",
+ "test": true,
+ "doc": true,
+ "doctest": true
+ }
+ ],
+ "features": {},
+ "manifest_path": "[..]/host-dep-0.0.1/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2015",
+ "links": null
+ }
+ "#;
+
+ let normal_dep = r#"
+ {
+ "name": "normal-dep",
+ "version": "0.0.1",
+ "id": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "dependencies": [],
+ "targets": [
+ {
+ "kind": [
+ "lib"
+ ],
+ "crate_types": [
+ "lib"
+ ],
+ "name": "normal-dep",
+ "src_path": "[..]/normal-dep-0.0.1/src/lib.rs",
+ "edition": "2015",
+ "test": true,
+ "doc": true,
+ "doctest": true
+ }
+ ],
+ "features": {},
+ "manifest_path": "[..]/normal-dep-0.0.1/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2015",
+ "links": null
+ }
+ "#;
+
+ // The dependencies are stored in sorted order by target and then by name.
+ // Since the testsuite may run on different targets, this needs to be
+ // sorted before it can be compared.
+ let mut foo_deps = serde_json::json!([
+ {
+ "name": "normal-dep",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "req": "^0.0.1",
+ "kind": null,
+ "rename": null,
+ "optional": false,
+ "uses_default_features": true,
+ "features": [],
+ "target": null,
+ "registry": null
+ },
+ {
+ "name": "cfg-dep",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "req": "^0.0.1",
+ "kind": null,
+ "rename": null,
+ "optional": false,
+ "uses_default_features": true,
+ "features": [],
+ "target": "cfg(foobar)",
+ "registry": null
+ },
+ {
+ "name": "alt-dep",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "req": "^0.0.1",
+ "kind": null,
+ "rename": null,
+ "optional": false,
+ "uses_default_features": true,
+ "features": [],
+ "target": alt_target,
+ "registry": null
+ },
+ {
+ "name": "host-dep",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "req": "^0.0.1",
+ "kind": null,
+ "rename": null,
+ "optional": false,
+ "uses_default_features": true,
+ "features": [],
+ "target": host_target,
+ "registry": null
+ }
+ ]);
+ foo_deps.as_array_mut().unwrap().sort_by(|a, b| {
+ // This really should be `rename`, but not needed here.
+ // Also, sorting on `name` isn't really necessary since this test
+ // only has one package per target, but leaving it here to be safe.
+ let a = (a["target"].as_str(), a["name"].as_str());
+ let b = (b["target"].as_str(), b["name"].as_str());
+ a.cmp(&b)
+ });
+
+ let foo = r#"
+ {
+ "name": "foo",
+ "version": "0.1.0",
+ "id": "foo 0.1.0 (path+file:[..]foo)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": null,
+ "dependencies":
+ $FOO_DEPS,
+ "targets": [
+ {
+ "kind": [
+ "lib"
+ ],
+ "crate_types": [
+ "lib"
+ ],
+ "name": "foo",
+ "src_path": "[..]/foo/src/lib.rs",
+ "edition": "2015",
+ "test": true,
+ "doc": true,
+ "doctest": true
+ }
+ ],
+ "features": {},
+ "manifest_path": "[..]/foo/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2015",
+ "links": null
+ }
+ "#
+ .replace("$ALT_TRIPLE", alt_target)
+ .replace("$HOST_TRIPLE", host_target)
+ .replace("$FOO_DEPS", &foo_deps.to_string());
+
+ // We're going to be checking that we don't download excessively,
+ // so we need to ensure that downloads will happen.
+ let clear = || {
+ cargo_home().join("registry/cache").rm_rf();
+ cargo_home().join("registry/src").rm_rf();
+ p.build_dir().rm_rf();
+ };
+
+ // Normal metadata, no filtering, returns *everything*.
+ p.cargo("metadata")
+ .with_stderr_unordered(
+ "\
+[UPDATING] [..]
+[WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems
+[DOWNLOADING] crates ...
+[DOWNLOADED] normal-dep v0.0.1 [..]
+[DOWNLOADED] host-dep v0.0.1 [..]
+[DOWNLOADED] alt-dep v0.0.1 [..]
+[DOWNLOADED] cfg-dep v0.0.1 [..]
+",
+ )
+ .with_json(
+ &r#"
+{
+ "packages": [
+ $ALT_DEP,
+ $CFG_DEP,
+ $FOO,
+ $HOST_DEP,
+ $NORMAL_DEP
+ ],
+ "workspace_members": [
+ "foo 0.1.0 (path+file:[..]foo)"
+ ],
+ "resolve": {
+ "nodes": [
+ {
+ "id": "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ },
+ {
+ "id": "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ },
+ {
+ "id": "foo 0.1.0 (path+file:[..]foo)",
+ "dependencies": [
+ "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)"
+ ],
+ "deps": [
+ {
+ "name": "alt_dep",
+ "pkg": "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": "$ALT_TRIPLE"
+ }
+ ]
+ },
+ {
+ "name": "cfg_dep",
+ "pkg": "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": "cfg(foobar)"
+ }
+ ]
+ },
+ {
+ "name": "host_dep",
+ "pkg": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": "$HOST_TRIPLE"
+ }
+ ]
+ },
+ {
+ "name": "normal_dep",
+ "pkg": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": null
+ }
+ ]
+ }
+ ],
+ "features": []
+ },
+ {
+ "id": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ },
+ {
+ "id": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ }
+ ],
+ "root": "foo 0.1.0 (path+file:[..]foo)"
+ },
+ "target_directory": "[..]/foo/target",
+ "version": 1,
+ "workspace_root": "[..]/foo",
+ "metadata": null
+}
+"#
+ .replace("$ALT_TRIPLE", alt_target)
+ .replace("$HOST_TRIPLE", host_target)
+ .replace("$ALT_DEP", alt_dep)
+ .replace("$CFG_DEP", cfg_dep)
+ .replace("$HOST_DEP", host_dep)
+ .replace("$NORMAL_DEP", normal_dep)
+ .replace("$FOO", &foo),
+ )
+ .run();
+ clear();
+
+ // Filter on alternate, removes cfg and host.
+ p.cargo("metadata --filter-platform")
+ .arg(alt_target)
+ .with_stderr_unordered(
+ "\
+[WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems
+[DOWNLOADING] crates ...
+[DOWNLOADED] normal-dep v0.0.1 [..]
+[DOWNLOADED] host-dep v0.0.1 [..]
+[DOWNLOADED] alt-dep v0.0.1 [..]
+",
+ )
+ .with_json(
+ &r#"
+{
+ "packages": [
+ $ALT_DEP,
+ $FOO,
+ $NORMAL_DEP
+ ],
+ "workspace_members": "{...}",
+ "resolve": {
+ "nodes": [
+ {
+ "id": "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ },
+ {
+ "id": "foo 0.1.0 (path+file:[..]foo)",
+ "dependencies": [
+ "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)"
+ ],
+ "deps": [
+ {
+ "name": "alt_dep",
+ "pkg": "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": "$ALT_TRIPLE"
+ }
+ ]
+ },
+ {
+ "name": "normal_dep",
+ "pkg": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": null
+ }
+ ]
+ }
+ ],
+ "features": []
+ },
+ {
+ "id": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ }
+ ],
+ "root": "foo 0.1.0 (path+file:[..]foo)"
+ },
+ "target_directory": "[..]foo/target",
+ "version": 1,
+ "workspace_root": "[..]foo",
+ "metadata": null
+}
+"#
+ .replace("$ALT_TRIPLE", alt_target)
+ .replace("$ALT_DEP", alt_dep)
+ .replace("$NORMAL_DEP", normal_dep)
+ .replace("$FOO", &foo),
+ )
+ .run();
+ clear();
+
+ // Filter on host, removes alt and cfg.
+ p.cargo("metadata --filter-platform")
+ .arg(&host_target)
+ .with_stderr_unordered(
+ "\
+[WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems
+[DOWNLOADING] crates ...
+[DOWNLOADED] normal-dep v0.0.1 [..]
+[DOWNLOADED] host-dep v0.0.1 [..]
+",
+ )
+ .with_json(
+ &r#"
+{
+ "packages": [
+ $FOO,
+ $HOST_DEP,
+ $NORMAL_DEP
+ ],
+ "workspace_members": "{...}",
+ "resolve": {
+ "nodes": [
+ {
+ "id": "foo 0.1.0 (path+file:[..]foo)",
+ "dependencies": [
+ "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)"
+ ],
+ "deps": [
+ {
+ "name": "host_dep",
+ "pkg": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": "$HOST_TRIPLE"
+ }
+ ]
+ },
+ {
+ "name": "normal_dep",
+ "pkg": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": null
+ }
+ ]
+ }
+ ],
+ "features": []
+ },
+ {
+ "id": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ },
+ {
+ "id": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ }
+ ],
+ "root": "foo 0.1.0 (path+file:[..]foo)"
+ },
+ "target_directory": "[..]foo/target",
+ "version": 1,
+ "workspace_root": "[..]foo",
+ "metadata": null
+}
+"#
+ .replace("$HOST_TRIPLE", host_target)
+ .replace("$HOST_DEP", host_dep)
+ .replace("$NORMAL_DEP", normal_dep)
+ .replace("$FOO", &foo),
+ )
+ .run();
+ clear();
+
+ // Filter host with cfg, removes alt only
+ p.cargo("metadata --filter-platform")
+ .arg(&host_target)
+ .env("RUSTFLAGS", "--cfg=foobar")
+ .with_stderr_unordered(
+ "\
+[WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems
+[DOWNLOADING] crates ...
+[DOWNLOADED] normal-dep v0.0.1 [..]
+[DOWNLOADED] host-dep v0.0.1 [..]
+[DOWNLOADED] cfg-dep v0.0.1 [..]
+",
+ )
+ .with_json(
+ &r#"
+{
+ "packages": [
+ $CFG_DEP,
+ $FOO,
+ $HOST_DEP,
+ $NORMAL_DEP
+ ],
+ "workspace_members": "{...}",
+ "resolve": {
+ "nodes": [
+ {
+ "id": "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ },
+ {
+ "id": "foo 0.1.0 (path+file:[..]/foo)",
+ "dependencies": [
+ "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)"
+ ],
+ "deps": [
+ {
+ "name": "cfg_dep",
+ "pkg": "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": "cfg(foobar)"
+ }
+ ]
+ },
+ {
+ "name": "host_dep",
+ "pkg": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": "$HOST_TRIPLE"
+ }
+ ]
+ },
+ {
+ "name": "normal_dep",
+ "pkg": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": null
+ }
+ ]
+ }
+ ],
+ "features": []
+ },
+ {
+ "id": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ },
+ {
+ "id": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ }
+ ],
+ "root": "foo 0.1.0 (path+file:[..]/foo)"
+ },
+ "target_directory": "[..]/foo/target",
+ "version": 1,
+ "workspace_root": "[..]/foo",
+ "metadata": null
+}
+"#
+ .replace("$HOST_TRIPLE", host_target)
+ .replace("$CFG_DEP", cfg_dep)
+ .replace("$HOST_DEP", host_dep)
+ .replace("$NORMAL_DEP", normal_dep)
+ .replace("$FOO", &foo),
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dep_kinds() {
+ Package::new("bar", "0.1.0").publish();
+ Package::new("winapi", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1"
+
+ [dev-dependencies]
+ bar = "0.1"
+
+ [build-dependencies]
+ bar = "0.1"
+
+ [target.'cfg(windows)'.dependencies]
+ winapi = "0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("metadata")
+ .with_json(
+ r#"
+ {
+ "packages": "{...}",
+ "workspace_members": "{...}",
+ "target_directory": "{...}",
+ "version": 1,
+ "workspace_root": "{...}",
+ "metadata": null,
+ "resolve": {
+ "nodes": [
+ {
+ "id": "bar 0.1.0 [..]",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ },
+ {
+ "id": "foo 0.1.0 [..]",
+ "dependencies": [
+ "bar 0.1.0 [..]",
+ "winapi 0.1.0 [..]"
+ ],
+ "deps": [
+ {
+ "name": "bar",
+ "pkg": "bar 0.1.0 [..]",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": null
+ },
+ {
+ "kind": "dev",
+ "target": null
+ },
+ {
+ "kind": "build",
+ "target": null
+ }
+ ]
+ },
+ {
+ "name": "winapi",
+ "pkg": "winapi 0.1.0 [..]",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": "cfg(windows)"
+ }
+ ]
+ }
+ ],
+ "features": []
+ },
+ {
+ "id": "winapi 0.1.0 [..]",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ }
+ ],
+ "root": "foo 0.1.0 [..]"
+ }
+ }
+ "#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dep_kinds_workspace() {
+ // Check for bug with duplicate dep kinds in a workspace.
+ // If different members select different features for the same package,
+ // they show up multiple times in the resolver `deps`.
+ //
+ // Here:
+ // foo -> dep
+ // bar -> foo[feat1] -> dep
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [features]
+ feat1 = []
+
+ [dependencies]
+ dep = { path="dep" }
+
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [dependencies]
+ foo = { path="..", features=["feat1"] }
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .file("dep/Cargo.toml", &basic_lib_manifest("dep"))
+ .file("dep/src/lib.rs", "")
+ .build();
+
+ p.cargo("metadata")
+ .with_json(
+ r#"
+ {
+ "packages": "{...}",
+ "workspace_members": "{...}",
+ "target_directory": "[..]/foo/target",
+ "version": 1,
+ "workspace_root": "[..]/foo",
+ "metadata": null,
+ "resolve": {
+ "nodes": [
+ {
+ "id": "bar 0.1.0 (path+file://[..]/foo/bar)",
+ "dependencies": [
+ "foo 0.1.0 (path+file://[..]/foo)"
+ ],
+ "deps": [
+ {
+ "name": "foo",
+ "pkg": "foo 0.1.0 (path+file://[..]/foo)",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": null
+ }
+ ]
+ }
+ ],
+ "features": []
+ },
+ {
+ "id": "dep 0.5.0 (path+file://[..]/foo/dep)",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ },
+ {
+ "id": "foo 0.1.0 (path+file://[..]/foo)",
+ "dependencies": [
+ "dep 0.5.0 (path+file://[..]/foo/dep)"
+ ],
+ "deps": [
+ {
+ "name": "dep",
+ "pkg": "dep 0.5.0 (path+file://[..]/foo/dep)",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": null
+ }
+ ]
+ }
+ ],
+ "features": [
+ "feat1"
+ ]
+ }
+ ],
+ "root": "foo 0.1.0 (path+file://[..]/foo)"
+ }
+ }
+ "#,
+ )
+ .run();
+}
+
+// Creating non-utf8 path is an OS-specific pain, so let's run this only on
+// linux, where arbitrary bytes work.
+#[cfg(target_os = "linux")]
+#[cargo_test]
+fn cargo_metadata_non_utf8() {
+ use std::ffi::OsString;
+ use std::os::unix::ffi::OsStringExt;
+ use std::path::PathBuf;
+
+ let base = PathBuf::from(OsString::from_vec(vec![255]));
+
+ let p = project()
+ .no_manifest()
+ .file(base.join("./src/lib.rs"), "")
+ .file(base.join("./Cargo.toml"), &basic_lib_manifest("foo"))
+ .build();
+
+ p.cargo("metadata")
+ .cwd(p.root().join(base))
+ .arg("--format-version")
+ .arg("1")
+ .with_stderr("error: path contains invalid UTF-8 characters")
+ .with_status(101)
+ .run();
+}
+
+// TODO: Consider using this test instead of the version without the 'artifact' suffix or merge them because they should be pretty much the same.
+#[cargo_test]
+fn workspace_metadata_with_dependencies_no_deps_artifact() {
+ let p = project()
+ // NOTE that 'artifact' isn't mentioned in the workspace here, yet it shows up as member.
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies]
+ baz = { path = "../baz/" }
+ baz-renamed = { path = "../baz/" }
+ artifact = { path = "../artifact/", artifact = "bin" }
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .file("baz/Cargo.toml", &basic_lib_manifest("baz"))
+ .file("baz/src/lib.rs", "")
+ .file("artifact/Cargo.toml", &basic_bin_manifest("artifact"))
+ .file("artifact/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("metadata --no-deps -Z bindeps")
+ .masquerade_as_nightly_cargo(&["bindeps"])
+ .with_json(
+ r#"
+ {
+ "metadata": null,
+ "packages": [
+ {
+ "authors": [
+ "wycats@example.com"
+ ],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [
+ {
+ "artifact": {
+ "kinds": [
+ "bin"
+ ],
+ "lib": false,
+ "target": null
+ },
+ "features": [],
+ "kind": null,
+ "name": "artifact",
+ "optional": false,
+ "path": "[..]/foo/artifact",
+ "registry": null,
+ "rename": null,
+ "req": "*",
+ "source": null,
+ "target": null,
+ "uses_default_features": true
+ },
+ {
+ "features": [],
+ "kind": null,
+ "name": "baz",
+ "optional": false,
+ "path": "[..]/foo/baz",
+ "registry": null,
+ "rename": null,
+ "req": "*",
+ "source": null,
+ "target": null,
+ "uses_default_features": true
+ },
+ {
+ "features": [],
+ "kind": null,
+ "name": "baz-renamed",
+ "optional": false,
+ "path": "[..]/foo/baz",
+ "registry": null,
+ "rename": null,
+ "req": "*",
+ "source": null,
+ "target": null,
+ "uses_default_features": true
+ }
+ ],
+ "description": null,
+ "documentation": null,
+ "edition": "2015",
+ "features": {},
+ "homepage": null,
+ "id": "bar 0.5.0 (path+file://[..]/foo/bar)",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]/foo/bar/Cargo.toml",
+ "metadata": null,
+ "name": "bar",
+ "publish": null,
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "source": null,
+ "targets": [
+ {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "edition": "2015",
+ "kind": [
+ "lib"
+ ],
+ "name": "bar",
+ "src_path": "[..]/foo/bar/src/lib.rs",
+ "test": true
+ }
+ ],
+ "version": "0.5.0"
+ },
+ {
+ "authors": [
+ "wycats@example.com"
+ ],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [],
+ "description": null,
+ "documentation": null,
+ "edition": "2015",
+ "features": {},
+ "homepage": null,
+ "id": "artifact 0.5.0 (path+file://[..]/foo/artifact)",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]/foo/artifact/Cargo.toml",
+ "metadata": null,
+ "name": "artifact",
+ "publish": null,
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "source": null,
+ "targets": [
+ {
+ "crate_types": [
+ "bin"
+ ],
+ "doc": true,
+ "doctest": false,
+ "edition": "2015",
+ "kind": [
+ "bin"
+ ],
+ "name": "artifact",
+ "src_path": "[..]/foo/artifact/src/main.rs",
+ "test": true
+ }
+ ],
+ "version": "0.5.0"
+ },
+ {
+ "authors": [
+ "wycats@example.com"
+ ],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [],
+ "description": null,
+ "documentation": null,
+ "edition": "2015",
+ "features": {},
+ "homepage": null,
+ "id": "baz 0.5.0 (path+file://[..]/foo/baz)",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]/foo/baz/Cargo.toml",
+ "metadata": null,
+ "name": "baz",
+ "publish": null,
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "source": null,
+ "targets": [
+ {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "edition": "2015",
+ "kind": [
+ "lib"
+ ],
+ "name": "baz",
+ "src_path": "[..]/foo/baz/src/lib.rs",
+ "test": true
+ }
+ ],
+ "version": "0.5.0"
+ }
+ ],
+ "resolve": null,
+ "target_directory": "[..]/foo/target",
+ "version": 1,
+ "workspace_members": [
+ "bar 0.5.0 (path+file://[..]/foo/bar)",
+ "artifact 0.5.0 (path+file://[..]/foo/artifact)",
+ "baz 0.5.0 (path+file://[..]/foo/baz)"
+ ],
+ "workspace_root": "[..]/foo"
+ }
+"#,
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/minimal_versions.rs b/src/tools/cargo/tests/testsuite/minimal_versions.rs
new file mode 100644
index 000000000..f814dcb70
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/minimal_versions.rs
@@ -0,0 +1,38 @@
+//! Tests for minimal-version resolution.
+//!
+//! Note: Some tests are located in the resolver-tests package.
+
+use cargo_test_support::project;
+use cargo_test_support::registry::Package;
+
+// Ensure that the "-Z minimal-versions" CLI option works and the minimal
+// version of a dependency ends up in the lock file.
+#[cargo_test]
+fn minimal_version_cli() {
+ Package::new("dep", "1.0.0").publish();
+ Package::new("dep", "1.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.1"
+
+ [dependencies]
+ dep = "1.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("generate-lockfile -Zminimal-versions")
+ .masquerade_as_nightly_cargo(&["minimal-versions"])
+ .run();
+
+ let lock = p.read_lockfile();
+
+ assert!(!lock.contains("1.1.0"));
+}
diff --git a/src/tools/cargo/tests/testsuite/mock-std/Cargo.toml b/src/tools/cargo/tests/testsuite/mock-std/Cargo.toml
new file mode 100644
index 000000000..a69aa4b88
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/Cargo.toml
@@ -0,0 +1,8 @@
+[workspace]
+members = [
+ "library/alloc",
+ "library/core",
+ "library/proc_macro",
+ "library/std",
+ "library/test",
+]
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/alloc/Cargo.toml b/src/tools/cargo/tests/testsuite/mock-std/library/alloc/Cargo.toml
new file mode 100644
index 000000000..dc965abff
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/alloc/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "alloc"
+version = "0.1.0"
+authors = ["Alex Crichton <alex@alexcrichton.com>"]
+edition = "2018"
+
+[dependencies]
+registry-dep-using-core = { version = "*", features = ['mockbuild'] }
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/alloc/src/lib.rs b/src/tools/cargo/tests/testsuite/mock-std/library/alloc/src/lib.rs
new file mode 100644
index 000000000..823716e40
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/alloc/src/lib.rs
@@ -0,0 +1,11 @@
+#![feature(staged_api)]
+#![stable(since = "1.0.0", feature = "dummy")]
+
+extern crate alloc;
+
+#[stable(since = "1.0.0", feature = "dummy")]
+pub use alloc::*;
+
+#[stable(since = "1.0.0", feature = "dummy")]
+pub fn custom_api() {
+}
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/compiler_builtins/Cargo.toml b/src/tools/cargo/tests/testsuite/mock-std/library/compiler_builtins/Cargo.toml
new file mode 100644
index 000000000..d1df281d6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/compiler_builtins/Cargo.toml
@@ -0,0 +1,5 @@
+[package]
+name = "compiler_builtins"
+version = "0.1.0"
+authors = ["Alex Crichton <alex@alexcrichton.com>"]
+edition = "2018"
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/compiler_builtins/src/lib.rs b/src/tools/cargo/tests/testsuite/mock-std/library/compiler_builtins/src/lib.rs
new file mode 100644
index 000000000..65e2cc340
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/compiler_builtins/src/lib.rs
@@ -0,0 +1 @@
+// intentionally blank
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/core/Cargo.toml b/src/tools/cargo/tests/testsuite/mock-std/library/core/Cargo.toml
new file mode 100644
index 000000000..3f7de53db
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/core/Cargo.toml
@@ -0,0 +1,5 @@
+[package]
+name = "core"
+version = "0.1.0"
+authors = ["Alex Crichton <alex@alexcrichton.com>"]
+edition = "2018"
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/core/src/lib.rs b/src/tools/cargo/tests/testsuite/mock-std/library/core/src/lib.rs
new file mode 100644
index 000000000..b90ed0914
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/core/src/lib.rs
@@ -0,0 +1,9 @@
+#![feature(staged_api)]
+#![stable(since = "1.0.0", feature = "dummy")]
+
+#[stable(since = "1.0.0", feature = "dummy")]
+pub use core::*;
+
+#[stable(since = "1.0.0", feature = "dummy")]
+pub fn custom_api() {
+}
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/panic_unwind/Cargo.toml b/src/tools/cargo/tests/testsuite/mock-std/library/panic_unwind/Cargo.toml
new file mode 100644
index 000000000..e7beb923f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/panic_unwind/Cargo.toml
@@ -0,0 +1,5 @@
+[package]
+name = "panic_unwind"
+version = "0.1.0"
+authors = ["Alex Crichton <alex@alexcrichton.com>"]
+edition = "2018"
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/panic_unwind/src/lib.rs b/src/tools/cargo/tests/testsuite/mock-std/library/panic_unwind/src/lib.rs
new file mode 100644
index 000000000..6af65d875
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/panic_unwind/src/lib.rs
@@ -0,0 +1,5 @@
+#![feature(panic_unwind, panic_runtime)]
+#![panic_runtime]
+#![no_std]
+
+extern crate panic_unwind;
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/proc_macro/Cargo.toml b/src/tools/cargo/tests/testsuite/mock-std/library/proc_macro/Cargo.toml
new file mode 100644
index 000000000..939a113b2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/proc_macro/Cargo.toml
@@ -0,0 +1,5 @@
+[package]
+name = "proc_macro"
+version = "0.1.0"
+authors = ["Alex Crichton <alex@alexcrichton.com>"]
+edition = "2018"
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/proc_macro/src/lib.rs b/src/tools/cargo/tests/testsuite/mock-std/library/proc_macro/src/lib.rs
new file mode 100644
index 000000000..82a768406
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/proc_macro/src/lib.rs
@@ -0,0 +1,11 @@
+#![feature(staged_api)]
+#![stable(since = "1.0.0", feature = "dummy")]
+
+extern crate proc_macro;
+
+#[stable(since = "1.0.0", feature = "dummy")]
+pub use proc_macro::*;
+
+#[stable(since = "1.0.0", feature = "dummy")]
+pub fn custom_api() {
+}
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-alloc/Cargo.toml b/src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-alloc/Cargo.toml
new file mode 100644
index 000000000..6b86f22ca
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-alloc/Cargo.toml
@@ -0,0 +1,11 @@
+[package]
+name = "rustc-std-workspace-alloc"
+version = "1.9.0"
+authors = ["Alex Crichton <alex@alexcrichton.com>"]
+edition = "2018"
+
+[lib]
+path = "lib.rs"
+
+[dependencies]
+alloc = { path = "../alloc" }
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-alloc/lib.rs b/src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-alloc/lib.rs
new file mode 100644
index 000000000..2bbfa1a49
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-alloc/lib.rs
@@ -0,0 +1,3 @@
+#![no_std]
+
+pub use alloc::*;
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-core/Cargo.toml b/src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-core/Cargo.toml
new file mode 100644
index 000000000..8d1921600
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-core/Cargo.toml
@@ -0,0 +1,11 @@
+[package]
+name = "rustc-std-workspace-core"
+version = "1.9.0"
+authors = ["Alex Crichton <alex@alexcrichton.com>"]
+edition = "2018"
+
+[lib]
+path = "lib.rs"
+
+[dependencies]
+core = { path = "../core" }
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-core/lib.rs b/src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-core/lib.rs
new file mode 100644
index 000000000..816251790
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-core/lib.rs
@@ -0,0 +1,3 @@
+#![no_std]
+
+pub use core::*;
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-std/Cargo.toml b/src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-std/Cargo.toml
new file mode 100644
index 000000000..91572b815
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-std/Cargo.toml
@@ -0,0 +1,11 @@
+[package]
+name = "rustc-std-workspace-std"
+version = "1.9.0"
+authors = ["Alex Crichton <alex@alexcrichton.com>"]
+edition = "2018"
+
+[lib]
+path = "lib.rs"
+
+[dependencies]
+std = { path = "../std" }
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-std/lib.rs b/src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-std/lib.rs
new file mode 100644
index 000000000..f40d09caf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/rustc-std-workspace-std/lib.rs
@@ -0,0 +1 @@
+pub use std::*;
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/std/Cargo.toml b/src/tools/cargo/tests/testsuite/mock-std/library/std/Cargo.toml
new file mode 100644
index 000000000..d2cfdea39
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/std/Cargo.toml
@@ -0,0 +1,11 @@
+[package]
+name = "std"
+version = "0.1.0"
+authors = ["Alex Crichton <alex@alexcrichton.com>"]
+edition = "2018"
+
+[dependencies]
+registry-dep-using-alloc = { version = "*", features = ['mockbuild'] }
+
+[features]
+feature1 = []
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/std/src/lib.rs b/src/tools/cargo/tests/testsuite/mock-std/library/std/src/lib.rs
new file mode 100644
index 000000000..146d4c42c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/std/src/lib.rs
@@ -0,0 +1,12 @@
+#![feature(staged_api)]
+#![stable(since = "1.0.0", feature = "dummy")]
+
+#[stable(since = "1.0.0", feature = "dummy")]
+pub use std::*;
+
+#[stable(since = "1.0.0", feature = "dummy")]
+pub fn custom_api() {}
+
+#[cfg(feature = "feature1")]
+#[stable(since = "1.0.0", feature = "dummy")]
+pub fn conditional_function() {}
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/test/Cargo.toml b/src/tools/cargo/tests/testsuite/mock-std/library/test/Cargo.toml
new file mode 100644
index 000000000..299db7bfd
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/test/Cargo.toml
@@ -0,0 +1,18 @@
+[package]
+name = "test"
+version = "0.1.0"
+authors = ["Alex Crichton <alex@alexcrichton.com>"]
+edition = "2018"
+
+[dependencies]
+proc_macro = { path = "../proc_macro" }
+std = { path = "../std" }
+panic_unwind = { path = "../panic_unwind" }
+compiler_builtins = { path = "../compiler_builtins" }
+registry-dep-using-std = { version = "*", features = ['mockbuild'] }
+
+[features]
+panic-unwind = []
+backtrace = []
+feature1 = ["std/feature1"]
+default = []
diff --git a/src/tools/cargo/tests/testsuite/mock-std/library/test/src/lib.rs b/src/tools/cargo/tests/testsuite/mock-std/library/test/src/lib.rs
new file mode 100644
index 000000000..a112855f5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/mock-std/library/test/src/lib.rs
@@ -0,0 +1,10 @@
+#![feature(staged_api)]
+#![feature(test)]
+#![unstable(feature = "test", issue = "none")]
+
+extern crate test;
+
+pub use test::*;
+
+pub fn custom_api() {
+}
diff --git a/src/tools/cargo/tests/testsuite/multitarget.rs b/src/tools/cargo/tests/testsuite/multitarget.rs
new file mode 100644
index 000000000..5f3543f01
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/multitarget.rs
@@ -0,0 +1,231 @@
+//! Tests for multiple `--target` flags to subcommands
+
+use cargo_test_support::{basic_manifest, cross_compile, project, rustc_host};
+
+#[cargo_test]
+fn simple_build() {
+ if cross_compile::disabled() {
+ return;
+ }
+ let t1 = cross_compile::alternate();
+ let t2 = rustc_host();
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build")
+ .arg("--target")
+ .arg(&t1)
+ .arg("--target")
+ .arg(&t2)
+ .run();
+
+ assert!(p.target_bin(t1, "foo").is_file());
+ assert!(p.target_bin(t2, "foo").is_file());
+}
+
+#[cargo_test]
+fn simple_build_with_config() {
+ if cross_compile::disabled() {
+ return;
+ }
+ let t1 = cross_compile::alternate();
+ let t2 = rustc_host();
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config.toml",
+ &format!(
+ r#"
+ [build]
+ target = ["{t1}", "{t2}"]
+ "#
+ ),
+ )
+ .build();
+
+ p.cargo("build").run();
+
+ assert!(p.target_bin(t1, "foo").is_file());
+ assert!(p.target_bin(t2, "foo").is_file());
+}
+
+#[cargo_test]
+fn simple_test() {
+ if !cross_compile::can_run_on_host() {
+ return;
+ }
+ let t1 = cross_compile::alternate();
+ let t2 = rustc_host();
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
+ .file("src/lib.rs", "fn main() {}")
+ .build();
+
+ p.cargo("test")
+ .arg("--target")
+ .arg(&t1)
+ .arg("--target")
+ .arg(&t2)
+ .with_stderr_contains(&format!("[RUNNING] [..]{}[..]", t1))
+ .with_stderr_contains(&format!("[RUNNING] [..]{}[..]", t2))
+ .run();
+}
+
+#[cargo_test]
+fn simple_run() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("run --target a --target b")
+ .with_stderr("[ERROR] only one `--target` argument is supported")
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn simple_doc() {
+ if cross_compile::disabled() {
+ return;
+ }
+ let t1 = cross_compile::alternate();
+ let t2 = rustc_host();
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
+ .file("src/lib.rs", "//! empty lib")
+ .build();
+
+ p.cargo("doc")
+ .arg("--target")
+ .arg(&t1)
+ .arg("--target")
+ .arg(&t2)
+ .run();
+
+ assert!(p.build_dir().join(&t1).join("doc/foo/index.html").is_file());
+ assert!(p.build_dir().join(&t2).join("doc/foo/index.html").is_file());
+}
+
+#[cargo_test]
+fn simple_check() {
+ if cross_compile::disabled() {
+ return;
+ }
+ let t1 = cross_compile::alternate();
+ let t2 = rustc_host();
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .arg("--target")
+ .arg(&t1)
+ .arg("--target")
+ .arg(&t2)
+ .run();
+}
+
+#[cargo_test]
+fn same_value_twice() {
+ if cross_compile::disabled() {
+ return;
+ }
+ let t = rustc_host();
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build")
+ .arg("--target")
+ .arg(&t)
+ .arg("--target")
+ .arg(&t)
+ .run();
+
+ assert!(p.target_bin(t, "foo").is_file());
+}
+
+#[cargo_test]
+fn same_value_twice_with_config() {
+ if cross_compile::disabled() {
+ return;
+ }
+ let t = rustc_host();
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config.toml",
+ &format!(
+ r#"
+ [build]
+ target = ["{t}", "{t}"]
+ "#
+ ),
+ )
+ .build();
+
+ p.cargo("build").run();
+
+ assert!(p.target_bin(t, "foo").is_file());
+}
+
+#[cargo_test]
+fn works_with_config_in_both_string_or_list() {
+ if cross_compile::disabled() {
+ return;
+ }
+ let t = rustc_host();
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config.toml",
+ &format!(
+ r#"
+ [build]
+ target = "{t}"
+ "#
+ ),
+ )
+ .build();
+
+ p.cargo("build").run();
+
+ assert!(p.target_bin(t, "foo").is_file());
+
+ p.cargo("clean").run();
+
+ p.change_file(
+ ".cargo/config.toml",
+ &format!(
+ r#"
+ [build]
+ target = ["{t}"]
+ "#
+ ),
+ );
+
+ p.cargo("build").run();
+
+ assert!(p.target_bin(t, "foo").is_file());
+}
+
+#[cargo_test]
+fn works_with_env() {
+ let t = rustc_host();
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build").env("CARGO_BUILD_TARGET", t).run();
+
+ assert!(p.target_bin(t, "foo").is_file());
+}
diff --git a/src/tools/cargo/tests/testsuite/net_config.rs b/src/tools/cargo/tests/testsuite/net_config.rs
new file mode 100644
index 000000000..569ec552c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/net_config.rs
@@ -0,0 +1,74 @@
+//! Tests for network configuration.
+
+use cargo_test_support::project;
+
+#[cargo_test]
+fn net_retry_loads_from_config() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ git = "http://127.0.0.1:11/foo/bar"
+ "#,
+ )
+ .file("src/main.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [net]
+ retry=1
+ [http]
+ timeout=1
+ "#,
+ )
+ .build();
+
+ p.cargo("check -v")
+ .with_status(101)
+ .with_stderr_contains(
+ "[WARNING] spurious network error \
+ (1 tries remaining): [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn net_retry_git_outputs_warning() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ git = "http://127.0.0.1:11/foo/bar"
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
+ [http]
+ timeout=1
+ "#,
+ )
+ .file("src/main.rs", "")
+ .build();
+
+ p.cargo("check -v -j 1")
+ .with_status(101)
+ .with_stderr_contains(
+ "[WARNING] spurious network error \
+ (2 tries remaining): [..]",
+ )
+ .with_stderr_contains("[WARNING] spurious network error (1 tries remaining): [..]")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/new.rs b/src/tools/cargo/tests/testsuite/new.rs
new file mode 100644
index 000000000..b9ddcf2d7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/new.rs
@@ -0,0 +1,560 @@
+//! Tests for the `cargo new` command.
+
+use cargo_test_support::cargo_process;
+use cargo_test_support::paths;
+use std::env;
+use std::fs::{self, File};
+
+fn create_default_gitconfig() {
+ // This helps on Windows where libgit2 is very aggressive in attempting to
+ // find a git config file.
+ let gitconfig = paths::home().join(".gitconfig");
+ File::create(gitconfig).unwrap();
+
+ // If we're running this under a user account that has a different default branch set up
+ // then tests that assume the default branch is master will fail. We set the default branch
+ // to master explicitly so that tests that rely on this behavior still pass.
+ fs::write(
+ paths::home().join(".gitconfig"),
+ r#"
+ [init]
+ defaultBranch = master
+ "#,
+ )
+ .unwrap();
+}
+
+#[cargo_test]
+fn simple_lib() {
+ cargo_process("new --lib foo --vcs none --edition 2015")
+ .with_stderr("[CREATED] library `foo` package")
+ .run();
+
+ assert!(paths::root().join("foo").is_dir());
+ assert!(paths::root().join("foo/Cargo.toml").is_file());
+ assert!(paths::root().join("foo/src/lib.rs").is_file());
+ assert!(!paths::root().join("foo/.gitignore").is_file());
+
+ let lib = paths::root().join("foo/src/lib.rs");
+ let contents = fs::read_to_string(&lib).unwrap();
+ assert_eq!(
+ contents,
+ r#"pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn it_works() {
+ let result = add(2, 2);
+ assert_eq!(result, 4);
+ }
+}
+"#
+ );
+
+ cargo_process("build").cwd(&paths::root().join("foo")).run();
+}
+
+#[cargo_test]
+fn simple_bin() {
+ cargo_process("new --bin foo --edition 2015")
+ .with_stderr("[CREATED] binary (application) `foo` package")
+ .run();
+
+ assert!(paths::root().join("foo").is_dir());
+ assert!(paths::root().join("foo/Cargo.toml").is_file());
+ assert!(paths::root().join("foo/src/main.rs").is_file());
+
+ cargo_process("build").cwd(&paths::root().join("foo")).run();
+ assert!(paths::root()
+ .join(&format!("foo/target/debug/foo{}", env::consts::EXE_SUFFIX))
+ .is_file());
+}
+
+#[cargo_test]
+fn both_lib_and_bin() {
+ cargo_process("new --lib --bin foo")
+ .with_status(101)
+ .with_stderr("[ERROR] can't specify both lib and binary outputs")
+ .run();
+}
+
+#[cargo_test]
+fn simple_git() {
+ cargo_process("new --lib foo --edition 2015").run();
+
+ assert!(paths::root().is_dir());
+ assert!(paths::root().join("foo/Cargo.toml").is_file());
+ assert!(paths::root().join("foo/src/lib.rs").is_file());
+ assert!(paths::root().join("foo/.git").is_dir());
+ assert!(paths::root().join("foo/.gitignore").is_file());
+
+ let fp = paths::root().join("foo/.gitignore");
+ let contents = fs::read_to_string(&fp).unwrap();
+ assert_eq!(contents, "/target\n/Cargo.lock\n",);
+
+ cargo_process("build").cwd(&paths::root().join("foo")).run();
+}
+
+#[cargo_test(requires_hg)]
+fn simple_hg() {
+ cargo_process("new --lib foo --edition 2015 --vcs hg").run();
+
+ assert!(paths::root().is_dir());
+ assert!(paths::root().join("foo/Cargo.toml").is_file());
+ assert!(paths::root().join("foo/src/lib.rs").is_file());
+ assert!(paths::root().join("foo/.hg").is_dir());
+ assert!(paths::root().join("foo/.hgignore").is_file());
+
+ let fp = paths::root().join("foo/.hgignore");
+ let contents = fs::read_to_string(&fp).unwrap();
+ assert_eq!(contents, "^target$\n^Cargo.lock$\n",);
+
+ cargo_process("build").cwd(&paths::root().join("foo")).run();
+}
+
+#[cargo_test]
+fn no_argument() {
+ cargo_process("new")
+ .with_status(1)
+ .with_stderr_contains(
+ "\
+error: the following required arguments were not provided:
+ <path>
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn existing() {
+ let dst = paths::root().join("foo");
+ fs::create_dir(&dst).unwrap();
+ cargo_process("new foo")
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] destination `[CWD]/foo` already exists\n\n\
+ Use `cargo init` to initialize the directory",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid_characters() {
+ cargo_process("new foo.rs")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] invalid character `.` in package name: `foo.rs`, [..]
+If you need a package name to not match the directory name, consider using --name flag.
+If you need a binary with the name \"foo.rs\", use a valid package name, \
+and set the binary name to be different from the package. \
+This can be done by setting the binary filename to `src/bin/foo.rs.rs` \
+or change the name in Cargo.toml with:
+
+ [[bin]]
+ name = \"foo.rs\"
+ path = \"src/main.rs\"
+
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn reserved_name() {
+ cargo_process("new test")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] the name `test` cannot be used as a package name, it conflicts [..]
+If you need a package name to not match the directory name, consider using --name flag.
+If you need a binary with the name \"test\", use a valid package name, \
+and set the binary name to be different from the package. \
+This can be done by setting the binary filename to `src/bin/test.rs` \
+or change the name in Cargo.toml with:
+
+ [[bin]]
+ name = \"test\"
+ path = \"src/main.rs\"
+
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn reserved_binary_name() {
+ cargo_process("new --bin incremental")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] the name `incremental` cannot be used as a package name, it conflicts [..]
+If you need a package name to not match the directory name, consider using --name flag.
+",
+ )
+ .run();
+
+ cargo_process("new --lib incremental")
+ .with_stderr(
+ "\
+[WARNING] the name `incremental` will not support binary executables with that name, \
+it conflicts with cargo's build directory names
+[CREATED] library `incremental` package
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn keyword_name() {
+ cargo_process("new pub")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] the name `pub` cannot be used as a package name, it is a Rust keyword
+If you need a package name to not match the directory name, consider using --name flag.
+If you need a binary with the name \"pub\", use a valid package name, \
+and set the binary name to be different from the package. \
+This can be done by setting the binary filename to `src/bin/pub.rs` \
+or change the name in Cargo.toml with:
+
+ [[bin]]
+ name = \"pub\"
+ path = \"src/main.rs\"
+
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn std_name() {
+ cargo_process("new core")
+ .with_stderr(
+ "\
+[WARNING] the name `core` is part of Rust's standard library
+It is recommended to use a different name to avoid problems.
+If you need a package name to not match the directory name, consider using --name flag.
+If you need a binary with the name \"core\", use a valid package name, \
+and set the binary name to be different from the package. \
+This can be done by setting the binary filename to `src/bin/core.rs` \
+or change the name in Cargo.toml with:
+
+ [[bin]]
+ name = \"core\"
+ path = \"src/main.rs\"
+
+[CREATED] binary (application) `core` package
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn git_prefers_command_line() {
+ let root = paths::root();
+ fs::create_dir(&root.join(".cargo")).unwrap();
+ fs::write(
+ &root.join(".cargo/config"),
+ r#"
+ [cargo-new]
+ vcs = "none"
+ name = "foo"
+ email = "bar"
+ "#,
+ )
+ .unwrap();
+
+ cargo_process("new foo --vcs git").run();
+ assert!(paths::root().join("foo/.gitignore").exists());
+ assert!(!fs::read_to_string(paths::root().join("foo/Cargo.toml"))
+ .unwrap()
+ .contains("authors ="));
+}
+
+#[cargo_test]
+fn subpackage_no_git() {
+ cargo_process("new foo").run();
+
+ assert!(paths::root().join("foo/.git").is_dir());
+ assert!(paths::root().join("foo/.gitignore").is_file());
+
+ let subpackage = paths::root().join("foo").join("components");
+ fs::create_dir(&subpackage).unwrap();
+ cargo_process("new foo/components/subcomponent").run();
+
+ assert!(!paths::root()
+ .join("foo/components/subcomponent/.git")
+ .is_file());
+ assert!(!paths::root()
+ .join("foo/components/subcomponent/.gitignore")
+ .is_file());
+}
+
+#[cargo_test]
+fn subpackage_git_with_gitignore() {
+ cargo_process("new foo").run();
+
+ assert!(paths::root().join("foo/.git").is_dir());
+ assert!(paths::root().join("foo/.gitignore").is_file());
+
+ let gitignore = paths::root().join("foo/.gitignore");
+ fs::write(gitignore, b"components").unwrap();
+
+ let subpackage = paths::root().join("foo/components");
+ fs::create_dir(&subpackage).unwrap();
+ cargo_process("new foo/components/subcomponent").run();
+
+ assert!(paths::root()
+ .join("foo/components/subcomponent/.git")
+ .is_dir());
+ assert!(paths::root()
+ .join("foo/components/subcomponent/.gitignore")
+ .is_file());
+}
+
+#[cargo_test]
+fn subpackage_git_with_vcs_arg() {
+ cargo_process("new foo").run();
+
+ let subpackage = paths::root().join("foo").join("components");
+ fs::create_dir(&subpackage).unwrap();
+ cargo_process("new foo/components/subcomponent --vcs git").run();
+
+ assert!(paths::root()
+ .join("foo/components/subcomponent/.git")
+ .is_dir());
+ assert!(paths::root()
+ .join("foo/components/subcomponent/.gitignore")
+ .is_file());
+}
+
+#[cargo_test]
+fn unknown_flags() {
+ cargo_process("new foo --flag")
+ .with_status(1)
+ .with_stderr_contains("error: unexpected argument '--flag' found")
+ .run();
+}
+
+#[cargo_test]
+fn explicit_invalid_name_not_suggested() {
+ cargo_process("new --name 10-invalid a")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] the name `10-invalid` cannot be used as a package name, \
+the name cannot start with a digit\n\
+If you need a binary with the name \"10-invalid\", use a valid package name, \
+and set the binary name to be different from the package. \
+This can be done by setting the binary filename to `src/bin/10-invalid.rs` \
+or change the name in Cargo.toml with:
+
+ [[bin]]
+ name = \"10-invalid\"
+ path = \"src/main.rs\"
+
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn explicit_project_name() {
+ cargo_process("new --lib foo --name bar")
+ .with_stderr("[CREATED] library `bar` package")
+ .run();
+}
+
+#[cargo_test]
+fn new_with_edition_2015() {
+ cargo_process("new --edition 2015 foo").run();
+ let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap();
+ assert!(manifest.contains("edition = \"2015\""));
+}
+
+#[cargo_test]
+fn new_with_edition_2018() {
+ cargo_process("new --edition 2018 foo").run();
+ let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap();
+ assert!(manifest.contains("edition = \"2018\""));
+}
+
+#[cargo_test]
+fn new_default_edition() {
+ cargo_process("new foo").run();
+ let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap();
+ assert!(manifest.contains("edition = \"2021\""));
+}
+
+#[cargo_test]
+fn new_with_bad_edition() {
+ cargo_process("new --edition something_else foo")
+ .with_stderr_contains("error: invalid value 'something_else' for '--edition <YEAR>'")
+ .with_status(1)
+ .run();
+}
+
+#[cargo_test]
+fn new_with_reference_link() {
+ cargo_process("new foo").run();
+
+ let contents = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap();
+ assert!(contents.contains("# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html"))
+}
+
+#[cargo_test]
+fn lockfile_constant_during_new() {
+ cargo_process("new foo").run();
+
+ cargo_process("build").cwd(&paths::root().join("foo")).run();
+ let before = fs::read_to_string(paths::root().join("foo/Cargo.lock")).unwrap();
+ cargo_process("build").cwd(&paths::root().join("foo")).run();
+ let after = fs::read_to_string(paths::root().join("foo/Cargo.lock")).unwrap();
+ assert_eq!(before, after);
+}
+
+#[cargo_test]
+fn restricted_windows_name() {
+ if cfg!(windows) {
+ cargo_process("new nul")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] cannot use name `nul`, it is a reserved Windows filename
+If you need a package name to not match the directory name, consider using --name flag.
+",
+ )
+ .run();
+ } else {
+ cargo_process("new nul")
+ .with_stderr(
+ "\
+[WARNING] the name `nul` is a reserved Windows filename
+This package will not work on Windows platforms.
+[CREATED] binary (application) `nul` package
+",
+ )
+ .run();
+ }
+}
+
+#[cargo_test]
+fn non_ascii_name() {
+ cargo_process("new Привет")
+ .with_stderr(
+ "\
+[WARNING] the name `Привет` contains non-ASCII characters
+Non-ASCII crate names are not supported by Rust.
+[CREATED] binary (application) `Привет` package
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn non_ascii_name_invalid() {
+ // These are alphanumeric characters, but not Unicode XID.
+ cargo_process("new ⒶⒷⒸ")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] invalid character `Ⓐ` in package name: `ⒶⒷⒸ`, \
+the first character must be a Unicode XID start character (most letters or `_`)
+If you need a package name to not match the directory name, consider using --name flag.
+If you need a binary with the name \"ⒶⒷⒸ\", use a valid package name, \
+and set the binary name to be different from the package. \
+This can be done by setting the binary filename to `src/bin/ⒶⒷⒸ.rs` \
+or change the name in Cargo.toml with:
+
+ [[bin]]
+ name = \"ⒶⒷⒸ\"
+ path = \"src/main.rs\"
+
+",
+ )
+ .run();
+
+ cargo_process("new a¼")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] invalid character `¼` in package name: `a¼`, \
+characters must be Unicode XID characters (numbers, `-`, `_`, or most letters)
+If you need a package name to not match the directory name, consider using --name flag.
+If you need a binary with the name \"a¼\", use a valid package name, \
+and set the binary name to be different from the package. \
+This can be done by setting the binary filename to `src/bin/a¼.rs` \
+or change the name in Cargo.toml with:
+
+ [[bin]]
+ name = \"a¼\"
+ path = \"src/main.rs\"
+
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn git_default_branch() {
+ // Check for init.defaultBranch support.
+ create_default_gitconfig();
+
+ cargo_process("new foo").run();
+ let repo = git2::Repository::open(paths::root().join("foo")).unwrap();
+ let head = repo.find_reference("HEAD").unwrap();
+ assert_eq!(head.symbolic_target().unwrap(), "refs/heads/master");
+
+ fs::write(
+ paths::home().join(".gitconfig"),
+ r#"
+ [init]
+ defaultBranch = hello
+ "#,
+ )
+ .unwrap();
+ cargo_process("new bar").run();
+ let repo = git2::Repository::open(paths::root().join("bar")).unwrap();
+ let head = repo.find_reference("HEAD").unwrap();
+ assert_eq!(head.symbolic_target().unwrap(), "refs/heads/hello");
+}
+
+#[cargo_test]
+fn non_utf8_str_in_ignore_file() {
+ let gitignore = paths::home().join(".gitignore");
+ File::create(gitignore).unwrap();
+
+ fs::write(paths::home().join(".gitignore"), &[0xFF, 0xFE]).unwrap();
+
+ cargo_process(&format!("init {} --vcs git", paths::home().display()))
+ .with_status(101)
+ .with_stderr(
+ "\
+error: Failed to create package `home` at `[..]`
+
+Caused by:
+ Character at line 0 is invalid. Cargo only supports UTF-8.
+",
+ )
+ .run();
+}
+
+#[cfg(unix)]
+#[cargo_test]
+fn path_with_invalid_character() {
+ cargo_process("new --name testing test:ing")
+ .with_stderr(
+ "\
+[WARNING] the path `[CWD]/test:ing` contains invalid PATH characters (usually `:`, `;`, or `\"`)
+It is recommended to use a different name to avoid problems.
+[CREATED] binary (application) `testing` package
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/offline.rs b/src/tools/cargo/tests/testsuite/offline.rs
new file mode 100644
index 000000000..fe54fc59d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/offline.rs
@@ -0,0 +1,728 @@
+//! Tests for --offline flag.
+
+use cargo_test_support::{
+ basic_manifest, git, main_file, path2url, project,
+ registry::{Package, RegistryBuilder},
+};
+use std::fs;
+
+#[cargo_test]
+fn offline_unused_target_dep() {
+ // --offline with a target dependency that is not used and not downloaded.
+ Package::new("unused_dep", "1.0.0").publish();
+ Package::new("used_dep", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [dependencies]
+ used_dep = "1.0"
+ [target.'cfg(unused)'.dependencies]
+ unused_dep = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ // Do a build that downloads only what is necessary.
+ p.cargo("check")
+ .with_stderr_contains("[DOWNLOADED] used_dep [..]")
+ .with_stderr_does_not_contain("[DOWNLOADED] unused_dep [..]")
+ .run();
+ p.cargo("clean").run();
+ // Build offline, make sure it works.
+ p.cargo("check --offline").run();
+}
+
+#[cargo_test]
+fn offline_missing_optional() {
+ Package::new("opt_dep", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [dependencies]
+ opt_dep = { version = "1.0", optional = true }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ // Do a build that downloads only what is necessary.
+ p.cargo("check")
+ .with_stderr_does_not_contain("[DOWNLOADED] opt_dep [..]")
+ .run();
+ p.cargo("clean").run();
+ // Build offline, make sure it works.
+ p.cargo("check --offline").run();
+ p.cargo("check --offline --features=opt_dep")
+ .with_stderr(
+ "\
+[ERROR] failed to download `opt_dep v1.0.0`
+
+Caused by:
+ attempting to make an HTTP request, but --offline was specified
+",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_path_with_offline() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check --offline").run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_downloaded_dependency_with_offline() {
+ Package::new("present_dep", "1.2.3")
+ .file("Cargo.toml", &basic_manifest("present_dep", "1.2.3"))
+ .file("src/lib.rs", "")
+ .publish();
+
+ // make package downloaded
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ present_dep = "1.2.3"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("check").run();
+
+ let p2 = project()
+ .at("bar")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [dependencies]
+ present_dep = "1.2.3"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p2.cargo("check --offline")
+ .with_stderr(
+ "\
+[CHECKING] present_dep v1.2.3
+[CHECKING] bar v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_offline_not_try_update() {
+ // When --offline needs to download the registry, provide a reasonable
+ // error hint to run without --offline.
+ let p = project()
+ .at("bar")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [dependencies]
+ not_cached_dep = "1.2.5"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ let msg = "\
+[ERROR] no matching package named `not_cached_dep` found
+location searched: registry `crates-io`
+required by package `bar v0.1.0 ([..]/bar)`
+As a reminder, you're using offline mode (--offline) which can sometimes cause \
+surprising resolution failures, if this error is too confusing you may wish to \
+retry without the offline flag.
+";
+
+ p.cargo("check --offline")
+ .with_status(101)
+ .with_stderr(msg)
+ .run();
+
+ // While we're here, also check the config works.
+ p.change_file(".cargo/config", "net.offline = true");
+ p.cargo("check").with_status(101).with_stderr(msg).run();
+}
+
+#[cargo_test]
+fn compile_offline_without_maxvers_cached() {
+ Package::new("present_dep", "1.2.1").publish();
+ Package::new("present_dep", "1.2.2").publish();
+
+ Package::new("present_dep", "1.2.3")
+ .file("Cargo.toml", &basic_manifest("present_dep", "1.2.3"))
+ .file(
+ "src/lib.rs",
+ r#"pub fn get_version()->&'static str {"1.2.3"}"#,
+ )
+ .publish();
+
+ Package::new("present_dep", "1.2.5")
+ .file("Cargo.toml", &basic_manifest("present_dep", "1.2.5"))
+ .file("src/lib.rs", r#"pub fn get_version(){"1.2.5"}"#)
+ .publish();
+
+ // make package cached
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ present_dep = "=1.2.3"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("build").run();
+
+ let p2 = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ present_dep = "1.2"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "\
+extern crate present_dep;
+fn main(){
+ println!(\"{}\", present_dep::get_version());
+}",
+ )
+ .build();
+
+ p2.cargo("run --offline")
+ .with_stderr(
+ "\
+[COMPILING] present_dep v1.2.3
+[COMPILING] foo v0.1.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+ Running `[..]`",
+ )
+ .with_stdout("1.2.3")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_forbird_git_httpsrepo_offline() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = ["chabapok@example.com"]
+
+ [dependencies.dep1]
+ git = 'https://github.com/some_user/dep1.git'
+ "#,
+ )
+ .file("src/main.rs", "")
+ .build();
+
+ p.cargo("check --offline").with_status(101).with_stderr("\
+[ERROR] failed to get `dep1` as a dependency of package `foo v0.5.0 [..]`
+
+Caused by:
+ failed to load source for dependency `dep1`
+
+Caused by:
+ Unable to update https://github.com/some_user/dep1.git
+
+Caused by:
+ can't checkout from 'https://github.com/some_user/dep1.git': you are in the offline mode (--offline)").run();
+}
+
+#[cargo_test]
+fn compile_offline_while_transitive_dep_not_cached() {
+ let baz = Package::new("baz", "1.0.0");
+ let baz_path = baz.archive_dst();
+ baz.publish();
+
+ let baz_content = fs::read(&baz_path).unwrap();
+ // Truncate the file to simulate a download failure.
+ fs::write(&baz_path, &[]).unwrap();
+
+ Package::new("bar", "0.1.0").dep("baz", "1.0.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main(){}")
+ .build();
+
+ // simulate download bar, but fail to download baz
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains("[..]failed to verify the checksum of `baz[..]")
+ .run();
+
+ // Restore the file contents.
+ fs::write(&baz_path, &baz_content).unwrap();
+
+ p.cargo("check --offline")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to download `bar v0.1.0`
+
+Caused by:
+ attempting to make an HTTP request, but --offline was specified
+",
+ )
+ .run();
+}
+
+fn update_offline_not_cached() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+ p.cargo("update --offline")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] no matching package named `bar` found
+location searched: registry `[..]`
+required by package `foo v0.0.1 ([..]/foo)`
+As a reminder, you're using offline mode (--offline) which can sometimes cause \
+surprising resolution failures, if this error is too confusing you may wish to \
+retry without the offline flag.",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn update_offline_not_cached_sparse() {
+ let _registry = RegistryBuilder::new().http_index().build();
+ update_offline_not_cached()
+}
+
+#[cargo_test]
+fn update_offline_not_cached_git() {
+ update_offline_not_cached()
+}
+
+#[cargo_test]
+fn cargo_compile_offline_with_cached_git_dep() {
+ let git_project = git::new("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("dep1", "0.5.0"))
+ .file(
+ "src/lib.rs",
+ r#"
+ pub static COOL_STR:&str = "cached git repo rev1";
+ "#,
+ )
+ });
+
+ let repo = git2::Repository::open(&git_project.root()).unwrap();
+ let rev1 = repo.revparse_single("HEAD").unwrap().id();
+
+ // Commit the changes and make sure we trigger a recompile
+ git_project.change_file(
+ "src/lib.rs",
+ r#"pub static COOL_STR:&str = "cached git repo rev2";"#,
+ );
+ git::add(&repo);
+ let rev2 = git::commit(&repo);
+
+ // cache to registry rev1 and rev2
+ let prj = project()
+ .at("cache_git_dep")
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "cache_git_dep"
+ version = "0.5.0"
+
+ [dependencies.dep1]
+ git = '{}'
+ rev = "{}"
+ "#,
+ git_project.url(),
+ rev1
+ ),
+ )
+ .file("src/main.rs", "fn main(){}")
+ .build();
+ prj.cargo("build").run();
+
+ prj.change_file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "cache_git_dep"
+ version = "0.5.0"
+
+ [dependencies.dep1]
+ git = '{}'
+ rev = "{}"
+ "#,
+ git_project.url(),
+ rev2
+ ),
+ );
+ prj.cargo("build").run();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+
+ [dependencies.dep1]
+ git = '{}'
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &main_file(r#""hello from {}", dep1::COOL_STR"#, &["dep1"]),
+ )
+ .build();
+
+ let git_root = git_project.root();
+
+ p.cargo("build --offline")
+ .with_stderr(format!(
+ "\
+[COMPILING] dep1 v0.5.0 ({}#[..])
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ path2url(git_root),
+ ))
+ .run();
+
+ assert!(p.bin("foo").is_file());
+
+ p.process(&p.bin("foo"))
+ .with_stdout("hello from cached git repo rev2\n")
+ .run();
+
+ p.change_file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+
+ [dependencies.dep1]
+ git = '{}'
+ rev = "{}"
+ "#,
+ git_project.url(),
+ rev1
+ ),
+ );
+
+ p.cargo("build --offline").run();
+ p.process(&p.bin("foo"))
+ .with_stdout("hello from cached git repo rev1\n")
+ .run();
+}
+
+#[cargo_test]
+fn offline_resolve_optional_fail() {
+ // Example where resolve fails offline.
+ //
+ // This happens if at least 1 version of an optional dependency is
+ // available, but none of them satisfy the requirements. The current logic
+ // that handles this is `RegistryIndex::query_inner`, and it doesn't know
+ // if the package being queried is an optional one. This is not ideal, it
+ // would be best if it just ignored optional (unselected) dependencies.
+ Package::new("dep", "1.0.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ dep = { version = "1.0", optional = true }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("fetch").run();
+
+ // Change dep to 2.0.
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ dep = { version = "2.0", optional = true }
+ "#,
+ );
+
+ p.cargo("check --offline")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to select a version for the requirement `dep = \"^2.0\"`
+candidate versions found which didn't match: 1.0.0
+location searched: `[..]` index (which is replacing registry `crates-io`)
+required by package `foo v0.1.0 ([..]/foo)`
+perhaps a crate was updated and forgotten to be re-vendored?
+As a reminder, you're using offline mode (--offline) which can sometimes cause \
+surprising resolution failures, if this error is too confusing you may wish to \
+retry without the offline flag.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn offline_with_all_patched() {
+ // Offline works if everything is patched.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ dep = "1.0"
+
+ [patch.crates-io]
+ dep = {path = "dep"}
+ "#,
+ )
+ .file("src/lib.rs", "pub fn f() { dep::foo(); }")
+ .file("dep/Cargo.toml", &basic_manifest("dep", "1.0.0"))
+ .file("dep/src/lib.rs", "pub fn foo() {}")
+ .build();
+
+ p.cargo("check --offline").run();
+}
+
+#[cargo_test]
+fn update_offline_cached() {
+ // Cache a few versions to update against
+ let p = project().file("src/lib.rs", "").build();
+ let versions = ["1.2.3", "1.2.5", "1.2.9"];
+ for vers in versions.iter() {
+ Package::new("present_dep", vers)
+ .file("Cargo.toml", &basic_manifest("present_dep", vers))
+ .file(
+ "src/lib.rs",
+ format!(r#"pub fn get_version()->&'static str {{ "{}" }}"#, vers).as_str(),
+ )
+ .publish();
+ // make package cached
+ p.change_file(
+ "Cargo.toml",
+ format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ present_dep = "={}"
+ "#,
+ vers
+ )
+ .as_str(),
+ );
+ p.cargo("build").run();
+ }
+
+ let p2 = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ present_dep = "1.2"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "\
+extern crate present_dep;
+fn main(){
+ println!(\"{}\", present_dep::get_version());
+}",
+ )
+ .build();
+
+ p2.cargo("build --offline")
+ .with_stderr(
+ "\
+[COMPILING] present_dep v1.2.9
+[COMPILING] foo v0.1.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p2.rename_run("foo", "with_1_2_9")
+ .with_stdout("1.2.9")
+ .run();
+ // updates happen without updating the index
+ p2.cargo("update -p present_dep --precise 1.2.3 --offline")
+ .with_status(0)
+ .with_stderr(
+ "\
+[DOWNGRADING] present_dep v1.2.9 -> v1.2.3
+",
+ )
+ .run();
+
+ p2.cargo("build --offline")
+ .with_stderr(
+ "\
+[COMPILING] present_dep v1.2.3
+[COMPILING] foo v0.1.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p2.rename_run("foo", "with_1_2_3")
+ .with_stdout("1.2.3")
+ .run();
+
+ // Offline update should only print package details and not index updating
+ p2.cargo("update --offline")
+ .with_status(0)
+ .with_stderr(
+ "\
+[UPDATING] present_dep v1.2.3 -> v1.2.9
+",
+ )
+ .run();
+
+ // No v1.2.8 loaded into the cache so expect failure.
+ p2.cargo("update -p present_dep --precise 1.2.8 --offline")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] no matching package named `present_dep` found
+location searched: registry `[..]`
+required by package `foo v0.1.0 ([..]/foo)`
+As a reminder, you're using offline mode (--offline) which can sometimes cause \
+surprising resolution failures, if this error is too confusing you may wish to \
+retry without the offline flag.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn offline_and_frozen_and_no_lock() {
+ let p = project().file("src/lib.rs", "").build();
+ p.cargo("check --frozen --offline")
+ .with_status(101)
+ .with_stderr("\
+error: the lock file [ROOT]/foo/Cargo.lock needs to be updated but --frozen was passed to prevent this
+If you want to try to generate the lock file without accessing the network, \
+remove the --frozen flag and use --offline instead.
+")
+ .run();
+}
+
+#[cargo_test]
+fn offline_and_locked_and_no_frozen() {
+ let p = project().file("src/lib.rs", "").build();
+ p.cargo("check --locked --offline")
+ .with_status(101)
+ .with_stderr("\
+error: the lock file [ROOT]/foo/Cargo.lock needs to be updated but --locked was passed to prevent this
+If you want to try to generate the lock file without accessing the network, \
+remove the --locked flag and use --offline instead.
+")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/old_cargos.rs b/src/tools/cargo/tests/testsuite/old_cargos.rs
new file mode 100644
index 000000000..a85e13d3b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/old_cargos.rs
@@ -0,0 +1,679 @@
+//! Tests for checking behavior of old cargos.
+//!
+//! These tests are ignored because it is intended to be run on a developer
+//! system with a bunch of toolchains installed. This requires `rustup` to be
+//! installed. It will iterate over installed toolchains, and run some tests
+//! over each one, producing a report at the end. As of this writing, I have
+//! tested 1.0 to 1.51. Run this with:
+//!
+//! ```console
+//! cargo test --test testsuite -- old_cargos --nocapture --ignored
+//! ```
+
+use cargo::CargoResult;
+use cargo_test_support::paths::CargoPathExt;
+use cargo_test_support::registry::{self, Dependency, Package};
+use cargo_test_support::{cargo_exe, execs, paths, process, project, rustc_host};
+use cargo_util::{ProcessBuilder, ProcessError};
+use semver::Version;
+use std::fs;
+
+fn tc_process(cmd: &str, toolchain: &str) -> ProcessBuilder {
+ let mut p = if toolchain == "this" {
+ if cmd == "cargo" {
+ process(&cargo_exe())
+ } else {
+ process(cmd)
+ }
+ } else {
+ let mut cmd = process(cmd);
+ cmd.arg(format!("+{}", toolchain));
+ cmd
+ };
+ // Reset PATH since `process` modifies it to remove rustup.
+ p.env("PATH", std::env::var_os("PATH").unwrap());
+ p
+}
+
+/// Returns a sorted list of all toolchains.
+///
+/// The returned value includes the parsed version, and the rustup toolchain
+/// name as a string.
+fn collect_all_toolchains() -> Vec<(Version, String)> {
+ let rustc_version = |tc| {
+ let mut cmd = tc_process("rustc", tc);
+ cmd.arg("-V");
+ let output = cmd.exec_with_output().expect("rustc installed");
+ let version = std::str::from_utf8(&output.stdout).unwrap();
+ let parts: Vec<_> = version.split_whitespace().collect();
+ assert_eq!(parts[0], "rustc");
+ assert!(parts[1].starts_with("1."));
+ Version::parse(parts[1]).expect("valid version")
+ };
+
+ // Provide a way to override the list.
+ if let Ok(tcs) = std::env::var("OLD_CARGO") {
+ return tcs
+ .split(',')
+ .map(|tc| (rustc_version(tc), tc.to_string()))
+ .collect();
+ }
+
+ let host = rustc_host();
+ // I tend to have lots of toolchains installed, but I don't want to test
+ // all of them (like dated nightlies, or toolchains for non-host targets).
+ let valid_names = &[
+ format!("stable-{}", host),
+ format!("beta-{}", host),
+ format!("nightly-{}", host),
+ ];
+
+ let output = ProcessBuilder::new("rustup")
+ .args(&["toolchain", "list"])
+ .exec_with_output()
+ .expect("rustup should be installed");
+ let stdout = std::str::from_utf8(&output.stdout).unwrap();
+ let mut toolchains: Vec<_> = stdout
+ .lines()
+ .map(|line| {
+ // Some lines say things like (default), just get the version.
+ line.split_whitespace().next().expect("non-empty line")
+ })
+ .filter(|line| {
+ line.ends_with(&host)
+ && (line.starts_with("1.") || valid_names.iter().any(|name| name == line))
+ })
+ .map(|line| (rustc_version(line), line.to_string()))
+ .collect();
+
+ toolchains.sort_by(|a, b| a.0.cmp(&b.0));
+ toolchains
+}
+
+/// Returns whether the default toolchain is the stable version.
+fn default_toolchain_is_stable() -> bool {
+ let default = tc_process("rustc", "this").arg("-V").exec_with_output();
+ let stable = tc_process("rustc", "stable").arg("-V").exec_with_output();
+ match (default, stable) {
+ (Ok(d), Ok(s)) => d.stdout == s.stdout,
+ _ => false,
+ }
+}
+
+// This is a test for exercising the behavior of older versions of cargo with
+// the new feature syntax.
+//
+// The test involves a few dependencies with different feature requirements:
+//
+// * `bar` 1.0.0 is the base version that does not use the new syntax.
+// * `bar` 1.0.1 has a feature with the new syntax, but the feature is unused.
+// The optional dependency `new-baz-dep` should not be activated.
+// * `bar` 1.0.2 has a dependency on `baz` that *requires* the new feature
+// syntax.
+#[ignore = "must be run manually, requires old cargo installations"]
+#[cargo_test]
+fn new_features() {
+ let registry = registry::init();
+ if std::process::Command::new("rustup").output().is_err() {
+ panic!("old_cargos requires rustup to be installed");
+ }
+ Package::new("new-baz-dep", "1.0.0").publish();
+
+ Package::new("baz", "1.0.0").publish();
+ let baz101_cksum = Package::new("baz", "1.0.1")
+ .add_dep(Dependency::new("new-baz-dep", "1.0").optional(true))
+ .feature("new-feat", &["dep:new-baz-dep"])
+ .publish();
+
+ let bar100_cksum = Package::new("bar", "1.0.0")
+ .add_dep(Dependency::new("baz", "1.0").optional(true))
+ .feature("feat", &["baz"])
+ .publish();
+ let bar101_cksum = Package::new("bar", "1.0.1")
+ .add_dep(Dependency::new("baz", "1.0").optional(true))
+ .feature("feat", &["dep:baz"])
+ .publish();
+ let bar102_cksum = Package::new("bar", "1.0.2")
+ .add_dep(Dependency::new("baz", "1.0").enable_features(&["new-feat"]))
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ let lock_bar_to = |toolchain_version: &Version, bar_version| {
+ let lock = if toolchain_version < &Version::new(1, 12, 0) {
+ let url = registry.index_url();
+ match bar_version {
+ 100 => format!(
+ r#"
+ [root]
+ name = "foo"
+ version = "0.1.0"
+ dependencies = [
+ "bar 1.0.0 (registry+{url})",
+ ]
+
+ [[package]]
+ name = "bar"
+ version = "1.0.0"
+ source = "registry+{url}"
+ "#,
+ url = url
+ ),
+ 101 => format!(
+ r#"
+ [root]
+ name = "foo"
+ version = "0.1.0"
+ dependencies = [
+ "bar 1.0.1 (registry+{url})",
+ ]
+
+ [[package]]
+ name = "bar"
+ version = "1.0.1"
+ source = "registry+{url}"
+ "#,
+ url = url
+ ),
+ 102 => format!(
+ r#"
+ [root]
+ name = "foo"
+ version = "0.1.0"
+ dependencies = [
+ "bar 1.0.2 (registry+{url})",
+ ]
+
+ [[package]]
+ name = "bar"
+ version = "1.0.2"
+ source = "registry+{url}"
+ dependencies = [
+ "baz 1.0.1 (registry+{url})",
+ ]
+
+ [[package]]
+ name = "baz"
+ version = "1.0.1"
+ source = "registry+{url}"
+ "#,
+ url = url
+ ),
+ _ => panic!("unexpected version"),
+ }
+ } else {
+ match bar_version {
+ 100 => format!(
+ r#"
+ [root]
+ name = "foo"
+ version = "0.1.0"
+ dependencies = [
+ "bar 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ ]
+
+ [[package]]
+ name = "bar"
+ version = "1.0.0"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+
+ [metadata]
+ "checksum bar 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "{}"
+ "#,
+ bar100_cksum
+ ),
+ 101 => format!(
+ r#"
+ [root]
+ name = "foo"
+ version = "0.1.0"
+ dependencies = [
+ "bar 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ ]
+
+ [[package]]
+ name = "bar"
+ version = "1.0.1"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+
+ [metadata]
+ "checksum bar 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "{}"
+ "#,
+ bar101_cksum
+ ),
+ 102 => format!(
+ r#"
+ [root]
+ name = "foo"
+ version = "0.1.0"
+ dependencies = [
+ "bar 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ ]
+
+ [[package]]
+ name = "bar"
+ version = "1.0.2"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+ dependencies = [
+ "baz 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ ]
+
+ [[package]]
+ name = "baz"
+ version = "1.0.1"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+
+ [metadata]
+ "checksum bar 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "{bar102_cksum}"
+ "checksum baz 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "{baz101_cksum}"
+ "#,
+ bar102_cksum = bar102_cksum,
+ baz101_cksum = baz101_cksum
+ ),
+ _ => panic!("unexpected version"),
+ }
+ };
+ p.change_file("Cargo.lock", &lock);
+ };
+
+ let toolchains = collect_all_toolchains();
+
+ let config_path = paths::home().join(".cargo/config");
+ let lock_path = p.root().join("Cargo.lock");
+
+ struct ToolchainBehavior {
+ bar: Option<Version>,
+ baz: Option<Version>,
+ new_baz_dep: Option<Version>,
+ }
+
+ // Collect errors to print at the end. One entry per toolchain, a list of
+ // strings to print.
+ let mut unexpected_results: Vec<Vec<String>> = Vec::new();
+
+ for (version, toolchain) in &toolchains {
+ let mut tc_result = Vec::new();
+ // Write a config appropriate for this version.
+ if version < &Version::new(1, 12, 0) {
+ fs::write(
+ &config_path,
+ format!(
+ r#"
+ [registry]
+ index = "{}"
+ "#,
+ registry.index_url()
+ ),
+ )
+ .unwrap();
+ } else {
+ fs::write(
+ &config_path,
+ format!(
+ "
+ [source.crates-io]
+ registry = 'https://wut' # only needed by 1.12
+ replace-with = 'dummy-registry'
+
+ [source.dummy-registry]
+ registry = '{}'
+ ",
+ registry.index_url()
+ ),
+ )
+ .unwrap();
+ }
+
+ // Fetches the version of a package in the lock file.
+ let pkg_version = |pkg| -> Option<Version> {
+ let output = tc_process("cargo", toolchain)
+ .args(&["pkgid", pkg])
+ .cwd(p.root())
+ .exec_with_output()
+ .ok()?;
+ let stdout = std::str::from_utf8(&output.stdout).unwrap();
+ let version = stdout
+ .trim()
+ .rsplitn(2, ':')
+ .next()
+ .expect("version after colon");
+ Some(Version::parse(version).expect("parseable version"))
+ };
+
+ // Runs `cargo build` and returns the versions selected in the lock.
+ let run_cargo = || -> CargoResult<ToolchainBehavior> {
+ match tc_process("cargo", toolchain)
+ .args(&["build", "--verbose"])
+ .cwd(p.root())
+ .exec_with_output()
+ {
+ Ok(_output) => {
+ eprintln!("{} ok", toolchain);
+ let bar = pkg_version("bar");
+ let baz = pkg_version("baz");
+ let new_baz_dep = pkg_version("new-baz-dep");
+ Ok(ToolchainBehavior {
+ bar,
+ baz,
+ new_baz_dep,
+ })
+ }
+ Err(e) => {
+ eprintln!("{} err {}", toolchain, e);
+ Err(e)
+ }
+ }
+ };
+
+ macro_rules! check_lock {
+ ($tc_result:ident, $pkg:expr, $which:expr, $actual:expr, None) => {
+ check_lock!(= $tc_result, $pkg, $which, $actual, None);
+ };
+ ($tc_result:ident, $pkg:expr, $which:expr, $actual:expr, $expected:expr) => {
+ check_lock!(= $tc_result, $pkg, $which, $actual, Some(Version::parse($expected).unwrap()));
+ };
+ (= $tc_result:ident, $pkg:expr, $which:expr, $actual:expr, $expected:expr) => {
+ let exp: Option<Version> = $expected;
+ if $actual != $expected {
+ $tc_result.push(format!(
+ "{} for {} saw {:?} but expected {:?}",
+ $which, $pkg, $actual, exp
+ ));
+ }
+ };
+ }
+
+ let check_err_contains = |tc_result: &mut Vec<_>, err: anyhow::Error, contents| {
+ if let Some(ProcessError {
+ stderr: Some(stderr),
+ ..
+ }) = err.downcast_ref::<ProcessError>()
+ {
+ let stderr = std::str::from_utf8(stderr).unwrap();
+ if !stderr.contains(contents) {
+ tc_result.push(format!(
+ "{} expected to see error contents:\n{}\nbut saw:\n{}",
+ toolchain, contents, stderr
+ ));
+ }
+ } else {
+ panic!("{} unexpected error {}", toolchain, err);
+ }
+ };
+
+ // Unlocked behavior.
+ let which = "unlocked";
+ lock_path.rm_rf();
+ p.build_dir().rm_rf();
+ match run_cargo() {
+ Ok(behavior) => {
+ if version < &Version::new(1, 51, 0) {
+ check_lock!(tc_result, "bar", which, behavior.bar, "1.0.2");
+ check_lock!(tc_result, "baz", which, behavior.baz, "1.0.1");
+ check_lock!(tc_result, "new-baz-dep", which, behavior.new_baz_dep, None);
+ } else if version >= &Version::new(1, 51, 0) && version <= &Version::new(1, 59, 0) {
+ check_lock!(tc_result, "bar", which, behavior.bar, "1.0.0");
+ check_lock!(tc_result, "baz", which, behavior.baz, None);
+ check_lock!(tc_result, "new-baz-dep", which, behavior.new_baz_dep, None);
+ }
+ // Starting with 1.60, namespaced-features has been stabilized.
+ else {
+ check_lock!(tc_result, "bar", which, behavior.bar, "1.0.2");
+ check_lock!(tc_result, "baz", which, behavior.baz, "1.0.1");
+ check_lock!(
+ tc_result,
+ "new-baz-dep",
+ which,
+ behavior.new_baz_dep,
+ "1.0.0"
+ );
+ }
+ }
+ Err(e) => {
+ tc_result.push(format!("unlocked build failed: {}", e));
+ }
+ }
+
+ let which = "locked bar 1.0.0";
+ lock_bar_to(version, 100);
+ match run_cargo() {
+ Ok(behavior) => {
+ check_lock!(tc_result, "bar", which, behavior.bar, "1.0.0");
+ check_lock!(tc_result, "baz", which, behavior.baz, None);
+ check_lock!(tc_result, "new-baz-dep", which, behavior.new_baz_dep, None);
+ }
+ Err(e) => {
+ tc_result.push(format!("bar 1.0.0 locked build failed: {}", e));
+ }
+ }
+
+ let which = "locked bar 1.0.1";
+ lock_bar_to(version, 101);
+ match run_cargo() {
+ Ok(behavior) => {
+ check_lock!(tc_result, "bar", which, behavior.bar, "1.0.1");
+ check_lock!(tc_result, "baz", which, behavior.baz, None);
+ check_lock!(tc_result, "new-baz-dep", which, behavior.new_baz_dep, None);
+ }
+ Err(e) => {
+ // When version >= 1.51 and <= 1.59,
+ // 1.0.1 can't be used without -Znamespaced-features
+ // It gets filtered out of the index.
+ check_err_contains(
+ &mut tc_result,
+ e,
+ "candidate versions found which didn't match: 1.0.2, 1.0.0",
+ );
+ }
+ }
+
+ let which = "locked bar 1.0.2";
+ lock_bar_to(version, 102);
+ match run_cargo() {
+ Ok(behavior) => {
+ if version <= &Version::new(1, 59, 0) {
+ check_lock!(tc_result, "bar", which, behavior.bar, "1.0.2");
+ check_lock!(tc_result, "baz", which, behavior.baz, "1.0.1");
+ check_lock!(tc_result, "new-baz-dep", which, behavior.new_baz_dep, None);
+ }
+ // Starting with 1.60, namespaced-features has been stabilized.
+ else {
+ check_lock!(tc_result, "bar", which, behavior.bar, "1.0.2");
+ check_lock!(tc_result, "baz", which, behavior.baz, "1.0.1");
+ check_lock!(
+ tc_result,
+ "new-baz-dep",
+ which,
+ behavior.new_baz_dep,
+ "1.0.0"
+ );
+ }
+ }
+ Err(e) => {
+ // When version >= 1.51 and <= 1.59,
+ // baz can't lock to 1.0.1, it requires -Znamespaced-features
+ check_err_contains(
+ &mut tc_result,
+ e,
+ "candidate versions found which didn't match: 1.0.0",
+ );
+ }
+ }
+
+ unexpected_results.push(tc_result);
+ }
+
+ // Generate a report.
+ let mut has_err = false;
+ for ((tc_vers, tc_name), errs) in toolchains.iter().zip(unexpected_results) {
+ if errs.is_empty() {
+ continue;
+ }
+ eprintln!("error: toolchain {} (version {}):", tc_name, tc_vers);
+ for err in errs {
+ eprintln!(" {}", err);
+ }
+ has_err = true;
+ }
+ if has_err {
+ panic!("at least one toolchain did not run as expected");
+ }
+}
+
+#[cargo_test]
+#[ignore = "must be run manually, requires old cargo installations"]
+fn index_cache_rebuild() {
+ // Checks that the index cache gets rebuilt.
+ //
+ // 1.48 will not cache entries with features with the same name as a
+ // dependency. If the cache does not get rebuilt, then running with
+ // `-Znamespaced-features` would prevent the new cargo from seeing those
+ // entries. The index cache version was changed to prevent this from
+ // happening, and switching between versions should work correctly
+ // (although it will thrash the cash, that's better than not working
+ // correctly.
+ Package::new("baz", "1.0.0").publish();
+ Package::new("bar", "1.0.0").publish();
+ Package::new("bar", "1.0.1")
+ .add_dep(Dependency::new("baz", "1.0").optional(true))
+ .feature("baz", &["dep:baz"])
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // This version of Cargo errors on index entries that have overlapping
+ // feature names, so 1.0.1 will be missing.
+ execs()
+ .with_process_builder(tc_process("cargo", "1.48.0"))
+ .arg("check")
+ .cwd(p.root())
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v1.0.0 [..]
+[CHECKING] bar v1.0.0
+[CHECKING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ fs::remove_file(p.root().join("Cargo.lock")).unwrap();
+
+ // This should rebuild the cache and use 1.0.1.
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v1.0.1 [..]
+[CHECKING] bar v1.0.1
+[CHECKING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ fs::remove_file(p.root().join("Cargo.lock")).unwrap();
+
+ // Verify 1.48 can still resolve, and is at 1.0.0.
+ execs()
+ .with_process_builder(tc_process("cargo", "1.48.0"))
+ .arg("tree")
+ .cwd(p.root())
+ .with_stdout(
+ "\
+foo v0.1.0 [..]
+└── bar v1.0.0
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+#[ignore = "must be run manually, requires old cargo installations"]
+fn avoids_split_debuginfo_collision() {
+ // Test needs two different toolchains.
+ // If the default toolchain is stable, then it won't work.
+ if default_toolchain_is_stable() {
+ return;
+ }
+ // Checks for a bug where .o files were being incorrectly shared between
+ // different toolchains using incremental and split-debuginfo on macOS.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [profile.dev]
+ split-debuginfo = "unpacked"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ execs()
+ .with_process_builder(tc_process("cargo", "stable"))
+ .arg("build")
+ .env("CARGO_INCREMENTAL", "1")
+ .cwd(p.root())
+ .with_stderr(
+ "\
+[COMPILING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ p.cargo("build")
+ .env("CARGO_INCREMENTAL", "1")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ execs()
+ .with_process_builder(tc_process("cargo", "stable"))
+ .arg("build")
+ .env("CARGO_INCREMENTAL", "1")
+ .cwd(p.root())
+ .with_stderr(
+ "\
+[FINISHED] [..]
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/out_dir.rs b/src/tools/cargo/tests/testsuite/out_dir.rs
new file mode 100644
index 000000000..fe647f56e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/out_dir.rs
@@ -0,0 +1,317 @@
+//! Tests for --out-dir flag.
+
+use cargo_test_support::sleep_ms;
+use cargo_test_support::{basic_manifest, project};
+use std::env;
+use std::fs;
+use std::path::Path;
+
+#[cargo_test]
+fn binary_with_debug() {
+ let p = project()
+ .file("src/main.rs", r#"fn main() { println!("Hello, World!") }"#)
+ .build();
+
+ p.cargo("build -Z unstable-options --out-dir out")
+ .masquerade_as_nightly_cargo(&["out-dir"])
+ .enable_mac_dsym()
+ .run();
+ check_dir_contents(
+ &p.root().join("out"),
+ &["foo"],
+ &["foo", "foo.dSYM"],
+ &["foo.exe", "foo.pdb"],
+ &["foo.exe"],
+ );
+}
+
+#[cargo_test]
+fn static_library_with_debug() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ crate-type = ["staticlib"]
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #[no_mangle]
+ pub extern "C" fn foo() { println!("Hello, World!") }
+ "#,
+ )
+ .build();
+
+ p.cargo("build -Z unstable-options --out-dir out")
+ .masquerade_as_nightly_cargo(&["out-dir"])
+ .run();
+ check_dir_contents(
+ &p.root().join("out"),
+ &["libfoo.a"],
+ &["libfoo.a"],
+ &["foo.lib"],
+ &["libfoo.a"],
+ );
+}
+
+#[cargo_test]
+fn dynamic_library_with_debug() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ crate-type = ["cdylib"]
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #[no_mangle]
+ pub extern "C" fn foo() { println!("Hello, World!") }
+ "#,
+ )
+ .build();
+
+ p.cargo("build -Z unstable-options --out-dir out")
+ .masquerade_as_nightly_cargo(&["out-dir"])
+ .enable_mac_dsym()
+ .run();
+ check_dir_contents(
+ &p.root().join("out"),
+ &["libfoo.so"],
+ &["libfoo.dylib", "libfoo.dylib.dSYM"],
+ &["foo.dll", "foo.dll.exp", "foo.dll.lib", "foo.pdb"],
+ &["foo.dll", "libfoo.dll.a"],
+ );
+}
+
+#[cargo_test]
+fn rlib_with_debug() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ crate-type = ["rlib"]
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() { println!("Hello, World!") }
+ "#,
+ )
+ .build();
+
+ p.cargo("build -Z unstable-options --out-dir out")
+ .masquerade_as_nightly_cargo(&["out-dir"])
+ .run();
+ check_dir_contents(
+ &p.root().join("out"),
+ &["libfoo.rlib"],
+ &["libfoo.rlib"],
+ &["libfoo.rlib"],
+ &["libfoo.rlib"],
+ );
+}
+
+#[cargo_test]
+fn include_only_the_binary_from_the_current_package() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [workspace]
+
+ [dependencies]
+ utils = { path = "./utils" }
+ "#,
+ )
+ .file("src/lib.rs", "extern crate utils;")
+ .file(
+ "src/main.rs",
+ r#"
+ extern crate foo;
+ extern crate utils;
+ fn main() {
+ println!("Hello, World!")
+ }
+ "#,
+ )
+ .file("utils/Cargo.toml", &basic_manifest("utils", "0.0.1"))
+ .file("utils/src/lib.rs", "")
+ .build();
+
+ p.cargo("build -Z unstable-options --bin foo --out-dir out")
+ .masquerade_as_nightly_cargo(&["out-dir"])
+ .enable_mac_dsym()
+ .run();
+ check_dir_contents(
+ &p.root().join("out"),
+ &["foo"],
+ &["foo", "foo.dSYM"],
+ &["foo.exe", "foo.pdb"],
+ &["foo.exe"],
+ );
+}
+
+#[cargo_test]
+fn out_dir_is_a_file() {
+ let p = project()
+ .file("src/main.rs", r#"fn main() { println!("Hello, World!") }"#)
+ .file("out", "")
+ .build();
+
+ p.cargo("build -Z unstable-options --out-dir out")
+ .masquerade_as_nightly_cargo(&["out-dir"])
+ .with_status(101)
+ .with_stderr_contains("[ERROR] failed to create directory [..]")
+ .run();
+}
+
+#[cargo_test]
+fn replaces_artifacts() {
+ let p = project()
+ .file("src/main.rs", r#"fn main() { println!("foo") }"#)
+ .build();
+
+ p.cargo("build -Z unstable-options --out-dir out")
+ .masquerade_as_nightly_cargo(&["out-dir"])
+ .run();
+ p.process(
+ &p.root()
+ .join(&format!("out/foo{}", env::consts::EXE_SUFFIX)),
+ )
+ .with_stdout("foo")
+ .run();
+
+ sleep_ms(1000);
+ p.change_file("src/main.rs", r#"fn main() { println!("bar") }"#);
+
+ p.cargo("build -Z unstable-options --out-dir out")
+ .masquerade_as_nightly_cargo(&["out-dir"])
+ .run();
+ p.process(
+ &p.root()
+ .join(&format!("out/foo{}", env::consts::EXE_SUFFIX)),
+ )
+ .with_stdout("bar")
+ .run();
+}
+
+#[cargo_test]
+fn avoid_build_scripts() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+ .file("a/src/main.rs", "fn main() {}")
+ .file("a/build.rs", r#"fn main() { println!("hello-build-a"); }"#)
+ .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+ .file("b/src/main.rs", "fn main() {}")
+ .file("b/build.rs", r#"fn main() { println!("hello-build-b"); }"#)
+ .build();
+
+ p.cargo("build -Z unstable-options --out-dir out -vv")
+ .masquerade_as_nightly_cargo(&["out-dir"])
+ .enable_mac_dsym()
+ .with_stdout_contains("[a 0.0.1] hello-build-a")
+ .with_stdout_contains("[b 0.0.1] hello-build-b")
+ .run();
+ check_dir_contents(
+ &p.root().join("out"),
+ &["a", "b"],
+ &["a", "a.dSYM", "b", "b.dSYM"],
+ &["a.exe", "a.pdb", "b.exe", "b.pdb"],
+ &["a.exe", "b.exe"],
+ );
+}
+
+#[cargo_test]
+fn cargo_build_out_dir() {
+ let p = project()
+ .file("src/main.rs", r#"fn main() { println!("Hello, World!") }"#)
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ out-dir = "out"
+ "#,
+ )
+ .build();
+
+ p.cargo("build -Z unstable-options")
+ .masquerade_as_nightly_cargo(&["out-dir"])
+ .enable_mac_dsym()
+ .run();
+ check_dir_contents(
+ &p.root().join("out"),
+ &["foo"],
+ &["foo", "foo.dSYM"],
+ &["foo.exe", "foo.pdb"],
+ &["foo.exe"],
+ );
+}
+
+fn check_dir_contents(
+ out_dir: &Path,
+ expected_linux: &[&str],
+ expected_mac: &[&str],
+ expected_win_msvc: &[&str],
+ expected_win_gnu: &[&str],
+) {
+ let expected = if cfg!(target_os = "windows") {
+ if cfg!(target_env = "msvc") {
+ expected_win_msvc
+ } else {
+ expected_win_gnu
+ }
+ } else if cfg!(target_os = "macos") {
+ expected_mac
+ } else {
+ expected_linux
+ };
+
+ let actual = list_dir(out_dir);
+ let mut expected = expected.iter().map(|s| s.to_string()).collect::<Vec<_>>();
+ expected.sort_unstable();
+ assert_eq!(actual, expected);
+}
+
+fn list_dir(dir: &Path) -> Vec<String> {
+ let mut res = Vec::new();
+ for entry in fs::read_dir(dir).unwrap() {
+ let entry = entry.unwrap();
+ res.push(entry.file_name().into_string().unwrap());
+ }
+ res.sort_unstable();
+ res
+}
diff --git a/src/tools/cargo/tests/testsuite/owner.rs b/src/tools/cargo/tests/testsuite/owner.rs
new file mode 100644
index 000000000..9fc960c92
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/owner.rs
@@ -0,0 +1,192 @@
+//! Tests for the `cargo owner` command.
+
+use std::fs;
+
+use cargo_test_support::paths::CargoPathExt;
+use cargo_test_support::project;
+use cargo_test_support::registry::{self, api_path};
+
+fn setup(name: &str, content: Option<&str>) {
+ let dir = api_path().join(format!("api/v1/crates/{}", name));
+ dir.mkdir_p();
+ if let Some(body) = content {
+ fs::write(dir.join("owners"), body).unwrap();
+ }
+}
+
+#[cargo_test]
+fn simple_list() {
+ let registry = registry::init();
+ let content = r#"{
+ "users": [
+ {
+ "id": 70,
+ "login": "github:rust-lang:core",
+ "name": "Core"
+ },
+ {
+ "id": 123,
+ "login": "octocat"
+ }
+ ]
+ }"#;
+ setup("foo", Some(content));
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("owner -l")
+ .replace_crates_io(registry.index_url())
+ .with_stdout(
+ "\
+github:rust-lang:core (Core)
+octocat
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn simple_add() {
+ let registry = registry::init();
+ setup("foo", None);
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("owner -a username")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr(
+ " Updating crates.io index
+error: failed to invite owners to crate `foo` on registry at file://[..]
+
+Caused by:
+ EOF while parsing a value at line 1 column 0",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn simple_add_with_asymmetric() {
+ let registry = registry::RegistryBuilder::new()
+ .http_api()
+ .token(cargo_test_support::registry::Token::rfc_key())
+ .build();
+ setup("foo", None);
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [project]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ // The http_api server will check that the authorization is correct.
+ // If the authorization was not sent then we would get an unauthorized error.
+ p.cargo("owner -a username")
+ .arg("-Zregistry-auth")
+ .masquerade_as_nightly_cargo(&["registry-auth"])
+ .replace_crates_io(registry.index_url())
+ .with_status(0)
+ .run();
+}
+
+#[cargo_test]
+fn simple_remove() {
+ let registry = registry::init();
+ setup("foo", None);
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("owner -r username")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr(
+ " Updating crates.io index
+ Owner removing [\"username\"] from crate foo
+error: failed to remove owners from crate `foo` on registry at file://[..]
+
+Caused by:
+ EOF while parsing a value at line 1 column 0",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn simple_remove_with_asymmetric() {
+ let registry = registry::RegistryBuilder::new()
+ .http_api()
+ .token(cargo_test_support::registry::Token::rfc_key())
+ .build();
+ setup("foo", None);
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [project]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ // The http_api server will check that the authorization is correct.
+ // If the authorization was not sent then we would get an unauthorized error.
+ p.cargo("owner -r username")
+ .arg("-Zregistry-auth")
+ .replace_crates_io(registry.index_url())
+ .masquerade_as_nightly_cargo(&["registry-auth"])
+ .with_status(0)
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/package.rs b/src/tools/cargo/tests/testsuite/package.rs
new file mode 100644
index 000000000..14bac6618
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/package.rs
@@ -0,0 +1,2764 @@
+//! Tests for the `cargo package` command.
+
+use cargo_test_support::paths::CargoPathExt;
+use cargo_test_support::publish::validate_crate_contents;
+use cargo_test_support::registry::{self, Package};
+use cargo_test_support::{
+ basic_manifest, cargo_process, git, path2url, paths, project, symlink_supported, t,
+};
+use flate2::read::GzDecoder;
+use std::fs::{self, read_to_string, File};
+use std::path::Path;
+use tar::Archive;
+
+#[cargo_test]
+fn simple() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ exclude = ["*.txt"]
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .file("src/bar.txt", "") // should be ignored when packaging
+ .build();
+
+ p.cargo("package")
+ .with_stderr(
+ "\
+[WARNING] manifest has no documentation[..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] 4 files, [..] ([..] compressed)
+",
+ )
+ .run();
+ assert!(p.root().join("target/package/foo-0.0.1.crate").is_file());
+ p.cargo("package -l")
+ .with_stdout(
+ "\
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+src/main.rs
+",
+ )
+ .run();
+ p.cargo("package").with_stdout("").run();
+
+ let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+ validate_crate_contents(
+ f,
+ "foo-0.0.1.crate",
+ &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"],
+ &[],
+ );
+}
+
+#[cargo_test]
+fn metadata_warning() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+ p.cargo("package")
+ .with_stderr(
+ "\
+warning: manifest has no description, license, license-file, documentation, \
+homepage or repository.
+See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] [..] files, [..] ([..] compressed)
+",
+ )
+ .run();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+ p.cargo("package")
+ .with_stderr(
+ "\
+warning: manifest has no description, documentation, homepage or repository.
+See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] [..] files, [..] ([..] compressed)
+",
+ )
+ .run();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ repository = "bar"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+ p.cargo("package")
+ .with_stderr(
+ "\
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] [..] files, [..] ([..] compressed)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn package_verbose() {
+ let root = paths::root().join("all");
+ let repo = git::repo(&root)
+ .file("Cargo.toml", &basic_manifest("foo", "0.0.1"))
+ .file("src/main.rs", "fn main() {}")
+ .file("a/a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+ .file("a/a/src/lib.rs", "")
+ .build();
+ cargo_process("build").cwd(repo.root()).run();
+
+ println!("package main repo");
+ cargo_process("package -v --no-verify")
+ .cwd(repo.root())
+ .with_stderr(
+ "\
+[WARNING] manifest has no description[..]
+See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
+[PACKAGING] foo v0.0.1 ([..])
+[ARCHIVING] .cargo_vcs_info.json
+[ARCHIVING] Cargo.lock
+[ARCHIVING] Cargo.toml
+[ARCHIVING] Cargo.toml.orig
+[ARCHIVING] src/main.rs
+[PACKAGED] 5 files, [..] ([..] compressed)
+",
+ )
+ .run();
+
+ let f = File::open(&repo.root().join("target/package/foo-0.0.1.crate")).unwrap();
+ let vcs_contents = format!(
+ r#"{{
+ "git": {{
+ "sha1": "{}"
+ }},
+ "path_in_vcs": ""
+}}
+"#,
+ repo.revparse_head()
+ );
+ validate_crate_contents(
+ f,
+ "foo-0.0.1.crate",
+ &[
+ "Cargo.lock",
+ "Cargo.toml",
+ "Cargo.toml.orig",
+ "src/main.rs",
+ ".cargo_vcs_info.json",
+ ],
+ &[(".cargo_vcs_info.json", &vcs_contents)],
+ );
+
+ println!("package sub-repo");
+ cargo_process("package -v --no-verify")
+ .cwd(repo.root().join("a/a"))
+ .with_stderr(
+ "\
+[WARNING] manifest has no description[..]
+See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
+[PACKAGING] a v0.0.1 ([..])
+[ARCHIVING] .cargo_vcs_info.json
+[ARCHIVING] Cargo.toml
+[ARCHIVING] Cargo.toml.orig
+[ARCHIVING] src/lib.rs
+[PACKAGED] 4 files, [..] ([..] compressed)
+",
+ )
+ .run();
+
+ let f = File::open(&repo.root().join("a/a/target/package/a-0.0.1.crate")).unwrap();
+ let vcs_contents = format!(
+ r#"{{
+ "git": {{
+ "sha1": "{}"
+ }},
+ "path_in_vcs": "a/a"
+}}
+"#,
+ repo.revparse_head()
+ );
+ validate_crate_contents(
+ f,
+ "a-0.0.1.crate",
+ &[
+ "Cargo.toml",
+ "Cargo.toml.orig",
+ "src/lib.rs",
+ ".cargo_vcs_info.json",
+ ],
+ &[(".cargo_vcs_info.json", &vcs_contents)],
+ );
+}
+
+#[cargo_test]
+fn package_verification() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+ p.cargo("build").run();
+ p.cargo("package")
+ .with_stderr(
+ "\
+[WARNING] manifest has no description[..]
+See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] [..] files, [..] ([..] compressed)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn vcs_file_collision() {
+ let p = project().build();
+ let _ = git::repo(&paths::root().join("foo"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ description = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ exclude = ["*.no-existe"]
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {}
+ "#,
+ )
+ .file(".cargo_vcs_info.json", "foo")
+ .build();
+ p.cargo("package")
+ .arg("--no-verify")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] invalid inclusion of reserved file name .cargo_vcs_info.json \
+in package source
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn orig_file_collision() {
+ let p = project().build();
+ let _ = git::repo(&paths::root().join("foo"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ description = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ exclude = ["*.no-existe"]
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {}
+ "#,
+ )
+ .file("Cargo.toml.orig", "oops")
+ .build();
+ p.cargo("package")
+ .arg("--no-verify")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] invalid inclusion of reserved file name Cargo.toml.orig \
+in package source
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn path_dependency_no_version() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("package")
+ .with_status(101)
+ .with_stderr(
+ "\
+[WARNING] manifest has no documentation, homepage or repository.
+See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
+[ERROR] all dependencies must have a version specified when packaging.
+dependency `bar` does not specify a version\n\
+Note: The packaged dependency will use the version from crates.io,
+the `path` specification will be removed from the dependency declaration.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn git_dependency_no_version() {
+ registry::init();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+
+ [dependencies.foo]
+ git = "git://path/to/nowhere"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("package")
+ .with_status(101)
+ .with_stderr(
+ "\
+[WARNING] manifest has no documentation, homepage or repository.
+See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
+[ERROR] all dependencies must have a version specified when packaging.
+dependency `foo` does not specify a version
+Note: The packaged dependency will use the version from crates.io,
+the `git` specification will be removed from the dependency declaration.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn exclude() {
+ let root = paths::root().join("exclude");
+ let repo = git::repo(&root)
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ exclude = [
+ "*.txt",
+ # file in root
+ "file_root_1", # NO_CHANGE (ignored)
+ "/file_root_2", # CHANGING (packaged -> ignored)
+ "file_root_3/", # NO_CHANGE (packaged)
+ "file_root_4/*", # NO_CHANGE (packaged)
+ "file_root_5/**", # NO_CHANGE (packaged)
+ # file in sub-dir
+ "file_deep_1", # CHANGING (packaged -> ignored)
+ "/file_deep_2", # NO_CHANGE (packaged)
+ "file_deep_3/", # NO_CHANGE (packaged)
+ "file_deep_4/*", # NO_CHANGE (packaged)
+ "file_deep_5/**", # NO_CHANGE (packaged)
+ # dir in root
+ "dir_root_1", # CHANGING (packaged -> ignored)
+ "/dir_root_2", # CHANGING (packaged -> ignored)
+ "dir_root_3/", # CHANGING (packaged -> ignored)
+ "dir_root_4/*", # NO_CHANGE (ignored)
+ "dir_root_5/**", # NO_CHANGE (ignored)
+ # dir in sub-dir
+ "dir_deep_1", # CHANGING (packaged -> ignored)
+ "/dir_deep_2", # NO_CHANGE
+ "dir_deep_3/", # CHANGING (packaged -> ignored)
+ "dir_deep_4/*", # CHANGING (packaged -> ignored)
+ "dir_deep_5/**", # CHANGING (packaged -> ignored)
+ ]
+ "#,
+ )
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .file("bar.txt", "")
+ .file("src/bar.txt", "")
+ // File in root.
+ .file("file_root_1", "")
+ .file("file_root_2", "")
+ .file("file_root_3", "")
+ .file("file_root_4", "")
+ .file("file_root_5", "")
+ // File in sub-dir.
+ .file("some_dir/file_deep_1", "")
+ .file("some_dir/file_deep_2", "")
+ .file("some_dir/file_deep_3", "")
+ .file("some_dir/file_deep_4", "")
+ .file("some_dir/file_deep_5", "")
+ // Dir in root.
+ .file("dir_root_1/some_dir/file", "")
+ .file("dir_root_2/some_dir/file", "")
+ .file("dir_root_3/some_dir/file", "")
+ .file("dir_root_4/some_dir/file", "")
+ .file("dir_root_5/some_dir/file", "")
+ // Dir in sub-dir.
+ .file("some_dir/dir_deep_1/some_dir/file", "")
+ .file("some_dir/dir_deep_2/some_dir/file", "")
+ .file("some_dir/dir_deep_3/some_dir/file", "")
+ .file("some_dir/dir_deep_4/some_dir/file", "")
+ .file("some_dir/dir_deep_5/some_dir/file", "")
+ .build();
+
+ cargo_process("package --no-verify -v")
+ .cwd(repo.root())
+ .with_stdout("")
+ .with_stderr(
+ "\
+[WARNING] manifest has no description[..]
+See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
+[PACKAGING] foo v0.0.1 ([..])
+[ARCHIVING] .cargo_vcs_info.json
+[ARCHIVING] Cargo.lock
+[ARCHIVING] Cargo.toml
+[ARCHIVING] Cargo.toml.orig
+[ARCHIVING] file_root_3
+[ARCHIVING] file_root_4
+[ARCHIVING] file_root_5
+[ARCHIVING] some_dir/dir_deep_2/some_dir/file
+[ARCHIVING] some_dir/dir_deep_4/some_dir/file
+[ARCHIVING] some_dir/dir_deep_5/some_dir/file
+[ARCHIVING] some_dir/file_deep_2
+[ARCHIVING] some_dir/file_deep_3
+[ARCHIVING] some_dir/file_deep_4
+[ARCHIVING] some_dir/file_deep_5
+[ARCHIVING] src/main.rs
+[PACKAGED] 15 files, [..] ([..] compressed)
+",
+ )
+ .run();
+
+ assert!(repo.root().join("target/package/foo-0.0.1.crate").is_file());
+
+ cargo_process("package -l")
+ .cwd(repo.root())
+ .with_stdout(
+ "\
+.cargo_vcs_info.json
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+file_root_3
+file_root_4
+file_root_5
+some_dir/dir_deep_2/some_dir/file
+some_dir/dir_deep_4/some_dir/file
+some_dir/dir_deep_5/some_dir/file
+some_dir/file_deep_2
+some_dir/file_deep_3
+some_dir/file_deep_4
+some_dir/file_deep_5
+src/main.rs
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn include() {
+ let root = paths::root().join("include");
+ let repo = git::repo(&root)
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ exclude = ["*.txt"]
+ include = ["foo.txt", "**/*.rs", "Cargo.toml", ".dotfile"]
+ "#,
+ )
+ .file("foo.txt", "")
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .file(".dotfile", "")
+ // Should be ignored when packaging.
+ .file("src/bar.txt", "")
+ .build();
+
+ cargo_process("package --no-verify -v")
+ .cwd(repo.root())
+ .with_stderr(
+ "\
+[WARNING] manifest has no description[..]
+See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
+[WARNING] both package.include and package.exclude are specified; the exclude list will be ignored
+[PACKAGING] foo v0.0.1 ([..])
+[ARCHIVING] .cargo_vcs_info.json
+[ARCHIVING] .dotfile
+[ARCHIVING] Cargo.lock
+[ARCHIVING] Cargo.toml
+[ARCHIVING] Cargo.toml.orig
+[ARCHIVING] foo.txt
+[ARCHIVING] src/main.rs
+[PACKAGED] 7 files, [..] ([..] compressed)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn package_lib_with_bin() {
+ let p = project()
+ .file("src/main.rs", "extern crate foo; fn main() {}")
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("package -v").run();
+}
+
+#[cargo_test]
+fn package_git_submodule() {
+ let project = git::new("foo", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = ["foo@example.com"]
+ license = "MIT"
+ description = "foo"
+ repository = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
+ });
+ let library = git::new("bar", |library| {
+ library.no_manifest().file("Makefile", "all:")
+ });
+
+ let repository = git2::Repository::open(&project.root()).unwrap();
+ let url = path2url(library.root()).to_string();
+ git::add_submodule(&repository, &url, Path::new("bar"));
+ git::commit(&repository);
+
+ let repository = git2::Repository::open(&project.root().join("bar")).unwrap();
+ repository
+ .reset(
+ &repository.revparse_single("HEAD").unwrap(),
+ git2::ResetType::Hard,
+ None,
+ )
+ .unwrap();
+
+ project
+ .cargo("package --no-verify -v")
+ .with_stderr_contains("[ARCHIVING] bar/Makefile")
+ .run();
+}
+
+#[cargo_test]
+/// Tests if a symlink to a git submodule is properly handled.
+///
+/// This test requires you to be able to make symlinks.
+/// For windows, this may require you to enable developer mode.
+fn package_symlink_to_submodule() {
+ #[cfg(unix)]
+ use std::os::unix::fs::symlink;
+ #[cfg(windows)]
+ use std::os::windows::fs::symlink_dir as symlink;
+
+ if !symlink_supported() {
+ return;
+ }
+
+ let project = git::new("foo", |project| {
+ project.file("src/lib.rs", "pub fn foo() {}")
+ });
+
+ let library = git::new("submodule", |library| {
+ library.no_manifest().file("Makefile", "all:")
+ });
+
+ let repository = git2::Repository::open(&project.root()).unwrap();
+ let url = path2url(library.root()).to_string();
+ git::add_submodule(&repository, &url, Path::new("submodule"));
+ t!(symlink(
+ &project.root().join("submodule"),
+ &project.root().join("submodule-link")
+ ));
+ git::add(&repository);
+ git::commit(&repository);
+
+ let repository = git2::Repository::open(&project.root().join("submodule")).unwrap();
+ repository
+ .reset(
+ &repository.revparse_single("HEAD").unwrap(),
+ git2::ResetType::Hard,
+ None,
+ )
+ .unwrap();
+
+ project
+ .cargo("package --no-verify -v")
+ .with_stderr_contains("[ARCHIVING] submodule/Makefile")
+ .run();
+}
+
+#[cargo_test]
+fn no_duplicates_from_modified_tracked_files() {
+ let p = git::new("all", |p| p.file("src/main.rs", "fn main() {}"));
+ p.change_file("src/main.rs", r#"fn main() { println!("A change!"); }"#);
+ p.cargo("build").run();
+ p.cargo("package --list --allow-dirty")
+ .with_stdout(
+ "\
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+src/main.rs
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn ignore_nested() {
+ let cargo_toml = r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#;
+ let main_rs = r#"
+ fn main() { println!("hello"); }
+ "#;
+ let p = project()
+ .file("Cargo.toml", cargo_toml)
+ .file("src/main.rs", main_rs)
+ // If a project happens to contain a copy of itself, we should
+ // ignore it.
+ .file("a_dir/foo/Cargo.toml", cargo_toml)
+ .file("a_dir/foo/src/main.rs", main_rs)
+ .build();
+
+ p.cargo("package")
+ .with_stderr(
+ "\
+[WARNING] manifest has no documentation[..]
+See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] 4 files, [..] ([..] compressed)
+",
+ )
+ .run();
+ assert!(p.root().join("target/package/foo-0.0.1.crate").is_file());
+ p.cargo("package -l")
+ .with_stdout(
+ "\
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+src/main.rs
+",
+ )
+ .run();
+ p.cargo("package").with_stdout("").run();
+
+ let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+ validate_crate_contents(
+ f,
+ "foo-0.0.1.crate",
+ &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"],
+ &[],
+ );
+}
+
+// Windows doesn't allow these characters in filenames.
+#[cfg(unix)]
+#[cargo_test]
+fn package_weird_characters() {
+ let p = project()
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .file("src/:foo", "")
+ .build();
+
+ p.cargo("package")
+ .with_status(101)
+ .with_stderr(
+ "\
+warning: [..]
+See [..]
+[ERROR] cannot package a filename with a special character `:`: src/:foo
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn repackage_on_source_change() {
+ let p = project()
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .build();
+
+ p.cargo("package").run();
+
+ // Add another source file
+ p.change_file("src/foo.rs", r#"fn main() { println!("foo"); }"#);
+
+ // Check that cargo rebuilds the tarball
+ p.cargo("package")
+ .with_stderr(
+ "\
+[WARNING] [..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] 5 files, [..] ([..] compressed)
+",
+ )
+ .run();
+
+ // Check that the tarball contains the added file
+ let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+ validate_crate_contents(
+ f,
+ "foo-0.0.1.crate",
+ &[
+ "Cargo.lock",
+ "Cargo.toml",
+ "Cargo.toml.orig",
+ "src/main.rs",
+ "src/foo.rs",
+ ],
+ &[],
+ );
+}
+
+#[cargo_test]
+/// Tests if a broken symlink is properly handled when packaging.
+///
+/// This test requires you to be able to make symlinks.
+/// For windows, this may require you to enable developer mode.
+fn broken_symlink() {
+ #[cfg(unix)]
+ use std::os::unix::fs::symlink;
+ #[cfg(windows)]
+ use std::os::windows::fs::symlink_dir as symlink;
+
+ if !symlink_supported() {
+ return;
+ }
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = 'foo'
+ documentation = 'foo'
+ homepage = 'foo'
+ repository = 'foo'
+ "#,
+ )
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .build();
+ t!(symlink("nowhere", &p.root().join("src/foo.rs")));
+
+ p.cargo("package -v")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[ERROR] failed to prepare local package for uploading
+
+Caused by:
+ failed to open for archiving: `[..]foo.rs`
+
+Caused by:
+ [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+/// Tests if a broken but excluded symlink is ignored.
+/// See issue rust-lang/cargo#10917
+///
+/// This test requires you to be able to make symlinks.
+/// For windows, this may require you to enable developer mode.
+fn broken_but_excluded_symlink() {
+ #[cfg(unix)]
+ use std::os::unix::fs::symlink;
+ #[cfg(windows)]
+ use std::os::windows::fs::symlink_dir as symlink;
+
+ if !symlink_supported() {
+ return;
+ }
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = 'foo'
+ documentation = 'foo'
+ homepage = 'foo'
+ repository = 'foo'
+ exclude = ["src/foo.rs"]
+ "#,
+ )
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .build();
+ t!(symlink("nowhere", &p.root().join("src/foo.rs")));
+
+ p.cargo("package -v --list")
+ // `src/foo.rs` is excluded.
+ .with_stdout(
+ "\
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+src/main.rs
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+#[cfg(not(windows))] // https://github.com/libgit2/libgit2/issues/6250
+/// Test that /dir and /dir/ matches symlinks to directories.
+fn gitignore_symlink_dir() {
+ if !symlink_supported() {
+ return;
+ }
+
+ let (p, _repo) = git::new_repo("foo", |p| {
+ p.file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .symlink_dir("src", "src1")
+ .symlink_dir("src", "src2")
+ .symlink_dir("src", "src3")
+ .symlink_dir("src", "src4")
+ .file(".gitignore", "/src1\n/src2/\nsrc3\nsrc4/")
+ });
+
+ p.cargo("package -l --no-metadata")
+ .with_stderr("")
+ .with_stdout(
+ "\
+.cargo_vcs_info.json
+.gitignore
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+src/main.rs
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+#[cfg(not(windows))] // https://github.com/libgit2/libgit2/issues/6250
+/// Test that /dir and /dir/ matches symlinks to directories in dirty working directory.
+fn gitignore_symlink_dir_dirty() {
+ if !symlink_supported() {
+ return;
+ }
+
+ let (p, _repo) = git::new_repo("foo", |p| {
+ p.file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .file(".gitignore", "/src1\n/src2/\nsrc3\nsrc4/")
+ });
+
+ p.symlink("src", "src1");
+ p.symlink("src", "src2");
+ p.symlink("src", "src3");
+ p.symlink("src", "src4");
+
+ p.cargo("package -l --no-metadata")
+ .with_stderr("")
+ .with_stdout(
+ "\
+.cargo_vcs_info.json
+.gitignore
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+src/main.rs
+",
+ )
+ .run();
+
+ p.cargo("package -l --no-metadata --allow-dirty")
+ .with_stderr("")
+ .with_stdout(
+ "\
+.gitignore
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+src/main.rs
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+/// Tests if a symlink to a directory is properly included.
+///
+/// This test requires you to be able to make symlinks.
+/// For windows, this may require you to enable developer mode.
+fn package_symlink_to_dir() {
+ if !symlink_supported() {
+ return;
+ }
+
+ project()
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .file("bla/Makefile", "all:")
+ .symlink_dir("bla", "foo")
+ .build()
+ .cargo("package -v")
+ .with_stderr_contains("[ARCHIVING] foo/Makefile")
+ .run();
+}
+
+#[cargo_test]
+/// Tests if a symlink to ancestor causes filesystem loop error.
+///
+/// This test requires you to be able to make symlinks.
+/// For windows, this may require you to enable developer mode.
+fn filesystem_loop() {
+ if !symlink_supported() {
+ return;
+ }
+
+ project()
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .symlink_dir("a/b", "a/b/c/d/foo")
+ .build()
+ .cargo("package -v")
+ .with_stderr_contains(
+ "[WARNING] File system loop found: [..]/a/b/c/d/foo points to an ancestor [..]/a/b",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn do_not_package_if_repository_is_dirty() {
+ let p = project().build();
+
+ // Create a Git repository containing a minimal Rust project.
+ let _ = git::repo(&paths::root().join("foo"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ // Modify Cargo.toml without committing the change.
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ # change
+ "#,
+ );
+
+ p.cargo("package")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: 1 files in the working directory contain changes that were not yet \
+committed into git:
+
+Cargo.toml
+
+to proceed despite this and include the uncommitted changes, pass the `--allow-dirty` flag
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dirty_ignored() {
+ // Cargo warns about an ignored file that will be published.
+ let (p, repo) = git::new_repo("foo", |p| {
+ p.file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ description = "foo"
+ license = "foo"
+ documentation = "foo"
+ include = ["src", "build"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(".gitignore", "build")
+ });
+ // Example of adding a file that is confusingly ignored by an overzealous
+ // gitignore rule.
+ p.change_file("src/build/mod.rs", "");
+ p.cargo("package --list")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: 1 files in the working directory contain changes that were not yet committed into git:
+
+src/build/mod.rs
+
+to proceed despite this and include the uncommitted changes, pass the `--allow-dirty` flag
+",
+ )
+ .run();
+ // Add the ignored file and make sure it is included.
+ let mut index = t!(repo.index());
+ t!(index.add_path(Path::new("src/build/mod.rs")));
+ t!(index.write());
+ git::commit(&repo);
+ p.cargo("package --list")
+ .with_stderr("")
+ .with_stdout(
+ "\
+.cargo_vcs_info.json
+Cargo.toml
+Cargo.toml.orig
+src/build/mod.rs
+src/lib.rs
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn generated_manifest() {
+ let registry = registry::alt_init();
+ Package::new("abc", "1.0.0").publish();
+ Package::new("def", "1.0.0").alternative(true).publish();
+ Package::new("ghi", "1.0.0").publish();
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ exclude = ["*.txt"]
+ license = "MIT"
+ description = "foo"
+
+ [package.metadata]
+ foo = 'bar'
+
+ [workspace]
+
+ [dependencies]
+ bar = { path = "bar", version = "0.1" }
+ def = { version = "1.0", registry = "alternative" }
+ ghi = "1.0"
+ abc = "1.0"
+ "#,
+ )
+ .file("src/main.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("package --no-verify").run();
+
+ let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+ let rewritten_toml = format!(
+ r#"{}
+[package]
+name = "foo"
+version = "0.0.1"
+authors = []
+exclude = ["*.txt"]
+description = "foo"
+license = "MIT"
+
+[package.metadata]
+foo = "bar"
+
+[dependencies.abc]
+version = "1.0"
+
+[dependencies.bar]
+version = "0.1"
+
+[dependencies.def]
+version = "1.0"
+registry-index = "{}"
+
+[dependencies.ghi]
+version = "1.0"
+"#,
+ cargo::core::package::MANIFEST_PREAMBLE,
+ registry.index_url()
+ );
+
+ validate_crate_contents(
+ f,
+ "foo-0.0.1.crate",
+ &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"],
+ &[("Cargo.toml", &rewritten_toml)],
+ );
+}
+
+#[cargo_test]
+fn ignore_workspace_specifier() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ authors = []
+
+ [workspace]
+
+ [dependencies]
+ bar = { path = "bar", version = "0.1" }
+ "#,
+ )
+ .file("src/main.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ workspace = ".."
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("package --no-verify").cwd("bar").run();
+
+ let f = File::open(&p.root().join("target/package/bar-0.1.0.crate")).unwrap();
+ let rewritten_toml = format!(
+ r#"{}
+[package]
+name = "bar"
+version = "0.1.0"
+authors = []
+"#,
+ cargo::core::package::MANIFEST_PREAMBLE
+ );
+ validate_crate_contents(
+ f,
+ "bar-0.1.0.crate",
+ &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"],
+ &[("Cargo.toml", &rewritten_toml)],
+ );
+}
+
+#[cargo_test]
+fn package_two_kinds_of_deps() {
+ Package::new("other", "1.0.0").publish();
+ Package::new("other1", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ other = "1.0"
+ other1 = { version = "1.0" }
+ "#,
+ )
+ .file("src/main.rs", "")
+ .build();
+
+ p.cargo("package --no-verify").run();
+}
+
+#[cargo_test]
+fn test_edition() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["edition"]
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ edition = "2018"
+ "#,
+ )
+ .file("src/lib.rs", r#" "#)
+ .build();
+
+ p.cargo("check -v")
+ .with_stderr_contains(
+ "\
+[CHECKING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..]--edition=2018 [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn edition_with_metadata() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ edition = "2018"
+
+ [package.metadata.docs.rs]
+ features = ["foobar"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("package").run();
+}
+
+#[cargo_test]
+fn test_edition_malformed() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ edition = "chicken"
+ "#,
+ )
+ .file("src/lib.rs", r#" "#)
+ .build();
+
+ p.cargo("check -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ failed to parse the `edition` key
+
+Caused by:
+ supported edition values are `2015`, `2018`, or `2021`, but `chicken` is unknown
+"
+ .to_string(),
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_edition_from_the_future() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"[package]
+ edition = "2038"
+ name = "foo"
+ version = "99.99.99"
+ authors = []
+ "#,
+ )
+ .file("src/main.rs", r#""#)
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ failed to parse the `edition` key
+
+Caused by:
+ this version of Cargo is older than the `2038` edition, and only supports `2015`, `2018`, and `2021` editions.
+"
+ .to_string(),
+ )
+ .run();
+}
+
+#[cargo_test]
+fn do_not_package_if_src_was_modified() {
+ let p = project()
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .file("dir/foo.txt", "")
+ .file("bar.txt", "")
+ .file(
+ "build.rs",
+ r#"
+ use std::fs;
+
+ fn main() {
+ fs::write("src/generated.txt",
+ "Hello, world of generated files."
+ ).expect("failed to create file");
+ fs::remove_file("dir/foo.txt").expect("failed to remove file");
+ fs::remove_dir("dir").expect("failed to remove dir");
+ fs::write("bar.txt", "updated content").expect("failed to update");
+ fs::create_dir("new-dir").expect("failed to create dir");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("package")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: failed to verify package tarball
+
+Caused by:
+ Source directory was modified by build.rs during cargo publish. \
+ Build scripts should not modify anything outside of OUT_DIR.
+ Changed: [CWD]/target/package/foo-0.0.1/bar.txt
+ Added: [CWD]/target/package/foo-0.0.1/new-dir
+ <tab>[CWD]/target/package/foo-0.0.1/src/generated.txt
+ Removed: [CWD]/target/package/foo-0.0.1/dir
+ <tab>[CWD]/target/package/foo-0.0.1/dir/foo.txt
+
+ To proceed despite this, pass the `--no-verify` flag.",
+ )
+ .run();
+
+ p.cargo("package --no-verify").run();
+}
+
+#[cargo_test]
+fn package_with_select_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+
+ [features]
+ required = []
+ optional = []
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "#[cfg(not(feature = \"required\"))]
+ compile_error!(\"This crate requires `required` feature!\");
+ fn main() {}",
+ )
+ .build();
+
+ p.cargo("package --features required").run();
+}
+
+#[cargo_test]
+fn package_with_all_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+
+ [features]
+ required = []
+ optional = []
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "#[cfg(not(feature = \"required\"))]
+ compile_error!(\"This crate requires `required` feature!\");
+ fn main() {}",
+ )
+ .build();
+
+ p.cargo("package --all-features").run();
+}
+
+#[cargo_test]
+fn package_no_default_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+
+ [features]
+ default = ["required"]
+ required = []
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "#[cfg(not(feature = \"required\"))]
+ compile_error!(\"This crate requires `required` feature!\");
+ fn main() {}",
+ )
+ .build();
+
+ p.cargo("package --no-default-features")
+ .with_stderr_contains("error: This crate requires `required` feature!")
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn include_cargo_toml_implicit() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ include = ["src/lib.rs"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("package --list")
+ .with_stdout("Cargo.toml\nCargo.toml.orig\nsrc/lib.rs\n")
+ .run();
+}
+
+fn include_exclude_test(include: &str, exclude: &str, files: &[&str], expected: &str) {
+ let mut pb = project().file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ include = {}
+ exclude = {}
+ "#,
+ include, exclude
+ ),
+ );
+ for file in files {
+ pb = pb.file(file, "");
+ }
+ let p = pb.build();
+
+ p.cargo("package --list")
+ .with_stderr("")
+ .with_stdout(expected)
+ .run();
+ p.root().rm_rf();
+}
+
+#[cargo_test]
+fn package_include_ignore_only() {
+ // Test with a gitignore pattern that fails to parse with glob.
+ // This is a somewhat nonsense pattern, but is an example of something git
+ // allows and glob does not.
+ assert!(glob::Pattern::new("src/abc**").is_err());
+
+ include_exclude_test(
+ r#"["Cargo.toml", "src/abc**", "src/lib.rs"]"#,
+ "[]",
+ &["src/lib.rs", "src/abc1.rs", "src/abc2.rs", "src/abc/mod.rs"],
+ "Cargo.toml\n\
+ Cargo.toml.orig\n\
+ src/abc/mod.rs\n\
+ src/abc1.rs\n\
+ src/abc2.rs\n\
+ src/lib.rs\n\
+ ",
+ )
+}
+
+#[cargo_test]
+fn gitignore_patterns() {
+ include_exclude_test(
+ r#"["Cargo.toml", "foo"]"#, // include
+ "[]",
+ &["src/lib.rs", "foo", "a/foo", "a/b/foo", "x/foo/y", "bar"],
+ "Cargo.toml\n\
+ Cargo.toml.orig\n\
+ a/b/foo\n\
+ a/foo\n\
+ foo\n\
+ x/foo/y\n\
+ ",
+ );
+
+ include_exclude_test(
+ r#"["Cargo.toml", "/foo"]"#, // include
+ "[]",
+ &["src/lib.rs", "foo", "a/foo", "a/b/foo", "x/foo/y", "bar"],
+ "Cargo.toml\n\
+ Cargo.toml.orig\n\
+ foo\n\
+ ",
+ );
+
+ include_exclude_test(
+ "[]",
+ r#"["foo/"]"#, // exclude
+ &["src/lib.rs", "foo", "a/foo", "x/foo/y", "bar"],
+ "Cargo.toml\n\
+ Cargo.toml.orig\n\
+ a/foo\n\
+ bar\n\
+ foo\n\
+ src/lib.rs\n\
+ ",
+ );
+
+ include_exclude_test(
+ "[]",
+ r#"["*.txt", "[ab]", "[x-z]"]"#, // exclude
+ &[
+ "src/lib.rs",
+ "foo.txt",
+ "bar/foo.txt",
+ "other",
+ "a",
+ "b",
+ "c",
+ "x",
+ "y",
+ "z",
+ ],
+ "Cargo.toml\n\
+ Cargo.toml.orig\n\
+ c\n\
+ other\n\
+ src/lib.rs\n\
+ ",
+ );
+
+ include_exclude_test(
+ r#"["Cargo.toml", "**/foo/bar"]"#, // include
+ "[]",
+ &["src/lib.rs", "a/foo/bar", "foo", "bar"],
+ "Cargo.toml\n\
+ Cargo.toml.orig\n\
+ a/foo/bar\n\
+ ",
+ );
+
+ include_exclude_test(
+ r#"["Cargo.toml", "foo/**"]"#, // include
+ "[]",
+ &["src/lib.rs", "a/foo/bar", "foo/x/y/z"],
+ "Cargo.toml\n\
+ Cargo.toml.orig\n\
+ foo/x/y/z\n\
+ ",
+ );
+
+ include_exclude_test(
+ r#"["Cargo.toml", "a/**/b"]"#, // include
+ "[]",
+ &["src/lib.rs", "a/b", "a/x/b", "a/x/y/b"],
+ "Cargo.toml\n\
+ Cargo.toml.orig\n\
+ a/b\n\
+ a/x/b\n\
+ a/x/y/b\n\
+ ",
+ );
+}
+
+#[cargo_test]
+fn gitignore_negate() {
+ include_exclude_test(
+ r#"["Cargo.toml", "*.rs", "!foo.rs", "\\!important"]"#, // include
+ "[]",
+ &["src/lib.rs", "foo.rs", "!important"],
+ "!important\n\
+ Cargo.toml\n\
+ Cargo.toml.orig\n\
+ src/lib.rs\n\
+ ",
+ );
+
+ // NOTE: This is unusual compared to git. Git treats `src/` as a
+ // short-circuit which means rules like `!src/foo.rs` would never run.
+ // However, because Cargo only works by iterating over *files*, it doesn't
+ // short-circuit.
+ include_exclude_test(
+ r#"["Cargo.toml", "src/", "!src/foo.rs"]"#, // include
+ "[]",
+ &["src/lib.rs", "src/foo.rs"],
+ "Cargo.toml\n\
+ Cargo.toml.orig\n\
+ src/lib.rs\n\
+ ",
+ );
+
+ include_exclude_test(
+ r#"["Cargo.toml", "src/*.rs", "!foo.rs"]"#, // include
+ "[]",
+ &["src/lib.rs", "foo.rs", "src/foo.rs", "src/bar/foo.rs"],
+ "Cargo.toml\n\
+ Cargo.toml.orig\n\
+ src/lib.rs\n\
+ ",
+ );
+
+ include_exclude_test(
+ "[]",
+ r#"["*.rs", "!foo.rs", "\\!important"]"#, // exclude
+ &["src/lib.rs", "foo.rs", "!important"],
+ "Cargo.toml\n\
+ Cargo.toml.orig\n\
+ foo.rs\n\
+ ",
+ );
+}
+
+#[cargo_test]
+fn exclude_dot_files_and_directories_by_default() {
+ include_exclude_test(
+ "[]",
+ "[]",
+ &["src/lib.rs", ".dotfile", ".dotdir/file"],
+ "Cargo.toml\n\
+ Cargo.toml.orig\n\
+ src/lib.rs\n\
+ ",
+ );
+
+ include_exclude_test(
+ r#"["Cargo.toml", "src/lib.rs", ".dotfile", ".dotdir/file"]"#,
+ "[]",
+ &["src/lib.rs", ".dotfile", ".dotdir/file"],
+ ".dotdir/file\n\
+ .dotfile\n\
+ Cargo.toml\n\
+ Cargo.toml.orig\n\
+ src/lib.rs\n\
+ ",
+ );
+}
+
+#[cargo_test]
+fn invalid_license_file_path() {
+ // Test warning when license-file points to a non-existent file.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+ license-file = "does-not-exist"
+ description = "foo"
+ homepage = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("package --no-verify")
+ .with_stderr(
+ "\
+[WARNING] license-file `does-not-exist` does not appear to exist (relative to `[..]/foo`).
+Please update the license-file setting in the manifest at `[..]/foo/Cargo.toml`
+This may become a hard error in the future.
+[PACKAGING] foo v1.0.0 ([..]/foo)
+[PACKAGED] [..] files, [..] ([..] compressed)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn license_file_implicit_include() {
+ // license-file should be automatically included even if not listed.
+ let p = git::new("foo", |p| {
+ p.file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+ license-file = "subdir/LICENSE"
+ description = "foo"
+ homepage = "foo"
+ include = ["src"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("subdir/LICENSE", "license text")
+ });
+
+ p.cargo("package --list")
+ .with_stdout(
+ "\
+.cargo_vcs_info.json
+Cargo.toml
+Cargo.toml.orig
+src/lib.rs
+subdir/LICENSE
+",
+ )
+ .with_stderr("")
+ .run();
+
+ p.cargo("package --no-verify -v")
+ .with_stderr(
+ "\
+[PACKAGING] foo v1.0.0 [..]
+[ARCHIVING] .cargo_vcs_info.json
+[ARCHIVING] Cargo.toml
+[ARCHIVING] Cargo.toml.orig
+[ARCHIVING] src/lib.rs
+[ARCHIVING] subdir/LICENSE
+[PACKAGED] 5 files, [..] ([..] compressed)
+",
+ )
+ .run();
+ let f = File::open(&p.root().join("target/package/foo-1.0.0.crate")).unwrap();
+ validate_crate_contents(
+ f,
+ "foo-1.0.0.crate",
+ &[
+ ".cargo_vcs_info.json",
+ "Cargo.toml",
+ "Cargo.toml.orig",
+ "subdir/LICENSE",
+ "src/lib.rs",
+ ],
+ &[("subdir/LICENSE", "license text")],
+ );
+}
+
+#[cargo_test]
+fn relative_license_included() {
+ // license-file path outside of package will copy into root.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+ license-file = "../LICENSE"
+ description = "foo"
+ homepage = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("../LICENSE", "license text")
+ .build();
+
+ p.cargo("package --list")
+ .with_stdout(
+ "\
+Cargo.toml
+Cargo.toml.orig
+LICENSE
+src/lib.rs
+",
+ )
+ .with_stderr("")
+ .run();
+
+ p.cargo("package")
+ .with_stderr(
+ "\
+[PACKAGING] foo v1.0.0 [..]
+[VERIFYING] foo v1.0.0 [..]
+[COMPILING] foo v1.0.0 [..]
+[FINISHED] [..]
+[PACKAGED] 4 files, [..] ([..] compressed)
+",
+ )
+ .run();
+ let f = File::open(&p.root().join("target/package/foo-1.0.0.crate")).unwrap();
+ validate_crate_contents(
+ f,
+ "foo-1.0.0.crate",
+ &["Cargo.toml", "Cargo.toml.orig", "LICENSE", "src/lib.rs"],
+ &[("LICENSE", "license text")],
+ );
+ let manifest =
+ std::fs::read_to_string(p.root().join("target/package/foo-1.0.0/Cargo.toml")).unwrap();
+ assert!(manifest.contains("license-file = \"LICENSE\""));
+ let orig =
+ std::fs::read_to_string(p.root().join("target/package/foo-1.0.0/Cargo.toml.orig")).unwrap();
+ assert!(orig.contains("license-file = \"../LICENSE\""));
+}
+
+#[cargo_test]
+fn relative_license_include_collision() {
+ // Can't copy a relative license-file if there is a file with that name already.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+ license-file = "../LICENSE"
+ description = "foo"
+ homepage = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("../LICENSE", "outer license")
+ .file("LICENSE", "inner license")
+ .build();
+
+ p.cargo("package --list")
+ .with_stdout(
+ "\
+Cargo.toml
+Cargo.toml.orig
+LICENSE
+src/lib.rs
+",
+ )
+ .with_stderr("[WARNING] license-file `../LICENSE` appears to be [..]")
+ .run();
+
+ p.cargo("package")
+ .with_stderr(
+ "\
+[WARNING] license-file `../LICENSE` appears to be [..]
+[PACKAGING] foo v1.0.0 [..]
+[VERIFYING] foo v1.0.0 [..]
+[COMPILING] foo v1.0.0 [..]
+[FINISHED] [..]
+[PACKAGED] 4 files, [..] ([..] compressed)
+",
+ )
+ .run();
+ let f = File::open(&p.root().join("target/package/foo-1.0.0.crate")).unwrap();
+ validate_crate_contents(
+ f,
+ "foo-1.0.0.crate",
+ &["Cargo.toml", "Cargo.toml.orig", "LICENSE", "src/lib.rs"],
+ &[("LICENSE", "inner license")],
+ );
+ let manifest = read_to_string(p.root().join("target/package/foo-1.0.0/Cargo.toml")).unwrap();
+ assert!(manifest.contains("license-file = \"LICENSE\""));
+ let orig = read_to_string(p.root().join("target/package/foo-1.0.0/Cargo.toml.orig")).unwrap();
+ assert!(orig.contains("license-file = \"../LICENSE\""));
+}
+
+#[cargo_test]
+#[cfg(not(windows))] // Don't want to create invalid files on Windows.
+fn package_restricted_windows() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ license = "MIT"
+ description = "foo"
+ homepage = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "pub mod con;\npub mod aux;")
+ .file("src/con.rs", "pub fn f() {}")
+ .file("src/aux/mod.rs", "pub fn f() {}")
+ .build();
+
+ p.cargo("package")
+ // use unordered here because the order of the warning is different on each platform.
+ .with_stderr_unordered(
+ "\
+[WARNING] file src/aux/mod.rs is a reserved Windows filename, it will not work on Windows platforms
+[WARNING] file src/con.rs is a reserved Windows filename, it will not work on Windows platforms
+[PACKAGING] foo [..]
+[VERIFYING] foo [..]
+[COMPILING] foo [..]
+[PACKAGED] [..] files, [..] ([..] compressed)
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn finds_git_in_parent() {
+ // Test where `Cargo.toml` is not in the root of the git repo.
+ let repo_path = paths::root().join("repo");
+ fs::create_dir(&repo_path).unwrap();
+ let p = project()
+ .at("repo/foo")
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/lib.rs", "")
+ .build();
+ let repo = git::init(&repo_path);
+ git::add(&repo);
+ git::commit(&repo);
+ p.change_file("ignoreme", "");
+ p.change_file("ignoreme2", "");
+ p.cargo("package --list --allow-dirty")
+ .with_stdout(
+ "\
+Cargo.toml
+Cargo.toml.orig
+ignoreme
+ignoreme2
+src/lib.rs
+",
+ )
+ .run();
+
+ p.change_file(".gitignore", "ignoreme");
+ p.cargo("package --list --allow-dirty")
+ .with_stdout(
+ "\
+.gitignore
+Cargo.toml
+Cargo.toml.orig
+ignoreme2
+src/lib.rs
+",
+ )
+ .run();
+
+ fs::write(repo_path.join(".gitignore"), "ignoreme2").unwrap();
+ p.cargo("package --list --allow-dirty")
+ .with_stdout(
+ "\
+.gitignore
+Cargo.toml
+Cargo.toml.orig
+src/lib.rs
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+#[cfg(windows)]
+fn reserved_windows_name() {
+ // If we are running on a version of Windows that allows these reserved filenames,
+ // skip this test.
+ if paths::windows_reserved_names_are_allowed() {
+ return;
+ }
+
+ Package::new("bar", "1.0.0")
+ .file("src/lib.rs", "pub mod aux;")
+ .file("src/aux.rs", "")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+
+ [dependencies]
+ bar = "1.0.0"
+ "#,
+ )
+ .file("src/main.rs", "extern crate bar;\nfn main() { }")
+ .build();
+ p.cargo("package")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: failed to verify package tarball
+
+Caused by:
+ failed to download replaced source registry `[..]`
+
+Caused by:
+ failed to unpack package `[..] `[..]`)`
+
+Caused by:
+ failed to unpack entry at `[..]aux.rs`
+
+Caused by:
+ `[..]aux.rs` appears to contain a reserved Windows path, it cannot be extracted on Windows
+
+Caused by:
+ failed to unpack `[..]aux.rs`
+
+Caused by:
+ failed to unpack `[..]aux.rs` into `[..]aux.rs`",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn list_with_path_and_lock() {
+ // Allow --list even for something that isn't packageable.
+
+ // Init an empty registry because a versionless path dep will search for
+ // the package on crates.io.
+ registry::init();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ license = "MIT"
+ description = "foo"
+ homepage = "foo"
+
+ [dependencies]
+ bar = {path="bar"}
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("package --list")
+ .with_stdout(
+ "\
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+src/main.rs
+",
+ )
+ .run();
+
+ p.cargo("package")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] all dependencies must have a version specified when packaging.
+dependency `bar` does not specify a version
+Note: The packaged dependency will use the version from crates.io,
+the `path` specification will be removed from the dependency declaration.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn long_file_names() {
+ // Filenames over 100 characters require a GNU extension tarfile.
+ // See #8453.
+
+ registry::init();
+ let long_name = concat!(
+ "012345678901234567890123456789012345678901234567890123456789",
+ "012345678901234567890123456789012345678901234567890123456789",
+ "012345678901234567890123456789012345678901234567890123456789"
+ );
+ if cfg!(windows) {
+ // Long paths on Windows require a special registry entry that is
+ // disabled by default (even on Windows 10).
+ // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file
+ // If the directory where Cargo runs happens to be more than 80 characters
+ // long, then it will bump into this limit.
+ //
+ // First create a directory to account for various paths Cargo will
+ // be using in the target directory (such as "target/package/foo-0.1.0").
+ let test_path = paths::root().join("test-dir-probe-long-path-support");
+ test_path.mkdir_p();
+ let test_path = test_path.join(long_name);
+ if let Err(e) = File::create(&test_path) {
+ // write to stderr directly to avoid output from being captured
+ // and always display text, even without --nocapture
+ use std::io::Write;
+ writeln!(
+ std::io::stderr(),
+ "\nSkipping long_file_names test, this OS or filesystem does not \
+ appear to support long file paths: {:?}\n{:?}",
+ e,
+ test_path
+ )
+ .unwrap();
+ return;
+ }
+ }
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ license = "MIT"
+ description = "foo"
+ homepage = "foo"
+
+ [dependencies]
+ "#,
+ )
+ .file(long_name, "something")
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("package").run();
+ p.cargo("package --list")
+ .with_stdout(&format!(
+ "\
+{}
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+src/main.rs
+",
+ long_name
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn reproducible_output() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ exclude = ["*.txt"]
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .build();
+
+ p.cargo("package").run();
+ assert!(p.root().join("target/package/foo-0.0.1.crate").is_file());
+
+ let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+ let decoder = GzDecoder::new(f);
+ let mut archive = Archive::new(decoder);
+ for ent in archive.entries().unwrap() {
+ let ent = ent.unwrap();
+ println!("checking {:?}", ent.path());
+ let header = ent.header();
+ assert_eq!(header.mode().unwrap(), 0o644);
+ assert!(header.mtime().unwrap() != 0);
+ assert_eq!(header.username().unwrap().unwrap(), "");
+ assert_eq!(header.groupname().unwrap().unwrap(), "");
+ }
+}
+
+#[cargo_test]
+fn package_with_resolver_and_metadata() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ resolver = '2'
+
+ [package.metadata.docs.rs]
+ all-features = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("package").run();
+}
+
+#[cargo_test]
+fn deleted_git_working_tree() {
+ // When deleting a file, but not staged, cargo should ignore the file.
+ let (p, repo) = git::new_repo("foo", |p| {
+ p.file("src/lib.rs", "").file("src/main.rs", "fn main() {}")
+ });
+ p.root().join("src/lib.rs").rm_rf();
+ p.cargo("package --allow-dirty --list")
+ .with_stdout(
+ "\
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+src/main.rs
+",
+ )
+ .run();
+ p.cargo("package --allow-dirty").run();
+ let mut index = t!(repo.index());
+ t!(index.remove(Path::new("src/lib.rs"), 0));
+ t!(index.write());
+ p.cargo("package --allow-dirty --list")
+ .with_stdout(
+ "\
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+src/main.rs
+",
+ )
+ .run();
+ p.cargo("package --allow-dirty").run();
+}
+
+#[cargo_test]
+fn in_workspace() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "bar"
+ workspace = ".."
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("package --workspace")
+ .with_stderr(
+ "\
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] bar v0.0.1 ([CWD]/bar)
+[VERIFYING] bar v0.0.1 ([CWD]/bar)
+[COMPILING] bar v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] [..] files, [..] ([..] compressed)
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] [..] files, [..] ([..] compressed)
+",
+ )
+ .run();
+
+ assert!(p.root().join("target/package/foo-0.0.1.crate").is_file());
+ assert!(p.root().join("target/package/bar-0.0.1.crate").is_file());
+}
+
+#[cargo_test]
+fn workspace_overrides_resolver() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ edition = "2021"
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .file(
+ "baz/Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.1.0"
+ edition = "2015"
+ "#,
+ )
+ .file("baz/src/lib.rs", "")
+ .build();
+
+ p.cargo("package --no-verify -p bar -p baz").run();
+
+ let f = File::open(&p.root().join("target/package/bar-0.1.0.crate")).unwrap();
+ let rewritten_toml = format!(
+ r#"{}
+[package]
+edition = "2021"
+name = "bar"
+version = "0.1.0"
+resolver = "1"
+"#,
+ cargo::core::package::MANIFEST_PREAMBLE
+ );
+ validate_crate_contents(
+ f,
+ "bar-0.1.0.crate",
+ &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"],
+ &[("Cargo.toml", &rewritten_toml)],
+ );
+
+ // When the crate has the same implicit resolver as the workspace it is not overridden
+ let f = File::open(&p.root().join("target/package/baz-0.1.0.crate")).unwrap();
+ let rewritten_toml = format!(
+ r#"{}
+[package]
+edition = "2015"
+name = "baz"
+version = "0.1.0"
+"#,
+ cargo::core::package::MANIFEST_PREAMBLE
+ );
+ validate_crate_contents(
+ f,
+ "baz-0.1.0.crate",
+ &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"],
+ &[("Cargo.toml", &rewritten_toml)],
+ );
+}
+
+fn verify_packaged_status_line(
+ output: std::process::Output,
+ num_files: usize,
+ uncompressed_size: u64,
+ compressed_size: u64,
+) {
+ use cargo::util::human_readable_bytes;
+
+ let stderr = String::from_utf8(output.stderr).unwrap();
+ let mut packaged_lines = stderr
+ .lines()
+ .filter(|line| line.trim().starts_with("Packaged"));
+ let packaged_line = packaged_lines
+ .next()
+ .expect("`Packaged` status line should appear in stderr");
+ assert!(
+ packaged_lines.next().is_none(),
+ "Only one `Packaged` status line should appear in stderr"
+ );
+ let size_info = packaged_line.trim().trim_start_matches("Packaged").trim();
+ let uncompressed = human_readable_bytes(uncompressed_size);
+ let compressed = human_readable_bytes(compressed_size);
+ let expected = format!(
+ "{} files, {:.1}{} ({:.1}{} compressed)",
+ num_files, uncompressed.0, uncompressed.1, compressed.0, compressed.1
+ );
+ assert_eq!(size_info, expected);
+}
+
+#[cargo_test]
+fn basic_filesizes() {
+ let cargo_toml_orig_contents = r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ exclude = ["*.txt"]
+ license = "MIT"
+ description = "foo"
+ "#;
+ let main_rs_contents = r#"fn main() { println!("🦀"); }"#;
+ let cargo_toml_contents = format!(
+ r#"{}
+[package]
+name = "foo"
+version = "0.0.1"
+authors = []
+exclude = ["*.txt"]
+description = "foo"
+license = "MIT"
+"#,
+ cargo::core::package::MANIFEST_PREAMBLE
+ );
+ let cargo_lock_contents = r#"# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "foo"
+version = "0.0.1"
+"#;
+ let p = project()
+ .file("Cargo.toml", cargo_toml_orig_contents)
+ .file("src/main.rs", main_rs_contents)
+ .file("src/bar.txt", "Ignored text file contents") // should be ignored when packaging
+ .build();
+
+ let uncompressed_size = (cargo_toml_orig_contents.len()
+ + main_rs_contents.len()
+ + cargo_toml_contents.len()
+ + cargo_lock_contents.len()) as u64;
+ let output = p.cargo("package").exec_with_output().unwrap();
+
+ assert!(p.root().join("target/package/foo-0.0.1.crate").is_file());
+ p.cargo("package -l")
+ .with_stdout(
+ "\
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+src/main.rs
+",
+ )
+ .run();
+ p.cargo("package").with_stdout("").run();
+
+ let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+ let compressed_size = f.metadata().unwrap().len();
+ verify_packaged_status_line(output, 4, uncompressed_size, compressed_size);
+ validate_crate_contents(
+ f,
+ "foo-0.0.1.crate",
+ &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"],
+ &[
+ ("Cargo.lock", cargo_lock_contents),
+ ("Cargo.toml", &cargo_toml_contents),
+ ("Cargo.toml.orig", cargo_toml_orig_contents),
+ ("src/main.rs", main_rs_contents),
+ ],
+ );
+}
+
+#[cargo_test]
+fn larger_filesizes() {
+ let cargo_toml_orig_contents = r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#;
+ let lots_of_crabs = std::iter::repeat("🦀").take(1337).collect::<String>();
+ let main_rs_contents = format!(r#"fn main() {{ println!("{}"); }}"#, lots_of_crabs);
+ let bar_txt_contents = "This file is relatively uncompressible, to increase the compressed
+ package size beyond 1KiB.
+ Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
+ ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
+ ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
+ reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur
+ sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est
+ laborum.";
+ let cargo_toml_contents = format!(
+ r#"{}
+[package]
+name = "foo"
+version = "0.0.1"
+authors = []
+description = "foo"
+license = "MIT"
+"#,
+ cargo::core::package::MANIFEST_PREAMBLE
+ );
+ let cargo_lock_contents = r#"# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "foo"
+version = "0.0.1"
+"#;
+ let p = project()
+ .file("Cargo.toml", cargo_toml_orig_contents)
+ .file("src/main.rs", &main_rs_contents)
+ .file("src/bar.txt", bar_txt_contents)
+ .build();
+
+ let uncompressed_size = (cargo_toml_orig_contents.len()
+ + main_rs_contents.len()
+ + cargo_toml_contents.len()
+ + cargo_lock_contents.len()
+ + bar_txt_contents.len()) as u64;
+
+ let output = p.cargo("package").exec_with_output().unwrap();
+ assert!(p.root().join("target/package/foo-0.0.1.crate").is_file());
+ p.cargo("package -l")
+ .with_stdout(
+ "\
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+src/bar.txt
+src/main.rs
+",
+ )
+ .run();
+ p.cargo("package").with_stdout("").run();
+
+ let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+ let compressed_size = f.metadata().unwrap().len();
+ verify_packaged_status_line(output, 5, uncompressed_size, compressed_size);
+ validate_crate_contents(
+ f,
+ "foo-0.0.1.crate",
+ &[
+ "Cargo.lock",
+ "Cargo.toml",
+ "Cargo.toml.orig",
+ "src/bar.txt",
+ "src/main.rs",
+ ],
+ &[
+ ("Cargo.lock", cargo_lock_contents),
+ ("Cargo.toml", &cargo_toml_contents),
+ ("Cargo.toml.orig", cargo_toml_orig_contents),
+ ("src/bar.txt", bar_txt_contents),
+ ("src/main.rs", &main_rs_contents),
+ ],
+ );
+}
+
+#[cargo_test]
+fn symlink_filesizes() {
+ if !symlink_supported() {
+ return;
+ }
+
+ let cargo_toml_orig_contents = r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#;
+ let lots_of_crabs = std::iter::repeat("🦀").take(1337).collect::<String>();
+ let main_rs_contents = format!(r#"fn main() {{ println!("{}"); }}"#, lots_of_crabs);
+ let bar_txt_contents = "This file is relatively uncompressible, to increase the compressed
+ package size beyond 1KiB.
+ Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
+ ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
+ ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
+ reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur
+ sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est
+ laborum.";
+ let cargo_toml_contents = format!(
+ r#"{}
+[package]
+name = "foo"
+version = "0.0.1"
+authors = []
+description = "foo"
+license = "MIT"
+"#,
+ cargo::core::package::MANIFEST_PREAMBLE
+ );
+ let cargo_lock_contents = r#"# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "foo"
+version = "0.0.1"
+"#;
+
+ let p = project()
+ .file("Cargo.toml", cargo_toml_orig_contents)
+ .file("src/main.rs", &main_rs_contents)
+ .file("bla/bar.txt", bar_txt_contents)
+ .symlink("src/main.rs", "src/main.rs.bak")
+ .symlink_dir("bla", "foo")
+ .build();
+
+ let uncompressed_size = (cargo_toml_orig_contents.len()
+ + main_rs_contents.len() * 2
+ + cargo_toml_contents.len()
+ + cargo_lock_contents.len()
+ + bar_txt_contents.len() * 2) as u64;
+
+ let output = p.cargo("package").exec_with_output().unwrap();
+ assert!(p.root().join("target/package/foo-0.0.1.crate").is_file());
+ p.cargo("package -l")
+ .with_stdout(
+ "\
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+bla/bar.txt
+foo/bar.txt
+src/main.rs
+src/main.rs.bak
+",
+ )
+ .run();
+ p.cargo("package").with_stdout("").run();
+
+ let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+ let compressed_size = f.metadata().unwrap().len();
+ verify_packaged_status_line(output, 7, uncompressed_size, compressed_size);
+ validate_crate_contents(
+ f,
+ "foo-0.0.1.crate",
+ &[
+ "Cargo.lock",
+ "Cargo.toml",
+ "Cargo.toml.orig",
+ "bla/bar.txt",
+ "foo/bar.txt",
+ "src/main.rs",
+ "src/main.rs.bak",
+ ],
+ &[
+ ("Cargo.lock", cargo_lock_contents),
+ ("Cargo.toml", &cargo_toml_contents),
+ ("Cargo.toml.orig", cargo_toml_orig_contents),
+ ("bla/bar.txt", bar_txt_contents),
+ ("foo/bar.txt", bar_txt_contents),
+ ("src/main.rs", &main_rs_contents),
+ ("src/main.rs.bak", &main_rs_contents),
+ ],
+ );
+}
diff --git a/src/tools/cargo/tests/testsuite/package_features.rs b/src/tools/cargo/tests/testsuite/package_features.rs
new file mode 100644
index 000000000..15f726be5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/package_features.rs
@@ -0,0 +1,704 @@
+//! Tests for feature selection on the command-line.
+
+use super::features2::switch_to_resolver_2;
+use cargo_test_support::registry::{Dependency, Package};
+use cargo_test_support::{basic_manifest, project};
+use std::fmt::Write;
+
+#[cargo_test]
+fn virtual_no_default_features() {
+ // --no-default-features in root of virtual workspace.
+ Package::new("dep1", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [dependencies]
+ dep1 = {version = "1.0", optional = true}
+
+ [features]
+ default = ["dep1"]
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.1.0"
+
+ [features]
+ default = ["f1"]
+ f1 = []
+ "#,
+ )
+ .file(
+ "b/src/lib.rs",
+ r#"
+ #[cfg(feature = "f1")]
+ compile_error!{"expected f1 off"}
+ "#,
+ )
+ .build();
+
+ p.cargo("check --no-default-features")
+ .with_stderr_unordered(
+ "\
+[UPDATING] [..]
+[CHECKING] a v0.1.0 [..]
+[CHECKING] b v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ p.cargo("check --features foo")
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] none of the selected packages contains these features: foo, did you mean: f1?",
+ )
+ .run();
+
+ p.cargo("check --features a/dep1,b/f1,b/f2,f2")
+ .with_status(101)
+ .with_stderr("[ERROR] none of the selected packages contains these features: b/f2, f2, did you mean: f1?")
+ .run();
+
+ p.cargo("check --features a/dep,b/f1,b/f2,f2")
+ .with_status(101)
+ .with_stderr("[ERROR] none of the selected packages contains these features: a/dep, b/f2, f2, did you mean: a/dep1, f1?")
+ .run();
+
+ p.cargo("check --features a/dep,a/dep1")
+ .with_status(101)
+ .with_stderr("[ERROR] none of the selected packages contains these features: a/dep, did you mean: b/f1?")
+ .run();
+}
+
+#[cargo_test]
+fn virtual_typo_member_feature() {
+ project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+ resolver = "2"
+
+ [features]
+ deny-warnings = []
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build()
+ .cargo("check --features a/deny-warning")
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] none of the selected packages contains these features: a/deny-warning, did you mean: a/deny-warnings?",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn virtual_features() {
+ // --features in root of virtual workspace.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [features]
+ f1 = []
+ "#,
+ )
+ .file(
+ "a/src/lib.rs",
+ r#"
+ #[cfg(not(feature = "f1"))]
+ compile_error!{"f1 is missing"}
+ "#,
+ )
+ .file("b/Cargo.toml", &basic_manifest("b", "0.1.0"))
+ .file("b/src/lib.rs", "")
+ .build();
+
+ p.cargo("check --features f1")
+ .with_stderr_unordered(
+ "\
+[CHECKING] a [..]
+[CHECKING] b [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn virtual_with_specific() {
+ // -p flags with --features in root of virtual.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [features]
+ f1 = []
+ f2 = []
+ "#,
+ )
+ .file(
+ "a/src/lib.rs",
+ r#"
+ #[cfg(not_feature = "f1")]
+ compile_error!{"f1 is missing"}
+ #[cfg(not_feature = "f2")]
+ compile_error!{"f2 is missing"}
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.1.0"
+
+ [features]
+ f2 = []
+ f3 = []
+ "#,
+ )
+ .file(
+ "b/src/lib.rs",
+ r#"
+ #[cfg(not_feature = "f2")]
+ compile_error!{"f2 is missing"}
+ #[cfg(not_feature = "f3")]
+ compile_error!{"f3 is missing"}
+ "#,
+ )
+ .build();
+
+ p.cargo("check -p a -p b --features f1,f2,f3")
+ .with_stderr_unordered(
+ "\
+[CHECKING] a [..]
+[CHECKING] b [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn other_member_from_current() {
+ // -p for another member while in the current directory.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { path="bar", features=["f3"] }
+
+ [features]
+ f1 = ["bar/f4"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [features]
+ f1 = []
+ f2 = []
+ f3 = []
+ f4 = []
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .file(
+ "bar/src/main.rs",
+ r#"
+ fn main() {
+ if cfg!(feature = "f1") {
+ print!("f1");
+ }
+ if cfg!(feature = "f2") {
+ print!("f2");
+ }
+ if cfg!(feature = "f3") {
+ print!("f3");
+ }
+ if cfg!(feature = "f4") {
+ print!("f4");
+ }
+ println!();
+ }
+ "#,
+ )
+ .build();
+
+ // Old behavior.
+ p.cargo("run -p bar --features f1")
+ .with_stdout("f3f4")
+ .run();
+
+ p.cargo("run -p bar --features f1,f2")
+ .with_status(101)
+ .with_stderr("[ERROR] Package `foo[..]` does not have the feature `f2`")
+ .run();
+
+ p.cargo("run -p bar --features bar/f1")
+ .with_stdout("f1f3")
+ .run();
+
+ // New behavior.
+ switch_to_resolver_2(&p);
+ p.cargo("run -p bar --features f1").with_stdout("f1").run();
+
+ p.cargo("run -p bar --features f1,f2")
+ .with_stdout("f1f2")
+ .run();
+
+ p.cargo("run -p bar --features bar/f1")
+ .with_stdout("f1")
+ .run();
+}
+
+#[cargo_test]
+fn feature_default_resolver() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [features]
+ test = []
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ if cfg!(feature = "test") {
+ println!("feature set");
+ }
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("check --features testt")
+ .with_status(101)
+ .with_stderr("[ERROR] Package `a[..]` does not have the feature `testt`")
+ .run();
+
+ p.cargo("run --features test")
+ .with_status(0)
+ .with_stdout("feature set")
+ .run();
+
+ p.cargo("run --features a/test")
+ .with_status(101)
+ .with_stderr("[ERROR] package `a[..]` does not have a dependency named `a`")
+ .run();
+}
+
+#[cargo_test]
+fn virtual_member_slash() {
+ // member slash feature syntax
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a"]
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [dependencies]
+ b = {path="../b", optional=true}
+
+ [features]
+ default = ["f1"]
+ f1 = []
+ f2 = []
+ "#,
+ )
+ .file(
+ "a/src/lib.rs",
+ r#"
+ #[cfg(feature = "f1")]
+ compile_error!{"f1 is set"}
+
+ #[cfg(feature = "f2")]
+ compile_error!{"f2 is set"}
+
+ #[cfg(feature = "b")]
+ compile_error!{"b is set"}
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.1.0"
+
+ [features]
+ bfeat = []
+ "#,
+ )
+ .file(
+ "b/src/lib.rs",
+ r#"
+ #[cfg(feature = "bfeat")]
+ compile_error!{"bfeat is set"}
+ "#,
+ )
+ .build();
+
+ p.cargo("check -p a")
+ .with_status(101)
+ .with_stderr_contains("[..]f1 is set[..]")
+ .with_stderr_does_not_contain("[..]f2 is set[..]")
+ .with_stderr_does_not_contain("[..]b is set[..]")
+ .run();
+
+ p.cargo("check -p a --features a/f1")
+ .with_status(101)
+ .with_stderr_contains("[..]f1 is set[..]")
+ .with_stderr_does_not_contain("[..]f2 is set[..]")
+ .with_stderr_does_not_contain("[..]b is set[..]")
+ .run();
+
+ p.cargo("check -p a --features a/f2")
+ .with_status(101)
+ .with_stderr_contains("[..]f1 is set[..]")
+ .with_stderr_contains("[..]f2 is set[..]")
+ .with_stderr_does_not_contain("[..]b is set[..]")
+ .run();
+
+ p.cargo("check -p a --features b/bfeat")
+ .with_status(101)
+ .with_stderr_contains("[..]bfeat is set[..]")
+ .run();
+
+ p.cargo("check -p a --no-default-features").run();
+
+ p.cargo("check -p a --no-default-features --features b")
+ .with_status(101)
+ .with_stderr_contains("[..]b is set[..]")
+ .run();
+}
+
+#[cargo_test]
+fn non_member() {
+ // -p for a non-member
+ Package::new("dep", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ resolver = "2"
+
+ [dependencies]
+ dep = "1.0"
+
+ [features]
+ f1 = []
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check -p dep --features f1")
+ .with_status(101)
+ .with_stderr("[ERROR] cannot specify features for packages outside of workspace")
+ .run();
+
+ p.cargo("check -p dep --all-features")
+ .with_status(101)
+ .with_stderr("[ERROR] cannot specify features for packages outside of workspace")
+ .run();
+
+ p.cargo("check -p dep --no-default-features")
+ .with_status(101)
+ .with_stderr("[ERROR] cannot specify features for packages outside of workspace")
+ .run();
+
+ p.cargo("check -p dep")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] [..]
+[DOWNLOADED] [..]
+[CHECKING] dep [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn resolver1_member_features() {
+ // --features member-name/feature-name with resolver="1"
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["member1", "member2"]
+ "#,
+ )
+ .file(
+ "member1/Cargo.toml",
+ r#"
+ [package]
+ name = "member1"
+ version = "0.1.0"
+
+ [features]
+ m1-feature = []
+ "#,
+ )
+ .file(
+ "member1/src/main.rs",
+ r#"
+ fn main() {
+ if cfg!(feature = "m1-feature") {
+ println!("m1-feature set");
+ }
+ }
+ "#,
+ )
+ .file("member2/Cargo.toml", &basic_manifest("member2", "0.1.0"))
+ .file("member2/src/lib.rs", "")
+ .build();
+
+ p.cargo("run -p member1 --features member1/m1-feature")
+ .cwd("member2")
+ .with_stdout("m1-feature set")
+ .run();
+
+ p.cargo("check -p member1 --features member1/m2-feature")
+ .cwd("member2")
+ .with_status(101)
+ .with_stderr("[ERROR] Package `member1[..]` does not have the feature `m2-feature`")
+ .run();
+}
+
+#[cargo_test]
+fn non_member_feature() {
+ // --features for a non-member
+ Package::new("jazz", "1.0.0").publish();
+ Package::new("bar", "1.0.0")
+ .add_dep(Dependency::new("jazz", "1.0").optional(true))
+ .publish();
+ let make_toml = |resolver, optional| {
+ let mut s = String::new();
+ write!(
+ s,
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ resolver = "{}"
+
+ [dependencies]
+ "#,
+ resolver
+ )
+ .unwrap();
+ if optional {
+ s.push_str(r#"bar = { version = "1.0", optional = true } "#);
+ } else {
+ s.push_str(r#"bar = "1.0""#)
+ }
+ s.push('\n');
+ s
+ };
+ let p = project()
+ .file("Cargo.toml", &make_toml("1", false))
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("fetch").run();
+ ///////////////////////// V1 non-optional
+ eprintln!("V1 non-optional");
+ p.cargo("check -p bar")
+ .with_stderr(
+ "\
+[CHECKING] bar v1.0.0
+[FINISHED] [..]
+",
+ )
+ .run();
+ // TODO: This should not be allowed (future warning?)
+ p.cargo("check --features bar/jazz")
+ .with_stderr(
+ "\
+[DOWNLOADING] crates ...
+[DOWNLOADED] jazz v1.0.0 [..]
+[CHECKING] jazz v1.0.0
+[CHECKING] bar v1.0.0
+[CHECKING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ // TODO: This should not be allowed (future warning?)
+ p.cargo("check -p bar --features bar/jazz -v")
+ .with_stderr(
+ "\
+[FRESH] jazz v1.0.0
+[FRESH] bar v1.0.0
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ ///////////////////////// V1 optional
+ eprintln!("V1 optional");
+ p.change_file("Cargo.toml", &make_toml("1", true));
+
+ // This error isn't great, but is probably unlikely to be common in
+ // practice, so I'm not going to put much effort into improving it.
+ p.cargo("check -p bar")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: package ID specification `bar` did not match any packages
+
+<tab>Did you mean `foo`?
+",
+ )
+ .run();
+
+ p.cargo("check -p bar --features bar -v")
+ .with_stderr(
+ "\
+[FRESH] bar v1.0.0
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ // TODO: This should not be allowed (future warning?)
+ p.cargo("check -p bar --features bar/jazz -v")
+ .with_stderr(
+ "\
+[FRESH] jazz v1.0.0
+[FRESH] bar v1.0.0
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ ///////////////////////// V2 non-optional
+ eprintln!("V2 non-optional");
+ p.change_file("Cargo.toml", &make_toml("2", false));
+ // TODO: This should not be allowed (future warning?)
+ p.cargo("check --features bar/jazz -v")
+ .with_stderr(
+ "\
+[FRESH] jazz v1.0.0
+[FRESH] bar v1.0.0
+[FRESH] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.cargo("check -p bar -v")
+ .with_stderr(
+ "\
+[FRESH] bar v1.0.0
+[FINISHED] [..]
+",
+ )
+ .run();
+ p.cargo("check -p bar --features bar/jazz")
+ .with_status(101)
+ .with_stderr("error: cannot specify features for packages outside of workspace")
+ .run();
+
+ ///////////////////////// V2 optional
+ eprintln!("V2 optional");
+ p.change_file("Cargo.toml", &make_toml("2", true));
+ p.cargo("check -p bar")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: package ID specification `bar` did not match any packages
+
+<tab>Did you mean `foo`?
+",
+ )
+ .run();
+ // New --features behavior does not look at cwd.
+ p.cargo("check -p bar --features bar")
+ .with_status(101)
+ .with_stderr("error: cannot specify features for packages outside of workspace")
+ .run();
+ p.cargo("check -p bar --features bar/jazz")
+ .with_status(101)
+ .with_stderr("error: cannot specify features for packages outside of workspace")
+ .run();
+ p.cargo("check -p bar --features foo/bar")
+ .with_status(101)
+ .with_stderr("error: cannot specify features for packages outside of workspace")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/patch.rs b/src/tools/cargo/tests/testsuite/patch.rs
new file mode 100644
index 000000000..681c02416
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/patch.rs
@@ -0,0 +1,2645 @@
+//! Tests for `[patch]` table source replacement.
+
+use cargo_test_support::git;
+use cargo_test_support::paths;
+use cargo_test_support::registry::{self, Package};
+use cargo_test_support::{basic_manifest, project};
+use std::fs;
+
+#[cargo_test]
+fn replace() {
+ Package::new("bar", "0.1.0").publish();
+ Package::new("baz", "0.1.0")
+ .file(
+ "src/lib.rs",
+ "extern crate bar; pub fn baz() { bar::bar(); }",
+ )
+ .dep("bar", "0.1.0")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ baz = "0.1.0"
+
+ [patch.crates-io]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
+ extern crate bar;
+ extern crate baz;
+ pub fn bar() {
+ bar::bar();
+ baz::baz();
+ }
+ ",
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] baz v0.1.0 ([..])
+[CHECKING] bar v0.1.0 ([CWD]/bar)
+[CHECKING] baz v0.1.0
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+}
+
+#[cargo_test]
+fn from_config() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file(
+ ".cargo/config.toml",
+ r#"
+ [patch.crates-io]
+ bar = { path = 'bar' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1"))
+ .file("bar/src/lib.rs", r#""#)
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[CHECKING] bar v0.1.1 ([..])
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn from_config_relative() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file(
+ "../.cargo/config.toml",
+ r#"
+ [patch.crates-io]
+ bar = { path = 'foo/bar' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1"))
+ .file("bar/src/lib.rs", r#""#)
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[CHECKING] bar v0.1.1 ([..])
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn from_config_precedence() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [patch.crates-io]
+ bar = { path = 'no-such-path' }
+ "#,
+ )
+ .file(
+ ".cargo/config.toml",
+ r#"
+ [patch.crates-io]
+ bar = { path = 'bar' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1"))
+ .file("bar/src/lib.rs", r#""#)
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[CHECKING] bar v0.1.1 ([..])
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn nonexistent() {
+ Package::new("baz", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [patch.crates-io]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate bar; pub fn foo() { bar::bar(); }",
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[CHECKING] bar v0.1.0 ([CWD]/bar)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+}
+
+#[cargo_test]
+fn patch_git() {
+ let bar = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = {{ git = '{}' }}
+
+ [patch.'{0}']
+ bar = {{ path = "bar" }}
+ "#,
+ bar.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate bar; pub fn foo() { bar::bar(); }",
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] git repository `file://[..]`
+[CHECKING] bar v0.1.0 ([CWD]/bar)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+}
+
+#[cargo_test]
+fn patch_to_git() {
+ let bar = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1"
+
+ [patch.crates-io]
+ bar = {{ git = '{}' }}
+ "#,
+ bar.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate bar; pub fn foo() { bar::bar(); }",
+ )
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] git repository `file://[..]`
+[UPDATING] `dummy-registry` index
+[CHECKING] bar v0.1.0 (file://[..])
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+}
+
+#[cargo_test]
+fn unused() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [patch.crates-io]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0"))
+ .file("bar/src/lib.rs", "not rust code")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[WARNING] Patch `bar v0.2.0 ([CWD]/bar)` was not used in the crate graph.
+Check that [..]
+with the [..]
+what is [..]
+version. [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 [..]
+[CHECKING] bar v0.1.0
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.cargo("check")
+ .with_stderr(
+ "\
+[WARNING] Patch `bar v0.2.0 ([CWD]/bar)` was not used in the crate graph.
+Check that [..]
+with the [..]
+what is [..]
+version. [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ // unused patch should be in the lock file
+ let lock = p.read_lockfile();
+ let toml: toml::Table = toml::from_str(&lock).unwrap();
+ assert_eq!(toml["patch"]["unused"].as_array().unwrap().len(), 1);
+ assert_eq!(toml["patch"]["unused"][0]["name"].as_str(), Some("bar"));
+ assert_eq!(
+ toml["patch"]["unused"][0]["version"].as_str(),
+ Some("0.2.0")
+ );
+}
+
+#[cargo_test]
+fn unused_with_mismatch_source_being_patched() {
+ registry::alt_init();
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [patch.alternative]
+ bar = { path = "bar" }
+
+ [patch.crates-io]
+ bar = { path = "baz" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0"))
+ .file("bar/src/lib.rs", "not rust code")
+ .file("baz/Cargo.toml", &basic_manifest("bar", "0.3.0"))
+ .file("baz/src/lib.rs", "not rust code")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[WARNING] Patch `bar v0.2.0 ([CWD]/bar)` was not used in the crate graph.
+Perhaps you misspelled the source URL being patched.
+Possible URLs for `[patch.<URL>]`:
+ crates-io
+[WARNING] Patch `bar v0.3.0 ([CWD]/baz)` was not used in the crate graph.
+Check that [..]
+with the [..]
+what is [..]
+version. [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 [..]
+[CHECKING] bar v0.1.0
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn prefer_patch_version() {
+ Package::new("bar", "0.1.2").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [patch.crates-io]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[CHECKING] bar v0.1.1 ([CWD]/bar)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.cargo("check")
+ .with_stderr(
+ "\
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ // there should be no patch.unused in the toml file
+ let lock = p.read_lockfile();
+ let toml: toml::Table = toml::from_str(&lock).unwrap();
+ assert!(toml.get("patch").is_none());
+}
+
+#[cargo_test]
+fn unused_from_config() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file(
+ ".cargo/config.toml",
+ r#"
+ [patch.crates-io]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0"))
+ .file("bar/src/lib.rs", "not rust code")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[WARNING] Patch `bar v0.2.0 ([CWD]/bar)` was not used in the crate graph.
+Check that [..]
+with the [..]
+what is [..]
+version. [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 [..]
+[CHECKING] bar v0.1.0
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.cargo("check")
+ .with_stderr(
+ "\
+[WARNING] Patch `bar v0.2.0 ([CWD]/bar)` was not used in the crate graph.
+Check that [..]
+with the [..]
+what is [..]
+version. [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ // unused patch should be in the lock file
+ let lock = p.read_lockfile();
+ let toml: toml::Table = toml::from_str(&lock).unwrap();
+ assert_eq!(toml["patch"]["unused"].as_array().unwrap().len(), 1);
+ assert_eq!(toml["patch"]["unused"][0]["name"].as_str(), Some("bar"));
+ assert_eq!(
+ toml["patch"]["unused"][0]["version"].as_str(),
+ Some("0.2.0")
+ );
+}
+
+#[cargo_test]
+fn unused_git() {
+ Package::new("bar", "0.1.0").publish();
+
+ let foo = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", &basic_manifest("bar", "0.2.0"))
+ .file("src/lib.rs", "")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1"
+
+ [patch.crates-io]
+ bar = {{ git = '{}' }}
+ "#,
+ foo.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] git repository `file://[..]`
+[UPDATING] `dummy-registry` index
+[WARNING] Patch `bar v0.2.0 ([..])` was not used in the crate graph.
+Check that [..]
+with the [..]
+what is [..]
+version. [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 [..]
+[CHECKING] bar v0.1.0
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.cargo("check")
+ .with_stderr(
+ "\
+[WARNING] Patch `bar v0.2.0 ([..])` was not used in the crate graph.
+Check that [..]
+with the [..]
+what is [..]
+version. [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn add_patch() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", r#""#)
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 [..]
+[CHECKING] bar v0.1.0
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [patch.crates-io]
+ bar = { path = 'bar' }
+ "#,
+ );
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.1.0 ([CWD]/bar)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+}
+
+#[cargo_test]
+fn add_patch_from_config() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", r#""#)
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 [..]
+[CHECKING] bar v0.1.0
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+
+ p.change_file(
+ ".cargo/config.toml",
+ r#"
+ [patch.crates-io]
+ bar = { path = 'bar' }
+ "#,
+ );
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.1.0 ([CWD]/bar)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+}
+
+#[cargo_test]
+fn add_ignored_patch() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1"))
+ .file("bar/src/lib.rs", r#""#)
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 [..]
+[CHECKING] bar v0.1.0
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [patch.crates-io]
+ bar = { path = 'bar' }
+ "#,
+ );
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[WARNING] Patch `bar v0.1.1 ([CWD]/bar)` was not used in the crate graph.
+Check that [..]
+with the [..]
+what is [..]
+version. [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+ p.cargo("check")
+ .with_stderr(
+ "\
+[WARNING] Patch `bar v0.1.1 ([CWD]/bar)` was not used in the crate graph.
+Check that [..]
+with the [..]
+what is [..]
+version. [..]
+[FINISHED] [..]",
+ )
+ .run();
+
+ p.cargo("update").run();
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.1.1 ([CWD]/bar)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn add_patch_with_features() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [patch.crates-io]
+ bar = { path = 'bar', features = ["some_feature"] }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", r#""#)
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[WARNING] patch for `bar` uses the features mechanism. \
+default-features and features will not take effect because the patch dependency does not support this mechanism
+[UPDATING] `dummy-registry` index
+[CHECKING] bar v0.1.0 ([CWD]/bar)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.cargo("check")
+ .with_stderr(
+ "\
+[WARNING] patch for `bar` uses the features mechanism. \
+default-features and features will not take effect because the patch dependency does not support this mechanism
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn add_patch_with_setting_default_features() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [patch.crates-io]
+ bar = { path = 'bar', default-features = false, features = ["none_default_feature"] }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", r#""#)
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[WARNING] patch for `bar` uses the features mechanism. \
+default-features and features will not take effect because the patch dependency does not support this mechanism
+[UPDATING] `dummy-registry` index
+[CHECKING] bar v0.1.0 ([CWD]/bar)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ p.cargo("check")
+ .with_stderr(
+ "\
+[WARNING] patch for `bar` uses the features mechanism. \
+default-features and features will not take effect because the patch dependency does not support this mechanism
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn no_warn_ws_patch() {
+ Package::new("c", "0.1.0").publish();
+
+ // Don't issue an unused patch warning when the patch isn't used when
+ // partially building a workspace.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b", "c"]
+
+ [patch.crates-io]
+ c = { path = "c" }
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_manifest("a", "0.1.0"))
+ .file("a/src/lib.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.1.0"
+ [dependencies]
+ c = "0.1.0"
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .file("c/Cargo.toml", &basic_manifest("c", "0.1.0"))
+ .file("c/src/lib.rs", "")
+ .build();
+
+ p.cargo("check -p a")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[CHECKING] a [..]
+[FINISHED] [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn new_minor() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [patch.crates-io]
+ bar = { path = 'bar' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1"))
+ .file("bar/src/lib.rs", r#""#)
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[CHECKING] bar v0.1.1 [..]
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn transitive_new_minor() {
+ Package::new("baz", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = { path = 'bar' }
+
+ [patch.crates-io]
+ baz = { path = 'baz' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ baz = '0.1.0'
+ "#,
+ )
+ .file("bar/src/lib.rs", r#""#)
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.1"))
+ .file("baz/src/lib.rs", r#""#)
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[CHECKING] baz v0.1.1 [..]
+[CHECKING] bar v0.1.0 [..]
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn new_major() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.2.0"
+
+ [patch.crates-io]
+ bar = { path = 'bar' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0"))
+ .file("bar/src/lib.rs", r#""#)
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[CHECKING] bar v0.2.0 [..]
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ Package::new("bar", "0.2.0").publish();
+ p.cargo("update").run();
+ p.cargo("check")
+ .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+ .run();
+
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.2.0"
+ "#,
+ );
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.2.0 [..]
+[CHECKING] bar v0.2.0
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn transitive_new_major() {
+ Package::new("baz", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = { path = 'bar' }
+
+ [patch.crates-io]
+ baz = { path = 'baz' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ baz = '0.2.0'
+ "#,
+ )
+ .file("bar/src/lib.rs", r#""#)
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.2.0"))
+ .file("baz/src/lib.rs", r#""#)
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[CHECKING] baz v0.2.0 [..]
+[CHECKING] bar v0.1.0 [..]
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn shared_by_transitive() {
+ Package::new("baz", "0.1.1").publish();
+
+ let baz = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", &basic_manifest("baz", "0.1.2"))
+ .file("src/lib.rs", "")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = " 0.1.0"
+
+ [dependencies]
+ bar = {{ path = "bar" }}
+ baz = "0.1"
+
+ [patch.crates-io]
+ baz = {{ git = "{}", version = "0.1" }}
+ "#,
+ baz.url(),
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [dependencies]
+ baz = "0.1.1"
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] git repository `file://[..]`
+[UPDATING] `dummy-registry` index
+[CHECKING] baz v0.1.2 [..]
+[CHECKING] bar v0.1.0 [..]
+[CHECKING] foo v0.1.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn remove_patch() {
+ Package::new("foo", "0.1.0").publish();
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1"
+
+ [patch.crates-io]
+ foo = { path = 'foo' }
+ bar = { path = 'bar' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", r#""#)
+ .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("foo/src/lib.rs", r#""#)
+ .build();
+
+ // Generate a lock file where `foo` is unused
+ p.cargo("check").run();
+ let lock_file1 = p.read_lockfile();
+
+ // Remove `foo` and generate a new lock file form the old one
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1"
+
+ [patch.crates-io]
+ bar = { path = 'bar' }
+ "#,
+ );
+ p.cargo("check").run();
+ let lock_file2 = p.read_lockfile();
+
+ // Remove the lock file and build from scratch
+ fs::remove_file(p.root().join("Cargo.lock")).unwrap();
+ p.cargo("check").run();
+ let lock_file3 = p.read_lockfile();
+
+ assert!(lock_file1.contains("foo"));
+ assert_eq!(lock_file2, lock_file3);
+ assert_ne!(lock_file1, lock_file2);
+}
+
+#[cargo_test]
+fn non_crates_io() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [patch.some-other-source]
+ bar = { path = 'bar' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", r#""#)
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ [patch] entry `some-other-source` should be a URL or registry name
+
+Caused by:
+ invalid url `some-other-source`: relative URL without a base
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn replace_with_crates_io() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [patch.crates-io]
+ bar = "0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", r#""#)
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+error: failed to resolve patches for `[..]`
+
+Caused by:
+ patch for `bar` in `[..]` points to the same source, but patches must point \
+ to different sources
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn patch_in_virtual() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo"]
+
+ [patch.crates-io]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", r#""#)
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = "0.1"
+ "#,
+ )
+ .file("foo/src/lib.rs", r#""#)
+ .build();
+
+ p.cargo("check").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+}
+
+#[cargo_test]
+fn patch_depends_on_another_patch() {
+ Package::new("bar", "0.1.0")
+ .file("src/lib.rs", "broken code")
+ .publish();
+
+ Package::new("baz", "0.1.0")
+ .dep("bar", "0.1")
+ .file("src/lib.rs", "broken code")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1"
+ baz = "0.1"
+
+ [patch.crates-io]
+ bar = { path = "bar" }
+ baz = { path = "baz" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1"))
+ .file("bar/src/lib.rs", r#""#)
+ .file(
+ "baz/Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.1.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1"
+ "#,
+ )
+ .file("baz/src/lib.rs", r#""#)
+ .build();
+
+ p.cargo("check").run();
+
+ // Nothing should be rebuilt, no registry should be updated.
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+}
+
+#[cargo_test]
+fn replace_prerelease() {
+ Package::new("baz", "1.1.0-pre.1").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+
+ [patch.crates-io]
+ baz = { path = "./baz" }
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ baz = "1.1.0-pre.1"
+ "#,
+ )
+ .file(
+ "bar/src/main.rs",
+ "extern crate baz; fn main() { baz::baz() }",
+ )
+ .file(
+ "baz/Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "1.1.0-pre.1"
+ authors = []
+ [workspace]
+ "#,
+ )
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn patch_older() {
+ Package::new("baz", "1.0.2").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { path = 'bar' }
+ baz = "=1.0.1"
+
+ [patch.crates-io]
+ baz = { path = "./baz" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ baz = "1.0.0"
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .file(
+ "baz/Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "1.0.1"
+ authors = []
+ "#,
+ )
+ .file("baz/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[CHECKING] baz v1.0.1 [..]
+[CHECKING] bar v0.5.0 [..]
+[CHECKING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cycle() {
+ Package::new("a", "1.0.0").publish();
+ Package::new("b", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+
+ [patch.crates-io]
+ a = {path="a"}
+ b = {path="b"}
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "1.0.0"
+
+ [dependencies]
+ b = "1.0"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "1.0.0"
+
+ [dependencies]
+ a = "1.0"
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[ERROR] cyclic package dependency: [..]
+package `[..]`
+ ... which satisfies dependency `[..]` of package `[..]`
+ ... which satisfies dependency `[..]` of package `[..]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn multipatch() {
+ Package::new("a", "1.0.0").publish();
+ Package::new("a", "2.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ a1 = { version = "1", package = "a" }
+ a2 = { version = "2", package = "a" }
+
+ [patch.crates-io]
+ b1 = { path = "a1", package = "a" }
+ b2 = { path = "a2", package = "a" }
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() { a1::f1(); a2::f2(); }")
+ .file(
+ "a1/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "1.0.0"
+ "#,
+ )
+ .file("a1/src/lib.rs", "pub fn f1() {}")
+ .file(
+ "a2/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "2.0.0"
+ "#,
+ )
+ .file("a2/src/lib.rs", "pub fn f2() {}")
+ .build();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn patch_same_version() {
+ let bar = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "")
+ .build();
+
+ cargo_test_support::registry::init();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ [dependencies]
+ bar = "0.1"
+ [patch.crates-io]
+ bar = {{ path = "bar" }}
+ bar2 = {{ git = '{}', package = 'bar' }}
+ "#,
+ bar.url(),
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+error: cannot have two `[patch]` entries which both resolve to `bar v0.1.0`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn two_semver_compatible() {
+ let bar = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.1"))
+ .file("src/lib.rs", "")
+ .build();
+
+ cargo_test_support::registry::init();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ [dependencies]
+ bar = "0.1"
+ [patch.crates-io]
+ bar = {{ path = "bar" }}
+ bar2 = {{ git = '{}', package = 'bar' }}
+ "#,
+ bar.url(),
+ ),
+ )
+ .file("src/lib.rs", "pub fn foo() { bar::foo() }")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.2"
+ "#,
+ )
+ .file("bar/src/lib.rs", "pub fn foo() {}")
+ .build();
+
+ // assert the build succeeds and doesn't panic anywhere, and then afterwards
+ // assert that the build succeeds again without updating anything or
+ // building anything else.
+ p.cargo("check").run();
+ p.cargo("check")
+ .with_stderr(
+ "\
+warning: Patch `bar v0.1.1 [..]` was not used in the crate graph.
+Perhaps you misspelled the source URL being patched.
+Possible URLs for `[patch.<URL>]`:
+ [CWD]/bar
+[FINISHED] [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn multipatch_select_big() {
+ let bar = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "")
+ .build();
+
+ cargo_test_support::registry::init();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ [dependencies]
+ bar = "*"
+ [patch.crates-io]
+ bar = {{ path = "bar" }}
+ bar2 = {{ git = '{}', package = 'bar' }}
+ "#,
+ bar.url(),
+ ),
+ )
+ .file("src/lib.rs", "pub fn foo() { bar::foo() }")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.2.0"
+ "#,
+ )
+ .file("bar/src/lib.rs", "pub fn foo() {}")
+ .build();
+
+ // assert the build succeeds, which is only possible if 0.2.0 is selected
+ // since 0.1.0 is missing the function we need. Afterwards assert that the
+ // build succeeds again without updating anything or building anything else.
+ p.cargo("check").run();
+ p.cargo("check")
+ .with_stderr(
+ "\
+warning: Patch `bar v0.1.0 [..]` was not used in the crate graph.
+Perhaps you misspelled the source URL being patched.
+Possible URLs for `[patch.<URL>]`:
+ [CWD]/bar
+[FINISHED] [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn canonicalize_a_bunch() {
+ let base = git::repo(&paths::root().join("base"))
+ .file("Cargo.toml", &basic_manifest("base", "0.1.0"))
+ .file("src/lib.rs", "")
+ .build();
+
+ let intermediate = git::repo(&paths::root().join("intermediate"))
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "intermediate"
+ version = "0.1.0"
+
+ [dependencies]
+ # Note the lack of trailing slash
+ base = {{ git = '{}' }}
+ "#,
+ base.url(),
+ ),
+ )
+ .file("src/lib.rs", "pub fn f() { base::f() }")
+ .build();
+
+ let newbase = git::repo(&paths::root().join("newbase"))
+ .file("Cargo.toml", &basic_manifest("base", "0.1.0"))
+ .file("src/lib.rs", "pub fn f() {}")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ # Note the trailing slashes
+ base = {{ git = '{base}/' }}
+ intermediate = {{ git = '{intermediate}/' }}
+
+ [patch.'{base}'] # Note the lack of trailing slash
+ base = {{ git = '{newbase}' }}
+ "#,
+ base = base.url(),
+ intermediate = intermediate.url(),
+ newbase = newbase.url(),
+ ),
+ )
+ .file("src/lib.rs", "pub fn a() { base::f(); intermediate::f() }")
+ .build();
+
+ // Once to make sure it actually works
+ p.cargo("check").run();
+
+ // Then a few more times for good measure to ensure no weird warnings about
+ // `[patch]` are printed.
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+}
+
+#[cargo_test]
+fn update_unused_new_version() {
+ // If there is an unused patch entry, and then you update the patch,
+ // make sure `cargo update` will be able to fix the lock file.
+ Package::new("bar", "0.1.5").publish();
+
+ // Start with a lock file to 0.1.5, and an "unused" patch because the
+ // version is too old.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = "0.1.5"
+
+ [patch.crates-io]
+ bar = { path = "../bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // Patch is too old.
+ let bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.4"))
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr_contains("[WARNING] Patch `bar v0.1.4 [..] was not used in the crate graph.")
+ .run();
+ // unused patch should be in the lock file
+ let lock = p.read_lockfile();
+ let toml: toml::Table = toml::from_str(&lock).unwrap();
+ assert_eq!(toml["patch"]["unused"].as_array().unwrap().len(), 1);
+ assert_eq!(toml["patch"]["unused"][0]["name"].as_str(), Some("bar"));
+ assert_eq!(
+ toml["patch"]["unused"][0]["version"].as_str(),
+ Some("0.1.4")
+ );
+
+ // Oh, OK, let's update to the latest version.
+ bar.change_file("Cargo.toml", &basic_manifest("bar", "0.1.6"));
+
+ // Create a backup so we can test it with different options.
+ fs::copy(p.root().join("Cargo.lock"), p.root().join("Cargo.lock.bak")).unwrap();
+
+ // Try to build again, this should automatically update Cargo.lock.
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[CHECKING] bar v0.1.6 ([..]/bar)
+[CHECKING] foo v0.0.1 ([..]/foo)
+[FINISHED] [..]
+",
+ )
+ .run();
+ // This should not update any registry.
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+ assert!(!p.read_lockfile().contains("unused"));
+
+ // Restore the lock file, and see if `update` will work, too.
+ fs::copy(p.root().join("Cargo.lock.bak"), p.root().join("Cargo.lock")).unwrap();
+
+ // Try `update -p`.
+ p.cargo("update -p bar")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[ADDING] bar v0.1.6 ([..]/bar)
+[REMOVING] bar v0.1.5
+",
+ )
+ .run();
+
+ // Try with bare `cargo update`.
+ fs::copy(p.root().join("Cargo.lock.bak"), p.root().join("Cargo.lock")).unwrap();
+ p.cargo("update")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[ADDING] bar v0.1.6 ([..]/bar)
+[REMOVING] bar v0.1.5
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn too_many_matches() {
+ // The patch locations has multiple versions that match.
+ registry::alt_init();
+ Package::new("bar", "0.1.0").publish();
+ Package::new("bar", "0.1.0").alternative(true).publish();
+ Package::new("bar", "0.1.1").alternative(true).publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1"
+
+ [patch.crates-io]
+ bar = { version = "0.1", registry = "alternative" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // Picks 0.1.1, the most recent version.
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[ERROR] failed to resolve patches for `https://github.com/rust-lang/crates.io-index`
+
+Caused by:
+ patch for `bar` in `https://github.com/rust-lang/crates.io-index` failed to resolve
+
+Caused by:
+ patch for `bar` in `registry `alternative`` resolved to more than one candidate
+ Found versions: 0.1.0, 0.1.1
+ Update the patch definition to select only one package.
+ For example, add an `=` version requirement to the patch definition, such as `version = \"=0.1.1\"`.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn no_matches() {
+ // A patch to a location that does not contain the named package.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1"
+
+ [patch.crates-io]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("abc", "0.1.0"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to resolve patches for `https://github.com/rust-lang/crates.io-index`
+
+Caused by:
+ patch for `bar` in `https://github.com/rust-lang/crates.io-index` failed to resolve
+
+Caused by:
+ The patch location `[..]/foo/bar` does not appear to contain any packages matching the name `bar`.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn mismatched_version() {
+ // A patch to a location that has an old version.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1.1"
+
+ [patch.crates-io]
+ bar = { path = "bar", version = "0.1.1" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to resolve patches for `https://github.com/rust-lang/crates.io-index`
+
+Caused by:
+ patch for `bar` in `https://github.com/rust-lang/crates.io-index` failed to resolve
+
+Caused by:
+ The patch location `[..]/foo/bar` contains a `bar` package with version `0.1.0`, \
+ but the patch definition requires `^0.1.1`.
+ Check that the version in the patch location is what you expect, \
+ and update the patch definition to match.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn patch_walks_backwards() {
+ // Starting with a locked patch, change the patch so it points to an older version.
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1"
+
+ [patch.crates-io]
+ bar = {path="bar"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[CHECKING] bar v0.1.1 ([..]/foo/bar)
+[CHECKING] foo v0.1.0 ([..]/foo)
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ // Somehow the user changes the version backwards.
+ p.change_file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"));
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[CHECKING] bar v0.1.0 ([..]/foo/bar)
+[CHECKING] foo v0.1.0 ([..]/foo)
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn patch_walks_backwards_restricted() {
+ // This is the same as `patch_walks_backwards`, but the patch contains a
+ // `version` qualifier. This is unusual, just checking a strange edge case.
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1"
+
+ [patch.crates-io]
+ bar = {path="bar", version="0.1.1"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[CHECKING] bar v0.1.1 ([..]/foo/bar)
+[CHECKING] foo v0.1.0 ([..]/foo)
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ // Somehow the user changes the version backwards.
+ p.change_file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"));
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to resolve patches for `https://github.com/rust-lang/crates.io-index`
+
+Caused by:
+ patch for `bar` in `https://github.com/rust-lang/crates.io-index` failed to resolve
+
+Caused by:
+ The patch location `[..]/foo/bar` contains a `bar` package with version `0.1.0`, but the patch definition requires `^0.1.1`.
+ Check that the version in the patch location is what you expect, and update the patch definition to match.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn patched_dep_new_version() {
+ // What happens when a patch is locked, and then one of the patched
+ // dependencies needs to be updated. In this case, the baz requirement
+ // gets updated from 0.1.0 to 0.1.1.
+ Package::new("bar", "0.1.0").dep("baz", "0.1.0").publish();
+ Package::new("baz", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1"
+
+ [patch.crates-io]
+ bar = {path="bar"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [dependencies]
+ baz = "0.1"
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ // Lock everything.
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] baz v0.1.0 [..]
+[CHECKING] baz v0.1.0
+[CHECKING] bar v0.1.0 ([..]/foo/bar)
+[CHECKING] foo v0.1.0 ([..]/foo)
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ Package::new("baz", "0.1.1").publish();
+
+ // Just the presence of the new version should not have changed anything.
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+
+ // Modify the patch so it requires the new version.
+ p.change_file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [dependencies]
+ baz = "0.1.1"
+ "#,
+ );
+
+ // Should unlock and update cleanly.
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] baz v0.1.1 (registry `dummy-registry`)
+[CHECKING] baz v0.1.1
+[CHECKING] bar v0.1.0 ([..]/foo/bar)
+[CHECKING] foo v0.1.0 ([..]/foo)
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn patch_update_doesnt_update_other_sources() {
+ // Very extreme edge case, make sure a patch update doesn't update other
+ // sources.
+ registry::alt_init();
+ Package::new("bar", "0.1.0").publish();
+ Package::new("bar", "0.1.0").alternative(true).publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1"
+ bar_alt = { version = "0.1", registry = "alternative", package = "bar" }
+
+ [patch.crates-io]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr_unordered(
+ "\
+[UPDATING] `dummy-registry` index
+[UPDATING] `alternative` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 (registry `alternative`)
+[CHECKING] bar v0.1.0 (registry `alternative`)
+[CHECKING] bar v0.1.0 ([..]/foo/bar)
+[CHECKING] foo v0.1.0 ([..]/foo)
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ // Publish new versions in both sources.
+ Package::new("bar", "0.1.1").publish();
+ Package::new("bar", "0.1.1").alternative(true).publish();
+
+ // Since it is locked, nothing should change.
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+
+ // Require new version on crates.io.
+ p.change_file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1"));
+
+ // This should not update bar_alt.
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[CHECKING] bar v0.1.1 ([..]/foo/bar)
+[CHECKING] foo v0.1.0 ([..]/foo)
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn can_update_with_alt_reg() {
+ // A patch to an alt reg can update.
+ registry::alt_init();
+ Package::new("bar", "0.1.0").publish();
+ Package::new("bar", "0.1.0").alternative(true).publish();
+ Package::new("bar", "0.1.1").alternative(true).publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1"
+
+ [patch.crates-io]
+ bar = { version = "=0.1.1", registry = "alternative" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.1 (registry `alternative`)
+[CHECKING] bar v0.1.1 (registry `alternative`)
+[CHECKING] foo v0.1.0 ([..]/foo)
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ Package::new("bar", "0.1.2").alternative(true).publish();
+
+ // Should remain locked.
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+
+ // This does nothing, due to `=` requirement.
+ p.cargo("update -p bar")
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[UPDATING] `dummy-registry` index
+",
+ )
+ .run();
+
+ // Bump to 0.1.2.
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1"
+
+ [patch.crates-io]
+ bar = { version = "=0.1.2", registry = "alternative" }
+ "#,
+ );
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.2 (registry `alternative`)
+[CHECKING] bar v0.1.2 (registry `alternative`)
+[CHECKING] foo v0.1.0 ([..]/foo)
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn old_git_patch() {
+ // Example where an old lockfile with an explicit branch="master" in Cargo.toml.
+ Package::new("bar", "1.0.0").publish();
+ let (bar, bar_repo) = git::new_repo("bar", |p| {
+ p.file("Cargo.toml", &basic_manifest("bar", "1.0.0"))
+ .file("src/lib.rs", "")
+ });
+
+ let bar_oid = bar_repo.head().unwrap().target().unwrap();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+
+ [patch.crates-io]
+ bar = {{ git = "{}", branch = "master" }}
+ "#,
+ bar.url()
+ ),
+ )
+ .file(
+ "Cargo.lock",
+ &format!(
+ r#"
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+[[package]]
+name = "bar"
+version = "1.0.0"
+source = "git+{}#{}"
+
+[[package]]
+name = "foo"
+version = "0.1.0"
+dependencies = [
+ "bar",
+]
+ "#,
+ bar.url(),
+ bar_oid
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ bar.change_file("Cargo.toml", &basic_manifest("bar", "2.0.0"));
+ git::add(&bar_repo);
+ git::commit(&bar_repo);
+
+ // This *should* keep the old lock.
+ p.cargo("tree")
+ // .env("CARGO_LOG", "trace")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+",
+ )
+ // .with_status(1)
+ .with_stdout(format!(
+ "\
+foo v0.1.0 [..]
+└── bar v1.0.0 (file:///[..]branch=master#{})
+",
+ &bar_oid.to_string()[..8]
+ ))
+ .run();
+}
+
+// From https://github.com/rust-lang/cargo/issues/7463
+#[cargo_test]
+fn patch_eq_conflict_panic() {
+ Package::new("bar", "0.1.0").publish();
+ Package::new("bar", "0.1.1").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "=0.1.0"
+
+ [dev-dependencies]
+ bar = "=0.1.1"
+
+ [patch.crates-io]
+ bar = {path="bar"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("generate-lockfile")
+ .with_status(101)
+ .with_stderr(
+ r#"[UPDATING] `dummy-registry` index
+[ERROR] failed to select a version for `bar`.
+ ... required by package `foo v0.1.0 ([..])`
+versions that meet the requirements `=0.1.1` are: 0.1.1
+
+all possible versions conflict with previously selected packages.
+
+ previously selected package `bar v0.1.0`
+ ... which satisfies dependency `bar = "=0.1.0"` of package `foo v0.1.0 ([..])`
+
+failed to select a version for `bar` which could resolve this conflict
+"#,
+ )
+ .run();
+}
+
+// From https://github.com/rust-lang/cargo/issues/11336
+#[cargo_test]
+fn mismatched_version2() {
+ Package::new("qux", "0.1.0-beta.1").publish();
+ Package::new("qux", "0.1.0-beta.2").publish();
+ Package::new("bar", "0.1.0")
+ .dep("qux", "=0.1.0-beta.1")
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1.0"
+ qux = "0.1.0-beta.2"
+
+ [patch.crates-io]
+ qux = { path = "qux" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "qux/Cargo.toml",
+ r#"
+ [package]
+ name = "qux"
+ version = "0.1.0-beta.1"
+ "#,
+ )
+ .file("qux/src/lib.rs", "")
+ .build();
+
+ p.cargo("generate-lockfile")
+ .with_status(101)
+ .with_stderr(
+ r#"[UPDATING] `dummy-registry` index
+[ERROR] failed to select a version for `qux`.
+ ... required by package `bar v0.1.0`
+ ... which satisfies dependency `bar = "^0.1.0"` of package `foo v0.1.0 ([..])`
+versions that meet the requirements `=0.1.0-beta.1` are: 0.1.0-beta.1
+
+all possible versions conflict with previously selected packages.
+
+ previously selected package `qux v0.1.0-beta.2`
+ ... which satisfies dependency `qux = "^0.1.0-beta.2"` of package `foo v0.1.0 ([..])`
+
+failed to select a version for `qux` which could resolve this conflict"#,
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/path.rs b/src/tools/cargo/tests/testsuite/path.rs
new file mode 100644
index 000000000..ebbb72f9a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/path.rs
@@ -0,0 +1,1139 @@
+//! Tests for `path` dependencies.
+
+use cargo_test_support::paths::{self, CargoPathExt};
+use cargo_test_support::registry::Package;
+use cargo_test_support::{basic_lib_manifest, basic_manifest, main_file, project};
+use cargo_test_support::{sleep_ms, t};
+use std::fs;
+
+#[cargo_test]
+// I have no idea why this is failing spuriously on Windows;
+// for more info, see #3466.
+#[cfg(not(windows))]
+fn cargo_compile_with_nested_deps_shorthand() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+
+ version = "0.5.0"
+ path = "bar"
+ "#,
+ )
+ .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.baz]
+
+ version = "0.5.0"
+ path = "baz"
+
+ [lib]
+
+ name = "bar"
+ "#,
+ )
+ .file(
+ "bar/src/bar.rs",
+ r#"
+ extern crate baz;
+
+ pub fn gimme() -> String {
+ baz::gimme()
+ }
+ "#,
+ )
+ .file("bar/baz/Cargo.toml", &basic_lib_manifest("baz"))
+ .file(
+ "bar/baz/src/baz.rs",
+ r#"
+ pub fn gimme() -> String {
+ "test passed".to_string()
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build")
+ .with_stderr(
+ "[COMPILING] baz v0.5.0 ([CWD]/bar/baz)\n\
+ [COMPILING] bar v0.5.0 ([CWD]/bar)\n\
+ [COMPILING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ )
+ .run();
+
+ assert!(p.bin("foo").is_file());
+
+ p.process(&p.bin("foo")).with_stdout("test passed\n").run();
+
+ println!("cleaning");
+ p.cargo("clean -v").with_stdout("").run();
+ println!("building baz");
+ p.cargo("build -p baz")
+ .with_stderr(
+ "[COMPILING] baz v0.5.0 ([CWD]/bar/baz)\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ )
+ .run();
+ println!("building foo");
+ p.cargo("build -p foo")
+ .with_stderr(
+ "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\
+ [COMPILING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_root_dev_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dev-dependencies.bar]
+
+ version = "0.5.0"
+ path = "../bar"
+
+ [[bin]]
+ name = "foo"
+ "#,
+ )
+ .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .build();
+ let _p2 = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn gimme() -> &'static str {
+ "zoidberg"
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains("[..]can't find crate for `bar`")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_root_dev_deps_with_testing() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dev-dependencies.bar]
+
+ version = "0.5.0"
+ path = "../bar"
+
+ [[bin]]
+ name = "foo"
+ "#,
+ )
+ .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .build();
+ let _p2 = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn gimme() -> &'static str {
+ "zoidberg"
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] [..] v0.5.0 ([..])
+[COMPILING] [..] v0.5.0 ([..])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("running 0 tests")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_compile_with_transitive_dev_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+
+ version = "0.5.0"
+ path = "bar"
+ "#,
+ )
+ .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dev-dependencies.baz]
+
+ git = "git://example.com/path/to/nowhere"
+
+ [lib]
+
+ name = "bar"
+ "#,
+ )
+ .file(
+ "bar/src/bar.rs",
+ r#"
+ pub fn gimme() -> &'static str {
+ "zoidberg"
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build")
+ .with_stderr(
+ "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\
+ [COMPILING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in \
+ [..]\n",
+ )
+ .run();
+
+ assert!(p.bin("foo").is_file());
+
+ p.process(&p.bin("foo")).with_stdout("zoidberg\n").run();
+}
+
+#[cargo_test]
+fn no_rebuild_dependency() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/main.rs", "extern crate bar; fn main() { bar::bar() }")
+ .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+ .file("bar/src/bar.rs", "pub fn bar() {}")
+ .build();
+ // First time around we should compile both foo and bar
+ p.cargo("check")
+ .with_stderr(
+ "[CHECKING] bar v0.5.0 ([CWD]/bar)\n\
+ [CHECKING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ )
+ .run();
+
+ sleep_ms(1000);
+ p.change_file(
+ "src/main.rs",
+ r#"
+ extern crate bar;
+ fn main() { bar::bar(); }
+ "#,
+ );
+ // Don't compile bar, but do recompile foo.
+ p.cargo("check")
+ .with_stderr(
+ "[CHECKING] foo v0.5.0 ([..])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn deep_dependencies_trigger_rebuild() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/main.rs", "extern crate bar; fn main() { bar::bar() }")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [lib]
+ name = "bar"
+ [dependencies.baz]
+ path = "../baz"
+ "#,
+ )
+ .file(
+ "bar/src/bar.rs",
+ "extern crate baz; pub fn bar() { baz::baz() }",
+ )
+ .file("baz/Cargo.toml", &basic_lib_manifest("baz"))
+ .file("baz/src/baz.rs", "pub fn baz() {}")
+ .build();
+ p.cargo("check")
+ .with_stderr(
+ "[CHECKING] baz v0.5.0 ([CWD]/baz)\n\
+ [CHECKING] bar v0.5.0 ([CWD]/bar)\n\
+ [CHECKING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ )
+ .run();
+ p.cargo("check").with_stdout("").run();
+
+ // Make sure an update to baz triggers a rebuild of bar
+ //
+ // We base recompilation off mtime, so sleep for at least a second to ensure
+ // that this write will change the mtime.
+ sleep_ms(1000);
+ p.change_file("baz/src/baz.rs", r#"pub fn baz() { println!("hello!"); }"#);
+ sleep_ms(1000);
+ p.cargo("check")
+ .with_stderr(
+ "[CHECKING] baz v0.5.0 ([CWD]/baz)\n\
+ [CHECKING] bar v0.5.0 ([CWD]/bar)\n\
+ [CHECKING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ )
+ .run();
+
+ // Make sure an update to bar doesn't trigger baz
+ sleep_ms(1000);
+ p.change_file(
+ "bar/src/bar.rs",
+ r#"
+ extern crate baz;
+ pub fn bar() { println!("hello!"); baz::baz(); }
+ "#,
+ );
+ sleep_ms(1000);
+ p.cargo("check")
+ .with_stderr(
+ "[CHECKING] bar v0.5.0 ([CWD]/bar)\n\
+ [CHECKING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn no_rebuild_two_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+ path = "bar"
+ [dependencies.baz]
+ path = "baz"
+ "#,
+ )
+ .file("src/main.rs", "extern crate bar; fn main() { bar::bar() }")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [lib]
+ name = "bar"
+ [dependencies.baz]
+ path = "../baz"
+ "#,
+ )
+ .file("bar/src/bar.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_lib_manifest("baz"))
+ .file("baz/src/baz.rs", "pub fn baz() {}")
+ .build();
+ p.cargo("build")
+ .with_stderr(
+ "[COMPILING] baz v0.5.0 ([CWD]/baz)\n\
+ [COMPILING] bar v0.5.0 ([CWD]/bar)\n\
+ [COMPILING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ )
+ .run();
+ assert!(p.bin("foo").is_file());
+ p.cargo("build").with_stdout("").run();
+ assert!(p.bin("foo").is_file());
+}
+
+#[cargo_test]
+fn nested_deps_recompile() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+
+ version = "0.5.0"
+ path = "src/bar"
+ "#,
+ )
+ .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file("src/bar/Cargo.toml", &basic_lib_manifest("bar"))
+ .file("src/bar/src/bar.rs", "pub fn gimme() -> i32 { 92 }")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "[CHECKING] bar v0.5.0 ([CWD]/src/bar)\n\
+ [CHECKING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ )
+ .run();
+ sleep_ms(1000);
+
+ p.change_file("src/main.rs", r#"fn main() {}"#);
+
+ // This shouldn't recompile `bar`
+ p.cargo("check")
+ .with_stderr(
+ "[CHECKING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn error_message_for_missing_manifest() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+
+ path = "src/bar"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("src/bar/not-a-manifest", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to get `bar` as a dependency of package `foo v0.5.0 [..]`
+
+Caused by:
+ failed to load source for dependency `bar`
+
+Caused by:
+ Unable to update [CWD]/src/bar
+
+Caused by:
+ failed to read `[..]bar/Cargo.toml`
+
+Caused by:
+ [..] (os error [..])
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn override_relative() {
+ let bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file("src/lib.rs", "")
+ .build();
+
+ fs::create_dir(&paths::root().join(".cargo")).unwrap();
+ fs::write(&paths::root().join(".cargo/config"), r#"paths = ["bar"]"#).unwrap();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+ path = '{}'
+ "#,
+ bar.root().display()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("check -v").run();
+}
+
+#[cargo_test]
+fn override_self() {
+ let bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file("src/lib.rs", "")
+ .build();
+
+ let p = project();
+ let root = p.root();
+ let p = p
+ .file(".cargo/config", &format!("paths = ['{}']", root.display()))
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+ path = '{}'
+
+ "#,
+ bar.root().display()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn override_path_dep() {
+ let bar = project()
+ .at("bar")
+ .file(
+ "p1/Cargo.toml",
+ r#"
+ [package]
+ name = "p1"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies.p2]
+ path = "../p2"
+ "#,
+ )
+ .file("p1/src/lib.rs", "")
+ .file("p2/Cargo.toml", &basic_manifest("p2", "0.5.0"))
+ .file("p2/src/lib.rs", "")
+ .build();
+
+ let p = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ "paths = ['{}', '{}']",
+ bar.root().join("p1").display(),
+ bar.root().join("p2").display()
+ ),
+ )
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.p2]
+ path = '{}'
+
+ "#,
+ bar.root().join("p2").display()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check -v").run();
+}
+
+#[cargo_test]
+fn path_dep_build_cmd() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+
+ version = "0.5.0"
+ path = "bar"
+ "#,
+ )
+ .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+ build = "build.rs"
+
+ [lib]
+ name = "bar"
+ path = "src/bar.rs"
+ "#,
+ )
+ .file(
+ "bar/build.rs",
+ r#"
+ use std::fs;
+ fn main() {
+ fs::copy("src/bar.rs.in", "src/bar.rs").unwrap();
+ }
+ "#,
+ )
+ .file("bar/src/bar.rs.in", "pub fn gimme() -> i32 { 0 }")
+ .build();
+ p.root().join("bar").move_into_the_past();
+
+ p.cargo("build")
+ .with_stderr(
+ "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\
+ [COMPILING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in \
+ [..]\n",
+ )
+ .run();
+
+ assert!(p.bin("foo").is_file());
+
+ p.process(&p.bin("foo")).with_stdout("0\n").run();
+
+ // Touching bar.rs.in should cause the `build` command to run again.
+ p.change_file("bar/src/bar.rs.in", "pub fn gimme() -> i32 { 1 }");
+
+ p.cargo("build")
+ .with_stderr(
+ "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\
+ [COMPILING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in \
+ [..]\n",
+ )
+ .run();
+
+ p.process(&p.bin("foo")).with_stdout("1\n").run();
+}
+
+#[cargo_test]
+fn dev_deps_no_rebuild_lib() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dev-dependencies.bar]
+ path = "bar"
+
+ [lib]
+ name = "foo"
+ doctest = false
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #[cfg(test)] #[allow(unused_extern_crates)] extern crate bar;
+ #[cfg(not(test))] pub fn foo() { env!("FOO"); }
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+ p.cargo("build")
+ .env("FOO", "bar")
+ .with_stderr(
+ "[COMPILING] foo v0.5.0 ([CWD])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ )
+ .run();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] [..] v0.5.0 ([CWD][..])
+[COMPILING] [..] v0.5.0 ([CWD][..])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("running 0 tests")
+ .run();
+}
+
+#[cargo_test]
+fn custom_target_no_rebuild() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ [dependencies]
+ a = { path = "a" }
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("a/Cargo.toml", &basic_manifest("a", "0.5.0"))
+ .file("a/src/lib.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.5.0"
+ authors = []
+ [dependencies]
+ a = { path = "../a" }
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .build();
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] a v0.5.0 ([..])
+[CHECKING] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ t!(fs::rename(
+ p.root().join("target"),
+ p.root().join("target_moved")
+ ));
+ p.cargo("check --manifest-path=b/Cargo.toml")
+ .env("CARGO_TARGET_DIR", "target_moved")
+ .with_stderr(
+ "\
+[CHECKING] b v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn override_and_depend() {
+ let p = project()
+ .no_manifest()
+ .file(
+ "a/a1/Cargo.toml",
+ r#"
+ [package]
+ name = "a1"
+ version = "0.5.0"
+ authors = []
+ [dependencies]
+ a2 = { path = "../a2" }
+ "#,
+ )
+ .file("a/a1/src/lib.rs", "")
+ .file("a/a2/Cargo.toml", &basic_manifest("a2", "0.5.0"))
+ .file("a/a2/src/lib.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.5.0"
+ authors = []
+ [dependencies]
+ a1 = { path = "../a/a1" }
+ a2 = { path = "../a/a2" }
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .file("b/.cargo/config", r#"paths = ["../a"]"#)
+ .build();
+ p.cargo("check")
+ .cwd("b")
+ .with_stderr(
+ "\
+[WARNING] skipping duplicate package `a2` found at `[..]`
+[CHECKING] a2 v0.5.0 ([..])
+[CHECKING] a1 v0.5.0 ([..])
+[CHECKING] b v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn missing_path_dependency() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("a", "0.5.0"))
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"paths = ["../whoa-this-does-not-exist"]"#,
+ )
+ .build();
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to update path override `[..]../whoa-this-does-not-exist` \
+(defined in `[..]`)
+
+Caused by:
+ failed to read directory `[..]`
+
+Caused by:
+ [..] (os error [..])
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid_path_dep_in_workspace_with_lockfile() {
+ Package::new("bar", "1.0.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "top"
+ version = "0.5.0"
+ authors = []
+
+ [workspace]
+
+ [dependencies]
+ foo = { path = "foo" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .build();
+
+ // Generate a lock file
+ p.cargo("check").run();
+
+ // Change the dependency on `bar` to an invalid path
+ p.change_file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "" }
+ "#,
+ );
+
+ // Make sure we get a nice error. In the past this actually stack
+ // overflowed!
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: no matching package found
+searched package name: `bar`
+perhaps you meant: foo
+location searched: [..]
+required by package `foo v0.5.0 ([..])`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn workspace_produces_rlib() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "top"
+ version = "0.5.0"
+ authors = []
+
+ [workspace]
+
+ [dependencies]
+ foo = { path = "foo" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("foo/Cargo.toml", &basic_manifest("foo", "0.5.0"))
+ .file("foo/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+
+ assert!(p.root().join("target/debug/libtop.rlib").is_file());
+ assert!(!p.root().join("target/debug/libfoo.rlib").is_file());
+}
+
+#[cargo_test]
+fn deep_path_error() {
+ // Test for an error loading a path deep in the dependency graph.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [dependencies]
+ a = {path="a"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+ [dependencies]
+ b = {path="../b"}
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.1.0"
+ [dependencies]
+ c = {path="../c"}
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to get `c` as a dependency of package `b v0.1.0 [..]`
+ ... which satisfies path dependency `b` of package `a v0.1.0 [..]`
+ ... which satisfies path dependency `a` of package `foo v0.1.0 [..]`
+
+Caused by:
+ failed to load source for dependency `c`
+
+Caused by:
+ Unable to update [..]/foo/c
+
+Caused by:
+ failed to read `[..]/foo/c/Cargo.toml`
+
+Caused by:
+ [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn catch_tricky_cycle() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "message"
+ version = "0.1.0"
+
+ [dev-dependencies]
+ test = { path = "test" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "tangle/Cargo.toml",
+ r#"
+ [package]
+ name = "tangle"
+ version = "0.1.0"
+
+ [dependencies]
+ message = { path = ".." }
+ snapshot = { path = "../snapshot" }
+ "#,
+ )
+ .file("tangle/src/lib.rs", "")
+ .file(
+ "snapshot/Cargo.toml",
+ r#"
+ [package]
+ name = "snapshot"
+ version = "0.1.0"
+
+ [dependencies]
+ ledger = { path = "../ledger" }
+ "#,
+ )
+ .file("snapshot/src/lib.rs", "")
+ .file(
+ "ledger/Cargo.toml",
+ r#"
+ [package]
+ name = "ledger"
+ version = "0.1.0"
+
+ [dependencies]
+ tangle = { path = "../tangle" }
+ "#,
+ )
+ .file("ledger/src/lib.rs", "")
+ .file(
+ "test/Cargo.toml",
+ r#"
+ [package]
+ name = "test"
+ version = "0.1.0"
+
+ [dependencies]
+ snapshot = { path = "../snapshot" }
+ "#,
+ )
+ .file("test/src/lib.rs", "")
+ .build();
+
+ p.cargo("test")
+ .with_stderr_contains("[..]cyclic package dependency[..]")
+ .with_status(101)
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/paths.rs b/src/tools/cargo/tests/testsuite/paths.rs
new file mode 100644
index 000000000..31e00ae11
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/paths.rs
@@ -0,0 +1,226 @@
+//! Tests for `paths` overrides.
+
+use cargo_test_support::registry::Package;
+use cargo_test_support::{basic_manifest, project};
+
+#[cargo_test]
+fn broken_path_override_warns() {
+ Package::new("bar", "0.1.0").publish();
+ Package::new("bar", "0.2.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = { path = "a1" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "a1/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1"
+ "#,
+ )
+ .file("a1/src/lib.rs", "")
+ .file(
+ "a2/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.2"
+ "#,
+ )
+ .file("a2/src/lib.rs", "")
+ .file(".cargo/config", r#"paths = ["a2"]"#)
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+warning: path override for crate `a` has altered the original list of
+dependencies; the dependency on `bar` was either added or
+modified to not match the previously resolved version
+
+This is currently allowed but is known to produce buggy behavior with spurious
+recompiles and changes to the crate graph. Path overrides unfortunately were
+never intended to support this feature, so for now this message is just a
+warning. In the future, however, this message will become a hard error.
+
+To change the dependency graph via an override it's recommended to use the
+`[patch]` feature of Cargo instead of the path override feature. This is
+documented online at the url below for more information.
+
+https://doc.rust-lang.org/cargo/reference/overriding-dependencies.html
+
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..]
+[CHECKING] [..]
+[CHECKING] [..]
+[CHECKING] [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn override_to_path_dep() {
+ Package::new("bar", "0.1.0").dep("baz", "0.1").publish();
+ Package::new("baz", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ baz = { path = "baz" }
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.0.1"))
+ .file("bar/baz/src/lib.rs", "")
+ .file(".cargo/config", r#"paths = ["bar"]"#)
+ .build();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn paths_ok_with_optional() {
+ Package::new("baz", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ baz = { version = "0.1", optional = true }
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .file(
+ "bar2/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ baz = { version = "0.1", optional = true }
+ "#,
+ )
+ .file("bar2/src/lib.rs", "")
+ .file(".cargo/config", r#"paths = ["bar2"]"#)
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.1.0 ([..]bar2)
+[CHECKING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn paths_add_optional_bad() {
+ Package::new("baz", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "")
+ .file(
+ "bar2/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ baz = { version = "0.1", optional = true }
+ "#,
+ )
+ .file("bar2/src/lib.rs", "")
+ .file(".cargo/config", r#"paths = ["bar2"]"#)
+ .build();
+
+ p.cargo("check")
+ .with_stderr_contains(
+ "\
+warning: path override for crate `bar` has altered the original list of
+dependencies; the dependency on `baz` was either added or\
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/pkgid.rs b/src/tools/cargo/tests/testsuite/pkgid.rs
new file mode 100644
index 000000000..3e3e4692a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/pkgid.rs
@@ -0,0 +1,128 @@
+//! Tests for the `cargo pkgid` command.
+
+use cargo_test_support::project;
+use cargo_test_support::registry::Package;
+
+#[cargo_test]
+fn simple() {
+ Package::new("bar", "0.1.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("generate-lockfile").run();
+
+ p.cargo("pkgid foo")
+ .with_stdout(format!("file://[..]{}#0.1.0", p.root().to_str().unwrap()))
+ .run();
+
+ p.cargo("pkgid bar")
+ .with_stdout("https://github.com/rust-lang/crates.io-index#bar@0.1.0")
+ .run();
+}
+
+#[cargo_test]
+fn suggestion_bad_pkgid() {
+ Package::new("crates-io", "0.1.0").publish();
+ Package::new("two-ver", "0.1.0").publish();
+ Package::new("two-ver", "0.2.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ crates-io = "0.1.0"
+ two-ver = "0.1.0"
+ two-ver2 = { package = "two-ver", version = "0.2.0" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("cratesio", "")
+ .build();
+
+ p.cargo("generate-lockfile").run();
+
+ // Bad URL.
+ p.cargo("pkgid https://example.com/crates-io")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: package ID specification `https://example.com/crates-io` did not match any packages
+Did you mean one of these?
+
+ crates-io@0.1.0
+",
+ )
+ .run();
+
+ // Bad name.
+ p.cargo("pkgid crates_io")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: package ID specification `crates_io` did not match any packages
+
+<tab>Did you mean `crates-io`?
+",
+ )
+ .run();
+
+ // Bad version.
+ p.cargo("pkgid two-ver:0.3.0")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: package ID specification `two-ver@0.3.0` did not match any packages
+Did you mean one of these?
+
+ two-ver@0.1.0
+ two-ver@0.2.0
+",
+ )
+ .run();
+
+ // Bad file URL.
+ p.cargo("pkgid ./Cargo.toml")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: invalid package ID specification: `./Cargo.toml`
+
+Caused by:
+ package ID specification `./Cargo.toml` looks like a file path, maybe try file://[..]/Cargo.toml
+",
+ )
+ .run();
+
+ // Bad file URL with similar name.
+ p.cargo("pkgid './cratesio'")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: invalid package ID specification: `./cratesio`
+
+<tab>Did you mean `crates-io`?
+
+Caused by:
+ package ID specification `./cratesio` looks like a file path, maybe try file://[..]/cratesio
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/plugins.rs b/src/tools/cargo/tests/testsuite/plugins.rs
new file mode 100644
index 000000000..331ba32e0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/plugins.rs
@@ -0,0 +1,421 @@
+//! Tests for rustc plugins.
+
+use cargo_test_support::rustc_host;
+use cargo_test_support::{basic_manifest, project};
+
+#[cargo_test(nightly, reason = "plugins are unstable")]
+fn plugin_to_the_max() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "foo_lib"
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #![feature(plugin)]
+ #![plugin(bar)]
+ extern crate foo_lib;
+
+ fn main() { foo_lib::foo(); }
+ "#,
+ )
+ .file(
+ "src/foo_lib.rs",
+ r#"
+ #![feature(plugin)]
+ #![plugin(bar)]
+
+ pub fn foo() {}
+ "#,
+ )
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "bar"
+ plugin = true
+
+ [dependencies.baz]
+ path = "../baz"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #![feature(rustc_private)]
+
+ extern crate baz;
+ extern crate rustc_driver;
+
+ use rustc_driver::plugin::Registry;
+
+ #[no_mangle]
+ pub fn __rustc_plugin_registrar(_reg: &mut Registry) {
+ println!("{}", baz::baz());
+ }
+ "#,
+ )
+ .build();
+ let _baz = project()
+ .at("baz")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "baz"
+ crate_type = ["dylib"]
+ "#,
+ )
+ .file("src/lib.rs", "pub fn baz() -> i32 { 1 }")
+ .build();
+
+ foo.cargo("build").run();
+ foo.cargo("doc").run();
+}
+
+#[cargo_test(nightly, reason = "plugins are unstable")]
+fn plugin_with_dynamic_native_dependency() {
+ let build = project()
+ .at("builder")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "builder"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "builder"
+ crate-type = ["dylib"]
+ "#,
+ )
+ .file("src/lib.rs", "#[no_mangle] pub extern fn foo() {}")
+ .build();
+
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #![feature(plugin)]
+ #![plugin(bar)]
+
+ fn main() {}
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+ build = 'build.rs'
+
+ [lib]
+ name = "bar"
+ plugin = true
+ "#,
+ )
+ .file(
+ "bar/build.rs",
+ r#"
+ use std::env;
+ use std::fs;
+ use std::path::PathBuf;
+
+ fn main() {
+ let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
+ let root = PathBuf::from(env::var("BUILDER_ROOT").unwrap());
+ let file = format!("{}builder{}",
+ env::consts::DLL_PREFIX,
+ env::consts::DLL_SUFFIX);
+ let src = root.join(&file);
+ let dst = out_dir.join(&file);
+ fs::copy(src, dst).unwrap();
+ if cfg!(target_env = "msvc") {
+ fs::copy(root.join("builder.dll.lib"),
+ out_dir.join("builder.dll.lib")).unwrap();
+ }
+ println!("cargo:rustc-flags=-L {}", out_dir.display());
+ }
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ #![feature(rustc_private)]
+
+ extern crate rustc_driver;
+ use rustc_driver::plugin::Registry;
+
+ #[cfg_attr(not(target_env = "msvc"), link(name = "builder"))]
+ #[cfg_attr(target_env = "msvc", link(name = "builder.dll"))]
+ extern { fn foo(); }
+
+ #[no_mangle]
+ pub fn __rustc_plugin_registrar(_reg: &mut Registry) {
+ unsafe { foo() }
+ }
+ "#,
+ )
+ .build();
+
+ build.cargo("build").run();
+
+ let root = build.root().join("target").join("debug");
+ foo.cargo("build -v").env("BUILDER_ROOT", root).run();
+}
+
+#[cargo_test]
+fn plugin_integration() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+
+ [lib]
+ name = "foo"
+ plugin = true
+ doctest = false
+ "#,
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "")
+ .file("tests/it_works.rs", "")
+ .build();
+
+ p.cargo("test -v").run();
+}
+
+#[cargo_test]
+fn doctest_a_plugin() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "#[macro_use] extern crate bar;")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "bar"
+ plugin = true
+ "#,
+ )
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("test -v").run();
+}
+
+// See #1515
+#[cargo_test]
+fn native_plugin_dependency_with_custom_linker() {
+ let target = rustc_host();
+
+ let _foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ plugin = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ let bar = project()
+ .at("bar")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.foo]
+ path = "../foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}]
+ linker = "nonexistent-linker"
+ "#,
+ target
+ ),
+ )
+ .build();
+
+ bar.cargo("build --verbose")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] -C linker=nonexistent-linker [..]`
+[ERROR] [..]linker[..]
+",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "requires rustc_private")]
+fn panic_abort_plugins() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [profile.dev]
+ panic = 'abort'
+
+ [dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ plugin = true
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ #![feature(rustc_private)]
+ extern crate rustc_ast;
+ extern crate rustc_driver;
+ "#,
+ )
+ .build();
+
+ p.cargo("build").run();
+}
+
+#[cargo_test(nightly, reason = "requires rustc_private")]
+fn shared_panic_abort_plugins() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [profile.dev]
+ panic = 'abort'
+
+ [dependencies]
+ bar = { path = "bar" }
+ baz = { path = "baz" }
+ "#,
+ )
+ .file("src/lib.rs", "extern crate baz;")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ plugin = true
+
+ [dependencies]
+ baz = { path = "../baz" }
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ #![feature(rustc_private)]
+ extern crate rustc_ast;
+ extern crate rustc_driver;
+ extern crate baz;
+ "#,
+ )
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1"))
+ .file("baz/src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v").run();
+}
diff --git a/src/tools/cargo/tests/testsuite/proc_macro.rs b/src/tools/cargo/tests/testsuite/proc_macro.rs
new file mode 100644
index 000000000..7d6f6ba86
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/proc_macro.rs
@@ -0,0 +1,560 @@
+//! Tests for proc-macros.
+
+use cargo_test_support::project;
+
+#[cargo_test]
+fn probe_cfg_before_crate_type_discovery() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [target.'cfg(not(stage300))'.dependencies.noop]
+ path = "../noop"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[macro_use]
+ extern crate noop;
+
+ #[derive(Noop)]
+ struct X;
+
+ fn main() {}
+ "#,
+ )
+ .build();
+ let _noop = project()
+ .at("noop")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "noop"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate proc_macro;
+ use proc_macro::TokenStream;
+
+ #[proc_macro_derive(Noop)]
+ pub fn noop(_input: TokenStream) -> TokenStream {
+ "".parse().unwrap()
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn noop() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.noop]
+ path = "../noop"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[macro_use]
+ extern crate noop;
+
+ #[derive(Noop)]
+ struct X;
+
+ fn main() {}
+ "#,
+ )
+ .build();
+ let _noop = project()
+ .at("noop")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "noop"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate proc_macro;
+ use proc_macro::TokenStream;
+
+ #[proc_macro_derive(Noop)]
+ pub fn noop(_input: TokenStream) -> TokenStream {
+ "".parse().unwrap()
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("check").run();
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn impl_and_derive() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.transmogrify]
+ path = "../transmogrify"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[macro_use]
+ extern crate transmogrify;
+
+ trait ImplByTransmogrify {
+ fn impl_by_transmogrify(&self) -> bool;
+ }
+
+ #[derive(Transmogrify, Debug)]
+ struct X { success: bool }
+
+ fn main() {
+ let x = X::new();
+ assert!(x.impl_by_transmogrify());
+ println!("{:?}", x);
+ }
+ "#,
+ )
+ .build();
+ let _transmogrify = project()
+ .at("transmogrify")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "transmogrify"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate proc_macro;
+ use proc_macro::TokenStream;
+
+ #[proc_macro_derive(Transmogrify)]
+ #[doc(hidden)]
+ pub fn transmogrify(input: TokenStream) -> TokenStream {
+ "
+ impl X {
+ fn new() -> Self {
+ X { success: true }
+ }
+ }
+
+ impl ImplByTransmogrify for X {
+ fn impl_by_transmogrify(&self) -> bool {
+ true
+ }
+ }
+ ".parse().unwrap()
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build").run();
+ p.cargo("run").with_stdout("X { success: true }").run();
+}
+
+#[cargo_test(nightly, reason = "plugins are unstable")]
+fn plugin_and_proc_macro() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ plugin = true
+ proc-macro = true
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #![feature(rustc_private)]
+ #![feature(proc_macro, proc_macro_lib)]
+
+ extern crate rustc_driver;
+ use rustc_driver::plugin::Registry;
+
+ extern crate proc_macro;
+ use proc_macro::TokenStream;
+
+ #[no_mangle]
+ pub fn __rustc_plugin_registrar(reg: &mut Registry) {}
+
+ #[proc_macro_derive(Questionable)]
+ pub fn questionable(input: TokenStream) -> TokenStream {
+ input
+ }
+ "#,
+ )
+ .build();
+
+ let msg = " `lib.plugin` and `lib.proc-macro` cannot both be `true`";
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains(msg)
+ .run();
+}
+
+#[cargo_test]
+fn proc_macro_doctest() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #![crate_type = "proc-macro"]
+
+ extern crate proc_macro;
+
+ use proc_macro::TokenStream;
+
+ /// ```
+ /// assert!(true);
+ /// ```
+ #[proc_macro_derive(Bar)]
+ pub fn derive(_input: TokenStream) -> TokenStream {
+ "".parse().unwrap()
+ }
+
+ #[test]
+ fn a() {
+ assert!(true);
+ }
+ "#,
+ )
+ .build();
+
+ foo.cargo("test")
+ .with_stdout_contains("test a ... ok")
+ .with_stdout_contains_n("test [..] ... ok", 2)
+ .run();
+}
+
+#[cargo_test]
+fn proc_macro_crate_type() {
+ // Verify that `crate-type = ["proc-macro"]` is the same as `proc-macro = true`
+ // and that everything, including rustdoc, works correctly.
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [dependencies]
+ pm = { path = "pm" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ //! ```
+ //! use foo::THING;
+ //! assert_eq!(THING, 123);
+ //! ```
+ #[macro_use]
+ extern crate pm;
+ #[derive(MkItem)]
+ pub struct S;
+ #[cfg(test)]
+ mod tests {
+ use super::THING;
+ #[test]
+ fn it_works() {
+ assert_eq!(THING, 123);
+ }
+ }
+ "#,
+ )
+ .file(
+ "pm/Cargo.toml",
+ r#"
+ [package]
+ name = "pm"
+ version = "0.1.0"
+ [lib]
+ crate-type = ["proc-macro"]
+ "#,
+ )
+ .file(
+ "pm/src/lib.rs",
+ r#"
+ extern crate proc_macro;
+ use proc_macro::TokenStream;
+
+ #[proc_macro_derive(MkItem)]
+ pub fn mk_item(_input: TokenStream) -> TokenStream {
+ "pub const THING: i32 = 123;".parse().unwrap()
+ }
+ "#,
+ )
+ .build();
+
+ foo.cargo("test")
+ .with_stdout_contains("test tests::it_works ... ok")
+ .with_stdout_contains_n("test [..] ... ok", 2)
+ .run();
+}
+
+#[cargo_test]
+fn proc_macro_crate_type_warning() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [lib]
+ crate-type = ["proc-macro"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ foo.cargo("check")
+ .with_stderr_contains(
+ "[WARNING] library `foo` should only specify `proc-macro = true` instead of setting `crate-type`")
+ .run();
+}
+
+#[cargo_test]
+fn proc_macro_conflicting_warning() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [lib]
+ proc-macro = false
+ proc_macro = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ foo.cargo("check")
+ .with_stderr_contains(
+"[WARNING] conflicting between `proc-macro` and `proc_macro` in the `foo` library target.\n
+ `proc_macro` is ignored and not recommended for use in the future",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn proc_macro_crate_type_warning_plugin() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [lib]
+ crate-type = ["proc-macro"]
+ plugin = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ foo.cargo("check")
+ .with_stderr_contains(
+ "[WARNING] proc-macro library `foo` should not specify `plugin = true`")
+ .with_stderr_contains(
+ "[WARNING] library `foo` should only specify `proc-macro = true` instead of setting `crate-type`")
+ .run();
+}
+
+#[cargo_test]
+fn proc_macro_crate_type_multiple() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [lib]
+ crate-type = ["proc-macro", "rlib"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ foo.cargo("check")
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml`
+
+Caused by:
+ cannot mix `proc-macro` crate type with others
+",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn proc_macro_extern_prelude() {
+ // Check that proc_macro is in the extern prelude.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ use proc_macro::TokenStream;
+ #[proc_macro]
+ pub fn foo(input: TokenStream) -> TokenStream {
+ "".parse().unwrap()
+ }
+ "#,
+ )
+ .build();
+ p.cargo("test").run();
+ p.cargo("doc").run();
+}
+
+#[cargo_test]
+fn proc_macro_built_once() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ['a', 'b']
+ resolver = "2"
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [build-dependencies]
+ the-macro = { path = '../the-macro' }
+ "#,
+ )
+ .file("a/build.rs", "fn main() {}")
+ .file("a/src/main.rs", "fn main() {}")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.1.0"
+
+ [dependencies]
+ the-macro = { path = '../the-macro', features = ['a'] }
+ "#,
+ )
+ .file("b/src/main.rs", "fn main() {}")
+ .file(
+ "the-macro/Cargo.toml",
+ r#"
+ [package]
+ name = "the-macro"
+ version = "0.1.0"
+
+ [lib]
+ proc_macro = true
+
+ [features]
+ a = []
+ "#,
+ )
+ .file("the-macro/src/lib.rs", "")
+ .build();
+ p.cargo("build --verbose")
+ .with_stderr_unordered(
+ "\
+[COMPILING] the-macro [..]
+[RUNNING] `rustc --crate-name the_macro [..]`
+[COMPILING] b [..]
+[RUNNING] `rustc --crate-name b [..]`
+[COMPILING] a [..]
+[RUNNING] `rustc --crate-name build_script_build [..]`
+[RUNNING] `[..]build[..]script[..]build[..]`
+[RUNNING] `rustc --crate-name a [..]`
+[FINISHED] [..]
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/profile_config.rs b/src/tools/cargo/tests/testsuite/profile_config.rs
new file mode 100644
index 000000000..c59ed7a97
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/profile_config.rs
@@ -0,0 +1,519 @@
+//! Tests for profiles defined in config files.
+
+use cargo_test_support::paths::CargoPathExt;
+use cargo_test_support::registry::Package;
+use cargo_test_support::{basic_lib_manifest, paths, project};
+
+// TODO: this should be remove once -Zprofile-rustflags is stabilized
+#[cargo_test]
+fn rustflags_works_with_zflag() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config.toml",
+ r#"
+ [profile.dev]
+ rustflags = ["-C", "link-dead-code=yes"]
+ "#,
+ )
+ .build();
+
+ p.cargo("check -v")
+ .masquerade_as_nightly_cargo(&["profile-rustflags"])
+ .with_status(101)
+ .with_stderr_contains("[..]feature `profile-rustflags` is required[..]")
+ .run();
+
+ p.cargo("check -v -Zprofile-rustflags")
+ .masquerade_as_nightly_cargo(&["profile-rustflags"])
+ .with_stderr(
+ "\
+[CHECKING] foo [..]
+[RUNNING] `rustc --crate-name foo [..] -C link-dead-code=yes [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ p.change_file(
+ ".cargo/config.toml",
+ r#"
+ [unstable]
+ profile-rustflags = true
+
+ [profile.dev]
+ rustflags = ["-C", "link-dead-code=yes"]
+ "#,
+ );
+
+ p.cargo("check -v")
+ .masquerade_as_nightly_cargo(&["profile-rustflags"])
+ .with_stderr(
+ "\
+[FRESH] foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_config_validate_warnings() {
+ let p = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [profile.test]
+ opt-level = 3
+
+ [profile.asdf]
+ opt-level = 3
+
+ [profile.dev]
+ bad-key = true
+
+ [profile.dev.build-override]
+ bad-key-bo = true
+
+ [profile.dev.package.bar]
+ bad-key-bar = true
+ "#,
+ )
+ .build();
+
+ p.cargo("build")
+ .with_stderr_unordered(
+ "\
+[WARNING] unused config key `profile.dev.bad-key` in `[..].cargo/config`
+[WARNING] unused config key `profile.dev.package.bar.bad-key-bar` in `[..].cargo/config`
+[WARNING] unused config key `profile.dev.build-override.bad-key-bo` in `[..].cargo/config`
+[COMPILING] foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_config_error_paths() {
+ // Errors in config show where the error is located.
+ let p = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [profile.dev]
+ opt-level = 3
+ "#,
+ )
+ .file(
+ paths::home().join(".cargo/config"),
+ r#"
+ [profile.dev]
+ rpath = "foo"
+ "#,
+ )
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] error in [..]/foo/.cargo/config: could not load config key `profile.dev`
+
+Caused by:
+ error in [..]/home/.cargo/config: `profile.dev.rpath` expected true/false, but found a string
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_config_validate_errors() {
+ let p = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [profile.dev.package.foo]
+ panic = "abort"
+ "#,
+ )
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] config profile `dev` is not valid (defined in `[..]/foo/.cargo/config`)
+
+Caused by:
+ `panic` may not be specified in a `package` profile
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_config_syntax_errors() {
+ let p = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [profile.dev]
+ codegen-units = "foo"
+ "#,
+ )
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] error in [..]/.cargo/config: could not load config key `profile.dev`
+
+Caused by:
+ error in [..]/foo/.cargo/config: `profile.dev.codegen-units` expected an integer, but found a string
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_config_override_spec_multiple() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
+ [profile.dev.package.bar]
+ opt-level = 3
+
+ [profile.dev.package."bar:0.5.0"]
+ opt-level = 3
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ // Unfortunately this doesn't tell you which file, hopefully it's not too
+ // much of a problem.
+ p.cargo("build -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] multiple package overrides in profile `dev` match package `bar v0.5.0 ([..])`
+found package specs: bar, bar@0.5.0",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_config_all_options() {
+ // Ensure all profile options are supported.
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [profile.release]
+ opt-level = 1
+ debug = true
+ debug-assertions = true
+ overflow-checks = false
+ rpath = true
+ lto = true
+ codegen-units = 2
+ panic = "abort"
+ incremental = true
+ "#,
+ )
+ .build();
+
+ p.cargo("build --release -v")
+ .env_remove("CARGO_INCREMENTAL")
+ .with_stderr(
+ "\
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name foo [..] \
+ -C opt-level=1 \
+ -C panic=abort \
+ -C lto[..]\
+ -C codegen-units=2 \
+ -C debuginfo=2 \
+ -C debug-assertions=on \
+ -C overflow-checks=off [..]\
+ -C rpath [..]\
+ -C incremental=[..]
+[FINISHED] release [optimized + debuginfo] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_config_override_precedence() {
+ // Config values take precedence over manifest values.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = {path = "bar"}
+
+ [profile.dev]
+ codegen-units = 2
+
+ [profile.dev.package.bar]
+ opt-level = 3
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+ .file("bar/src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [profile.dev.package.bar]
+ opt-level = 2
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar [..] -C opt-level=2[..]-C codegen-units=2 [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name foo [..]-C codegen-units=2 [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_config_no_warn_unknown_override() {
+ let p = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [profile.dev.package.bar]
+ codegen-units = 4
+ "#,
+ )
+ .build();
+
+ p.cargo("build")
+ .with_stderr_does_not_contain("[..]warning[..]")
+ .run();
+}
+
+#[cargo_test]
+fn profile_config_mixed_types() {
+ let p = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [profile.dev]
+ opt-level = 3
+ "#,
+ )
+ .file(
+ paths::home().join(".cargo/config"),
+ r#"
+ [profile.dev]
+ opt-level = 's'
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr_contains("[..]-C opt-level=3 [..]")
+ .run();
+}
+
+#[cargo_test]
+fn named_config_profile() {
+ // Exercises config named profies.
+ // foo -> middle -> bar -> dev
+ // middle exists in Cargo.toml, the others in .cargo/config
+ use super::config::ConfigBuilder;
+ use cargo::core::compiler::CompileKind;
+ use cargo::core::profiles::{Profiles, UnitFor};
+ use cargo::core::{PackageId, Workspace};
+ use cargo::util::interning::InternedString;
+ use std::fs;
+ paths::root().join(".cargo").mkdir_p();
+ fs::write(
+ paths::root().join(".cargo/config"),
+ r#"
+ [profile.foo]
+ inherits = "middle"
+ codegen-units = 2
+ [profile.foo.build-override]
+ codegen-units = 6
+ [profile.foo.package.dep]
+ codegen-units = 7
+
+ [profile.middle]
+ inherits = "bar"
+ codegen-units = 3
+
+ [profile.bar]
+ inherits = "dev"
+ codegen-units = 4
+ debug = 1
+ "#,
+ )
+ .unwrap();
+ fs::write(
+ paths::root().join("Cargo.toml"),
+ r#"
+ [workspace]
+
+ [profile.middle]
+ inherits = "bar"
+ codegen-units = 1
+ opt-level = 1
+ [profile.middle.package.dep]
+ overflow-checks = false
+
+ [profile.foo.build-override]
+ codegen-units = 5
+ debug-assertions = false
+ [profile.foo.package.dep]
+ codegen-units = 8
+ "#,
+ )
+ .unwrap();
+ let config = ConfigBuilder::new().build();
+ let profile_name = InternedString::new("foo");
+ let ws = Workspace::new(&paths::root().join("Cargo.toml"), &config).unwrap();
+ let profiles = Profiles::new(&ws, profile_name).unwrap();
+
+ let crates_io = cargo::core::source::SourceId::crates_io(&config).unwrap();
+ let a_pkg = PackageId::new("a", "0.1.0", crates_io).unwrap();
+ let dep_pkg = PackageId::new("dep", "0.1.0", crates_io).unwrap();
+
+ // normal package
+ let kind = CompileKind::Host;
+ let p = profiles.get_profile(a_pkg, true, true, UnitFor::new_normal(kind), kind);
+ assert_eq!(p.name, "foo");
+ assert_eq!(p.codegen_units, Some(2)); // "foo" from config
+ assert_eq!(p.opt_level, "1"); // "middle" from manifest
+ assert_eq!(p.debuginfo.to_option(), Some(1)); // "bar" from config
+ assert_eq!(p.debug_assertions, true); // "dev" built-in (ignore build-override)
+ assert_eq!(p.overflow_checks, true); // "dev" built-in (ignore package override)
+
+ // build-override
+ let bo = profiles.get_profile(a_pkg, true, true, UnitFor::new_host(false, kind), kind);
+ assert_eq!(bo.name, "foo");
+ assert_eq!(bo.codegen_units, Some(6)); // "foo" build override from config
+ assert_eq!(bo.opt_level, "0"); // default to zero
+ assert_eq!(bo.debuginfo.to_option(), Some(1)); // SAME as normal
+ assert_eq!(bo.debug_assertions, false); // "foo" build override from manifest
+ assert_eq!(bo.overflow_checks, true); // SAME as normal
+
+ // package overrides
+ let po = profiles.get_profile(dep_pkg, false, true, UnitFor::new_normal(kind), kind);
+ assert_eq!(po.name, "foo");
+ assert_eq!(po.codegen_units, Some(7)); // "foo" package override from config
+ assert_eq!(po.opt_level, "1"); // SAME as normal
+ assert_eq!(po.debuginfo.to_option(), Some(1)); // SAME as normal
+ assert_eq!(po.debug_assertions, true); // SAME as normal
+ assert_eq!(po.overflow_checks, false); // "middle" package override from manifest
+}
+
+#[cargo_test]
+fn named_env_profile() {
+ // Environment variables used to define a named profile.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v --profile=other")
+ .env("CARGO_PROFILE_OTHER_CODEGEN_UNITS", "1")
+ .env("CARGO_PROFILE_OTHER_INHERITS", "dev")
+ .with_stderr_contains("[..]-C codegen-units=1 [..]")
+ .run();
+}
+
+#[cargo_test]
+fn test_with_dev_profile() {
+ // The `test` profile inherits from `dev` for both local crates and
+ // dependencies.
+ Package::new("somedep", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ somedep = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("test --lib --no-run -v")
+ .env("CARGO_PROFILE_DEV_DEBUG", "0")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] [..]
+[DOWNLOADED] [..]
+[COMPILING] somedep v1.0.0
+[RUNNING] `rustc --crate-name somedep [..]-C debuginfo=0[..]
+[COMPILING] foo v0.1.0 [..]
+[RUNNING] `rustc --crate-name foo [..]-C debuginfo=0[..]
+[FINISHED] [..]
+[EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]`
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/profile_custom.rs b/src/tools/cargo/tests/testsuite/profile_custom.rs
new file mode 100644
index 000000000..ea6b54c95
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/profile_custom.rs
@@ -0,0 +1,731 @@
+//! Tests for named profiles.
+
+use cargo_test_support::paths::CargoPathExt;
+use cargo_test_support::{basic_lib_manifest, project};
+
+#[cargo_test]
+fn inherits_on_release() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [profile.release]
+ inherits = "dev"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] `inherits` must not be specified in root profile `release`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn missing_inherits() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [profile.release-lto]
+ codegen-units = 7
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] profile `release-lto` is missing an `inherits` directive \
+ (`inherits` is required for all profiles except `dev` or `release`)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid_profile_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [profile.'.release-lto']
+ inherits = "release"
+ codegen-units = 7
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at [..]
+
+Caused by:
+ invalid character `.` in profile name `.release-lto`
+ Allowed characters are letters, numbers, underscore, and hyphen.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+// We are currently uncertain if dir-name will ever be exposed to the user.
+// The code for it still roughly exists, but only for the internal profiles.
+// This test was kept in case we ever want to enable support for it again.
+#[ignore = "dir-name is disabled"]
+fn invalid_dir_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [profile.'release-lto']
+ inherits = "release"
+ dir-name = ".subdir"
+ codegen-units = 7
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at [..]
+
+Caused by:
+ Invalid character `.` in dir-name: `.subdir`",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dir_name_disabled() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [profile.release-lto]
+ inherits = "release"
+ dir-name = "lto"
+ lto = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[ROOT]/foo/Cargo.toml`
+
+Caused by:
+ dir-name=\"lto\" in profile `release-lto` is not currently allowed, \
+ directory names are tied to the profile name for custom profiles
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid_inherits() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [profile.'release-lto']
+ inherits = ".release"
+ codegen-units = 7
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "error: profile `release-lto` inherits from `.release`, \
+ but that profile is not defined",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn non_existent_inherits() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [profile.release-lto]
+ codegen-units = 7
+ inherits = "non-existent"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] profile `release-lto` inherits from `non-existent`, but that profile is not defined
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn self_inherits() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [profile.release-lto]
+ codegen-units = 7
+ inherits = "release-lto"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] profile inheritance loop detected with profile `release-lto` inheriting `release-lto`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn inherits_loop() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [profile.release-lto]
+ codegen-units = 7
+ inherits = "release-lto2"
+
+ [profile.release-lto2]
+ codegen-units = 7
+ inherits = "release-lto"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] profile inheritance loop detected with profile `release-lto2` inheriting `release-lto`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn overrides_with_custom() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ xxx = {path = "xxx"}
+ yyy = {path = "yyy"}
+
+ [profile.dev]
+ codegen-units = 7
+
+ [profile.dev.package.xxx]
+ codegen-units = 5
+ [profile.dev.package.yyy]
+ codegen-units = 3
+
+ [profile.other]
+ inherits = "dev"
+ codegen-units = 2
+
+ [profile.other.package.yyy]
+ codegen-units = 6
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("xxx/Cargo.toml", &basic_lib_manifest("xxx"))
+ .file("xxx/src/lib.rs", "")
+ .file("yyy/Cargo.toml", &basic_lib_manifest("yyy"))
+ .file("yyy/src/lib.rs", "")
+ .build();
+
+ // profile overrides are inherited between profiles using inherits and have a
+ // higher priority than profile options provided by custom profiles
+ p.cargo("build -v")
+ .with_stderr_unordered(
+ "\
+[COMPILING] xxx [..]
+[COMPILING] yyy [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name xxx [..] -C codegen-units=5 [..]`
+[RUNNING] `rustc --crate-name yyy [..] -C codegen-units=3 [..]`
+[RUNNING] `rustc --crate-name foo [..] -C codegen-units=7 [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ // This also verifies that the custom profile names appears in the finished line.
+ p.cargo("build --profile=other -v")
+ .with_stderr_unordered(
+ "\
+[COMPILING] xxx [..]
+[COMPILING] yyy [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name xxx [..] -C codegen-units=5 [..]`
+[RUNNING] `rustc --crate-name yyy [..] -C codegen-units=6 [..]`
+[RUNNING] `rustc --crate-name foo [..] -C codegen-units=2 [..]`
+[FINISHED] other [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn conflicting_usage() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --profile=dev --release")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: conflicting usage of --profile=dev and --release
+The `--release` flag is the same as `--profile=release`.
+Remove one flag or the other to continue.
+",
+ )
+ .run();
+
+ p.cargo("install --profile=release --debug")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: conflicting usage of --profile=release and --debug
+The `--debug` flag is the same as `--profile=dev`.
+Remove one flag or the other to continue.
+",
+ )
+ .run();
+
+ p.cargo("rustc --profile=dev --release")
+ .with_stderr(
+ "\
+warning: the `--release` flag should not be specified with the `--profile` flag
+The `--release` flag will be ignored.
+This was historically accepted, but will become an error in a future release.
+[COMPILING] foo [..]
+[FINISHED] dev [..]
+",
+ )
+ .run();
+
+ p.cargo("check --profile=dev --release")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: conflicting usage of --profile=dev and --release
+The `--release` flag is the same as `--profile=release`.
+Remove one flag or the other to continue.
+",
+ )
+ .run();
+
+ p.cargo("check --profile=test --release")
+ .with_stderr(
+ "\
+warning: the `--release` flag should not be specified with the `--profile` flag
+The `--release` flag will be ignored.
+This was historically accepted, but will become an error in a future release.
+[CHECKING] foo [..]
+[FINISHED] test [..]
+",
+ )
+ .run();
+
+ // This is OK since the two are the same.
+ p.cargo("rustc --profile=release --release")
+ .with_stderr(
+ "\
+[COMPILING] foo [..]
+[FINISHED] release [..]
+",
+ )
+ .run();
+
+ p.cargo("build --profile=release --release")
+ .with_stderr(
+ "\
+[FINISHED] release [..]
+",
+ )
+ .run();
+
+ p.cargo("install --path . --profile=dev --debug")
+ .with_stderr(
+ "\
+[INSTALLING] foo [..]
+[FINISHED] dev [..]
+[INSTALLING] [..]
+[INSTALLED] [..]
+[WARNING] be sure to add [..]
+",
+ )
+ .run();
+
+ p.cargo("install --path . --profile=release --debug")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: conflicting usage of --profile=release and --debug
+The `--debug` flag is the same as `--profile=dev`.
+Remove one flag or the other to continue.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn clean_custom_dirname() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [profile.other]
+ inherits = "release"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --release")
+ .with_stdout("")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("clean -p foo").run();
+
+ p.cargo("build --release")
+ .with_stdout("")
+ .with_stderr(
+ "\
+[FINISHED] release [optimized] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("clean -p foo --release").run();
+
+ p.cargo("build --release")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("build")
+ .with_stdout("")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("build --profile=other")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] other [optimized] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("clean").arg("--release").run();
+
+ // Make sure that 'other' was not cleaned
+ assert!(p.build_dir().is_dir());
+ assert!(p.build_dir().join("debug").is_dir());
+ assert!(p.build_dir().join("other").is_dir());
+ assert!(!p.build_dir().join("release").is_dir());
+
+ // This should clean 'other'
+ p.cargo("clean --profile=other").with_stderr("").run();
+ assert!(p.build_dir().join("debug").is_dir());
+ assert!(!p.build_dir().join("other").is_dir());
+}
+
+#[cargo_test]
+fn unknown_profile() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build --profile alpha")
+ .with_stderr("[ERROR] profile `alpha` is not defined")
+ .with_status(101)
+ .run();
+ // Clean has a separate code path, need to check it too.
+ p.cargo("clean --profile alpha")
+ .with_stderr("[ERROR] profile `alpha` is not defined")
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn reserved_profile_names() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [profile.doc]
+ opt-level = 1
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build --profile=doc")
+ .with_status(101)
+ .with_stderr("error: profile `doc` is reserved and not allowed to be explicitly specified")
+ .run();
+ // Not an exhaustive list, just a sample.
+ for name in ["build", "cargo", "check", "rustc", "CaRgO_startswith"] {
+ p.cargo(&format!("build --profile={}", name))
+ .with_status(101)
+ .with_stderr(&format!(
+ "\
+error: profile name `{}` is reserved
+Please choose a different name.
+See https://doc.rust-lang.org/cargo/reference/profiles.html for more on configuring profiles.
+",
+ name
+ ))
+ .run();
+ }
+ for name in ["build", "check", "cargo", "rustc", "CaRgO_startswith"] {
+ p.change_file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [profile.{}]
+ opt-level = 1
+ "#,
+ name
+ ),
+ );
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(&format!(
+ "\
+error: failed to parse manifest at `[ROOT]/foo/Cargo.toml`
+
+Caused by:
+ profile name `{}` is reserved
+ Please choose a different name.
+ See https://doc.rust-lang.org/cargo/reference/profiles.html for more on configuring profiles.
+",
+ name
+ ))
+ .run();
+ }
+
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [profile.debug]
+ debug = 1
+ inherits = "dev"
+ "#,
+ );
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[ROOT]/foo/Cargo.toml`
+
+Caused by:
+ profile name `debug` is reserved
+ To configure the default development profile, use the name `dev` as in [profile.dev]
+ See https://doc.rust-lang.org/cargo/reference/profiles.html for more on configuring profiles.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn legacy_commands_support_custom() {
+ // These commands have had `--profile` before custom named profiles.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [profile.super-dev]
+ codegen-units = 3
+ inherits = "dev"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ for command in ["rustc", "fix", "check"] {
+ let mut pb = p.cargo(command);
+ if command == "fix" {
+ pb.arg("--allow-no-vcs");
+ }
+ pb.arg("--profile=super-dev")
+ .arg("-v")
+ .with_stderr_contains("[RUNNING] [..]codegen-units=3[..]")
+ .run();
+ p.build_dir().rm_rf();
+ }
+}
+
+#[cargo_test]
+fn legacy_rustc() {
+ // `cargo rustc` historically has supported dev/test/bench/check
+ // other profiles are covered in check::rustc_check
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [profile.dev]
+ codegen-units = 3
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("rustc --profile dev -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.1.0 [..]
+[RUNNING] `rustc --crate-name foo [..]-C codegen-units=3[..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/profile_overrides.rs b/src/tools/cargo/tests/testsuite/profile_overrides.rs
new file mode 100644
index 000000000..dc9bafba1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/profile_overrides.rs
@@ -0,0 +1,515 @@
+//! Tests for profile overrides (build-override and per-package overrides).
+
+use cargo_test_support::registry::Package;
+use cargo_test_support::{basic_lib_manifest, basic_manifest, project};
+
+#[cargo_test]
+fn profile_override_basic() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = {path = "bar"}
+
+ [profile.dev]
+ opt-level = 1
+
+ [profile.dev.package.bar]
+ opt-level = 3
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check -v")
+ .with_stderr(
+ "[CHECKING] bar [..]
+[RUNNING] `rustc --crate-name bar [..] -C opt-level=3 [..]`
+[CHECKING] foo [..]
+[RUNNING] `rustc --crate-name foo [..] -C opt-level=1 [..]`
+[FINISHED] dev [optimized + debuginfo] target(s) in [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_override_warnings() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = {path = "bar"}
+
+ [profile.dev.package.bart]
+ opt-level = 3
+
+ [profile.dev.package.no-suggestion]
+ opt-level = 3
+
+ [profile.dev.package."bar:1.2.3"]
+ opt-level = 3
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_stderr_contains(
+ "\
+[WARNING] profile package spec `bar@1.2.3` in profile `dev` \
+ has a version or URL that does not match any of the packages: \
+ bar v0.5.0 ([..]/foo/bar)
+[WARNING] profile package spec `bart` in profile `dev` did not match any packages
+
+<tab>Did you mean `bar`?
+[WARNING] profile package spec `no-suggestion` in profile `dev` did not match any packages
+[COMPILING] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_override_bad_settings() {
+ let bad_values = [
+ (
+ "panic = \"abort\"",
+ "`panic` may not be specified in a `package` profile",
+ ),
+ (
+ "lto = true",
+ "`lto` may not be specified in a `package` profile",
+ ),
+ (
+ "rpath = true",
+ "`rpath` may not be specified in a `package` profile",
+ ),
+ ("package = {}", "package-specific profiles cannot be nested"),
+ ];
+ for &(snippet, expected) in bad_values.iter() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = {{path = "bar"}}
+
+ [profile.dev.package.bar]
+ {}
+ "#,
+ snippet
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains(format!("Caused by:\n {}", expected))
+ .run();
+ }
+}
+
+#[cargo_test]
+fn profile_override_hierarchy() {
+ // Test that the precedence rules are correct for different types.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["m1", "m2", "m3"]
+
+ [profile.dev]
+ codegen-units = 1
+
+ [profile.dev.package.m2]
+ codegen-units = 2
+
+ [profile.dev.package."*"]
+ codegen-units = 3
+
+ [profile.dev.build-override]
+ codegen-units = 4
+ "#,
+ )
+ // m1
+ .file(
+ "m1/Cargo.toml",
+ r#"
+ [package]
+ name = "m1"
+ version = "0.0.1"
+
+ [dependencies]
+ m2 = { path = "../m2" }
+ dep = { path = "../../dep" }
+ "#,
+ )
+ .file("m1/src/lib.rs", "extern crate m2; extern crate dep;")
+ .file("m1/build.rs", "fn main() {}")
+ // m2
+ .file(
+ "m2/Cargo.toml",
+ r#"
+ [package]
+ name = "m2"
+ version = "0.0.1"
+
+ [dependencies]
+ m3 = { path = "../m3" }
+
+ [build-dependencies]
+ m3 = { path = "../m3" }
+ dep = { path = "../../dep" }
+ "#,
+ )
+ .file("m2/src/lib.rs", "extern crate m3;")
+ .file(
+ "m2/build.rs",
+ "extern crate m3; extern crate dep; fn main() {}",
+ )
+ // m3
+ .file("m3/Cargo.toml", &basic_lib_manifest("m3"))
+ .file("m3/src/lib.rs", "")
+ .build();
+
+ // dep (outside of workspace)
+ let _dep = project()
+ .at("dep")
+ .file("Cargo.toml", &basic_lib_manifest("dep"))
+ .file("src/lib.rs", "")
+ .build();
+
+ // Profiles should be:
+ // m3: 4 (as build.rs dependency)
+ // m3: 1 (as [profile.dev] as workspace member)
+ // dep: 3 (as [profile.dev.package."*"] as non-workspace member)
+ // m1 build.rs: 4 (as [profile.dev.build-override])
+ // m2 build.rs: 2 (as [profile.dev.package.m2])
+ // m2: 2 (as [profile.dev.package.m2])
+ // m1: 1 (as [profile.dev])
+
+ p.cargo("build -v").with_stderr_unordered("\
+[COMPILING] m3 [..]
+[COMPILING] dep [..]
+[RUNNING] `rustc --crate-name m3 m3/src/lib.rs [..] --crate-type lib --emit=[..]link[..]-C codegen-units=4 [..]
+[RUNNING] `rustc --crate-name dep [..]dep/src/lib.rs [..] --crate-type lib --emit=[..]link[..]-C codegen-units=3 [..]
+[RUNNING] `rustc --crate-name m3 m3/src/lib.rs [..] --crate-type lib --emit=[..]link[..]-C codegen-units=1 [..]
+[RUNNING] `rustc --crate-name build_script_build m1/build.rs [..] --crate-type bin --emit=[..]link[..]-C codegen-units=4 [..]
+[COMPILING] m2 [..]
+[RUNNING] `rustc --crate-name build_script_build m2/build.rs [..] --crate-type bin --emit=[..]link[..]-C codegen-units=2 [..]
+[RUNNING] `[..]/m1-[..]/build-script-build`
+[RUNNING] `[..]/m2-[..]/build-script-build`
+[RUNNING] `rustc --crate-name m2 m2/src/lib.rs [..] --crate-type lib --emit=[..]link[..]-C codegen-units=2 [..]
+[COMPILING] m1 [..]
+[RUNNING] `rustc --crate-name m1 m1/src/lib.rs [..] --crate-type lib --emit=[..]link[..]-C codegen-units=1 [..]
+[FINISHED] dev [unoptimized + debuginfo] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_override_spec_multiple() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = { path = "bar" }
+
+ [profile.dev.package.bar]
+ opt-level = 3
+
+ [profile.dev.package."bar:0.5.0"]
+ opt-level = 3
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check -v")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[ERROR] multiple package overrides in profile `dev` match package `bar v0.5.0 ([..])`
+found package specs: bar, bar@0.5.0",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_override_spec() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["m1", "m2"]
+
+ [profile.dev.package."dep:1.0.0"]
+ codegen-units = 1
+
+ [profile.dev.package."dep:2.0.0"]
+ codegen-units = 2
+ "#,
+ )
+ // m1
+ .file(
+ "m1/Cargo.toml",
+ r#"
+ [package]
+ name = "m1"
+ version = "0.0.1"
+
+ [dependencies]
+ dep = { path = "../../dep1" }
+ "#,
+ )
+ .file("m1/src/lib.rs", "extern crate dep;")
+ // m2
+ .file(
+ "m2/Cargo.toml",
+ r#"
+ [package]
+ name = "m2"
+ version = "0.0.1"
+
+ [dependencies]
+ dep = {path = "../../dep2" }
+ "#,
+ )
+ .file("m2/src/lib.rs", "extern crate dep;")
+ .build();
+
+ project()
+ .at("dep1")
+ .file("Cargo.toml", &basic_manifest("dep", "1.0.0"))
+ .file("src/lib.rs", "")
+ .build();
+
+ project()
+ .at("dep2")
+ .file("Cargo.toml", &basic_manifest("dep", "2.0.0"))
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check -v")
+ .with_stderr_contains("[RUNNING] `rustc [..]dep1/src/lib.rs [..] -C codegen-units=1 [..]")
+ .with_stderr_contains("[RUNNING] `rustc [..]dep2/src/lib.rs [..] -C codegen-units=2 [..]")
+ .run();
+}
+
+#[cargo_test]
+fn override_proc_macro() {
+ Package::new("shared", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ shared = "1.0"
+ pm = {path = "pm"}
+
+ [profile.dev.build-override]
+ codegen-units = 4
+ "#,
+ )
+ .file("src/lib.rs", r#"pm::eat!{}"#)
+ .file(
+ "pm/Cargo.toml",
+ r#"
+ [package]
+ name = "pm"
+ version = "0.1.0"
+
+ [lib]
+ proc-macro = true
+
+ [dependencies]
+ shared = "1.0"
+ "#,
+ )
+ .file(
+ "pm/src/lib.rs",
+ r#"
+ extern crate proc_macro;
+ use proc_macro::TokenStream;
+
+ #[proc_macro]
+ pub fn eat(_item: TokenStream) -> TokenStream {
+ "".parse().unwrap()
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("check -v")
+ // Shared built for the proc-macro.
+ .with_stderr_contains("[RUNNING] `rustc [..]--crate-name shared [..]-C codegen-units=4[..]")
+ // Shared built for the library.
+ .with_stderr_line_without(
+ &["[RUNNING] `rustc --crate-name shared"],
+ &["-C codegen-units"],
+ )
+ .with_stderr_contains("[RUNNING] `rustc [..]--crate-name pm [..]-C codegen-units=4[..]")
+ .with_stderr_line_without(
+ &["[RUNNING] `rustc [..]--crate-name foo"],
+ &["-C codegen-units"],
+ )
+ .run();
+}
+
+#[cargo_test]
+fn no_warning_ws() {
+ // https://github.com/rust-lang/cargo/issues/7378, avoid warnings in a workspace.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+
+ [profile.dev.package.a]
+ codegen-units = 3
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_manifest("a", "0.1.0"))
+ .file("a/src/lib.rs", "")
+ .file("b/Cargo.toml", &basic_manifest("b", "0.1.0"))
+ .file("b/src/lib.rs", "")
+ .build();
+
+ p.cargo("check -p b")
+ .with_stderr(
+ "\
+[CHECKING] b [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_override_shared() {
+ // A dependency with a build script that is shared with a build
+ // dependency, using different profile settings. That is:
+ //
+ // foo DEBUG=2
+ // ├── common DEBUG=2
+ // │ └── common Run build.rs DEBUG=2
+ // │ └── common build.rs DEBUG=0 (build_override)
+ // └── foo Run build.rs DEBUG=2
+ // └── foo build.rs DEBUG=0 (build_override)
+ // └── common DEBUG=0 (build_override)
+ // └── common Run build.rs DEBUG=0 (build_override)
+ // └── common build.rs DEBUG=0 (build_override)
+ //
+ // The key part here is that `common` RunCustomBuild is run twice, once
+ // with DEBUG=2 (as a dependency of foo) and once with DEBUG=0 (as a
+ // build-dependency of foo's build script).
+ Package::new("common", "1.0.0")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ if std::env::var("DEBUG").unwrap() != "false" {
+ println!("cargo:rustc-cfg=foo_debug");
+ } else {
+ println!("cargo:rustc-cfg=foo_release");
+ }
+ }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() -> u32 {
+ if cfg!(foo_debug) {
+ assert!(cfg!(debug_assertions));
+ 1
+ } else if cfg!(foo_release) {
+ assert!(!cfg!(debug_assertions));
+ 2
+ } else {
+ panic!("not set");
+ }
+ }
+ "#,
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [build-dependencies]
+ common = "1.0"
+
+ [dependencies]
+ common = "1.0"
+
+ [profile.dev.build-override]
+ debug = 0
+ debug-assertions = false
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ assert_eq!(common::foo(), 2);
+ }
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ assert_eq!(common::foo(), 1);
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run").run();
+}
diff --git a/src/tools/cargo/tests/testsuite/profile_targets.rs b/src/tools/cargo/tests/testsuite/profile_targets.rs
new file mode 100644
index 000000000..b3235972c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/profile_targets.rs
@@ -0,0 +1,674 @@
+//! Tests for checking exactly how profiles correspond with each unit. For
+//! example, the `test` profile applying to test targets, but not other
+//! targets, etc.
+
+use cargo_test_support::{basic_manifest, project, Project};
+
+fn all_target_project() -> Project {
+ // This abuses the `codegen-units` setting so that we can verify exactly
+ // which profile is used for each compiler invocation.
+ project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = { path = "bar" }
+
+ [build-dependencies]
+ bdep = { path = "bdep" }
+
+ [profile.dev]
+ codegen-units = 1
+ panic = "abort"
+ [profile.release]
+ codegen-units = 2
+ panic = "abort"
+ [profile.test]
+ codegen-units = 3
+ [profile.bench]
+ codegen-units = 4
+ [profile.dev.build-override]
+ codegen-units = 5
+ [profile.release.build-override]
+ codegen-units = 6
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar;")
+ .file("src/main.rs", "extern crate foo; fn main() {}")
+ .file("examples/ex1.rs", "extern crate foo; fn main() {}")
+ .file("tests/test1.rs", "extern crate foo;")
+ .file("benches/bench1.rs", "extern crate foo;")
+ .file(
+ "build.rs",
+ r#"
+ extern crate bdep;
+ fn main() {
+ eprintln!("foo custom build PROFILE={} DEBUG={} OPT_LEVEL={}",
+ std::env::var("PROFILE").unwrap(),
+ std::env::var("DEBUG").unwrap(),
+ std::env::var("OPT_LEVEL").unwrap(),
+ );
+ }
+ "#,
+ )
+ // `bar` package.
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ // `bdep` package.
+ .file(
+ "bdep/Cargo.toml",
+ r#"
+ [package]
+ name = "bdep"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = { path = "../bar" }
+ "#,
+ )
+ .file("bdep/src/lib.rs", "extern crate bar;")
+ .build()
+}
+
+#[cargo_test]
+fn profile_selection_build() {
+ let p = all_target_project();
+
+ // `build`
+ // NOTES:
+ // - bdep `panic` is not set because it thinks `build.rs` is a plugin.
+ // - build_script_build is built without panic because it thinks `build.rs` is a plugin.
+ // - We make sure that the build dependencies bar, bdep, and build.rs
+ // are built without debuginfo.
+ p.cargo("build -vv")
+ .with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 [..]
+[COMPILING] bdep [..]
+[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 [..]
+[COMPILING] foo [..]
+[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=5 [..]
+[RUNNING] `[..]/target/debug/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..]
+[FINISHED] dev [unoptimized + debuginfo] [..]
+"
+ )
+ .with_stderr_line_without(&["[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]-C codegen-units=5 [..]"], &["-C debuginfo"])
+ .with_stderr_line_without(&["[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]-C codegen-units=5 [..]"], &["-C debuginfo"])
+ .with_stderr_line_without(&["[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]-C codegen-units=5 [..]"], &["-C debuginfo"])
+ .run();
+ p.cargo("build -vv")
+ .with_stderr_unordered(
+ "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_selection_build_release() {
+ let p = all_target_project();
+
+ // `build --release`
+ p.cargo("build --release -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..]
+[COMPILING] bdep [..]
+[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..]
+[COMPILING] foo [..]
+[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=6 [..]
+[RUNNING] `[..]/target/release/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]
+[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]
+[FINISHED] release [optimized] [..]
+").run();
+ p.cargo("build --release -vv")
+ .with_stderr_unordered(
+ "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] release [optimized] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_selection_build_all_targets() {
+ let p = all_target_project();
+ // `build`
+ // NOTES:
+ // - bdep `panic` is not set because it thinks `build.rs` is a plugin.
+ // - build_script_build is built without panic because it thinks
+ // `build.rs` is a plugin.
+ // - Benchmark dependencies are compiled in `dev` mode, which may be
+ // surprising. See issue rust-lang/cargo#4929.
+ // - We make sure that the build dependencies bar, bdep, and build.rs
+ // are built without debuginfo.
+ //
+ // - Dependency profiles:
+ // Pkg Target Profile Reason
+ // --- ------ ------- ------
+ // bar lib dev For foo-bin
+ // bar lib dev-panic For tests/benches and bdep
+ // bdep lib dev-panic For foo build.rs
+ // foo custom dev-panic
+ //
+ // - `foo` target list is:
+ // Target Profile Mode
+ // ------ ------- ----
+ // lib dev+panic build (a normal lib target)
+ // lib dev-panic build (used by tests/benches)
+ // lib dev dev
+ // test dev dev
+ // bench dev dev
+ // bin dev dev
+ // bin dev build
+ // example dev build
+ p.cargo("build --all-targets -vv")
+ .with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 [..]
+[COMPILING] bdep [..]
+[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 [..]
+[COMPILING] foo [..]
+[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=5 [..]
+[RUNNING] `[..]/target/debug/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..]`
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link[..]-C codegen-units=1 -C debuginfo=2 --test [..]`
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=1 -C debuginfo=2 [..]`
+[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link[..]-C codegen-units=1 -C debuginfo=2 --test [..]`
+[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]link[..]-C codegen-units=1 -C debuginfo=2 --test [..]`
+[RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]link[..]-C codegen-units=1 -C debuginfo=2 --test [..]`
+[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..]`
+[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..]`
+[FINISHED] dev [unoptimized + debuginfo] [..]
+"
+ )
+ .with_stderr_line_without(&["[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]-C codegen-units=5 [..]"], &["-C debuginfo"])
+ .with_stderr_line_without(&["[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]-C codegen-units=5 [..]"], &["-C debuginfo"])
+ .with_stderr_line_without(&["[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]-C codegen-units=5 [..]"], &["-C debuginfo"])
+ .run();
+ p.cargo("build -vv")
+ .with_stderr_unordered(
+ "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_selection_build_all_targets_release() {
+ let p = all_target_project();
+ // `build --all-targets --release`
+ // NOTES:
+ // - bdep `panic` is not set because it thinks `build.rs` is a plugin.
+ // - bar compiled twice. It tries with and without panic, but the "is a
+ // plugin" logic is forcing it to be cleared.
+ // - build_script_build is built without panic because it thinks
+ // `build.rs` is a plugin.
+ // - build_script_build is being run two times. Once for the `dev` and
+ // `test` targets, once for the `bench` targets.
+ // TODO: "PROFILE" says debug both times, though!
+ //
+ // - Dependency profiles:
+ // Pkg Target Profile Reason
+ // --- ------ ------- ------
+ // bar lib release For foo-bin
+ // bar lib release-panic For tests/benches and bdep
+ // bdep lib release-panic For foo build.rs
+ // foo custom release-panic
+ //
+ // - `foo` target list is:
+ // Target Profile Mode
+ // ------ ------- ----
+ // lib release+panic build (a normal lib target)
+ // lib release-panic build (used by tests/benches)
+ // lib release test (bench/test de-duped)
+ // test release test
+ // bench release test
+ // bin release test (bench/test de-duped)
+ // bin release build
+ // example release build
+ p.cargo("build --all-targets --release -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3[..]-C codegen-units=2 [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..]
+[COMPILING] bdep [..]
+[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..]
+[COMPILING] foo [..]
+[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=6 [..]
+[RUNNING] `[..]/target/release/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]`
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..]`
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3[..]-C codegen-units=2 [..]`
+[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..]`
+[RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..]`
+[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..]`
+[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]`
+[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]`
+[FINISHED] release [optimized] [..]
+").run();
+ p.cargo("build --all-targets --release -vv")
+ .with_stderr_unordered(
+ "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] release [optimized] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_selection_test() {
+ let p = all_target_project();
+ // `test`
+ // NOTES:
+ // - Dependency profiles:
+ // Pkg Target Profile Reason
+ // --- ------ ------- ------
+ // bar lib test For foo-bin
+ // bar lib test-panic For tests/benches and bdep
+ // bdep lib test-panic For foo build.rs
+ // foo custom test-panic
+ //
+ // - `foo` target list is:
+ // Target Profile Mode
+ // ------ ------- ----
+ // lib test-panic build (for tests)
+ // lib test build (for bins)
+ // lib test test
+ // test test test
+ // example test-panic build
+ // bin test test
+ // bin test build
+ //
+ p.cargo("test -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=3 -C debuginfo=2 [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C codegen-units=3 -C debuginfo=2 [..]
+[COMPILING] bdep [..]
+[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 [..]
+[COMPILING] foo [..]
+[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=5 [..]
+[RUNNING] `[..]/target/debug/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C codegen-units=3 -C debuginfo=2 [..]
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=3 -C debuginfo=2 [..]
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link[..]-C codegen-units=3 -C debuginfo=2 --test [..]
+[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]link[..]-C codegen-units=3 -C debuginfo=2 --test [..]
+[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=3 -C debuginfo=2 [..]
+[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link[..]-C codegen-units=3 -C debuginfo=2 --test [..]
+[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C panic=abort[..]-C codegen-units=3 -C debuginfo=2 [..]
+[FINISHED] test [unoptimized + debuginfo] [..]
+[RUNNING] `[..]/deps/foo-[..]`
+[RUNNING] `[..]/deps/foo-[..]`
+[RUNNING] `[..]/deps/test1-[..]`
+[DOCTEST] foo
+[RUNNING] `rustdoc [..]--test [..]
+").run();
+ p.cargo("test -vv")
+ .with_stderr_unordered(
+ "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] test [unoptimized + debuginfo] [..]
+[RUNNING] `[..]/deps/foo-[..]`
+[RUNNING] `[..]/deps/foo-[..]`
+[RUNNING] `[..]/deps/test1-[..]`
+[DOCTEST] foo
+[RUNNING] `rustdoc [..]--test [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_selection_test_release() {
+ let p = all_target_project();
+
+ // `test --release`
+ // NOTES:
+ // - Dependency profiles:
+ // Pkg Target Profile Reason
+ // --- ------ ------- ------
+ // bar lib release For foo-bin
+ // bar lib release-panic For tests/benches and bdep
+ // bdep lib release-panic For foo build.rs
+ // foo custom release-panic
+ //
+ // - `foo` target list is:
+ // Target Profile Mode
+ // ------ ------- ----
+ // lib release-panic build (for tests)
+ // lib release build (for bins)
+ // lib release test
+ // test release test
+ // example release-panic build
+ // bin release test
+ // bin release build
+ //
+ p.cargo("test --release -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C opt-level=3[..]-C codegen-units=2[..]
+[COMPILING] bdep [..]
+[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..]
+[COMPILING] foo [..]
+[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=6 [..]
+[RUNNING] `[..]/target/release/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3[..]-C codegen-units=2 [..]
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..]
+[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..]
+[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..]
+[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]link -C opt-level=3[..]-C codegen-units=2 [..]
+[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]
+[FINISHED] release [optimized] [..]
+[RUNNING] `[..]/deps/foo-[..]`
+[RUNNING] `[..]/deps/foo-[..]`
+[RUNNING] `[..]/deps/test1-[..]`
+[DOCTEST] foo
+[RUNNING] `rustdoc [..]--test [..]`
+").run();
+ p.cargo("test --release -vv")
+ .with_stderr_unordered(
+ "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] release [optimized] [..]
+[RUNNING] `[..]/deps/foo-[..]`
+[RUNNING] `[..]/deps/foo-[..]`
+[RUNNING] `[..]/deps/test1-[..]`
+[DOCTEST] foo
+[RUNNING] `rustdoc [..]--test [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_selection_bench() {
+ let p = all_target_project();
+
+ // `bench`
+ // NOTES:
+ // - Dependency profiles:
+ // Pkg Target Profile Reason
+ // --- ------ ------- ------
+ // bar lib bench For foo-bin
+ // bar lib bench-panic For tests/benches and bdep
+ // bdep lib bench-panic For foo build.rs
+ // foo custom bench-panic
+ //
+ // - `foo` target list is:
+ // Target Profile Mode
+ // ------ ------- ----
+ // lib bench-panic build (for benches)
+ // lib bench build (for bins)
+ // lib bench test(bench)
+ // bench bench test(bench)
+ // bin bench test(bench)
+ // bin bench build
+ //
+ p.cargo("bench -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3[..]-C codegen-units=4 [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=4 [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..]
+[COMPILING] bdep [..]
+[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..]
+[COMPILING] foo [..]
+[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=6 [..]
+[RUNNING] `[..]target/release/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=4 [..]
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3[..]-C codegen-units=4 [..]
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=4 --test [..]
+[RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=4 --test [..]
+[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=4 --test [..]
+[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=4 [..]
+[FINISHED] bench [optimized] [..]
+[RUNNING] `[..]/deps/foo-[..] --bench`
+[RUNNING] `[..]/deps/foo-[..] --bench`
+[RUNNING] `[..]/deps/bench1-[..] --bench`
+").run();
+ p.cargo("bench -vv")
+ .with_stderr_unordered(
+ "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] bench [optimized] [..]
+[RUNNING] `[..]/deps/foo-[..] --bench`
+[RUNNING] `[..]/deps/foo-[..] --bench`
+[RUNNING] `[..]/deps/bench1-[..] --bench`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_selection_check_all_targets() {
+ let p = all_target_project();
+ // `check`
+ // NOTES:
+ // - Dependency profiles:
+ // Pkg Target Profile Action Reason
+ // --- ------ ------- ------ ------
+ // bar lib dev* link For bdep
+ // bar lib dev-panic metadata For tests/benches
+ // bar lib dev metadata For lib/bins
+ // bdep lib dev* link For foo build.rs
+ // foo custom dev* link For build.rs
+ //
+ // `*` = wants panic, but it is cleared when args are built.
+ //
+ // - foo target list is:
+ // Target Profile Mode
+ // ------ ------- ----
+ // lib dev check
+ // lib dev-panic check (for tests/benches)
+ // lib dev-panic check-test (checking lib as a unittest)
+ // example dev check
+ // test dev-panic check-test
+ // bench dev-panic check-test
+ // bin dev check
+ // bin dev-panic check-test (checking bin as a unittest)
+ //
+ p.cargo("check --all-targets -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata[..]-C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..]
+[COMPILING] bdep[..]
+[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 [..]
+[COMPILING] foo [..]
+[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=5 [..]
+[RUNNING] `[..]target/debug/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata[..]-C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]metadata[..]-C codegen-units=1 -C debuginfo=2 --test [..]
+[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]metadata[..]-C codegen-units=1 -C debuginfo=2 --test [..]
+[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]metadata[..]-C codegen-units=1 -C debuginfo=2 --test [..]
+[RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]metadata[..]-C codegen-units=1 -C debuginfo=2 --test [..]
+[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]metadata -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]metadata -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..]
+[FINISHED] dev [unoptimized + debuginfo] [..]
+").run();
+ // Starting with Rust 1.27, rustc emits `rmeta` files for bins, so
+ // everything should be completely fresh. Previously, bins were being
+ // rechecked.
+ // See PR rust-lang/rust#49289 and issue rust-lang/cargo#3624.
+ p.cargo("check --all-targets -vv")
+ .with_stderr_unordered(
+ "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_selection_check_all_targets_release() {
+ let p = all_target_project();
+ // `check --release`
+ // See issue rust-lang/cargo#5218.
+ // This is a pretty straightforward variant of
+ // `profile_selection_check_all_targets` that uses `release` instead of
+ // `dev` for all targets.
+ p.cargo("check --all-targets --release -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata -C opt-level=3[..]-C codegen-units=2 [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]
+[COMPILING] bdep[..]
+[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link [..]-C codegen-units=6 [..]
+[COMPILING] foo [..]
+[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=6 [..]
+[RUNNING] `[..]target/release/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata -C opt-level=3[..]-C codegen-units=2 [..]
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]metadata -C opt-level=3[..]-C codegen-units=2 --test [..]
+[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]metadata -C opt-level=3[..]-C codegen-units=2 --test [..]
+[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]metadata -C opt-level=3[..]-C codegen-units=2 --test [..]
+[RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]metadata -C opt-level=3[..]-C codegen-units=2 --test [..]
+[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]metadata -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]
+[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]metadata -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]
+[FINISHED] release [optimized] [..]
+").run();
+
+ p.cargo("check --all-targets --release -vv")
+ .with_stderr_unordered(
+ "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] release [optimized] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_selection_check_all_targets_test() {
+ let p = all_target_project();
+
+ // `check --profile=test`
+ // - Dependency profiles:
+ // Pkg Target Profile Action Reason
+ // --- ------ ------- ------ ------
+ // bar lib test* link For bdep
+ // bar lib test-panic metadata For tests/benches
+ // bdep lib test* link For foo build.rs
+ // foo custom test* link For build.rs
+ //
+ // `*` = wants panic, but it is cleared when args are built.
+ //
+ // - foo target list is:
+ // Target Profile Mode
+ // ------ ------- ----
+ // lib test-panic check-test (for tests/benches)
+ // lib test-panic check-test (checking lib as a unittest)
+ // example test-panic check-test
+ // test test-panic check-test
+ // bench test-panic check-test
+ // bin test-panic check-test
+ //
+ p.cargo("check --all-targets --profile=test -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 [..]
+[COMPILING] bdep[..]
+[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 [..]
+[COMPILING] foo [..]
+[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=5 [..]
+[RUNNING] `[..]target/debug/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 [..]
+[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 --test [..]
+[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 --test [..]
+[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 --test [..]
+[RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 --test [..]
+[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 --test [..]
+[FINISHED] test [unoptimized + debuginfo] [..]
+").run();
+
+ p.cargo("check --all-targets --profile=test -vv")
+ .with_stderr_unordered(
+ "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] test [unoptimized + debuginfo] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_selection_doc() {
+ let p = all_target_project();
+ // `doc`
+ // NOTES:
+ // - Dependency profiles:
+ // Pkg Target Profile Action Reason
+ // --- ------ ------- ------ ------
+ // bar lib dev* link For bdep
+ // bar lib dev metadata For rustdoc
+ // bdep lib dev* link For foo build.rs
+ // foo custom dev* link For build.rs
+ //
+ // `*` = wants panic, but it is cleared when args are built.
+ p.cargo("doc -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[DOCUMENTING] bar [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 [..]
+[RUNNING] `rustdoc [..]--crate-name bar bar/src/lib.rs [..]
+[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..]
+[COMPILING] bdep [..]
+[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 [..]
+[COMPILING] foo [..]
+[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=5 [..]
+[RUNNING] `[..]target/debug/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0
+[DOCUMENTING] foo [..]
+[RUNNING] `rustdoc [..]--crate-name foo src/lib.rs [..]
+[FINISHED] dev [unoptimized + debuginfo] [..]
+").run();
+}
diff --git a/src/tools/cargo/tests/testsuite/profiles.rs b/src/tools/cargo/tests/testsuite/profiles.rs
new file mode 100644
index 000000000..2d2646fe3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/profiles.rs
@@ -0,0 +1,744 @@
+//! Tests for profiles.
+
+use cargo_test_support::project;
+use cargo_test_support::registry::Package;
+use std::env;
+
+#[cargo_test]
+fn profile_overrides() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "test"
+ version = "0.0.0"
+ authors = []
+
+ [profile.dev]
+ opt-level = 1
+ debug = false
+ rpath = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[COMPILING] test v0.0.0 ([CWD])
+[RUNNING] `rustc --crate-name test src/lib.rs [..]--crate-type lib \
+ --emit=[..]link[..]\
+ -C opt-level=1[..]\
+ -C debug-assertions=on \
+ -C metadata=[..] \
+ -C rpath \
+ --out-dir [..] \
+ -L dependency=[CWD]/target/debug/deps`
+[FINISHED] dev [optimized] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn opt_level_override_0() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "test"
+ version = "0.0.0"
+ authors = []
+
+ [profile.dev]
+ opt-level = 0
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[COMPILING] test v0.0.0 ([CWD])
+[RUNNING] `rustc --crate-name test src/lib.rs [..]--crate-type lib \
+ --emit=[..]link[..]\
+ -C debuginfo=2 \
+ -C metadata=[..] \
+ --out-dir [..] \
+ -L dependency=[CWD]/target/debug/deps`
+[FINISHED] [..] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn debug_override_1() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "test"
+ version = "0.0.0"
+ authors = []
+
+ [profile.dev]
+ debug = 1
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[COMPILING] test v0.0.0 ([CWD])
+[RUNNING] `rustc --crate-name test src/lib.rs [..]--crate-type lib \
+ --emit=[..]link[..]\
+ -C debuginfo=1 \
+ -C metadata=[..] \
+ --out-dir [..] \
+ -L dependency=[CWD]/target/debug/deps`
+[FINISHED] [..] target(s) in [..]
+",
+ )
+ .run();
+}
+
+fn check_opt_level_override(profile_level: &str, rustc_level: &str) {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+
+ name = "test"
+ version = "0.0.0"
+ authors = []
+
+ [profile.dev]
+ opt-level = {level}
+ "#,
+ level = profile_level
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("build -v")
+ .with_stderr(&format!(
+ "\
+[COMPILING] test v0.0.0 ([CWD])
+[RUNNING] `rustc --crate-name test src/lib.rs [..]--crate-type lib \
+ --emit=[..]link \
+ -C opt-level={level}[..]\
+ -C debuginfo=2 \
+ -C debug-assertions=on \
+ -C metadata=[..] \
+ --out-dir [..] \
+ -L dependency=[CWD]/target/debug/deps`
+[FINISHED] [..] target(s) in [..]
+",
+ level = rustc_level
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn opt_level_overrides() {
+ for &(profile_level, rustc_level) in &[
+ ("1", "1"),
+ ("2", "2"),
+ ("3", "3"),
+ ("\"s\"", "s"),
+ ("\"z\"", "z"),
+ ] {
+ check_opt_level_override(profile_level, rustc_level)
+ }
+}
+
+#[cargo_test]
+fn top_level_overrides_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+
+ name = "test"
+ version = "0.0.0"
+ authors = []
+
+ [profile.release]
+ opt-level = 1
+ debug = true
+
+ [dependencies.foo]
+ path = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+
+ name = "foo"
+ version = "0.0.0"
+ authors = []
+
+ [profile.release]
+ opt-level = 0
+ debug = false
+
+ [lib]
+ name = "foo"
+ crate_type = ["dylib", "rlib"]
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .build();
+ p.cargo("build -v --release")
+ .with_stderr(&format!(
+ "\
+[COMPILING] foo v0.0.0 ([CWD]/foo)
+[RUNNING] `rustc --crate-name foo foo/src/lib.rs [..]\
+ --crate-type dylib --crate-type rlib \
+ --emit=[..]link \
+ -C prefer-dynamic \
+ -C opt-level=1[..]\
+ -C debuginfo=2 \
+ -C metadata=[..] \
+ --out-dir [CWD]/target/release/deps \
+ -L dependency=[CWD]/target/release/deps`
+[COMPILING] test v0.0.0 ([CWD])
+[RUNNING] `rustc --crate-name test src/lib.rs [..]--crate-type lib \
+ --emit=[..]link \
+ -C opt-level=1[..]\
+ -C debuginfo=2 \
+ -C metadata=[..] \
+ --out-dir [..] \
+ -L dependency=[CWD]/target/release/deps \
+ --extern foo=[CWD]/target/release/deps/\
+ {prefix}foo[..]{suffix} \
+ --extern foo=[CWD]/target/release/deps/libfoo.rlib`
+[FINISHED] release [optimized + debuginfo] target(s) in [..]
+",
+ prefix = env::consts::DLL_PREFIX,
+ suffix = env::consts::DLL_SUFFIX
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn profile_in_non_root_manifest_triggers_a_warning() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["bar"]
+
+ [profile.dev]
+ debug = false
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ workspace = ".."
+
+ [profile.dev]
+ opt-level = 1
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build -v")
+ .cwd("bar")
+ .with_stderr(
+ "\
+[WARNING] profiles for the non root package will be ignored, specify profiles at the workspace root:
+package: [..]
+workspace: [..]
+[COMPILING] bar v0.1.0 ([..])
+[RUNNING] `rustc [..]`
+[FINISHED] dev [unoptimized] target(s) in [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_in_virtual_manifest_works() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+
+ [profile.dev]
+ opt-level = 1
+ debug = false
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ workspace = ".."
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build -v")
+ .cwd("bar")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.1.0 ([..])
+[RUNNING] `rustc [..]`
+[FINISHED] dev [optimized] target(s) in [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_lto_string_bool_dev() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [profile.dev]
+ lto = "true"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[ROOT]/foo/Cargo.toml`
+
+Caused by:
+ `lto` setting of string `\"true\"` for `dev` profile is not a valid setting, \
+must be a boolean (`true`/`false`) or a string (`\"thin\"`/`\"fat\"`/`\"off\"`) or omitted.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_panic_test_bench() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [profile.test]
+ panic = "abort"
+
+ [profile.bench]
+ panic = "abort"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_stderr_contains(
+ "\
+[WARNING] `panic` setting is ignored for `bench` profile
+[WARNING] `panic` setting is ignored for `test` profile
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn profile_doc_deprecated() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [profile.doc]
+ opt-level = 0
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build")
+ .with_stderr_contains("[WARNING] profile `doc` is deprecated and has no effect")
+ .run();
+}
+
+#[cargo_test]
+fn panic_unwind_does_not_build_twice() {
+ // Check for a bug where `lib` was built twice, once with panic set and
+ // once without. Since "unwind" is the default, they are the same and
+ // should only be built once.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [profile.dev]
+ panic = "unwind"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("src/main.rs", "fn main() {}")
+ .file("tests/t1.rs", "")
+ .build();
+
+ p.cargo("test -v --tests --no-run")
+ .with_stderr_unordered(
+ "\
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..] --test [..]
+[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin [..]
+[RUNNING] `rustc --crate-name foo src/main.rs [..] --test [..]
+[RUNNING] `rustc --crate-name t1 tests/t1.rs [..]
+[FINISHED] [..]
+[EXECUTABLE] `[..]/target/debug/deps/t1-[..][EXE]`
+[EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]`
+[EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn debug_0_report() {
+ // The finished line handles 0 correctly.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [profile.dev]
+ debug = 0
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.1.0 [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]-C debuginfo=0 [..]
+[FINISHED] dev [unoptimized] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn thin_lto_works() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "top"
+ version = "0.5.0"
+ authors = []
+
+ [profile.release]
+ lto = 'thin'
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --release -v")
+ .with_stderr(
+ "\
+[COMPILING] top [..]
+[RUNNING] `rustc [..] -C lto=thin [..]`
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn strip_works() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [profile.release]
+ strip = 'symbols'
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --release -v")
+ .with_stderr(
+ "\
+[COMPILING] foo [..]
+[RUNNING] `rustc [..] -C strip=symbols [..]`
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn strip_passes_unknown_option_to_rustc() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [profile.release]
+ strip = 'unknown'
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --release -v")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[COMPILING] foo [..]
+[RUNNING] `rustc [..] -C strip=unknown [..]`
+error: incorrect value `unknown` for [..] `strip` [..] was expected
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn strip_accepts_true_to_strip_symbols() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [profile.release]
+ strip = true
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --release -v")
+ .with_stderr(
+ "\
+[COMPILING] foo [..]
+[RUNNING] `rustc [..] -C strip=symbols [..]`
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn strip_accepts_false_to_disable_strip() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [profile.release]
+ strip = false
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --release -v")
+ .with_stderr_does_not_contain("-C strip")
+ .run();
+}
+
+#[cargo_test]
+fn rustflags_works() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["profile-rustflags"]
+
+ [profile.dev]
+ rustflags = ["-C", "link-dead-code=yes"]
+
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build -v")
+ .masquerade_as_nightly_cargo(&["profile-rustflags"])
+ .with_stderr(
+ "\
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name foo [..] -C link-dead-code=yes [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustflags_works_with_env() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["profile-rustflags"]
+
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build -v")
+ .env("CARGO_PROFILE_DEV_RUSTFLAGS", "-C link-dead-code=yes")
+ .masquerade_as_nightly_cargo(&["profile-rustflags"])
+ .with_stderr(
+ "\
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name foo [..] -C link-dead-code=yes [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustflags_requires_cargo_feature() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [profile.dev]
+ rustflags = ["-C", "link-dead-code=yes"]
+
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build -v")
+ .masquerade_as_nightly_cargo(&["profile-rustflags"])
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[CWD]/Cargo.toml`
+
+Caused by:
+ feature `profile-rustflags` is required
+
+ The package requires the Cargo feature called `profile-rustflags`, but that feature is \
+ not stabilized in this version of Cargo (1.[..]).
+ Consider adding `cargo-features = [\"profile-rustflags\"]` to the top of Cargo.toml \
+ (above the [package] table) to tell Cargo you are opting in to use this unstable feature.
+ See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#profile-rustflags-option \
+ for more information about the status of this feature.
+",
+ )
+ .run();
+
+ Package::new("bar", "1.0.0").publish();
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = "1.0"
+
+ [profile.dev.package.bar]
+ rustflags = ["-C", "link-dead-code=yes"]
+ "#,
+ );
+ p.cargo("check")
+ .masquerade_as_nightly_cargo(&["profile-rustflags"])
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[ROOT]/foo/Cargo.toml`
+
+Caused by:
+ feature `profile-rustflags` is required
+
+ The package requires the Cargo feature called `profile-rustflags`, but that feature is \
+ not stabilized in this version of Cargo (1.[..]).
+ Consider adding `cargo-features = [\"profile-rustflags\"]` to the top of Cargo.toml \
+ (above the [package] table) to tell Cargo you are opting in to use this unstable feature.
+ See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#profile-rustflags-option \
+ for more information about the status of this feature.
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/progress.rs b/src/tools/cargo/tests/testsuite/progress.rs
new file mode 100644
index 000000000..20870a394
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/progress.rs
@@ -0,0 +1,159 @@
+//! Tests for progress bar.
+
+use cargo_test_support::project;
+use cargo_test_support::registry::Package;
+
+#[cargo_test]
+fn bad_progress_config_unknown_when() {
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [term]
+ progress = { when = 'unknown' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] error in [..].cargo/config: \
+could not load config key `term.progress.when`
+
+Caused by:
+ unknown variant `unknown`, expected one of `auto`, `never`, `always`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_progress_config_missing_width() {
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [term]
+ progress = { when = 'always' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] \"always\" progress requires a `width` key
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_progress_config_missing_when() {
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [term]
+ progress = { width = 1000 }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: missing field `when`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn always_shows_progress() {
+ const N: usize = 3;
+ let mut deps = String::new();
+ for i in 1..=N {
+ Package::new(&format!("dep{}", i), "1.0.0").publish();
+ deps.push_str(&format!("dep{} = \"1.0\"\n", i));
+ }
+
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [term]
+ progress = { when = 'always', width = 100 }
+ "#,
+ )
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ {}
+ "#,
+ deps
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr_contains("[DOWNLOADING] [..] crates [..]")
+ .with_stderr_contains("[..][DOWNLOADED] 3 crates ([..]) in [..]")
+ .with_stderr_contains("[BUILDING] [..] [..]/4: [..]")
+ .run();
+}
+
+#[cargo_test]
+fn never_progress() {
+ const N: usize = 3;
+ let mut deps = String::new();
+ for i in 1..=N {
+ Package::new(&format!("dep{}", i), "1.0.0").publish();
+ deps.push_str(&format!("dep{} = \"1.0\"\n", i));
+ }
+
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [term]
+ progress = { when = 'never' }
+ "#,
+ )
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ {}
+ "#,
+ deps
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr_does_not_contain("[DOWNLOADING] [..] crates [..]")
+ .with_stderr_does_not_contain("[..][DOWNLOADED] 3 crates ([..]) in [..]")
+ .with_stderr_does_not_contain("[BUILDING] [..] [..]/4: [..]")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/pub_priv.rs b/src/tools/cargo/tests/testsuite/pub_priv.rs
new file mode 100644
index 000000000..83c6a49f8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/pub_priv.rs
@@ -0,0 +1,199 @@
+//! Tests for public/private dependencies.
+
+use cargo_test_support::project;
+use cargo_test_support::registry::Package;
+
+#[cargo_test(nightly, reason = "exported_private_dependencies lint is unstable")]
+fn exported_priv_warning() {
+ Package::new("priv_dep", "0.1.0")
+ .file("src/lib.rs", "pub struct FromPriv;")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["public-dependency"]
+
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ priv_dep = "0.1.0"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
+ extern crate priv_dep;
+ pub fn use_priv(_: priv_dep::FromPriv) {}
+ ",
+ )
+ .build();
+
+ p.cargo("check --message-format=short")
+ .masquerade_as_nightly_cargo(&["public-dependency"])
+ .with_stderr_contains(
+ "\
+src/lib.rs:3:13: warning: type `[..]FromPriv` from private dependency 'priv_dep' in public interface
+",
+ )
+ .run()
+}
+
+#[cargo_test(nightly, reason = "exported_private_dependencies lint is unstable")]
+fn exported_pub_dep() {
+ Package::new("pub_dep", "0.1.0")
+ .file("src/lib.rs", "pub struct FromPub;")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["public-dependency"]
+
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ pub_dep = {version = "0.1.0", public = true}
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
+ extern crate pub_dep;
+ pub fn use_pub(_: pub_dep::FromPub) {}
+ ",
+ )
+ .build();
+
+ p.cargo("check --message-format=short")
+ .masquerade_as_nightly_cargo(&["public-dependency"])
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] pub_dep v0.1.0 ([..])
+[CHECKING] pub_dep v0.1.0
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run()
+}
+
+#[cargo_test]
+pub fn requires_nightly_cargo() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["public-dependency"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check --message-format=short")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ the cargo feature `public-dependency` requires a nightly version of Cargo, but this is the `stable` channel
+ See https://doc.rust-lang.org/book/appendix-07-nightly-rust.html for more information about Rust release channels.
+ See https://doc.rust-lang.org/[..]cargo/reference/unstable.html#public-dependency for more information about using this feature.
+"
+ )
+ .run()
+}
+
+#[cargo_test]
+fn requires_feature() {
+ Package::new("pub_dep", "0.1.0")
+ .file("src/lib.rs", "")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ pub_dep = { version = "0.1.0", public = true }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check --message-format=short")
+ .masquerade_as_nightly_cargo(&["public-dependency"])
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ feature `public-dependency` is required
+
+ The package requires the Cargo feature called `public-dependency`, \
+ but that feature is not stabilized in this version of Cargo (1.[..]).
+ Consider adding `cargo-features = [\"public-dependency\"]` to the top of Cargo.toml \
+ (above the [package] table) to tell Cargo you are opting in to use this unstable feature.
+ See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#public-dependency \
+ for more information about the status of this feature.
+",
+ )
+ .run()
+}
+
+#[cargo_test]
+fn pub_dev_dependency() {
+ Package::new("pub_dep", "0.1.0")
+ .file("src/lib.rs", "pub struct FromPub;")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["public-dependency"]
+
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dev-dependencies]
+ pub_dep = {version = "0.1.0", public = true}
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
+ extern crate pub_dep;
+ pub fn use_pub(_: pub_dep::FromPub) {}
+ ",
+ )
+ .build();
+
+ p.cargo("check --message-format=short")
+ .masquerade_as_nightly_cargo(&["public-dependency"])
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ 'public' specifier can only be used on regular dependencies, not Development dependencies
+",
+ )
+ .run()
+}
diff --git a/src/tools/cargo/tests/testsuite/publish.rs b/src/tools/cargo/tests/testsuite/publish.rs
new file mode 100644
index 000000000..00a79fe73
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/publish.rs
@@ -0,0 +1,2951 @@
+//! Tests for the `cargo publish` command.
+
+use cargo_test_support::git::{self, repo};
+use cargo_test_support::paths;
+use cargo_test_support::registry::{self, Package, RegistryBuilder, Response};
+use cargo_test_support::{basic_manifest, no_such_file_err_msg, project, publish};
+use std::fs;
+use std::sync::{Arc, Mutex};
+
+const CLEAN_FOO_JSON: &str = r#"
+ {
+ "authors": [],
+ "badges": {},
+ "categories": [],
+ "deps": [],
+ "description": "foo",
+ "documentation": "foo",
+ "features": {},
+ "homepage": "foo",
+ "keywords": [],
+ "license": "MIT",
+ "license_file": null,
+ "links": null,
+ "name": "foo",
+ "readme": null,
+ "readme_file": null,
+ "repository": "foo",
+ "vers": "0.0.1"
+ }
+"#;
+
+fn validate_upload_foo() {
+ publish::validate_upload(
+ r#"
+ {
+ "authors": [],
+ "badges": {},
+ "categories": [],
+ "deps": [],
+ "description": "foo",
+ "documentation": null,
+ "features": {},
+ "homepage": null,
+ "keywords": [],
+ "license": "MIT",
+ "license_file": null,
+ "links": null,
+ "name": "foo",
+ "readme": null,
+ "readme_file": null,
+ "repository": null,
+ "vers": "0.0.1"
+ }
+ "#,
+ "foo-0.0.1.crate",
+ &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"],
+ );
+}
+
+fn validate_upload_li() {
+ publish::validate_upload(
+ r#"
+ {
+ "authors": [],
+ "badges": {},
+ "categories": [],
+ "deps": [],
+ "description": "li",
+ "documentation": null,
+ "features": {},
+ "homepage": null,
+ "keywords": [],
+ "license": "MIT",
+ "license_file": null,
+ "links": null,
+ "name": "li",
+ "readme": null,
+ "readme_file": null,
+ "repository": null,
+ "vers": "0.0.1"
+ }
+ "#,
+ "li-0.0.1.crate",
+ &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"],
+ );
+}
+
+#[cargo_test]
+fn simple() {
+ let registry = RegistryBuilder::new().http_api().http_index().build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[UPDATING] crates.io index
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] foo v0.0.1 ([CWD])
+[UPLOADED] foo v0.0.1 to registry `crates-io`
+note: Waiting for `foo v0.0.1` to be available at registry `crates-io`.
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+[PUBLISHED] foo v0.0.1 at registry `crates-io`
+",
+ )
+ .run();
+
+ validate_upload_foo();
+}
+
+// Check that the `token` key works at the root instead of under a
+// `[registry]` table.
+#[cargo_test]
+fn simple_publish_with_http() {
+ let _reg = registry::RegistryBuilder::new()
+ .http_api()
+ .token(registry::Token::Plaintext("sekrit".to_string()))
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish --no-verify --token sekrit --registry dummy-registry")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] foo v0.0.1 ([CWD])
+[UPLOADED] foo v0.0.1 to registry `dummy-registry`
+note: Waiting for `foo v0.0.1` to be available at registry `dummy-registry`.
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+[PUBLISHED] foo v0.0.1 at registry `dummy-registry`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn simple_publish_with_asymmetric() {
+ let _reg = registry::RegistryBuilder::new()
+ .http_api()
+ .http_index()
+ .alternative_named("dummy-registry")
+ .token(registry::Token::rfc_key())
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish --no-verify -Zregistry-auth --registry dummy-registry")
+ .masquerade_as_nightly_cargo(&["registry-auth"])
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] foo v0.0.1 ([CWD])
+[UPLOADED] foo v0.0.1 to registry `dummy-registry`
+note: Waiting for `foo v0.0.1` to be available at registry `dummy-registry`.
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+[PUBLISHED] foo v0.0.1 at registry `dummy-registry`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn old_token_location() {
+ // `publish` generally requires a remote registry
+ let registry = registry::RegistryBuilder::new().http_api().build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ let credentials = paths::home().join(".cargo/credentials.toml");
+ fs::remove_file(&credentials).unwrap();
+
+ // Verify can't publish without a token.
+ p.cargo("publish --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr_contains(
+ "[ERROR] no token found, \
+ please run `cargo login`",
+ )
+ .run();
+
+ fs::write(&credentials, format!(r#"token = "{}""#, registry.token())).unwrap();
+
+ p.cargo("publish --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[UPDATING] crates.io index
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] foo v0.0.1 ([CWD])
+[UPLOADED] foo v0.0.1 to registry `crates-io`
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.0.1 [..]
+",
+ )
+ .run();
+
+ // Skip `validate_upload_foo` as we just cared we got far enough for verify the token behavior.
+ // Other tests will verify the endpoint gets the right payload.
+}
+
+#[cargo_test]
+fn simple_with_index() {
+ // `publish` generally requires a remote registry
+ let registry = registry::RegistryBuilder::new().http_api().build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish --no-verify")
+ .arg("--token")
+ .arg(registry.token())
+ .arg("--index")
+ .arg(registry.index_url().as_str())
+ .with_stderr(
+ "\
+[..]
+[..]
+[..]
+[..]
+[..]
+[UPLOADING] foo v0.0.1 ([CWD])
+[UPLOADED] foo v0.0.1 to registry `[ROOT]/registry`
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.0.1 [..]
+",
+ )
+ .run();
+
+ // Skip `validate_upload_foo` as we just cared we got far enough for verify the VCS behavior.
+ // Other tests will verify the endpoint gets the right payload.
+}
+
+#[cargo_test]
+fn git_deps() {
+ // Use local registry for faster test times since no publish will occur
+ let registry = registry::init();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+
+ [dependencies.foo]
+ git = "git://path/to/nowhere"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish -v --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..] index
+[ERROR] all dependencies must have a version specified when publishing.
+dependency `foo` does not specify a version
+Note: The published dependency will use the version from crates.io,
+the `git` specification will be removed from the dependency declaration.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn path_dependency_no_version() {
+ // Use local registry for faster test times since no publish will occur
+ let registry = registry::init();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..] index
+[ERROR] all dependencies must have a version specified when publishing.
+dependency `bar` does not specify a version
+Note: The published dependency will use the version from crates.io,
+the `path` specification will be removed from the dependency declaration.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn unpublishable_crate() {
+ // Use local registry for faster test times since no publish will occur
+ let registry = registry::init();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ publish = false
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish --index")
+ .arg(registry.index_url().as_str())
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] `foo` cannot be published.
+`package.publish` is set to `false` or an empty list in Cargo.toml and prevents publishing.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dont_publish_dirty() {
+ // Use local registry for faster test times since no publish will occur
+ let registry = registry::init();
+
+ let p = project().file("bar", "").build();
+
+ let _ = git::repo(&paths::root().join("foo"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] crates.io index
+error: 1 files in the working directory contain changes that were not yet \
+committed into git:
+
+bar
+
+to proceed despite this and include the uncommitted changes, pass the `--allow-dirty` flag
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn publish_clean() {
+ // `publish` generally requires a remote registry
+ let registry = registry::RegistryBuilder::new().http_api().build();
+
+ let p = project().build();
+
+ let _ = repo(&paths::root().join("foo"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[..]
+[..]
+[VERIFYING] foo v0.0.1 ([CWD])
+[..]
+[..]
+[..]
+[UPLOADING] foo v0.0.1 ([CWD])
+[UPLOADED] foo v0.0.1 to registry `crates-io`
+note: Waiting [..]
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+[PUBLISHED] foo v0.0.1 [..]
+",
+ )
+ .run();
+
+ // Skip `validate_upload_foo_clean` as we just cared we got far enough for verify the VCS behavior.
+ // Other tests will verify the endpoint gets the right payload.
+}
+
+#[cargo_test]
+fn publish_in_sub_repo() {
+ // `publish` generally requires a remote registry
+ let registry = registry::RegistryBuilder::new().http_api().build();
+
+ let p = project().no_manifest().file("baz", "").build();
+
+ let _ = repo(&paths::root().join("foo"))
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .cwd("bar")
+ .with_stderr(
+ "\
+[..]
+[..]
+[VERIFYING] foo v0.0.1 ([CWD])
+[..]
+[..]
+[..]
+[UPLOADING] foo v0.0.1 ([CWD])
+[UPLOADED] foo v0.0.1 to registry `crates-io`
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.0.1 [..]
+",
+ )
+ .run();
+
+ // Skip `validate_upload_foo_clean` as we just cared we got far enough for verify the VCS behavior.
+ // Other tests will verify the endpoint gets the right payload.
+}
+
+#[cargo_test]
+fn publish_when_ignored() {
+ // `publish` generally requires a remote registry
+ let registry = registry::RegistryBuilder::new().http_api().build();
+
+ let p = project().file("baz", "").build();
+
+ let _ = repo(&paths::root().join("foo"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(".gitignore", "baz")
+ .build();
+
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[..]
+[..]
+[VERIFYING] foo v0.0.1 ([CWD])
+[..]
+[..]
+[..]
+[UPLOADING] foo v0.0.1 ([CWD])
+[UPLOADED] foo v0.0.1 to registry `crates-io`
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.0.1 [..]
+",
+ )
+ .run();
+
+ // Skip `validate_upload` as we just cared we got far enough for verify the VCS behavior.
+ // Other tests will verify the endpoint gets the right payload.
+}
+
+#[cargo_test]
+fn ignore_when_crate_ignored() {
+ // `publish` generally requires a remote registry
+ let registry = registry::RegistryBuilder::new().http_api().build();
+
+ let p = project().no_manifest().file("bar/baz", "").build();
+
+ let _ = repo(&paths::root().join("foo"))
+ .file(".gitignore", "bar")
+ .nocommit_file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ "#,
+ )
+ .nocommit_file("bar/src/main.rs", "fn main() {}");
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .cwd("bar")
+ .with_stderr(
+ "\
+[..]
+[..]
+[VERIFYING] foo v0.0.1 ([CWD])
+[..]
+[..]
+[..]
+[UPLOADING] foo v0.0.1 ([CWD])
+[UPLOADED] foo v0.0.1 to registry `crates-io`
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.0.1 [..]
+",
+ )
+ .run();
+
+ // Skip `validate_upload` as we just cared we got far enough for verify the VCS behavior.
+ // Other tests will verify the endpoint gets the right payload.
+}
+
+#[cargo_test]
+fn new_crate_rejected() {
+ // Use local registry for faster test times since no publish will occur
+ let registry = registry::init();
+
+ let p = project().file("baz", "").build();
+
+ let _ = repo(&paths::root().join("foo"))
+ .nocommit_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ "#,
+ )
+ .nocommit_file("src/main.rs", "fn main() {}");
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr_contains(
+ "[ERROR] 3 files in the working directory contain \
+ changes that were not yet committed into git:",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dry_run() {
+ // Use local registry for faster test times since no publish will occur
+ let registry = registry::init();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish --dry-run --index")
+ .arg(registry.index_url().as_str())
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] foo v0.0.1 ([CWD])
+[WARNING] aborting upload due to dry run
+",
+ )
+ .run();
+
+ // Ensure the API request wasn't actually made
+ assert!(registry::api_path().join("api/v1/crates").exists());
+ assert!(!registry::api_path().join("api/v1/crates/new").exists());
+}
+
+#[cargo_test]
+fn registry_not_in_publish_list() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ publish = [
+ "test"
+ ]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish")
+ .arg("--registry")
+ .arg("alternative")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] `foo` cannot be published.
+The registry `alternative` is not listed in the `package.publish` value in Cargo.toml.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn publish_empty_list() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ publish = []
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish --registry alternative")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] `foo` cannot be published.
+`package.publish` is set to `false` or an empty list in Cargo.toml and prevents publishing.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn publish_allowed_registry() {
+ let _registry = RegistryBuilder::new()
+ .http_api()
+ .http_index()
+ .alternative()
+ .build();
+
+ let p = project().build();
+
+ let _ = repo(&paths::root().join("foo"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ publish = ["alternative"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish --registry alternative")
+ .with_stderr(
+ "\
+[..]
+[..]
+[VERIFYING] foo v0.0.1 ([CWD])
+[..]
+[..]
+[..]
+[UPLOADING] foo v0.0.1 ([CWD])
+[UPLOADED] foo v0.0.1 to registry `alternative`
+note: Waiting for `foo v0.0.1` to be available at registry `alternative`.
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.0.1 at registry `alternative`
+",
+ )
+ .run();
+
+ publish::validate_alt_upload(
+ CLEAN_FOO_JSON,
+ "foo-0.0.1.crate",
+ &[
+ "Cargo.lock",
+ "Cargo.toml",
+ "Cargo.toml.orig",
+ "src/main.rs",
+ ".cargo_vcs_info.json",
+ ],
+ );
+}
+
+#[cargo_test]
+fn publish_implicitly_to_only_allowed_registry() {
+ let _registry = RegistryBuilder::new()
+ .http_api()
+ .http_index()
+ .alternative()
+ .build();
+
+ let p = project().build();
+
+ let _ = repo(&paths::root().join("foo"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ publish = ["alternative"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish")
+ .with_stderr(
+ "\
+[NOTE] Found `alternative` as only allowed registry. Publishing to it automatically.
+[UPDATING] `alternative` index
+[..]
+[VERIFYING] foo v0.0.1 ([CWD])
+[..]
+[..]
+[..]
+[UPLOADING] foo v0.0.1 ([CWD])
+[UPLOADED] foo v0.0.1 to registry `alternative`
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.0.1 [..]
+",
+ )
+ .run();
+
+ publish::validate_alt_upload(
+ CLEAN_FOO_JSON,
+ "foo-0.0.1.crate",
+ &[
+ "Cargo.lock",
+ "Cargo.toml",
+ "Cargo.toml.orig",
+ "src/main.rs",
+ ".cargo_vcs_info.json",
+ ],
+ );
+}
+
+#[cargo_test]
+fn publish_fail_with_no_registry_specified() {
+ let p = project().build();
+
+ let _ = repo(&paths::root().join("foo"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ publish = ["alternative", "test"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] `foo` cannot be published.
+The registry `crates-io` is not listed in the `package.publish` value in Cargo.toml.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn block_publish_no_registry() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ publish = []
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish --registry alternative")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] `foo` cannot be published.
+`package.publish` is set to `false` or an empty list in Cargo.toml and prevents publishing.
+",
+ )
+ .run();
+}
+
+// Explicitly setting `crates-io` in the publish list.
+#[cargo_test]
+fn publish_with_crates_io_explicit() {
+ // `publish` generally requires a remote registry
+ let registry = registry::RegistryBuilder::new().http_api().build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ publish = ["crates-io"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish --registry alternative")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] `foo` cannot be published.
+The registry `alternative` is not listed in the `package.publish` value in Cargo.toml.
+",
+ )
+ .run();
+
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[WARNING] [..]
+[..]
+[PACKAGING] [..]
+[VERIFYING] foo v0.0.1 ([CWD])
+[..]
+[..]
+[..]
+[UPLOADING] foo v0.0.1 ([CWD])
+[UPLOADED] foo v0.0.1 to registry `crates-io`
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.0.1 [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn publish_with_select_features() {
+ // `publish` generally requires a remote registry
+ let registry = registry::RegistryBuilder::new().http_api().build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+
+ [features]
+ required = []
+ optional = []
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "#[cfg(not(feature = \"required\"))]
+ compile_error!(\"This crate requires `required` feature!\");
+ fn main() {}",
+ )
+ .build();
+
+ p.cargo("publish --features required")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[..]
+[..]
+[..]
+[..]
+[VERIFYING] foo v0.0.1 ([CWD])
+[..]
+[..]
+[..]
+[UPLOADING] foo v0.0.1 ([CWD])
+[UPLOADED] foo v0.0.1 to registry `crates-io`
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.0.1 [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn publish_with_all_features() {
+ // `publish` generally requires a remote registry
+ let registry = registry::RegistryBuilder::new().http_api().build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+
+ [features]
+ required = []
+ optional = []
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "#[cfg(not(feature = \"required\"))]
+ compile_error!(\"This crate requires `required` feature!\");
+ fn main() {}",
+ )
+ .build();
+
+ p.cargo("publish --all-features")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[..]
+[..]
+[..]
+[..]
+[VERIFYING] foo v0.0.1 ([CWD])
+[..]
+[..]
+[..]
+[UPLOADING] foo v0.0.1 ([CWD])
+[UPLOADED] foo v0.0.1 to registry `crates-io`
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.0.1 [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn publish_with_no_default_features() {
+ // Use local registry for faster test times since no publish will occur
+ let registry = registry::init();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+
+ [features]
+ default = ["required"]
+ required = []
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "#[cfg(not(feature = \"required\"))]
+ compile_error!(\"This crate requires `required` feature!\");
+ fn main() {}",
+ )
+ .build();
+
+ p.cargo("publish --no-default-features")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr_contains("error: This crate requires `required` feature!")
+ .run();
+}
+
+#[cargo_test]
+fn publish_with_patch() {
+ let registry = RegistryBuilder::new().http_api().http_index().build();
+ Package::new("bar", "1.0.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ [dependencies]
+ bar = "1.0"
+ [patch.crates-io]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "extern crate bar;
+ fn main() {
+ bar::newfunc();
+ }",
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "1.0.0"))
+ .file("bar/src/lib.rs", "pub fn newfunc() {}")
+ .build();
+
+ // Check that it works with the patched crate.
+ p.cargo("build").run();
+
+ // Check that verify fails with patched crate which has new functionality.
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr_contains("[..]newfunc[..]")
+ .run();
+
+ // Remove the usage of new functionality and try again.
+ p.change_file("src/main.rs", "extern crate bar; pub fn main() {}");
+
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[..]
+[..]
+[..]
+[..]
+[UPDATING] crates.io index
+[VERIFYING] foo v0.0.1 ([CWD])
+[..]
+[..]
+[..]
+[..]
+[UPLOADING] foo v0.0.1 ([CWD])
+[UPLOADED] foo v0.0.1 to registry `crates-io`
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.0.1 [..]
+",
+ )
+ .run();
+
+ publish::validate_upload(
+ r#"
+ {
+ "authors": [],
+ "badges": {},
+ "categories": [],
+ "deps": [
+ {
+ "default_features": true,
+ "features": [],
+ "kind": "normal",
+ "name": "bar",
+ "optional": false,
+ "target": null,
+ "version_req": "^1.0"
+ }
+ ],
+ "description": "foo",
+ "documentation": null,
+ "features": {},
+ "homepage": null,
+ "keywords": [],
+ "license": "MIT",
+ "license_file": null,
+ "links": null,
+ "name": "foo",
+ "readme": null,
+ "readme_file": null,
+ "repository": null,
+ "vers": "0.0.1"
+ }
+ "#,
+ "foo-0.0.1.crate",
+ &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"],
+ );
+}
+
+#[cargo_test]
+fn publish_checks_for_token_before_verify() {
+ let registry = registry::RegistryBuilder::new()
+ .no_configure_token()
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ // Assert upload token error before the package is verified
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr_contains("[ERROR] no token found, please run `cargo login`")
+ .with_stderr_does_not_contain("[VERIFYING] foo v0.0.1 ([CWD])")
+ .run();
+
+ // Assert package verified successfully on dry run
+ p.cargo("publish --dry-run")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[..]
+[..]
+[..]
+[..]
+[VERIFYING] foo v0.0.1 ([CWD])
+[..]
+[..]
+[..]
+[UPLOADING] foo v0.0.1 [..]
+[WARNING] aborting upload due to dry run
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn publish_with_bad_source() {
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [source.crates-io]
+ replace-with = 'local-registry'
+
+ [source.local-registry]
+ local-registry = 'registry'
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] crates-io is replaced with non-remote-registry source registry `[..]/foo/registry`;
+include `--registry crates-io` to use crates.io
+",
+ )
+ .run();
+
+ p.change_file(
+ ".cargo/config",
+ r#"
+ [source.crates-io]
+ replace-with = "vendored-sources"
+
+ [source.vendored-sources]
+ directory = "vendor"
+ "#,
+ );
+
+ p.cargo("publish")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] crates-io is replaced with non-remote-registry source dir [..]/foo/vendor;
+include `--registry crates-io` to use crates.io
+",
+ )
+ .run();
+}
+
+// A dependency with both `git` and `version`.
+#[cargo_test]
+fn publish_git_with_version() {
+ let registry = RegistryBuilder::new().http_api().http_index().build();
+
+ Package::new("dep1", "1.0.1")
+ .file("src/lib.rs", "pub fn f() -> i32 {1}")
+ .publish();
+
+ let git_project = git::new("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("dep1", "1.0.0"))
+ .file("src/lib.rs", "pub fn f() -> i32 {2}")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ edition = "2018"
+ license = "MIT"
+ description = "foo"
+
+ [dependencies]
+ dep1 = {{version = "1.0", git="{}"}}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ pub fn main() {
+ println!("{}", dep1::f());
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run").with_stdout("2").run();
+
+ p.cargo("publish --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[..]
+[..]
+[..]
+[..]
+[..]
+[..]
+[UPLOADING] foo v0.1.0 ([CWD])
+[UPLOADED] foo v0.1.0 to registry `crates-io`
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.1.0 [..]
+",
+ )
+ .run();
+
+ publish::validate_upload_with_contents(
+ r#"
+ {
+ "authors": [],
+ "badges": {},
+ "categories": [],
+ "deps": [
+ {
+ "default_features": true,
+ "features": [],
+ "kind": "normal",
+ "name": "dep1",
+ "optional": false,
+ "target": null,
+ "version_req": "^1.0"
+ }
+ ],
+ "description": "foo",
+ "documentation": null,
+ "features": {},
+ "homepage": null,
+ "keywords": [],
+ "license": "MIT",
+ "license_file": null,
+ "links": null,
+ "name": "foo",
+ "readme": null,
+ "readme_file": null,
+ "repository": null,
+ "vers": "0.1.0"
+ }
+ "#,
+ "foo-0.1.0.crate",
+ &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"],
+ &[
+ (
+ "Cargo.toml",
+ // Check that only `version` is included in Cargo.toml.
+ &format!(
+ "{}\n\
+ [package]\n\
+ edition = \"2018\"\n\
+ name = \"foo\"\n\
+ version = \"0.1.0\"\n\
+ authors = []\n\
+ description = \"foo\"\n\
+ license = \"MIT\"\n\
+ \n\
+ [dependencies.dep1]\n\
+ version = \"1.0\"\n\
+ ",
+ cargo::core::package::MANIFEST_PREAMBLE
+ ),
+ ),
+ (
+ "Cargo.lock",
+ // The important check here is that it is 1.0.1 in the registry.
+ "# This file is automatically @generated by Cargo.\n\
+ # It is not intended for manual editing.\n\
+ version = 3\n\
+ \n\
+ [[package]]\n\
+ name = \"dep1\"\n\
+ version = \"1.0.1\"\n\
+ source = \"registry+https://github.com/rust-lang/crates.io-index\"\n\
+ checksum = \"[..]\"\n\
+ \n\
+ [[package]]\n\
+ name = \"foo\"\n\
+ version = \"0.1.0\"\n\
+ dependencies = [\n\
+ \x20\"dep1\",\n\
+ ]\n\
+ ",
+ ),
+ ],
+ );
+}
+
+#[cargo_test]
+fn publish_dev_dep_no_version() {
+ let registry = RegistryBuilder::new().http_api().http_index().build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+
+ [dev-dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[PACKAGING] foo v0.1.0 [..]
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] foo v0.1.0 [..]
+[UPLOADED] foo v0.1.0 [..]
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.1.0 [..]
+",
+ )
+ .run();
+
+ publish::validate_upload_with_contents(
+ r#"
+ {
+ "authors": [],
+ "badges": {},
+ "categories": [],
+ "deps": [],
+ "description": "foo",
+ "documentation": "foo",
+ "features": {},
+ "homepage": "foo",
+ "keywords": [],
+ "license": "MIT",
+ "license_file": null,
+ "links": null,
+ "name": "foo",
+ "readme": null,
+ "readme_file": null,
+ "repository": "foo",
+ "vers": "0.1.0"
+ }
+ "#,
+ "foo-0.1.0.crate",
+ &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"],
+ &[(
+ "Cargo.toml",
+ &format!(
+ r#"{}
+[package]
+name = "foo"
+version = "0.1.0"
+authors = []
+description = "foo"
+homepage = "foo"
+documentation = "foo"
+license = "MIT"
+repository = "foo"
+
+[dev-dependencies]
+"#,
+ cargo::core::package::MANIFEST_PREAMBLE
+ ),
+ )],
+ );
+}
+
+#[cargo_test]
+fn credentials_ambiguous_filename() {
+ // `publish` generally requires a remote registry
+ let registry = registry::RegistryBuilder::new().http_api().build();
+
+ // Make token in `credentials.toml` incorrect to ensure it is not read.
+ let credentials_toml = paths::home().join(".cargo/credentials.toml");
+ fs::write(credentials_toml, r#"token = "wrong-token""#).unwrap();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr_contains("[..]Unauthorized message from server[..]")
+ .run();
+
+ // Favor `credentials` if exists.
+ let credentials = paths::home().join(".cargo/credentials");
+ fs::write(credentials, r#"token = "sekrit""#).unwrap();
+
+ p.cargo("publish --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[..]
+[WARNING] Both `[..]/credentials` and `[..]/credentials.toml` exist. Using `[..]/credentials`
+[..]
+[..]
+[..]
+[..]
+[UPLOADING] foo v0.0.1 [..]
+[UPLOADED] foo v0.0.1 [..]
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.0.1 [..]
+",
+ )
+ .run();
+}
+
+// --index will not load registry.token to avoid possibly leaking
+// crates.io token to another server.
+#[cargo_test]
+fn index_requires_token() {
+ // Use local registry for faster test times since no publish will occur
+ let registry = registry::init();
+
+ let credentials = paths::home().join(".cargo/credentials.toml");
+ fs::remove_file(&credentials).unwrap();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify --index")
+ .arg(registry.index_url().as_str())
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] command-line argument --index requires --token to be specified
+",
+ )
+ .run();
+}
+
+// publish with source replacement without --registry
+#[cargo_test]
+fn cratesio_source_replacement() {
+ registry::init();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] crates-io is replaced with remote registry dummy-registry;
+include `--registry dummy-registry` or `--registry crates-io`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn publish_with_missing_readme() {
+ // Use local registry for faster test times since no publish will occur
+ let registry = registry::init();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ homepage = "https://example.com/"
+ readme = "foo.md"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr(&format!(
+ "\
+[UPDATING] [..]
+[PACKAGING] foo v0.1.0 [..]
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] foo v0.1.0 [..]
+[ERROR] failed to read `readme` file for package `foo v0.1.0 ([ROOT]/foo)`
+
+Caused by:
+ failed to read `[ROOT]/foo/foo.md`
+
+Caused by:
+ {}
+",
+ no_such_file_err_msg()
+ ))
+ .run();
+}
+
+// Registry returns an API error.
+#[cargo_test]
+fn api_error_json() {
+ let _registry = registry::RegistryBuilder::new()
+ .alternative()
+ .http_api()
+ .add_responder("/api/v1/crates/new", |_, _| Response {
+ body: br#"{"errors": [{"detail": "you must be logged in"}]}"#.to_vec(),
+ code: 403,
+ headers: vec![],
+ })
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify --registry alternative")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[PACKAGING] foo v0.0.1 [..]
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] foo v0.0.1 [..]
+[ERROR] failed to publish to registry at http://127.0.0.1:[..]/
+
+Caused by:
+ the remote server responded with an error (status 403 Forbidden): you must be logged in
+",
+ )
+ .run();
+}
+
+// Registry returns an API error with a 200 status code.
+#[cargo_test]
+fn api_error_200() {
+ let _registry = registry::RegistryBuilder::new()
+ .alternative()
+ .http_api()
+ .add_responder("/api/v1/crates/new", |_, _| Response {
+ body: br#"{"errors": [{"detail": "max upload size is 123"}]}"#.to_vec(),
+ code: 200,
+ headers: vec![],
+ })
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify --registry alternative")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[PACKAGING] foo v0.0.1 [..]
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] foo v0.0.1 [..]
+[ERROR] failed to publish to registry at http://127.0.0.1:[..]/
+
+Caused by:
+ the remote server responded with an error: max upload size is 123
+",
+ )
+ .run();
+}
+
+// Registry returns an error code without a JSON message.
+#[cargo_test]
+fn api_error_code() {
+ let _registry = registry::RegistryBuilder::new()
+ .alternative()
+ .http_api()
+ .add_responder("/api/v1/crates/new", |_, _| Response {
+ body: br#"go away"#.to_vec(),
+ code: 400,
+ headers: vec![],
+ })
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify --registry alternative")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[PACKAGING] foo v0.0.1 [..]
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] foo v0.0.1 [..]
+[ERROR] failed to publish to registry at http://127.0.0.1:[..]/
+
+Caused by:
+ failed to get a 200 OK response, got 400
+ headers:
+ <tab>HTTP/1.1 400
+ <tab>Content-Length: 7
+ <tab>
+ body:
+ go away
+",
+ )
+ .run();
+}
+
+// Registry has a network error.
+#[cargo_test]
+fn api_curl_error() {
+ let _registry = registry::RegistryBuilder::new()
+ .alternative()
+ .http_api()
+ .add_responder("/api/v1/crates/new", |_, _| {
+ panic!("broke");
+ })
+ .build();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // This doesn't check for the exact text of the error in the remote
+ // possibility that cargo is linked with a weird version of libcurl, or
+ // curl changes the text of the message. Currently the message 52
+ // (CURLE_GOT_NOTHING) is:
+ // Server returned nothing (no headers, no data) (Empty reply from server)
+ p.cargo("publish --no-verify --registry alternative")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[PACKAGING] foo v0.0.1 [..]
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] foo v0.0.1 [..]
+[ERROR] failed to publish to registry at http://127.0.0.1:[..]/
+
+Caused by:
+ [52] [..]
+",
+ )
+ .run();
+}
+
+// Registry returns an invalid response.
+#[cargo_test]
+fn api_other_error() {
+ let _registry = registry::RegistryBuilder::new()
+ .alternative()
+ .http_api()
+ .add_responder("/api/v1/crates/new", |_, _| Response {
+ body: b"\xff".to_vec(),
+ code: 200,
+ headers: vec![],
+ })
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify --registry alternative")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[PACKAGING] foo v0.0.1 [..]
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] foo v0.0.1 [..]
+[ERROR] failed to publish to registry at http://127.0.0.1:[..]/
+
+Caused by:
+ invalid response from server
+
+Caused by:
+ response body was not valid utf-8
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn in_package_workspace() {
+ let registry = RegistryBuilder::new().http_api().http_index().build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2021"
+ [workspace]
+ members = ["li"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "li/Cargo.toml",
+ r#"
+ [package]
+ name = "li"
+ version = "0.0.1"
+ description = "li"
+ license = "MIT"
+ "#,
+ )
+ .file("li/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish -p li --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[WARNING] manifest has no documentation, homepage or repository.
+See [..]
+[PACKAGING] li v0.0.1 ([CWD]/li)
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] li v0.0.1 ([CWD]/li)
+[UPLOADED] li v0.0.1 to registry `crates-io`
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] li v0.0.1 [..]
+",
+ )
+ .run();
+
+ validate_upload_li();
+}
+
+#[cargo_test]
+fn with_duplicate_spec_in_members() {
+ // Use local registry for faster test times since no publish will occur
+ let registry = registry::init();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [workspace]
+ resolver = "2"
+ members = ["li","bar"]
+ default-members = ["li","bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "li/Cargo.toml",
+ r#"
+ [package]
+ name = "li"
+ version = "0.0.1"
+ description = "li"
+ license = "MIT"
+ "#,
+ )
+ .file("li/src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ description = "bar"
+ license = "MIT"
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr(
+ "error: the `-p` argument must be specified to select a single package to publish",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn in_package_workspace_with_members_with_features_old() {
+ let registry = RegistryBuilder::new().http_api().http_index().build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [workspace]
+ members = ["li"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "li/Cargo.toml",
+ r#"
+ [package]
+ name = "li"
+ version = "0.0.1"
+ description = "li"
+ license = "MIT"
+ "#,
+ )
+ .file("li/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish -p li --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[WARNING] manifest has no documentation, homepage or repository.
+See [..]
+[PACKAGING] li v0.0.1 ([CWD]/li)
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] li v0.0.1 ([CWD]/li)
+[UPLOADED] li v0.0.1 to registry `crates-io`
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] li v0.0.1 [..]
+",
+ )
+ .run();
+
+ validate_upload_li();
+}
+
+#[cargo_test]
+fn in_virtual_workspace() {
+ // Use local registry for faster test times since no publish will occur
+ let registry = registry::init();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo"]
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("foo/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr(
+ "error: the `-p` argument must be specified in the root of a virtual workspace",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn in_virtual_workspace_with_p() {
+ // `publish` generally requires a remote registry
+ let registry = registry::RegistryBuilder::new().http_api().build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo","li"]
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("foo/src/main.rs", "fn main() {}")
+ .file(
+ "li/Cargo.toml",
+ r#"
+ [package]
+ name = "li"
+ version = "0.0.1"
+ description = "li"
+ license = "MIT"
+ "#,
+ )
+ .file("li/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish -p li --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[WARNING] manifest has no documentation, homepage or repository.
+See [..]
+[PACKAGING] li v0.0.1 ([CWD]/li)
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] li v0.0.1 ([CWD]/li)
+[UPLOADED] li v0.0.1 to registry `crates-io`
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] li v0.0.1 [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn in_package_workspace_not_found() {
+ // Use local registry for faster test times since no publish will occur
+ let registry = registry::init();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2021"
+ [workspace]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "li/Cargo.toml",
+ r#"
+ [package]
+ name = "li"
+ version = "0.0.1"
+ edition = "2021"
+ authors = []
+ license = "MIT"
+ description = "li"
+ "#,
+ )
+ .file("li/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish -p li --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr(
+ "\
+error: package ID specification `li` did not match any packages
+
+<tab>Did you mean `foo`?
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn in_package_workspace_found_multiple() {
+ // Use local registry for faster test times since no publish will occur
+ let registry = registry::init();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2021"
+ [workspace]
+ members = ["li","lii"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "li/Cargo.toml",
+ r#"
+ [package]
+ name = "li"
+ version = "0.0.1"
+ edition = "2021"
+ authors = []
+ license = "MIT"
+ description = "li"
+ "#,
+ )
+ .file("li/src/main.rs", "fn main() {}")
+ .file(
+ "lii/Cargo.toml",
+ r#"
+ [package]
+ name = "lii"
+ version = "0.0.1"
+ edition = "2021"
+ authors = []
+ license = "MIT"
+ description = "lii"
+ "#,
+ )
+ .file("lii/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish -p li* --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr(
+ "\
+error: the `-p` argument must be specified to select a single package to publish
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+// https://github.com/rust-lang/cargo/issues/10536
+fn publish_path_dependency_without_workspace() {
+ // Use local registry for faster test times since no publish will occur
+ let registry = registry::init();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2021"
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ edition = "2021"
+ authors = []
+ license = "MIT"
+ description = "bar"
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish -p bar --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr(
+ "\
+error: package ID specification `bar` did not match any packages
+
+<tab>Did you mean `foo`?
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn http_api_not_noop() {
+ let registry = registry::RegistryBuilder::new().http_api().build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[..]
+[..]
+[..]
+[..]
+[VERIFYING] foo v0.0.1 ([CWD])
+[..]
+[..]
+[..]
+[UPLOADING] foo v0.0.1 ([CWD])
+[UPLOADED] foo v0.0.1 to registry `crates-io`
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.0.1 [..]
+",
+ )
+ .run();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [project]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+
+ [dependencies]
+ foo = "0.0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build").run();
+}
+
+#[cargo_test]
+fn wait_for_first_publish() {
+ // Counter for number of tries before the package is "published"
+ let arc: Arc<Mutex<u32>> = Arc::new(Mutex::new(0));
+ let arc2 = arc.clone();
+
+ // Registry returns an invalid response.
+ let registry = registry::RegistryBuilder::new()
+ .http_index()
+ .http_api()
+ .add_responder("/index/de/la/delay", move |req, server| {
+ let mut lock = arc.lock().unwrap();
+ *lock += 1;
+ if *lock <= 1 {
+ server.not_found(req)
+ } else {
+ server.index(req)
+ }
+ })
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "delay"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_status(0)
+ .with_stderr(
+ "\
+[UPDATING] crates.io index
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] delay v0.0.1 ([CWD])
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] delay v0.0.1 ([CWD])
+[UPLOADED] delay v0.0.1 to registry `crates-io`
+note: Waiting for `delay v0.0.1` to be available at registry `crates-io`.
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+[PUBLISHED] delay v0.0.1 at registry `crates-io`
+",
+ )
+ .run();
+
+ // Verify the responder has been pinged
+ let lock = arc2.lock().unwrap();
+ assert_eq!(*lock, 2);
+ drop(lock);
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ [dependencies]
+ delay = "0.0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build").with_status(0).run();
+}
+
+/// A separate test is needed for package names with - or _ as they hit
+/// the responder twice per cargo invocation. If that ever gets changed
+/// this test will need to be changed accordingly.
+#[cargo_test]
+fn wait_for_first_publish_underscore() {
+ // Counter for number of tries before the package is "published"
+ let arc: Arc<Mutex<u32>> = Arc::new(Mutex::new(0));
+ let arc2 = arc.clone();
+ let misses = Arc::new(Mutex::new(Vec::new()));
+ let misses2 = misses.clone();
+
+ // Registry returns an invalid response.
+ let registry = registry::RegistryBuilder::new()
+ .http_index()
+ .http_api()
+ .add_responder("/index/de/la/delay_with_underscore", move |req, server| {
+ let mut lock = arc.lock().unwrap();
+ *lock += 1;
+ if *lock <= 1 {
+ server.not_found(req)
+ } else {
+ server.index(req)
+ }
+ })
+ .not_found_handler(move |req, _| {
+ misses.lock().unwrap().push(req.url.to_string());
+ Response {
+ body: b"not found".to_vec(),
+ code: 404,
+ headers: vec![],
+ }
+ })
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "delay_with_underscore"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_status(0)
+ .with_stderr(
+ "\
+[UPDATING] crates.io index
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] delay_with_underscore v0.0.1 ([CWD])
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] delay_with_underscore v0.0.1 ([CWD])
+[UPLOADED] delay_with_underscore v0.0.1 to registry `crates-io`
+note: Waiting for `delay_with_underscore v0.0.1` to be available at registry `crates-io`.
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+[PUBLISHED] delay_with_underscore v0.0.1 at registry `crates-io`
+",
+ )
+ .run();
+
+ // Verify the repsponder has been pinged
+ let lock = arc2.lock().unwrap();
+ assert_eq!(*lock, 2);
+ drop(lock);
+ {
+ let misses = misses2.lock().unwrap();
+ assert!(
+ misses.len() == 1,
+ "should only have 1 not found URL; instead found {misses:?}"
+ );
+ }
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ [dependencies]
+ delay_with_underscore = "0.0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build").with_status(0).run();
+}
+
+#[cargo_test]
+fn wait_for_subsequent_publish() {
+ // Counter for number of tries before the package is "published"
+ let arc: Arc<Mutex<u32>> = Arc::new(Mutex::new(0));
+ let arc2 = arc.clone();
+ let publish_req = Arc::new(Mutex::new(None));
+ let publish_req2 = publish_req.clone();
+
+ let registry = registry::RegistryBuilder::new()
+ .http_index()
+ .http_api()
+ .add_responder("/api/v1/crates/new", move |req, server| {
+ // Capture the publish request, but defer publishing
+ *publish_req.lock().unwrap() = Some(req.clone());
+ server.ok(req)
+ })
+ .add_responder("/index/de/la/delay", move |req, server| {
+ let mut lock = arc.lock().unwrap();
+ *lock += 1;
+ if *lock == 3 {
+ // Run the publish on the 3rd attempt
+ let rep = server
+ .check_authorized_publish(&publish_req2.lock().unwrap().as_ref().unwrap());
+ assert_eq!(rep.code, 200);
+ }
+ server.index(req)
+ })
+ .build();
+
+ // Publish an earlier version
+ Package::new("delay", "0.0.1")
+ .file("src/lib.rs", "")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "delay"
+ version = "0.0.2"
+ authors = []
+ license = "MIT"
+ description = "foo"
+
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_status(0)
+ .with_stderr(
+ "\
+[UPDATING] crates.io index
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] delay v0.0.2 ([CWD])
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] delay v0.0.2 ([CWD])
+[UPLOADED] delay v0.0.2 to registry `crates-io`
+note: Waiting for `delay v0.0.2` to be available at registry `crates-io`.
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+[PUBLISHED] delay v0.0.2 at registry `crates-io`
+",
+ )
+ .run();
+
+ // Verify the responder has been pinged
+ let lock = arc2.lock().unwrap();
+ assert_eq!(*lock, 3);
+ drop(lock);
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ [dependencies]
+ delay = "0.0.2"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check").with_status(0).run();
+}
+
+#[cargo_test]
+fn skip_wait_for_publish() {
+ // Intentionally using local registry so the crate never makes it to the index
+ let registry = registry::init();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ "
+ [publish]
+ timeout = 0
+ ",
+ )
+ .build();
+
+ p.cargo("publish --no-verify -Zpublish-timeout")
+ .replace_crates_io(registry.index_url())
+ .masquerade_as_nightly_cargo(&["publish-timeout"])
+ .with_stderr(
+ "\
+[UPDATING] crates.io index
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] foo v0.0.1 ([CWD])
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn timeout_waiting_for_publish() {
+ // Publish doesn't happen within the timeout window.
+ let registry = registry::RegistryBuilder::new()
+ .http_api()
+ .delayed_index_update(20)
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "delay"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config.toml",
+ r#"
+ [publish]
+ timeout = 2
+ "#,
+ )
+ .build();
+
+ p.cargo("publish --no-verify -Zpublish-timeout")
+ .replace_crates_io(registry.index_url())
+ .masquerade_as_nightly_cargo(&["publish-timeout"])
+ .with_status(0)
+ .with_stderr(
+ "\
+[UPDATING] crates.io index
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] delay v0.0.1 ([CWD])
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] delay v0.0.1 ([CWD])
+[UPLOADED] delay v0.0.1 to registry `crates-io`
+note: Waiting for `delay v0.0.1` to be available at registry `crates-io`.
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+warning: timed out waiting for `delay v0.0.1` to be available in registry `crates-io`
+note: The registry may have a backlog that is delaying making the crate available. The crate should be available soon.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn wait_for_git_publish() {
+ // Slow publish to an index with a git index.
+ let registry = registry::RegistryBuilder::new()
+ .http_api()
+ .delayed_index_update(5)
+ .build();
+
+ // Publish an earlier version
+ Package::new("delay", "0.0.1")
+ .file("src/lib.rs", "")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "delay"
+ version = "0.0.2"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify")
+ .replace_crates_io(registry.index_url())
+ .with_status(0)
+ .with_stderr(
+ "\
+[UPDATING] crates.io index
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] delay v0.0.2 ([CWD])
+[PACKAGED] [..] files, [..] ([..] compressed)
+[UPLOADING] delay v0.0.2 ([CWD])
+[UPLOADED] delay v0.0.2 to registry `crates-io`
+note: Waiting for `delay v0.0.2` to be available at registry `crates-io`.
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+[PUBLISHED] delay v0.0.2 at registry `crates-io`
+",
+ )
+ .run();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ [dependencies]
+ delay = "0.0.2"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check").with_status(0).run();
+}
+
+#[cargo_test]
+fn invalid_token() {
+ // Checks publish behavior with an invalid token.
+ let registry = RegistryBuilder::new().http_api().http_index().build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("publish --no-verify")
+ .replace_crates_io(registry.index_url())
+ .env("CARGO_REGISTRY_TOKEN", "\x16")
+ .with_stderr(
+ "\
+[UPDATING] crates.io index
+[PACKAGING] foo v0.0.1 ([ROOT]/foo)
+[PACKAGED] 4 files, [..]
+[UPLOADING] foo v0.0.1 ([ROOT]/foo)
+error: failed to publish to registry at http://127.0.0.1:[..]/
+
+Caused by:
+ token contains invalid characters.
+ Only printable ISO-8859-1 characters are allowed as it is sent in a HTTPS header.
+",
+ )
+ .with_status(101)
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/publish_lockfile.rs b/src/tools/cargo/tests/testsuite/publish_lockfile.rs
new file mode 100644
index 000000000..35da5131f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/publish_lockfile.rs
@@ -0,0 +1,592 @@
+//! Tests for including `Cargo.lock` when publishing/packaging.
+
+use std::fs::File;
+
+use cargo_test_support::registry::Package;
+use cargo_test_support::{
+ basic_manifest, cargo_process, git, paths, project, publish::validate_crate_contents,
+};
+
+fn pl_manifest(name: &str, version: &str, extra: &str) -> String {
+ format!(
+ r#"
+ [package]
+ name = "{}"
+ version = "{}"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+
+ {}
+ "#,
+ name, version, extra
+ )
+}
+
+#[cargo_test]
+fn removed() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["publish-lockfile"]
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ publish-lockfile = true
+ license = "MIT"
+ description = "foo"
+ documentation = "foo"
+ homepage = "foo"
+ repository = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("package")
+ .masquerade_as_nightly_cargo(&["publish-lockfile"])
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at [..]
+
+Caused by:
+ the cargo feature `publish-lockfile` has been removed in the 1.37 release
+
+ Remove the feature from Cargo.toml to remove this error.
+ See https://doc.rust-lang.org/[..]cargo/reference/unstable.html#publish-lockfile [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn package_lockfile() {
+ let p = project()
+ .file("Cargo.toml", &pl_manifest("foo", "0.0.1", ""))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("package")
+ .with_stderr(
+ "\
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] [..] files, [..] ([..] compressed)
+",
+ )
+ .run();
+ assert!(p.root().join("target/package/foo-0.0.1.crate").is_file());
+ p.cargo("package -l")
+ .with_stdout(
+ "\
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+src/main.rs
+",
+ )
+ .run();
+ p.cargo("package").with_stdout("").run();
+
+ let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+ validate_crate_contents(
+ f,
+ "foo-0.0.1.crate",
+ &["Cargo.toml", "Cargo.toml.orig", "Cargo.lock", "src/main.rs"],
+ &[],
+ );
+}
+
+#[cargo_test]
+fn package_lockfile_git_repo() {
+ // Create a Git repository containing a minimal Rust project.
+ let g = git::repo(&paths::root().join("foo"))
+ .file("Cargo.toml", &pl_manifest("foo", "0.0.1", ""))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+ cargo_process("package -l")
+ .cwd(g.root())
+ .with_stdout(
+ "\
+.cargo_vcs_info.json
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+src/main.rs
+",
+ )
+ .run();
+ cargo_process("package -v")
+ .cwd(g.root())
+ .with_stderr(
+ "\
+[PACKAGING] foo v0.0.1 ([..])
+[ARCHIVING] .cargo_vcs_info.json
+[ARCHIVING] Cargo.lock
+[ARCHIVING] Cargo.toml
+[ARCHIVING] Cargo.toml.orig
+[ARCHIVING] src/main.rs
+[VERIFYING] foo v0.0.1 ([..])
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc --crate-name foo src/main.rs [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] 5 files, [..] ([..] compressed)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn no_lock_file_with_library() {
+ let p = project()
+ .file("Cargo.toml", &pl_manifest("foo", "0.0.1", ""))
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("package").run();
+
+ let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+ validate_crate_contents(
+ f,
+ "foo-0.0.1.crate",
+ &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"],
+ &[],
+ );
+}
+
+#[cargo_test]
+fn lock_file_and_workspace() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo"]
+ "#,
+ )
+ .file("foo/Cargo.toml", &pl_manifest("foo", "0.0.1", ""))
+ .file("foo/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("package").cwd("foo").run();
+
+ let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+ validate_crate_contents(
+ f,
+ "foo-0.0.1.crate",
+ &["Cargo.toml", "Cargo.toml.orig", "src/main.rs", "Cargo.lock"],
+ &[],
+ );
+}
+
+#[cargo_test]
+fn note_resolve_changes() {
+ // `multi` has multiple sources (path and registry).
+ Package::new("multi", "0.1.0").publish();
+ // `updated` is always from registry, but should not change.
+ Package::new("updated", "1.0.0").publish();
+ // `patched` is [patch]ed.
+ Package::new("patched", "1.0.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &pl_manifest(
+ "foo",
+ "0.0.1",
+ r#"
+ [dependencies]
+ multi = { path = "multi", version = "0.1" }
+ updated = "1.0"
+ patched = "1.0"
+
+ [patch.crates-io]
+ patched = { path = "patched" }
+ "#,
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("multi/Cargo.toml", &basic_manifest("multi", "0.1.0"))
+ .file("multi/src/lib.rs", "")
+ .file("patched/Cargo.toml", &basic_manifest("patched", "1.0.0"))
+ .file("patched/src/lib.rs", "")
+ .build();
+
+ p.cargo("generate-lockfile").run();
+
+ // Make sure this does not change or warn.
+ Package::new("updated", "1.0.1").publish();
+
+ p.cargo("package --no-verify -v --allow-dirty")
+ .with_stderr_unordered(
+ "\
+[PACKAGING] foo v0.0.1 ([..])
+[ARCHIVING] Cargo.lock
+[ARCHIVING] Cargo.toml
+[ARCHIVING] Cargo.toml.orig
+[ARCHIVING] src/main.rs
+[UPDATING] `[..]` index
+[NOTE] package `multi v0.1.0` added to the packaged Cargo.lock file, was originally sourced from `[..]/foo/multi`
+[NOTE] package `patched v1.0.0` added to the packaged Cargo.lock file, was originally sourced from `[..]/foo/patched`
+[PACKAGED] [..] files, [..] ([..] compressed)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn outdated_lock_version_change_does_not_warn() {
+ // If the version of the package being packaged changes, but Cargo.lock is
+ // not updated, don't bother warning about it.
+ let p = project()
+ .file("Cargo.toml", &pl_manifest("foo", "0.1.0", ""))
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("generate-lockfile").run();
+
+ p.change_file("Cargo.toml", &pl_manifest("foo", "0.2.0", ""));
+
+ p.cargo("package --no-verify")
+ .with_stderr(
+ "\
+[PACKAGING] foo v0.2.0 ([..])
+[PACKAGED] [..] files, [..] ([..] compressed)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn no_warn_workspace_extras() {
+ // Other entries in workspace lock file should be ignored.
+ Package::new("dep1", "1.0.0").publish();
+ Package::new("dep2", "1.0.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ &pl_manifest(
+ "a",
+ "0.1.0",
+ r#"
+ [dependencies]
+ dep1 = "1.0"
+ "#,
+ ),
+ )
+ .file("a/src/main.rs", "fn main() {}")
+ .file(
+ "b/Cargo.toml",
+ &pl_manifest(
+ "b",
+ "0.1.0",
+ r#"
+ [dependencies]
+ dep2 = "1.0"
+ "#,
+ ),
+ )
+ .file("b/src/main.rs", "fn main() {}")
+ .build();
+ p.cargo("generate-lockfile").run();
+ p.cargo("package --no-verify")
+ .cwd("a")
+ .with_stderr(
+ "\
+[PACKAGING] a v0.1.0 ([..])
+[UPDATING] `[..]` index
+[PACKAGED] [..] files, [..] ([..] compressed)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn warn_package_with_yanked() {
+ Package::new("bar", "0.1.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &pl_manifest(
+ "foo",
+ "0.0.1",
+ r#"
+ [dependencies]
+ bar = "0.1"
+ "#,
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+ p.cargo("generate-lockfile").run();
+ Package::new("bar", "0.1.0").yanked(true).publish();
+ // Make sure it sticks with the locked (yanked) version.
+ Package::new("bar", "0.1.1").publish();
+ p.cargo("package --no-verify")
+ .with_stderr(
+ "\
+[PACKAGING] foo v0.0.1 ([..])
+[UPDATING] `[..]` index
+[WARNING] package `bar v0.1.0` in Cargo.lock is yanked in registry \
+ `crates-io`, consider updating to a version that is not yanked
+[PACKAGED] [..] files, [..] ([..] compressed)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn warn_install_with_yanked() {
+ Package::new("bar", "0.1.0").yanked(true).publish();
+ Package::new("bar", "0.1.1").publish();
+ Package::new("foo", "0.1.0")
+ .dep("bar", "0.1")
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "Cargo.lock",
+ r#"
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "foo"
+version = "0.1.0"
+dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+ "#,
+ )
+ .publish();
+
+ cargo_process("install --locked foo")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v0.1.0 (registry `[..]`)
+[INSTALLING] foo v0.1.0
+[WARNING] package `bar v0.1.0` in Cargo.lock is yanked in registry \
+ `crates-io`, consider running without --locked
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 (registry `[..]`)
+[COMPILING] bar v0.1.0
+[COMPILING] foo v0.1.0
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [..]/.cargo/bin/foo[EXE]
+[INSTALLED] package `foo v0.1.0` (executable `foo[EXE]`)
+[WARNING] be sure to add [..]
+",
+ )
+ .run();
+
+ // Try again without --locked, make sure it uses 0.1.1 and does not warn.
+ cargo_process("install --force foo")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[INSTALLING] foo v0.1.0
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.1 (registry `[..]`)
+[COMPILING] bar v0.1.1
+[COMPILING] foo v0.1.0
+[FINISHED] release [optimized] target(s) in [..]
+[REPLACING] [..]/.cargo/bin/foo[EXE]
+[REPLACED] package `foo v0.1.0` with `foo v0.1.0` (executable `foo[EXE]`)
+[WARNING] be sure to add [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn ignore_lockfile() {
+ // With an explicit `include` list, but Cargo.lock in .gitignore, don't
+ // complain about `Cargo.lock` being ignored. Note that it is still
+ // included in the packaged regardless.
+ let p = git::new("foo", |p| {
+ p.file(
+ "Cargo.toml",
+ &pl_manifest(
+ "foo",
+ "0.0.1",
+ r#"
+ include = [
+ "src/main.rs"
+ ]
+ "#,
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(".gitignore", "Cargo.lock")
+ });
+ p.cargo("package -l")
+ .with_stdout(
+ "\
+.cargo_vcs_info.json
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+src/main.rs
+",
+ )
+ .run();
+ p.cargo("generate-lockfile").run();
+ p.cargo("package -v")
+ .with_stderr(
+ "\
+[PACKAGING] foo v0.0.1 ([..])
+[ARCHIVING] .cargo_vcs_info.json
+[ARCHIVING] Cargo.lock
+[ARCHIVING] Cargo.toml
+[ARCHIVING] Cargo.toml.orig
+[ARCHIVING] src/main.rs
+[VERIFYING] foo v0.0.1 ([..])
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc --crate-name foo src/main.rs [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] 5 files, [..] ([..] compressed)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn ignore_lockfile_inner() {
+ // Ignore `Cargo.lock` if in .gitignore in a git subdirectory.
+ let p = git::new("foo", |p| {
+ p.no_manifest()
+ .file("bar/Cargo.toml", &pl_manifest("bar", "0.0.1", ""))
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("bar/.gitignore", "Cargo.lock")
+ });
+ p.cargo("generate-lockfile").cwd("bar").run();
+ p.cargo("package -v --no-verify")
+ .cwd("bar")
+ .with_stderr(
+ "\
+[PACKAGING] bar v0.0.1 ([..])
+[ARCHIVING] .cargo_vcs_info.json
+[ARCHIVING] .gitignore
+[ARCHIVING] Cargo.lock
+[ARCHIVING] Cargo.toml
+[ARCHIVING] Cargo.toml.orig
+[ARCHIVING] src/main.rs
+[PACKAGED] 6 files, [..] ([..] compressed)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn use_workspace_root_lockfile() {
+ // Issue #11148
+ // Workspace members should use `Cargo.lock` at workspace root
+
+ Package::new("serde", "0.2.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+
+ [dependencies]
+ serde = "0.2"
+
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "bar"
+ workspace = ".."
+
+ [dependencies]
+ serde = "0.2"
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ // Create `Cargo.lock` in the workspace root.
+ p.cargo("generate-lockfile").run();
+
+ // Now, add a newer version of `serde`.
+ Package::new("serde", "0.2.1").publish();
+
+ // Expect: package `bar` uses `serde v0.2.0` as required by workspace `Cargo.lock`.
+ p.cargo("package --workspace")
+ .with_stderr(
+ "\
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] bar v0.0.1 ([CWD]/bar)
+[UPDATING] `dummy-registry` index
+[VERIFYING] bar v0.0.1 ([CWD]/bar)
+[DOWNLOADING] crates ...
+[DOWNLOADED] serde v0.2.0 ([..])
+[COMPILING] serde v0.2.0
+[COMPILING] bar v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] 4 files, [..]
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] serde v0.2.0
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] 4 files, [..]
+",
+ )
+ .run();
+
+ let package_path = p.root().join("target/package/foo-0.0.1.crate");
+ assert!(package_path.is_file());
+ let f = File::open(&package_path).unwrap();
+ validate_crate_contents(
+ f,
+ "foo-0.0.1.crate",
+ &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"],
+ &[],
+ );
+
+ let package_path = p.root().join("target/package/bar-0.0.1.crate");
+ assert!(package_path.is_file());
+ let f = File::open(&package_path).unwrap();
+ validate_crate_contents(
+ f,
+ "bar-0.0.1.crate",
+ &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"],
+ &[],
+ );
+}
diff --git a/src/tools/cargo/tests/testsuite/read_manifest.rs b/src/tools/cargo/tests/testsuite/read_manifest.rs
new file mode 100644
index 000000000..b5e9f05a3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/read_manifest.rs
@@ -0,0 +1,206 @@
+//! Tests for the `cargo read-manifest` command.
+
+use cargo_test_support::{basic_bin_manifest, main_file, project};
+
+fn manifest_output(readme_value: &str) -> String {
+ format!(
+ r#"
+{{
+ "authors": [
+ "wycats@example.com"
+ ],
+ "categories": [],
+ "default_run": null,
+ "name":"foo",
+ "readme": {},
+ "homepage": null,
+ "documentation": null,
+ "repository": null,
+ "rust_version": null,
+ "version":"0.5.0",
+ "id":"foo[..]0.5.0[..](path+file://[..]/foo)",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "description": null,
+ "edition": "2015",
+ "source":null,
+ "dependencies":[],
+ "targets":[{{
+ "kind":["bin"],
+ "crate_types":["bin"],
+ "doc": true,
+ "doctest": false,
+ "test": true,
+ "edition": "2015",
+ "name":"foo",
+ "src_path":"[..]/foo/src/foo.rs"
+ }}],
+ "features":{{}},
+ "manifest_path":"[..]Cargo.toml",
+ "metadata": null,
+ "publish": null
+}}"#,
+ readme_value
+ )
+}
+
+fn manifest_output_no_readme() -> String {
+ manifest_output("null")
+}
+
+pub fn basic_bin_manifest_with_readme(name: &str, readme_filename: &str) -> String {
+ format!(
+ r#"
+ [package]
+
+ name = "{}"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+ readme = {}
+
+ [[bin]]
+
+ name = "{}"
+ "#,
+ name, readme_filename, name
+ )
+}
+
+#[cargo_test]
+fn cargo_read_manifest_path_to_cargo_toml_relative() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("read-manifest --manifest-path foo/Cargo.toml")
+ .cwd(p.root().parent().unwrap())
+ .with_json(&manifest_output_no_readme())
+ .run();
+}
+
+#[cargo_test]
+fn cargo_read_manifest_path_to_cargo_toml_absolute() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("read-manifest --manifest-path")
+ .arg(p.root().join("Cargo.toml"))
+ .cwd(p.root().parent().unwrap())
+ .with_json(&manifest_output_no_readme())
+ .run();
+}
+
+#[cargo_test]
+fn cargo_read_manifest_path_to_cargo_toml_parent_relative() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("read-manifest --manifest-path foo")
+ .cwd(p.root().parent().unwrap())
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] the manifest-path must be \
+ a path to a Cargo.toml file",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_read_manifest_path_to_cargo_toml_parent_absolute() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("read-manifest --manifest-path")
+ .arg(p.root())
+ .cwd(p.root().parent().unwrap())
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] the manifest-path must be \
+ a path to a Cargo.toml file",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cargo_read_manifest_cwd() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("read-manifest")
+ .with_json(&manifest_output_no_readme())
+ .run();
+}
+
+#[cargo_test]
+fn cargo_read_manifest_with_specified_readme() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &basic_bin_manifest_with_readme("foo", r#""SomeReadme.txt""#),
+ )
+ .file("SomeReadme.txt", "Sample Project")
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("read-manifest")
+ .with_json(&manifest_output(&format!(r#""{}""#, "SomeReadme.txt")))
+ .run();
+}
+
+#[cargo_test]
+fn cargo_read_manifest_default_readme() {
+ let readme_filenames = ["README.md", "README.txt", "README"];
+
+ for readme in readme_filenames.iter() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(readme, "Sample project")
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("read-manifest")
+ .with_json(&manifest_output(&format!(r#""{}""#, readme)))
+ .run();
+ }
+}
+
+#[cargo_test]
+fn cargo_read_manifest_suppress_default_readme() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &basic_bin_manifest_with_readme("foo", "false"),
+ )
+ .file("README.txt", "Sample project")
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("read-manifest")
+ .with_json(&manifest_output_no_readme())
+ .run();
+}
+
+// If a file named README.md exists, and `readme = true`, the value `README.md` should be defaulted in.
+#[cargo_test]
+fn cargo_read_manifest_defaults_readme_if_true() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest_with_readme("foo", "true"))
+ .file("README.md", "Sample project")
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("read-manifest")
+ .with_json(&manifest_output(r#""README.md""#))
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/registry.rs b/src/tools/cargo/tests/testsuite/registry.rs
new file mode 100644
index 000000000..05ec9b158
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/registry.rs
@@ -0,0 +1,3406 @@
+//! Tests for normal registry dependencies.
+
+use cargo::core::SourceId;
+use cargo_test_support::cargo_process;
+use cargo_test_support::paths::{self, CargoPathExt};
+use cargo_test_support::registry::{
+ self, registry_path, Dependency, Package, RegistryBuilder, Response, TestRegistry,
+};
+use cargo_test_support::{basic_manifest, project};
+use cargo_test_support::{git, install::cargo_home, t};
+use cargo_util::paths::remove_dir_all;
+use std::fmt::Write;
+use std::fs::{self, File};
+use std::path::Path;
+use std::sync::Arc;
+use std::sync::Mutex;
+
+fn setup_http() -> TestRegistry {
+ RegistryBuilder::new().http_index().build()
+}
+
+#[cargo_test]
+fn test_server_stops() {
+ let server = setup_http();
+ server.join(); // ensure the server fully shuts down
+}
+
+#[cargo_test]
+fn simple_http() {
+ let _server = setup_http();
+ simple();
+}
+
+#[cargo_test]
+fn simple_git() {
+ simple();
+}
+
+fn simple() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = ">= 0.0.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("bar", "0.0.1").publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.1 (registry `dummy-registry`)
+[CHECKING] bar v0.0.1
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+
+ p.cargo("clean").run();
+
+ assert!(paths::home().join(".cargo/registry/CACHEDIR.TAG").is_file());
+
+ // Don't download a second time
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.0.1
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn deps_http() {
+ let _server = setup_http();
+ deps();
+}
+
+#[cargo_test]
+fn deps_git() {
+ deps();
+}
+
+fn deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = ">= 0.0.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("baz", "0.0.1").publish();
+ Package::new("bar", "0.0.1").dep("baz", "*").publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`)
+[DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`)
+[CHECKING] baz v0.0.1
+[CHECKING] bar v0.0.1
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+
+ assert!(paths::home().join(".cargo/registry/CACHEDIR.TAG").is_file());
+}
+
+#[cargo_test]
+fn nonexistent_http() {
+ let _server = setup_http();
+ nonexistent();
+}
+
+#[cargo_test]
+fn nonexistent_git() {
+ nonexistent();
+}
+
+fn nonexistent() {
+ Package::new("init", "0.0.1").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ nonexistent = ">= 0.0.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..] index
+error: no matching package named `nonexistent` found
+location searched: registry [..]
+required by package `foo v0.0.1 ([..])`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn wrong_case_http() {
+ let _server = setup_http();
+ wrong_case();
+}
+
+#[cargo_test]
+fn wrong_case_git() {
+ wrong_case();
+}
+
+fn wrong_case() {
+ Package::new("init", "0.0.1").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ Init = ">= 0.0.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ // #5678 to make this work
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..] index
+error: no matching package found
+searched package name: `Init`
+perhaps you meant: init
+location searched: registry [..]
+required by package `foo v0.0.1 ([..])`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn mis_hyphenated_http() {
+ let _server = setup_http();
+ mis_hyphenated();
+}
+
+#[cargo_test]
+fn mis_hyphenated_git() {
+ mis_hyphenated();
+}
+
+fn mis_hyphenated() {
+ Package::new("mis-hyphenated", "0.0.1").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ mis_hyphenated = ">= 0.0.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ // #2775 to make this work
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..] index
+error: no matching package found
+searched package name: `mis_hyphenated`
+perhaps you meant: mis-hyphenated
+location searched: registry [..]
+required by package `foo v0.0.1 ([..])`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn wrong_version_http() {
+ let _server = setup_http();
+ wrong_version();
+}
+
+#[cargo_test]
+fn wrong_version_git() {
+ wrong_version();
+}
+
+fn wrong_version() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ foo = ">= 1.0.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("foo", "0.0.1").publish();
+ Package::new("foo", "0.0.2").publish();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: failed to select a version for the requirement `foo = \">=1.0.0\"`
+candidate versions found which didn't match: 0.0.2, 0.0.1
+location searched: `[..]` index (which is replacing registry `[..]`)
+required by package `foo v0.0.1 ([..])`
+",
+ )
+ .run();
+
+ Package::new("foo", "0.0.3").publish();
+ Package::new("foo", "0.0.4").publish();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: failed to select a version for the requirement `foo = \">=1.0.0\"`
+candidate versions found which didn't match: 0.0.4, 0.0.3, 0.0.2, ...
+location searched: `[..]` index (which is replacing registry `[..]`)
+required by package `foo v0.0.1 ([..])`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_cksum_http() {
+ let _server = setup_http();
+ bad_cksum();
+}
+
+#[cargo_test]
+fn bad_cksum_git() {
+ bad_cksum();
+}
+
+fn bad_cksum() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bad-cksum = ">= 0.0.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ let pkg = Package::new("bad-cksum", "0.0.1");
+ pkg.publish();
+ t!(File::create(&pkg.archive_dst()));
+
+ p.cargo("check -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..] index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bad-cksum [..]
+[ERROR] failed to download replaced source registry `crates-io`
+
+Caused by:
+ failed to verify the checksum of `bad-cksum v0.0.1 (registry `dummy-registry`)`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn update_registry_http() {
+ let _server = setup_http();
+ update_registry();
+}
+
+#[cargo_test]
+fn update_registry_git() {
+ update_registry();
+}
+
+fn update_registry() {
+ Package::new("init", "0.0.1").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ notyet = ">= 0.0.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: no matching package named `notyet` found
+location searched: registry `[..]`
+required by package `foo v0.0.1 ([..])`
+",
+ )
+ .run();
+
+ Package::new("notyet", "0.0.1").publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] notyet v0.0.1 (registry `dummy-registry`)
+[CHECKING] notyet v0.0.1
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn package_with_path_deps_http() {
+ let _server = setup_http();
+ package_with_path_deps();
+}
+
+#[cargo_test]
+fn package_with_path_deps_git() {
+ package_with_path_deps();
+}
+
+fn package_with_path_deps() {
+ Package::new("init", "0.0.1").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ repository = "bar"
+
+ [dependencies.notyet]
+ version = "0.0.1"
+ path = "notyet"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("notyet/Cargo.toml", &basic_manifest("notyet", "0.0.1"))
+ .file("notyet/src/lib.rs", "")
+ .build();
+
+ p.cargo("package")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[PACKAGING] foo [..]
+[UPDATING] [..]
+[ERROR] failed to prepare local package for uploading
+
+Caused by:
+ no matching package named `notyet` found
+ location searched: registry `crates-io`
+ required by package `foo v0.0.1 [..]`
+",
+ )
+ .run();
+
+ Package::new("notyet", "0.0.1").publish();
+
+ p.cargo("package")
+ .with_stderr(
+ "\
+[PACKAGING] foo v0.0.1 ([CWD])
+[UPDATING] `[..]` index
+[VERIFYING] foo v0.0.1 ([CWD])
+[DOWNLOADING] crates ...
+[DOWNLOADED] notyet v0.0.1 (registry `dummy-registry`)
+[COMPILING] notyet v0.0.1
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+[PACKAGED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn lockfile_locks_http() {
+ let _server = setup_http();
+ lockfile_locks();
+}
+
+#[cargo_test]
+fn lockfile_locks_git() {
+ lockfile_locks();
+}
+
+fn lockfile_locks() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("bar", "0.0.1").publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.1 (registry `dummy-registry`)
+[CHECKING] bar v0.0.1
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+
+ p.root().move_into_the_past();
+ Package::new("bar", "0.0.2").publish();
+
+ p.cargo("check").with_stdout("").run();
+}
+
+#[cargo_test]
+fn lockfile_locks_transitively_http() {
+ let _server = setup_http();
+ lockfile_locks_transitively();
+}
+
+#[cargo_test]
+fn lockfile_locks_transitively_git() {
+ lockfile_locks_transitively();
+}
+
+fn lockfile_locks_transitively() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("baz", "0.0.1").publish();
+ Package::new("bar", "0.0.1").dep("baz", "*").publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`)
+[DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`)
+[CHECKING] baz v0.0.1
+[CHECKING] bar v0.0.1
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+
+ p.root().move_into_the_past();
+ Package::new("baz", "0.0.2").publish();
+ Package::new("bar", "0.0.2").dep("baz", "*").publish();
+
+ p.cargo("check").with_stdout("").run();
+}
+
+#[cargo_test]
+fn yanks_are_not_used_http() {
+ let _server = setup_http();
+ yanks_are_not_used();
+}
+
+#[cargo_test]
+fn yanks_are_not_used_git() {
+ yanks_are_not_used();
+}
+
+fn yanks_are_not_used() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("baz", "0.0.1").publish();
+ Package::new("baz", "0.0.2").yanked(true).publish();
+ Package::new("bar", "0.0.1").dep("baz", "*").publish();
+ Package::new("bar", "0.0.2")
+ .dep("baz", "*")
+ .yanked(true)
+ .publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`)
+[DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`)
+[CHECKING] baz v0.0.1
+[CHECKING] bar v0.0.1
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn relying_on_a_yank_is_bad_http() {
+ let _server = setup_http();
+ relying_on_a_yank_is_bad();
+}
+
+#[cargo_test]
+fn relying_on_a_yank_is_bad_git() {
+ relying_on_a_yank_is_bad();
+}
+
+fn relying_on_a_yank_is_bad() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("baz", "0.0.1").publish();
+ Package::new("baz", "0.0.2").yanked(true).publish();
+ Package::new("bar", "0.0.1").dep("baz", "=0.0.2").publish();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: failed to select a version for the requirement `baz = \"=0.0.2\"`
+candidate versions found which didn't match: 0.0.1
+location searched: `[..]` index (which is replacing registry `[..]`)
+required by package `bar v0.0.1`
+ ... which satisfies dependency `bar = \"*\"` of package `foo [..]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn yanks_in_lockfiles_are_ok_http() {
+ let _server = setup_http();
+ yanks_in_lockfiles_are_ok();
+}
+
+#[cargo_test]
+fn yanks_in_lockfiles_are_ok_git() {
+ yanks_in_lockfiles_are_ok();
+}
+
+fn yanks_in_lockfiles_are_ok() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("bar", "0.0.1").publish();
+
+ p.cargo("check").run();
+
+ registry_path().join("3").rm_rf();
+
+ Package::new("bar", "0.0.1").yanked(true).publish();
+
+ p.cargo("check").with_stdout("").run();
+
+ p.cargo("update")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: no matching package named `bar` found
+location searched: registry [..]
+required by package `foo v0.0.1 ([..])`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn yanks_in_lockfiles_are_ok_for_other_update_http() {
+ let _server = setup_http();
+ yanks_in_lockfiles_are_ok_for_other_update();
+}
+
+#[cargo_test]
+fn yanks_in_lockfiles_are_ok_for_other_update_git() {
+ yanks_in_lockfiles_are_ok_for_other_update();
+}
+
+fn yanks_in_lockfiles_are_ok_for_other_update() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ baz = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("bar", "0.0.1").publish();
+ Package::new("baz", "0.0.1").publish();
+
+ p.cargo("check").run();
+
+ registry_path().join("3").rm_rf();
+
+ Package::new("bar", "0.0.1").yanked(true).publish();
+ Package::new("baz", "0.0.1").publish();
+
+ p.cargo("check").with_stdout("").run();
+
+ Package::new("baz", "0.0.2").publish();
+
+ p.cargo("update")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: no matching package named `bar` found
+location searched: registry [..]
+required by package `foo v0.0.1 ([..])`
+",
+ )
+ .run();
+
+ p.cargo("update -p baz")
+ .with_stderr_contains(
+ "\
+[UPDATING] `[..]` index
+[UPDATING] baz v0.0.1 -> v0.0.2
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn yanks_in_lockfiles_are_ok_with_new_dep_http() {
+ let _server = setup_http();
+ yanks_in_lockfiles_are_ok_with_new_dep();
+}
+
+#[cargo_test]
+fn yanks_in_lockfiles_are_ok_with_new_dep_git() {
+ yanks_in_lockfiles_are_ok_with_new_dep();
+}
+
+fn yanks_in_lockfiles_are_ok_with_new_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("bar", "0.0.1").publish();
+
+ p.cargo("check").run();
+
+ registry_path().join("3").rm_rf();
+
+ Package::new("bar", "0.0.1").yanked(true).publish();
+ Package::new("baz", "0.0.1").publish();
+
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ baz = "*"
+ "#,
+ );
+
+ p.cargo("check").with_stdout("").run();
+}
+
+#[cargo_test]
+fn update_with_lockfile_if_packages_missing_http() {
+ let _server = setup_http();
+ update_with_lockfile_if_packages_missing();
+}
+
+#[cargo_test]
+fn update_with_lockfile_if_packages_missing_git() {
+ update_with_lockfile_if_packages_missing();
+}
+
+fn update_with_lockfile_if_packages_missing() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("bar", "0.0.1").publish();
+ p.cargo("check").run();
+ p.root().move_into_the_past();
+
+ paths::home().join(".cargo/registry").rm_rf();
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.1 (registry `dummy-registry`)
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn update_lockfile_http() {
+ let _server = setup_http();
+ update_lockfile();
+}
+
+#[cargo_test]
+fn update_lockfile_git() {
+ update_lockfile();
+}
+
+fn update_lockfile() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ println!("0.0.1");
+ Package::new("bar", "0.0.1").publish();
+ p.cargo("check").run();
+
+ Package::new("bar", "0.0.2").publish();
+ Package::new("bar", "0.0.3").publish();
+ paths::home().join(".cargo/registry").rm_rf();
+ println!("0.0.2 update");
+ p.cargo("update -p bar --precise 0.0.2")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[UPDATING] bar v0.0.1 -> v0.0.2
+",
+ )
+ .run();
+
+ println!("0.0.2 build");
+ p.cargo("check")
+ .with_stderr(
+ "\
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..] v0.0.2 (registry `dummy-registry`)
+[CHECKING] bar v0.0.2
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+
+ println!("0.0.3 update");
+ p.cargo("update -p bar")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[UPDATING] bar v0.0.2 -> v0.0.3
+",
+ )
+ .run();
+
+ println!("0.0.3 build");
+ p.cargo("check")
+ .with_stderr(
+ "\
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..] v0.0.3 (registry `dummy-registry`)
+[CHECKING] bar v0.0.3
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+
+ println!("new dependencies update");
+ Package::new("bar", "0.0.4").dep("spam", "0.2.5").publish();
+ Package::new("spam", "0.2.5").publish();
+ p.cargo("update -p bar")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[UPDATING] bar v0.0.3 -> v0.0.4
+[ADDING] spam v0.2.5
+",
+ )
+ .run();
+
+ println!("new dependencies update");
+ Package::new("bar", "0.0.5").publish();
+ p.cargo("update -p bar")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[UPDATING] bar v0.0.4 -> v0.0.5
+[REMOVING] spam v0.2.5
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dev_dependency_not_used_http() {
+ let _server = setup_http();
+ dev_dependency_not_used();
+}
+
+#[cargo_test]
+fn dev_dependency_not_used_git() {
+ dev_dependency_not_used();
+}
+
+fn dev_dependency_not_used() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("baz", "0.0.1").publish();
+ Package::new("bar", "0.0.1").dev_dep("baz", "*").publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`)
+[CHECKING] bar v0.0.1
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_license_file_http() {
+ let registry = setup_http();
+ bad_license_file(&registry);
+}
+
+#[cargo_test]
+fn bad_license_file_git() {
+ let registry = registry::init();
+ bad_license_file(&registry);
+}
+
+fn bad_license_file(registry: &TestRegistry) {
+ Package::new("foo", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license-file = "foo"
+ description = "bar"
+ repository = "baz"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+ p.cargo("publish -v")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr_contains("[ERROR] the license file `foo` does not exist")
+ .run();
+}
+
+#[cargo_test]
+fn updating_a_dep_http() {
+ let _server = setup_http();
+ updating_a_dep();
+}
+
+#[cargo_test]
+fn updating_a_dep_git() {
+ updating_a_dep();
+}
+
+fn updating_a_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.a]
+ path = "a"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .build();
+
+ Package::new("bar", "0.0.1").publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.1 (registry `dummy-registry`)
+[CHECKING] bar v0.0.1
+[CHECKING] a v0.0.1 ([CWD]/a)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+ assert!(paths::home().join(".cargo/registry/CACHEDIR.TAG").is_file());
+
+ // Now delete the CACHEDIR.TAG file: this is the situation we'll be in after
+ // upgrading from a version of Cargo that doesn't mark this directory, to one that
+ // does. It should be recreated.
+ fs::remove_file(paths::home().join(".cargo/registry/CACHEDIR.TAG"))
+ .expect("remove CACHEDIR.TAG");
+
+ p.change_file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ );
+ Package::new("bar", "0.1.0").publish();
+
+ println!("second");
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 (registry `dummy-registry`)
+[CHECKING] bar v0.1.0
+[CHECKING] a v0.0.1 ([CWD]/a)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+
+ assert!(
+ paths::home().join(".cargo/registry/CACHEDIR.TAG").is_file(),
+ "CACHEDIR.TAG recreated in existing registry"
+ );
+}
+
+#[cargo_test]
+fn git_and_registry_dep_http() {
+ let _server = setup_http();
+ git_and_registry_dep();
+}
+
+#[cargo_test]
+fn git_and_registry_dep_git() {
+ git_and_registry_dep();
+}
+
+fn git_and_registry_dep() {
+ let b = git::repo(&paths::root().join("b"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = "0.0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = "0.0.1"
+
+ [dependencies.b]
+ git = '{}'
+ "#,
+ b.url()
+ ),
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("a", "0.0.1").publish();
+
+ p.root().move_into_the_past();
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] a v0.0.1 (registry `dummy-registry`)
+[CHECKING] a v0.0.1
+[CHECKING] b v0.0.1 ([..])
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+ p.root().move_into_the_past();
+
+ println!("second");
+ p.cargo("check").with_stdout("").run();
+}
+
+#[cargo_test]
+fn update_publish_then_update_http() {
+ let _server = setup_http();
+ update_publish_then_update();
+}
+
+#[cargo_test]
+fn update_publish_then_update_git() {
+ update_publish_then_update();
+}
+
+fn update_publish_then_update() {
+ // First generate a Cargo.lock and a clone of the registry index at the
+ // "head" of the current registry.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ a = "0.1.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+ Package::new("a", "0.1.0").publish();
+ p.cargo("build").run();
+
+ // Next, publish a new package and back up the copy of the registry we just
+ // created.
+ Package::new("a", "0.1.1").publish();
+ let registry = paths::home().join(".cargo/registry");
+ let backup = paths::root().join("registry-backup");
+ t!(fs::rename(&registry, &backup));
+
+ // Generate a Cargo.lock with the newer version, and then move the old copy
+ // of the registry back into place.
+ let p2 = project()
+ .at("foo2")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ a = "0.1.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+ p2.cargo("build").run();
+ registry.rm_rf();
+ t!(fs::rename(&backup, &registry));
+ t!(fs::rename(
+ p2.root().join("Cargo.lock"),
+ p.root().join("Cargo.lock")
+ ));
+
+ // Finally, build the first project again (with our newer Cargo.lock) which
+ // should force an update of the old registry, download the new crate, and
+ // then build everything again.
+ p.cargo("build")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] a v0.1.1 (registry `dummy-registry`)
+[COMPILING] a v0.1.1
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn fetch_downloads_http() {
+ let _server = setup_http();
+ fetch_downloads();
+}
+
+#[cargo_test]
+fn fetch_downloads_git() {
+ fetch_downloads();
+}
+
+fn fetch_downloads() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ a = "0.1.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("a", "0.1.0").publish();
+
+ p.cargo("fetch")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] a v0.1.0 (registry [..])
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn update_transitive_dependency_http() {
+ let _server = setup_http();
+ update_transitive_dependency();
+}
+
+#[cargo_test]
+fn update_transitive_dependency_git() {
+ update_transitive_dependency();
+}
+
+fn update_transitive_dependency() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ a = "0.1.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("a", "0.1.0").dep("b", "*").publish();
+ Package::new("b", "0.1.0").publish();
+
+ p.cargo("fetch").run();
+
+ Package::new("b", "0.1.1").publish();
+
+ p.cargo("update -pb")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[UPDATING] b v0.1.0 -> v0.1.1
+",
+ )
+ .run();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[DOWNLOADING] crates ...
+[DOWNLOADED] b v0.1.1 (registry `dummy-registry`)
+[CHECKING] b v0.1.1
+[CHECKING] a v0.1.0
+[CHECKING] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn update_backtracking_ok_http() {
+ let _server = setup_http();
+ update_backtracking_ok();
+}
+
+#[cargo_test]
+fn update_backtracking_ok_git() {
+ update_backtracking_ok();
+}
+
+fn update_backtracking_ok() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ webdriver = "0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("webdriver", "0.1.0")
+ .dep("hyper", "0.6")
+ .publish();
+ Package::new("hyper", "0.6.5")
+ .dep("openssl", "0.1")
+ .dep("cookie", "0.1")
+ .publish();
+ Package::new("cookie", "0.1.0")
+ .dep("openssl", "0.1")
+ .publish();
+ Package::new("openssl", "0.1.0").publish();
+
+ p.cargo("generate-lockfile").run();
+
+ Package::new("openssl", "0.1.1").publish();
+ Package::new("hyper", "0.6.6")
+ .dep("openssl", "0.1.1")
+ .dep("cookie", "0.1.0")
+ .publish();
+
+ p.cargo("update -p hyper")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[UPDATING] hyper v0.6.5 -> v0.6.6
+[UPDATING] openssl v0.1.0 -> v0.1.1
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn update_multiple_packages_http() {
+ let _server = setup_http();
+ update_multiple_packages();
+}
+
+#[cargo_test]
+fn update_multiple_packages_git() {
+ update_multiple_packages();
+}
+
+fn update_multiple_packages() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ a = "*"
+ b = "*"
+ c = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("a", "0.1.0").publish();
+ Package::new("b", "0.1.0").publish();
+ Package::new("c", "0.1.0").publish();
+
+ p.cargo("fetch").run();
+
+ Package::new("a", "0.1.1").publish();
+ Package::new("b", "0.1.1").publish();
+ Package::new("c", "0.1.1").publish();
+
+ p.cargo("update -pa -pb")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[UPDATING] a v0.1.0 -> v0.1.1
+[UPDATING] b v0.1.0 -> v0.1.1
+",
+ )
+ .run();
+
+ p.cargo("update -pb -pc")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[UPDATING] c v0.1.0 -> v0.1.1
+",
+ )
+ .run();
+
+ p.cargo("check")
+ .with_stderr_contains("[DOWNLOADED] a v0.1.1 (registry `dummy-registry`)")
+ .with_stderr_contains("[DOWNLOADED] b v0.1.1 (registry `dummy-registry`)")
+ .with_stderr_contains("[DOWNLOADED] c v0.1.1 (registry `dummy-registry`)")
+ .with_stderr_contains("[CHECKING] a v0.1.1")
+ .with_stderr_contains("[CHECKING] b v0.1.1")
+ .with_stderr_contains("[CHECKING] c v0.1.1")
+ .with_stderr_contains("[CHECKING] foo v0.5.0 ([..])")
+ .run();
+}
+
+#[cargo_test]
+fn bundled_crate_in_registry_http() {
+ let _server = setup_http();
+ bundled_crate_in_registry();
+}
+
+#[cargo_test]
+fn bundled_crate_in_registry_git() {
+ bundled_crate_in_registry();
+}
+
+fn bundled_crate_in_registry() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ bar = "0.1"
+ baz = "0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("bar", "0.1.0").publish();
+ Package::new("baz", "0.1.0")
+ .dep("bar", "0.1.0")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar", version = "0.1.0" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "")
+ .publish();
+
+ p.cargo("run").run();
+}
+
+#[cargo_test]
+fn update_same_prefix_oh_my_how_was_this_a_bug_http() {
+ let _server = setup_http();
+ update_same_prefix_oh_my_how_was_this_a_bug();
+}
+
+#[cargo_test]
+fn update_same_prefix_oh_my_how_was_this_a_bug_git() {
+ update_same_prefix_oh_my_how_was_this_a_bug();
+}
+
+fn update_same_prefix_oh_my_how_was_this_a_bug() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "ugh"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ foo = "0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("foobar", "0.2.0").publish();
+ Package::new("foo", "0.1.0")
+ .dep("foobar", "0.2.0")
+ .publish();
+
+ p.cargo("generate-lockfile").run();
+ p.cargo("update -pfoobar --precise=0.2.0").run();
+}
+
+#[cargo_test]
+fn use_semver_http() {
+ let _server = setup_http();
+ use_semver();
+}
+
+#[cargo_test]
+fn use_semver_git() {
+ use_semver();
+}
+
+fn use_semver() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ foo = "1.2.3-alpha.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("foo", "1.2.3-alpha.0").publish();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn use_semver_package_incorrectly_http() {
+ let _server = setup_http();
+ use_semver_package_incorrectly();
+}
+
+#[cargo_test]
+fn use_semver_package_incorrectly_git() {
+ use_semver_package_incorrectly();
+}
+
+fn use_semver_package_incorrectly() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.1-alpha.0"
+ authors = []
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ a = { version = "^0.1", path = "../a" }
+ "#,
+ )
+ .file("a/src/main.rs", "fn main() {}")
+ .file("b/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: no matching package found
+searched package name: `a`
+prerelease package needs to be specified explicitly
+a = { version = \"0.1.1-alpha.0\" }
+location searched: [..]
+required by package `b v0.1.0 ([..])`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn only_download_relevant_http() {
+ let _server = setup_http();
+ only_download_relevant();
+}
+
+#[cargo_test]
+fn only_download_relevant_git() {
+ only_download_relevant();
+}
+
+fn only_download_relevant() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+
+ [target.foo.dependencies]
+ foo = "*"
+ [dev-dependencies]
+ bar = "*"
+ [dependencies]
+ baz = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("foo", "0.1.0").publish();
+ Package::new("bar", "0.1.0").publish();
+ Package::new("baz", "0.1.0").publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] baz v0.1.0 ([..])
+[CHECKING] baz v0.1.0
+[CHECKING] bar v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn resolve_and_backtracking_http() {
+ let _server = setup_http();
+ resolve_and_backtracking();
+}
+
+#[cargo_test]
+fn resolve_and_backtracking_git() {
+ resolve_and_backtracking();
+}
+
+fn resolve_and_backtracking() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ foo = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("foo", "0.1.1")
+ .feature_dep("bar", "0.1", &["a", "b"])
+ .publish();
+ Package::new("foo", "0.1.0").publish();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn upstream_warnings_on_extra_verbose_http() {
+ let _server = setup_http();
+ upstream_warnings_on_extra_verbose();
+}
+
+#[cargo_test]
+fn upstream_warnings_on_extra_verbose_git() {
+ upstream_warnings_on_extra_verbose();
+}
+
+fn upstream_warnings_on_extra_verbose() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ foo = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("foo", "0.1.0")
+ .file("src/lib.rs", "fn unused() {}")
+ .publish();
+
+ p.cargo("check -vv")
+ .with_stderr_contains("[WARNING] [..]unused[..]")
+ .run();
+}
+
+#[cargo_test]
+fn disallow_network_http() {
+ let _server = setup_http();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ foo = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check --frozen")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[ERROR] failed to get `foo` as a dependency of package `bar v0.5.0 ([..])`
+
+Caused by:
+ failed to query replaced source registry `crates-io`
+
+Caused by:
+ attempting to make an HTTP request, but --frozen was specified
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn disallow_network_git() {
+ let _server = RegistryBuilder::new().build();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ foo = "*"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check --frozen")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to get `foo` as a dependency of package `bar v0.5.0 [..]`
+
+Caused by:
+ failed to load source for dependency `foo`
+
+Caused by:
+ Unable to update registry `crates-io`
+
+Caused by:
+ failed to update replaced source registry `crates-io`
+
+Caused by:
+ attempting to make an HTTP request, but --frozen was specified
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn add_dep_dont_update_registry_http() {
+ let _server = setup_http();
+ add_dep_dont_update_registry();
+}
+
+#[cargo_test]
+fn add_dep_dont_update_registry_git() {
+ add_dep_dont_update_registry();
+}
+
+fn add_dep_dont_update_registry() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ baz = { path = "baz" }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "baz/Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ remote = "0.3"
+ "#,
+ )
+ .file("baz/src/lib.rs", "")
+ .build();
+
+ Package::new("remote", "0.3.4").publish();
+
+ p.cargo("check").run();
+
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ baz = { path = "baz" }
+ remote = "0.3"
+ "#,
+ );
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.5.0 ([..])
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bump_version_dont_update_registry_http() {
+ let _server = setup_http();
+ bump_version_dont_update_registry();
+}
+
+#[cargo_test]
+fn bump_version_dont_update_registry_git() {
+ bump_version_dont_update_registry();
+}
+
+fn bump_version_dont_update_registry() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ baz = { path = "baz" }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "baz/Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ remote = "0.3"
+ "#,
+ )
+ .file("baz/src/lib.rs", "")
+ .build();
+
+ Package::new("remote", "0.3.4").publish();
+
+ p.cargo("check").run();
+
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.6.0"
+ authors = []
+
+ [dependencies]
+ baz = { path = "baz" }
+ "#,
+ );
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.6.0 ([..])
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn toml_lies_but_index_is_truth_http() {
+ let _server = setup_http();
+ toml_lies_but_index_is_truth();
+}
+
+#[cargo_test]
+fn toml_lies_but_index_is_truth_git() {
+ toml_lies_but_index_is_truth();
+}
+
+fn toml_lies_but_index_is_truth() {
+ Package::new("foo", "0.2.0").publish();
+ Package::new("bar", "0.3.0")
+ .dep("foo", "0.2.0")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.3.0"
+ authors = []
+
+ [dependencies]
+ foo = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "extern crate foo;")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ bar = "0.3"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check -v").run();
+}
+
+#[cargo_test]
+fn vv_prints_warnings_http() {
+ let _server = setup_http();
+ vv_prints_warnings();
+}
+
+#[cargo_test]
+fn vv_prints_warnings_git() {
+ vv_prints_warnings();
+}
+
+fn vv_prints_warnings() {
+ Package::new("foo", "0.2.0")
+ .file(
+ "src/lib.rs",
+ "#![deny(warnings)] fn foo() {} // unused function",
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "fo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ foo = "0.2"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check -vv").run();
+}
+
+#[cargo_test]
+fn bad_and_or_malicious_packages_rejected_http() {
+ let _server = setup_http();
+ bad_and_or_malicious_packages_rejected();
+}
+
+#[cargo_test]
+fn bad_and_or_malicious_packages_rejected_git() {
+ bad_and_or_malicious_packages_rejected();
+}
+
+fn bad_and_or_malicious_packages_rejected() {
+ Package::new("foo", "0.2.0")
+ .extra_file("foo-0.1.0/src/lib.rs", "")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "fo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ foo = "0.2"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check -vv")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..]
+error: failed to download [..]
+
+Caused by:
+ failed to unpack [..]
+
+Caused by:
+ [..] contains a file at \"foo-0.1.0/src/lib.rs\" which isn't under \"foo-0.2.0\"
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn git_init_templatedir_missing_http() {
+ let _server = setup_http();
+ git_init_templatedir_missing();
+}
+
+#[cargo_test]
+fn git_init_templatedir_missing_git() {
+ git_init_templatedir_missing();
+}
+
+fn git_init_templatedir_missing() {
+ Package::new("foo", "0.2.0").dep("bar", "*").publish();
+ Package::new("bar", "0.2.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "fo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ foo = "0.2"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check").run();
+
+ remove_dir_all(paths::home().join(".cargo/registry")).unwrap();
+ fs::write(
+ paths::home().join(".gitconfig"),
+ r#"
+ [init]
+ templatedir = nowhere
+ "#,
+ )
+ .unwrap();
+
+ p.cargo("check").run();
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn rename_deps_and_features_http() {
+ let _server = setup_http();
+ rename_deps_and_features();
+}
+
+#[cargo_test]
+fn rename_deps_and_features_git() {
+ rename_deps_and_features();
+}
+
+fn rename_deps_and_features() {
+ Package::new("foo", "0.1.0")
+ .file("src/lib.rs", "pub fn f1() {}")
+ .publish();
+ Package::new("foo", "0.2.0")
+ .file("src/lib.rs", "pub fn f2() {}")
+ .publish();
+ Package::new("bar", "0.2.0")
+ .add_dep(
+ Dependency::new("foo01", "0.1.0")
+ .package("foo")
+ .optional(true),
+ )
+ .add_dep(Dependency::new("foo02", "0.2.0").package("foo"))
+ .feature("another", &["foo01"])
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate foo02;
+ #[cfg(feature = "foo01")]
+ extern crate foo01;
+
+ pub fn foo() {
+ foo02::f2();
+ #[cfg(feature = "foo01")]
+ foo01::f1();
+ }
+ "#,
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ bar = "0.2"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "
+ extern crate bar;
+ fn main() { bar::foo(); }
+ ",
+ )
+ .build();
+
+ p.cargo("check").run();
+ p.cargo("check --features bar/foo01").run();
+ p.cargo("check --features bar/another").run();
+}
+
+#[cargo_test]
+fn ignore_invalid_json_lines_http() {
+ let _server = setup_http();
+ ignore_invalid_json_lines();
+}
+
+#[cargo_test]
+fn ignore_invalid_json_lines_git() {
+ ignore_invalid_json_lines();
+}
+
+fn ignore_invalid_json_lines() {
+ Package::new("foo", "0.1.0").publish();
+ Package::new("foo", "0.1.1").invalid_json(true).publish();
+ Package::new("foo", "0.2.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ foo = '0.1.0'
+ foo02 = { version = '0.2.0', package = 'foo' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn readonly_registry_still_works_http() {
+ let _server = setup_http();
+ readonly_registry_still_works();
+}
+
+#[cargo_test]
+fn readonly_registry_still_works_git() {
+ readonly_registry_still_works();
+}
+
+fn readonly_registry_still_works() {
+ Package::new("foo", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ foo = '0.1.0'
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("generate-lockfile").run();
+ p.cargo("fetch --locked").run();
+ chmod_readonly(&paths::home(), true);
+ p.cargo("check").run();
+ // make sure we un-readonly the files afterwards so "cargo clean" can remove them (#6934)
+ chmod_readonly(&paths::home(), false);
+
+ fn chmod_readonly(path: &Path, readonly: bool) {
+ for entry in t!(path.read_dir()) {
+ let entry = t!(entry);
+ let path = entry.path();
+ if t!(entry.file_type()).is_dir() {
+ chmod_readonly(&path, readonly);
+ } else {
+ set_readonly(&path, readonly);
+ }
+ }
+ set_readonly(path, readonly);
+ }
+
+ fn set_readonly(path: &Path, readonly: bool) {
+ let mut perms = t!(path.metadata()).permissions();
+ perms.set_readonly(readonly);
+ t!(fs::set_permissions(path, perms));
+ }
+}
+
+#[cargo_test]
+fn registry_index_rejected_http() {
+ let _server = setup_http();
+ registry_index_rejected();
+}
+
+#[cargo_test]
+fn registry_index_rejected_git() {
+ registry_index_rejected();
+}
+
+fn registry_index_rejected() {
+ Package::new("dep", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [registry]
+ index = "https://example.com/"
+ "#,
+ )
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ dep = "0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml`
+
+Caused by:
+ the `registry.index` config value is no longer supported
+ Use `[source]` replacement to alter the default index for crates.io.
+",
+ )
+ .run();
+
+ p.cargo("login")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] the `registry.index` config value is no longer supported
+Use `[source]` replacement to alter the default index for crates.io.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn package_lock_inside_package_is_overwritten() {
+ let registry = registry::init();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = ">= 0.0.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("bar", "0.0.1")
+ .file("src/lib.rs", "")
+ .file(".cargo-ok", "")
+ .publish();
+
+ p.cargo("check").run();
+
+ let id = SourceId::for_registry(registry.index_url()).unwrap();
+ let hash = cargo::util::hex::short_hash(&id);
+ let ok = cargo_home()
+ .join("registry")
+ .join("src")
+ .join(format!("-{}", hash))
+ .join("bar-0.0.1")
+ .join(".cargo-ok");
+
+ assert_eq!(ok.metadata().unwrap().len(), 2);
+}
+
+#[cargo_test]
+fn package_lock_as_a_symlink_inside_package_is_overwritten() {
+ let registry = registry::init();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = ">= 0.0.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("bar", "0.0.1")
+ .file("src/lib.rs", "pub fn f() {}")
+ .symlink(".cargo-ok", "src/lib.rs")
+ .publish();
+
+ p.cargo("check").run();
+
+ let id = SourceId::for_registry(registry.index_url()).unwrap();
+ let hash = cargo::util::hex::short_hash(&id);
+ let pkg_root = cargo_home()
+ .join("registry")
+ .join("src")
+ .join(format!("-{}", hash))
+ .join("bar-0.0.1");
+ let ok = pkg_root.join(".cargo-ok");
+ let librs = pkg_root.join("src/lib.rs");
+
+ // Is correctly overwritten and doesn't affect the file linked to
+ assert_eq!(ok.metadata().unwrap().len(), 2);
+ assert_eq!(fs::read_to_string(librs).unwrap(), "pub fn f() {}");
+}
+
+#[cargo_test]
+fn ignores_unknown_index_version_http() {
+ let _server = setup_http();
+ ignores_unknown_index_version();
+}
+
+#[cargo_test]
+fn ignores_unknown_index_version_git() {
+ ignores_unknown_index_version();
+}
+
+fn ignores_unknown_index_version() {
+ // If the version field is not understood, it is ignored.
+ Package::new("bar", "1.0.0").publish();
+ Package::new("bar", "1.0.1").schema_version(9999).publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("tree")
+ .with_stdout(
+ "foo v0.1.0 [..]\n\
+ └── bar v1.0.0\n\
+ ",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn protocol() {
+ cargo_process("install bar")
+ .with_status(101)
+ .env("CARGO_REGISTRIES_CRATES_IO_PROTOCOL", "invalid")
+ .with_stderr("[ERROR] unsupported registry protocol `invalid` (defined in environment variable `CARGO_REGISTRIES_CRATES_IO_PROTOCOL`)")
+ .run()
+}
+
+#[cargo_test]
+fn http_requires_trailing_slash() {
+ cargo_process("install bar --index sparse+https://invalid.crates.io/test")
+ .with_status(101)
+ .with_stderr("[ERROR] sparse registry url must end in a slash `/`: sparse+https://invalid.crates.io/test")
+ .run()
+}
+
+// Limit the test to debug builds so that `__CARGO_TEST_MAX_UNPACK_SIZE` will take affect.
+#[cfg(debug_assertions)]
+#[cargo_test]
+fn reach_max_unpack_size() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = ">= 0.0.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ // Size of bar.crate is around 180 bytes.
+ Package::new("bar", "0.0.1").publish();
+
+ p.cargo("check")
+ .env("__CARGO_TEST_MAX_UNPACK_SIZE", "8") // hit 8 bytes limit and boom!
+ .env("__CARGO_TEST_MAX_UNPACK_RATIO", "0")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.1 (registry `dummy-registry`)
+[ERROR] failed to download replaced source registry `crates-io`
+
+Caused by:
+ failed to unpack package `bar v0.0.1 (registry `dummy-registry`)`
+
+Caused by:
+ failed to iterate over archive
+
+Caused by:
+ maximum limit reached when reading
+",
+ )
+ .run();
+
+ // Restore to the default ratio and it should compile.
+ p.cargo("check")
+ .env("__CARGO_TEST_MAX_UNPACK_SIZE", "8")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.0.1
+[CHECKING] foo v0.0.1 ([..])
+[FINISHED] dev [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn sparse_retry_single() {
+ let fail_count = Mutex::new(0);
+ let _registry = RegistryBuilder::new()
+ .http_index()
+ .add_responder("/index/3/b/bar", move |req, server| {
+ let mut fail_count = fail_count.lock().unwrap();
+ if *fail_count < 2 {
+ *fail_count += 1;
+ server.internal_server_error(req)
+ } else {
+ server.index(req)
+ }
+ })
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = ">= 0.0.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("bar", "0.0.1").publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+warning: spurious network error (3 tries remaining): failed to get successful HTTP response from `[..]` (127.0.0.1), got 500
+body:
+internal server error
+warning: spurious network error (2 tries remaining): failed to get successful HTTP response from `[..]` (127.0.0.1), got 500
+body:
+internal server error
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.1 (registry `dummy-registry`)
+[CHECKING] bar v0.0.1
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn sparse_retry_multiple() {
+ // Tests retry behavior of downloading lots of packages with various
+ // failure rates accessing the sparse index.
+
+ // The index is the number of retries, the value is the number of packages
+ // that retry that number of times. Thus 50 packages succeed on first try,
+ // 25 on second, etc.
+ const RETRIES: &[u32] = &[50, 25, 12, 6];
+
+ let pkgs: Vec<_> = RETRIES
+ .iter()
+ .enumerate()
+ .flat_map(|(retries, num)| {
+ (0..*num)
+ .into_iter()
+ .map(move |n| (retries as u32, format!("{}-{n}-{retries}", rand_prefix())))
+ })
+ .collect();
+
+ let mut builder = RegistryBuilder::new().http_index();
+ let fail_counts: Arc<Mutex<Vec<u32>>> = Arc::new(Mutex::new(vec![0; pkgs.len()]));
+ let mut cargo_toml = r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ "#
+ .to_string();
+ // The expected stderr output.
+ let mut expected = "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+"
+ .to_string();
+ for (n, (retries, name)) in pkgs.iter().enumerate() {
+ let count_clone = fail_counts.clone();
+ let retries = *retries;
+ let ab = &name[..2];
+ let cd = &name[2..4];
+ builder = builder.add_responder(format!("/index/{ab}/{cd}/{name}"), move |req, server| {
+ let mut fail_counts = count_clone.lock().unwrap();
+ if fail_counts[n] < retries {
+ fail_counts[n] += 1;
+ server.internal_server_error(req)
+ } else {
+ server.index(req)
+ }
+ });
+ write!(&mut cargo_toml, "{name} = \"1.0.0\"\n").unwrap();
+ for retry in 0..retries {
+ let remain = 3 - retry;
+ write!(
+ &mut expected,
+ "warning: spurious network error ({remain} tries remaining): \
+ failed to get successful HTTP response from \
+ `http://127.0.0.1:[..]/{ab}/{cd}/{name}` (127.0.0.1), got 500\n\
+ body:\n\
+ internal server error\n"
+ )
+ .unwrap();
+ }
+ write!(
+ &mut expected,
+ "[DOWNLOADED] {name} v1.0.0 (registry `dummy-registry`)\n"
+ )
+ .unwrap();
+ }
+ let _server = builder.build();
+ for (_, name) in &pkgs {
+ Package::new(name, "1.0.0").publish();
+ }
+ let p = project()
+ .file("Cargo.toml", &cargo_toml)
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("fetch").with_stderr_unordered(expected).run();
+}
+
+#[cargo_test]
+fn dl_retry_single() {
+ // Tests retry behavior of downloading a package.
+ // This tests a single package which exercises the code path that causes
+ // it to block.
+ let fail_count = Mutex::new(0);
+ let _server = RegistryBuilder::new()
+ .http_index()
+ .add_responder("/dl/bar/1.0.0/download", move |req, server| {
+ let mut fail_count = fail_count.lock().unwrap();
+ if *fail_count < 2 {
+ *fail_count += 1;
+ server.internal_server_error(req)
+ } else {
+ server.dl(req)
+ }
+ })
+ .build();
+ Package::new("bar", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("fetch")
+ .with_stderr("\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+warning: spurious network error (3 tries remaining): \
+ failed to get successful HTTP response from `http://127.0.0.1:[..]/dl/bar/1.0.0/download` (127.0.0.1), got 500
+body:
+internal server error
+warning: spurious network error (2 tries remaining): \
+ failed to get successful HTTP response from `http://127.0.0.1:[..]/dl/bar/1.0.0/download` (127.0.0.1), got 500
+body:
+internal server error
+[DOWNLOADED] bar v1.0.0 (registry `dummy-registry`)
+").run();
+}
+
+/// Creates a random prefix to randomly spread out the package names
+/// to somewhat evenly distribute the different failures at different
+/// points.
+fn rand_prefix() -> String {
+ use rand::Rng;
+ const CHARS: &[u8] = b"abcdefghijklmnopqrstuvwxyz";
+ let mut rng = rand::thread_rng();
+ (0..5)
+ .map(|_| CHARS[rng.gen_range(0..CHARS.len())] as char)
+ .collect()
+}
+
+#[cargo_test]
+fn dl_retry_multiple() {
+ // Tests retry behavior of downloading lots of packages with various
+ // failure rates.
+
+ // The index is the number of retries, the value is the number of packages
+ // that retry that number of times. Thus 50 packages succeed on first try,
+ // 25 on second, etc.
+ const RETRIES: &[u32] = &[50, 25, 12, 6];
+
+ let pkgs: Vec<_> = RETRIES
+ .iter()
+ .enumerate()
+ .flat_map(|(retries, num)| {
+ (0..*num)
+ .into_iter()
+ .map(move |n| (retries as u32, format!("{}-{n}-{retries}", rand_prefix())))
+ })
+ .collect();
+
+ let mut builder = RegistryBuilder::new().http_index();
+ let fail_counts: Arc<Mutex<Vec<u32>>> = Arc::new(Mutex::new(vec![0; pkgs.len()]));
+ let mut cargo_toml = r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ "#
+ .to_string();
+ // The expected stderr output.
+ let mut expected = "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+"
+ .to_string();
+ for (n, (retries, name)) in pkgs.iter().enumerate() {
+ let count_clone = fail_counts.clone();
+ let retries = *retries;
+ builder =
+ builder.add_responder(format!("/dl/{name}/1.0.0/download"), move |req, server| {
+ let mut fail_counts = count_clone.lock().unwrap();
+ if fail_counts[n] < retries {
+ fail_counts[n] += 1;
+ server.internal_server_error(req)
+ } else {
+ server.dl(req)
+ }
+ });
+ write!(&mut cargo_toml, "{name} = \"1.0.0\"\n").unwrap();
+ for retry in 0..retries {
+ let remain = 3 - retry;
+ write!(
+ &mut expected,
+ "warning: spurious network error ({remain} tries remaining): \
+ failed to get successful HTTP response from \
+ `http://127.0.0.1:[..]/dl/{name}/1.0.0/download` (127.0.0.1), got 500\n\
+ body:\n\
+ internal server error\n"
+ )
+ .unwrap();
+ }
+ write!(
+ &mut expected,
+ "[DOWNLOADED] {name} v1.0.0 (registry `dummy-registry`)\n"
+ )
+ .unwrap();
+ }
+ let _server = builder.build();
+ for (_, name) in &pkgs {
+ Package::new(name, "1.0.0").publish();
+ }
+ let p = project()
+ .file("Cargo.toml", &cargo_toml)
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("fetch").with_stderr_unordered(expected).run();
+}
+
+#[cargo_test]
+fn deleted_entry() {
+ // Checks the behavior when a package is removed from the index.
+ // This is done occasionally on crates.io to handle things like
+ // copyright takedowns.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // First, test removing a single version, but leaving an older version.
+ Package::new("bar", "0.1.0").publish();
+ let bar_path = Path::new("3/b/bar");
+ let bar_reg_path = registry_path().join(&bar_path);
+ let old_index = fs::read_to_string(&bar_reg_path).unwrap();
+ Package::new("bar", "0.1.1").publish();
+ p.cargo("tree")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.1 (registry `dummy-registry`)
+",
+ )
+ .with_stdout(
+ "\
+foo v0.1.0 ([ROOT]/foo)
+└── bar v0.1.1
+",
+ )
+ .run();
+
+ // Remove 0.1.1
+ fs::remove_file(paths::root().join("dl/bar/0.1.1/download")).unwrap();
+ let repo = git2::Repository::open(registry_path()).unwrap();
+ let mut index = repo.index().unwrap();
+ fs::write(&bar_reg_path, &old_index).unwrap();
+ index.add_path(&bar_path).unwrap();
+ index.write().unwrap();
+ git::commit(&repo);
+
+ // With `Cargo.lock` unchanged, it shouldn't have an impact.
+ p.cargo("tree")
+ .with_stderr("")
+ .with_stdout(
+ "\
+foo v0.1.0 ([ROOT]/foo)
+└── bar v0.1.1
+",
+ )
+ .run();
+
+ // Regenerating Cargo.lock should switch to old version.
+ fs::remove_file(p.root().join("Cargo.lock")).unwrap();
+ p.cargo("tree")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 (registry `dummy-registry`)
+",
+ )
+ .with_stdout(
+ "\
+foo v0.1.0 ([ROOT]/foo)
+└── bar v0.1.0
+",
+ )
+ .run();
+
+ // Remove the package entirely.
+ fs::remove_file(paths::root().join("dl/bar/0.1.0/download")).unwrap();
+ let mut index = repo.index().unwrap();
+ index.remove(&bar_path, 0).unwrap();
+ index.write().unwrap();
+ git::commit(&repo);
+ fs::remove_file(&bar_reg_path).unwrap();
+
+ // With `Cargo.lock` unchanged, it shouldn't have an impact.
+ p.cargo("tree")
+ .with_stderr("")
+ .with_stdout(
+ "\
+foo v0.1.0 ([ROOT]/foo)
+└── bar v0.1.0
+",
+ )
+ .run();
+
+ // Regenerating Cargo.lock should fail.
+ fs::remove_file(p.root().join("Cargo.lock")).unwrap();
+ p.cargo("tree")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+error: no matching package named `bar` found
+location searched: registry `crates-io`
+required by package `foo v0.1.0 ([ROOT]/foo)`
+",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn corrupted_ok_overwritten() {
+ // Checks what happens if .cargo-ok gets truncated, such as if the file is
+ // created, but the flush/close is interrupted.
+ Package::new("bar", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("fetch")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v1.0.0 (registry `dummy-registry`)
+",
+ )
+ .run();
+ let ok = glob::glob(
+ paths::home()
+ .join(".cargo/registry/src/*/bar-1.0.0/.cargo-ok")
+ .to_str()
+ .unwrap(),
+ )
+ .unwrap()
+ .next()
+ .unwrap()
+ .unwrap();
+ // Simulate cargo being interrupted, or filesystem corruption.
+ fs::write(&ok, "").unwrap();
+ assert_eq!(fs::read_to_string(&ok).unwrap(), "");
+ p.cargo("fetch").with_stderr("").run();
+ assert_eq!(fs::read_to_string(&ok).unwrap(), "ok");
+}
+
+#[cargo_test]
+fn not_found_permutations() {
+ // Test for querying permutations for a missing dependency.
+ let misses = Arc::new(Mutex::new(Vec::new()));
+ let misses2 = misses.clone();
+ let _registry = RegistryBuilder::new()
+ .http_index()
+ .not_found_handler(move |req, _server| {
+ let mut misses = misses2.lock().unwrap();
+ misses.push(req.url.path().to_string());
+ Response {
+ code: 404,
+ headers: vec![],
+ body: b"not found".to_vec(),
+ }
+ })
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a-b-c = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+error: no matching package named `a-b-c` found
+location searched: registry `crates-io`
+required by package `foo v0.0.1 ([ROOT]/foo)`
+",
+ )
+ .run();
+ let mut misses = misses.lock().unwrap();
+ misses.sort();
+ assert_eq!(
+ &*misses,
+ &[
+ "/index/a-/b-/a-b-c",
+ "/index/a-/b_/a-b_c",
+ "/index/a_/b-/a_b-c",
+ "/index/a_/b_/a_b_c"
+ ]
+ );
+}
+
+#[cargo_test]
+fn default_auth_error() {
+ // Check for the error message for an authentication error when default is set.
+ let crates_io = RegistryBuilder::new().http_api().build();
+ let _alternative = RegistryBuilder::new().http_api().alternative().build();
+
+ paths::home().join(".cargo/credentials.toml").rm_rf();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // Test output before setting the default.
+ p.cargo("publish --no-verify")
+ .replace_crates_io(crates_io.index_url())
+ .with_stderr(
+ "\
+[UPDATING] crates.io index
+error: no token found, please run `cargo login`
+or use environment variable CARGO_REGISTRY_TOKEN
+",
+ )
+ .with_status(101)
+ .run();
+
+ p.cargo("publish --no-verify --registry alternative")
+ .replace_crates_io(crates_io.index_url())
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+error: no token found for `alternative`, please run `cargo login --registry alternative`
+or use environment variable CARGO_REGISTRIES_ALTERNATIVE_TOKEN
+",
+ )
+ .with_status(101)
+ .run();
+
+ // Test the output with the default.
+ cargo_util::paths::append(
+ &cargo_home().join("config"),
+ br#"
+ [registry]
+ default = "alternative"
+ "#,
+ )
+ .unwrap();
+
+ p.cargo("publish --no-verify")
+ .replace_crates_io(crates_io.index_url())
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+error: no token found for `alternative`, please run `cargo login --registry alternative`
+or use environment variable CARGO_REGISTRIES_ALTERNATIVE_TOKEN
+",
+ )
+ .with_status(101)
+ .run();
+
+ p.cargo("publish --no-verify --registry crates-io")
+ .replace_crates_io(crates_io.index_url())
+ .with_stderr(
+ "\
+[UPDATING] crates.io index
+error: no token found, please run `cargo login --registry crates-io`
+or use environment variable CARGO_REGISTRY_TOKEN
+",
+ )
+ .with_status(101)
+ .run();
+}
+
+const SAMPLE_HEADERS: &[&str] = &[
+ "x-amz-cf-pop: SFO53-P2",
+ "x-amz-cf-id: vEc3osJrCAXVaciNnF4Vev-hZFgnYwmNZtxMKRJ5bF6h9FTOtbTMnA==",
+ "x-cache: Hit from cloudfront",
+ "server: AmazonS3",
+ "x-amz-version-id: pvsJYY_JGsWiSETZvLJKb7DeEW5wWq1W",
+ "x-amz-server-side-encryption: AES256",
+ "content-type: text/plain",
+ "via: 1.1 bcbc5b46216015493e082cfbcf77ef10.cloudfront.net (CloudFront)",
+];
+
+#[cargo_test]
+fn debug_header_message_index() {
+ // The error message should include some headers for debugging purposes.
+ let _server = RegistryBuilder::new()
+ .http_index()
+ .add_responder("/index/3/b/bar", |_, _| Response {
+ code: 503,
+ headers: SAMPLE_HEADERS.iter().map(|s| s.to_string()).collect(),
+ body: b"Please slow down".to_vec(),
+ })
+ .build();
+ Package::new("bar", "1.0.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("fetch").with_status(101).with_stderr("\
+[UPDATING] `dummy-registry` index
+warning: spurious network error (3 tries remaining): \
+ failed to get successful HTTP response from `http://127.0.0.1:[..]/index/3/b/bar` (127.0.0.1), got 503
+body:
+Please slow down
+warning: spurious network error (2 tries remaining): \
+ failed to get successful HTTP response from `http://127.0.0.1:[..]/index/3/b/bar` (127.0.0.1), got 503
+body:
+Please slow down
+warning: spurious network error (1 tries remaining): \
+ failed to get successful HTTP response from `http://127.0.0.1:[..]/index/3/b/bar` (127.0.0.1), got 503
+body:
+Please slow down
+error: failed to get `bar` as a dependency of package `foo v0.1.0 ([ROOT]/foo)`
+
+Caused by:
+ failed to query replaced source registry `crates-io`
+
+Caused by:
+ download of 3/b/bar failed
+
+Caused by:
+ failed to get successful HTTP response from `http://127.0.0.1:[..]/index/3/b/bar` (127.0.0.1), got 503
+ debug headers:
+ x-amz-cf-pop: SFO53-P2
+ x-amz-cf-id: vEc3osJrCAXVaciNnF4Vev-hZFgnYwmNZtxMKRJ5bF6h9FTOtbTMnA==
+ x-cache: Hit from cloudfront
+ body:
+ Please slow down
+").run();
+}
+
+#[cargo_test]
+fn debug_header_message_dl() {
+ // Same as debug_header_message_index, but for the dl endpoint which goes
+ // through a completely different code path.
+ let _server = RegistryBuilder::new()
+ .http_index()
+ .add_responder("/dl/bar/1.0.0/download", |_, _| Response {
+ code: 503,
+ headers: SAMPLE_HEADERS.iter().map(|s| s.to_string()).collect(),
+ body: b"Please slow down".to_vec(),
+ })
+ .build();
+ Package::new("bar", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("fetch").with_status(101).with_stderr("\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+warning: spurious network error (3 tries remaining): \
+ failed to get successful HTTP response from `http://127.0.0.1:[..]/dl/bar/1.0.0/download` (127.0.0.1), got 503
+body:
+Please slow down
+warning: spurious network error (2 tries remaining): \
+ failed to get successful HTTP response from `http://127.0.0.1:[..]/dl/bar/1.0.0/download` (127.0.0.1), got 503
+body:
+Please slow down
+warning: spurious network error (1 tries remaining): \
+ failed to get successful HTTP response from `http://127.0.0.1:[..]/dl/bar/1.0.0/download` (127.0.0.1), got 503
+body:
+Please slow down
+error: failed to download from `http://127.0.0.1:[..]/dl/bar/1.0.0/download`
+
+Caused by:
+ failed to get successful HTTP response from `http://127.0.0.1:[..]/dl/bar/1.0.0/download` (127.0.0.1), got 503
+ debug headers:
+ x-amz-cf-pop: SFO53-P2
+ x-amz-cf-id: vEc3osJrCAXVaciNnF4Vev-hZFgnYwmNZtxMKRJ5bF6h9FTOtbTMnA==
+ x-cache: Hit from cloudfront
+ body:
+ Please slow down
+").run();
+}
diff --git a/src/tools/cargo/tests/testsuite/registry_auth.rs b/src/tools/cargo/tests/testsuite/registry_auth.rs
new file mode 100644
index 000000000..7779e285a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/registry_auth.rs
@@ -0,0 +1,519 @@
+//! Tests for registry authentication.
+
+use cargo_test_support::registry::{Package, RegistryBuilder};
+use cargo_test_support::{project, Execs, Project};
+
+fn cargo(p: &Project, s: &str) -> Execs {
+ let mut e = p.cargo(s);
+ e.masquerade_as_nightly_cargo(&["registry-auth"])
+ .arg("-Zregistry-auth");
+ e
+}
+
+fn make_project() -> Project {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ version = "0.0.1"
+ registry = "alternative"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+ Package::new("bar", "0.0.1").alternative(true).publish();
+ p
+}
+
+static SUCCESS_OUTPUT: &'static str = "\
+[UPDATING] `alternative` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.1 (registry `alternative`)
+[COMPILING] bar v0.0.1 (registry `alternative`)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+";
+
+#[cargo_test]
+fn requires_nightly() {
+ let _registry = RegistryBuilder::new()
+ .alternative()
+ .auth_required()
+ .http_api()
+ .build();
+
+ let p = make_project();
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ r#"[UPDATING] `alternative` index
+[DOWNLOADING] crates ...
+error: failed to download from `[..]/dl/bar/0.0.1/download`
+
+Caused by:
+ failed to get successful HTTP response from `[..]` (127.0.0.1), got 401
+ body:
+ Unauthorized message from server.
+"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn simple() {
+ let _registry = RegistryBuilder::new()
+ .alternative()
+ .auth_required()
+ .http_index()
+ .build();
+
+ let p = make_project();
+ cargo(&p, "build").with_stderr(SUCCESS_OUTPUT).run();
+}
+
+#[cargo_test]
+fn simple_with_asymmetric() {
+ let _registry = RegistryBuilder::new()
+ .alternative()
+ .auth_required()
+ .http_index()
+ .token(cargo_test_support::registry::Token::rfc_key())
+ .build();
+
+ let p = make_project();
+ cargo(&p, "build").with_stderr(SUCCESS_OUTPUT).run();
+}
+
+#[cargo_test]
+fn environment_config() {
+ let registry = RegistryBuilder::new()
+ .alternative()
+ .auth_required()
+ .no_configure_registry()
+ .no_configure_token()
+ .http_index()
+ .build();
+ let p = make_project();
+ cargo(&p, "build")
+ .env(
+ "CARGO_REGISTRIES_ALTERNATIVE_INDEX",
+ registry.index_url().as_str(),
+ )
+ .env("CARGO_REGISTRIES_ALTERNATIVE_TOKEN", registry.token())
+ .with_stderr(SUCCESS_OUTPUT)
+ .run();
+}
+
+#[cargo_test]
+fn environment_token() {
+ let registry = RegistryBuilder::new()
+ .alternative()
+ .auth_required()
+ .no_configure_token()
+ .http_index()
+ .build();
+
+ let p = make_project();
+ cargo(&p, "build")
+ .env("CARGO_REGISTRIES_ALTERNATIVE_TOKEN", registry.token())
+ .with_stderr(SUCCESS_OUTPUT)
+ .run();
+}
+
+#[cargo_test]
+fn environment_token_with_asymmetric() {
+ let registry = RegistryBuilder::new()
+ .alternative()
+ .auth_required()
+ .no_configure_token()
+ .http_index()
+ .token(cargo_test_support::registry::Token::Keys(
+ "k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36"
+ .to_string(),
+ None,
+ ))
+ .build();
+
+ let p = make_project();
+ cargo(&p, "build")
+ .env("CARGO_REGISTRIES_ALTERNATIVE_SECRET_KEY", registry.key())
+ .with_stderr(SUCCESS_OUTPUT)
+ .run();
+}
+
+#[cargo_test]
+fn warn_both_asymmetric_and_token() {
+ let _server = RegistryBuilder::new()
+ .alternative()
+ .no_configure_token()
+ .build();
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [registries.alternative]
+ token = "sekrit"
+ secret-key = "k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36"
+ "#,
+ )
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ description = "foo"
+ authors = []
+ license = "MIT"
+ homepage = "https://example.com/"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify --registry alternative")
+ .masquerade_as_nightly_cargo(&["credential-process", "registry-auth"])
+ .arg("-Zregistry-auth")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[ERROR] both `token` and `secret-key` were specified in the config for registry `alternative`.
+Only one of these values may be set, remove one or the other to proceed.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn warn_both_asymmetric_and_credential_process() {
+ let _server = RegistryBuilder::new()
+ .alternative()
+ .no_configure_token()
+ .build();
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [registries.alternative]
+ credential-process = "false"
+ secret-key = "k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36"
+ "#,
+ )
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ description = "foo"
+ authors = []
+ license = "MIT"
+ homepage = "https://example.com/"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify --registry alternative")
+ .masquerade_as_nightly_cargo(&["credential-process", "registry-auth"])
+ .arg("-Zcredential-process")
+ .arg("-Zregistry-auth")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[ERROR] both `credential-process` and `secret-key` were specified in the config for registry `alternative`.
+Only one of these values may be set, remove one or the other to proceed.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bad_environment_token_with_asymmetric_subject() {
+ let registry = RegistryBuilder::new()
+ .alternative()
+ .auth_required()
+ .no_configure_token()
+ .http_index()
+ .token(cargo_test_support::registry::Token::Keys(
+ "k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36"
+ .to_string(),
+ None,
+ ))
+ .build();
+
+ let p = make_project();
+ cargo(&p, "build")
+ .env("CARGO_REGISTRIES_ALTERNATIVE_SECRET_KEY", registry.key())
+ .env(
+ "CARGO_REGISTRIES_ALTERNATIVE_SECRET_KEY_SUBJECT",
+ "incorrect",
+ )
+ .with_stderr_contains(
+ " token rejected for `alternative`, please run `cargo login --registry alternative`",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn bad_environment_token_with_asymmetric_incorrect_subject() {
+ let registry = RegistryBuilder::new()
+ .alternative()
+ .auth_required()
+ .no_configure_token()
+ .http_index()
+ .token(cargo_test_support::registry::Token::rfc_key())
+ .build();
+
+ let p = make_project();
+ cargo(&p, "build")
+ .env("CARGO_REGISTRIES_ALTERNATIVE_SECRET_KEY", registry.key())
+ .env(
+ "CARGO_REGISTRIES_ALTERNATIVE_SECRET_KEY_SUBJECT",
+ "incorrect",
+ )
+ .with_stderr_contains(
+ " token rejected for `alternative`, please run `cargo login --registry alternative`",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn bad_environment_token_with_incorrect_asymmetric() {
+ let _registry = RegistryBuilder::new()
+ .alternative()
+ .auth_required()
+ .no_configure_token()
+ .http_index()
+ .token(cargo_test_support::registry::Token::Keys(
+ "k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36"
+ .to_string(),
+ None,
+ ))
+ .build();
+
+ let p = make_project();
+ cargo(&p, "build")
+ .env(
+ "CARGO_REGISTRIES_ALTERNATIVE_SECRET_KEY",
+ "k3.secret.9Vxr5hVlI_g_orBZN54vPz20bmB4O76wB_MVqUSuJJJqHFLwP8kdn_RY5g6J6pQG",
+ )
+ .with_stderr_contains(
+ " token rejected for `alternative`, please run `cargo login --registry alternative`",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn missing_token() {
+ let _registry = RegistryBuilder::new()
+ .alternative()
+ .auth_required()
+ .no_configure_token()
+ .http_index()
+ .build();
+
+ let p = make_project();
+ cargo(&p, "build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[ERROR] failed to get `bar` as a dependency of package `foo v0.0.1 ([..])`
+
+Caused by:
+ no token found for `alternative`, please run `cargo login --registry alternative`
+ or use environment variable CARGO_REGISTRIES_ALTERNATIVE_TOKEN",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn missing_token_git() {
+ let _registry = RegistryBuilder::new()
+ .alternative()
+ .auth_required()
+ .no_configure_token()
+ .build();
+
+ let p = make_project();
+ cargo(&p, "build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[ERROR] failed to download `bar v0.0.1 (registry `alternative`)`
+
+Caused by:
+ unable to get packages from source
+
+Caused by:
+ no token found for `alternative`, please run `cargo login --registry alternative`
+ or use environment variable CARGO_REGISTRIES_ALTERNATIVE_TOKEN",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn incorrect_token() {
+ let _registry = RegistryBuilder::new()
+ .alternative()
+ .auth_required()
+ .no_configure_token()
+ .http_index()
+ .build();
+
+ let p = make_project();
+ cargo(&p, "build")
+ .env("CARGO_REGISTRIES_ALTERNATIVE_TOKEN", "incorrect")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[ERROR] failed to get `bar` as a dependency of package `foo v0.0.1 ([..])`
+
+Caused by:
+ token rejected for `alternative`, please run `cargo login --registry alternative`
+ or use environment variable CARGO_REGISTRIES_ALTERNATIVE_TOKEN
+
+Caused by:
+ failed to get successful HTTP response from `http://[..]/index/config.json`, got 401
+ body:
+ Unauthorized message from server.",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn incorrect_token_git() {
+ let _registry = RegistryBuilder::new()
+ .alternative()
+ .auth_required()
+ .no_configure_token()
+ .http_api()
+ .build();
+
+ let p = make_project();
+ cargo(&p, "build")
+ .env("CARGO_REGISTRIES_ALTERNATIVE_TOKEN", "incorrect")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[DOWNLOADING] crates ...
+[ERROR] failed to download from `http://[..]/dl/bar/0.0.1/download`
+
+Caused by:
+ failed to get successful HTTP response from `http://[..]/dl/bar/0.0.1/download` (127.0.0.1), got 401
+ body:
+ Unauthorized message from server.",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn anonymous_alt_registry() {
+ // An alternative registry that requires auth, but is not in the config.
+ let registry = RegistryBuilder::new()
+ .alternative()
+ .auth_required()
+ .no_configure_token()
+ .no_configure_registry()
+ .http_index()
+ .build();
+
+ let p = make_project();
+ cargo(&p, &format!("install --index {} bar", registry.index_url()))
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[ERROR] no token found for `[..]`
+consider setting up an alternate registry in Cargo's configuration
+as described by https://doc.rust-lang.org/cargo/reference/registries.html
+
+[registries]
+my-registry = { index = \"[..]\" }
+
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn login() {
+ let _registry = RegistryBuilder::new()
+ .alternative()
+ .no_configure_token()
+ .auth_required()
+ .http_index()
+ .build();
+
+ let p = make_project();
+ cargo(&p, "login --registry alternative")
+ .with_stdout("please paste the token found on https://test-registry-login/me below")
+ .with_stdin("sekrit")
+ .run();
+}
+
+#[cargo_test]
+fn login_existing_token() {
+ let _registry = RegistryBuilder::new()
+ .alternative()
+ .auth_required()
+ .http_index()
+ .build();
+
+ let p = make_project();
+ cargo(&p, "login --registry alternative")
+ .with_stdout("please paste the token found on file://[..]/me below")
+ .with_stdin("sekrit")
+ .run();
+}
+
+#[cargo_test]
+fn duplicate_index() {
+ let server = RegistryBuilder::new()
+ .alternative()
+ .no_configure_token()
+ .auth_required()
+ .build();
+ let p = make_project();
+
+ // Two alternative registries with the same index.
+ cargo(&p, "build")
+ .env(
+ "CARGO_REGISTRIES_ALTERNATIVE1_INDEX",
+ server.index_url().as_str(),
+ )
+ .env(
+ "CARGO_REGISTRIES_ALTERNATIVE2_INDEX",
+ server.index_url().as_str(),
+ )
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[ERROR] failed to download `bar v0.0.1 (registry `alternative`)`
+
+Caused by:
+ unable to get packages from source
+
+Caused by:
+ multiple registries are configured with the same index url \
+ 'registry+file://[..]/alternative-registry': alternative1, alternative2
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/rename_deps.rs b/src/tools/cargo/tests/testsuite/rename_deps.rs
new file mode 100644
index 000000000..f2744049b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/rename_deps.rs
@@ -0,0 +1,391 @@
+//! Tests for renaming dependencies.
+
+use cargo_test_support::git;
+use cargo_test_support::paths;
+use cargo_test_support::registry::{self, Package};
+use cargo_test_support::{basic_manifest, project};
+
+#[cargo_test]
+fn rename_dependency() {
+ Package::new("bar", "0.1.0").publish();
+ Package::new("bar", "0.2.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = { version = "0.1.0" }
+ baz = { version = "0.2.0", package = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar; extern crate baz;")
+ .build();
+
+ p.cargo("build").run();
+}
+
+#[cargo_test]
+fn rename_with_different_names() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ baz = { path = "bar", package = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "extern crate baz;")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "random_name"
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+}
+
+#[cargo_test]
+fn lots_of_names() {
+ registry::alt_init();
+ Package::new("foo", "0.1.0")
+ .file("src/lib.rs", "pub fn foo1() {}")
+ .publish();
+ Package::new("foo", "0.2.0")
+ .file("src/lib.rs", "pub fn foo() {}")
+ .publish();
+ Package::new("foo", "0.1.0")
+ .file("src/lib.rs", "pub fn foo2() {}")
+ .alternative(true)
+ .publish();
+
+ let g = git::repo(&paths::root().join("another"))
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/lib.rs", "pub fn foo3() {}")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "test"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ foo = "0.2"
+ foo1 = {{ version = "0.1", package = "foo" }}
+ foo2 = {{ version = "0.1", registry = "alternative", package = "foo" }}
+ foo3 = {{ git = '{}', package = "foo" }}
+ foo4 = {{ path = "foo", package = "foo" }}
+ "#,
+ g.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "
+ extern crate foo;
+ extern crate foo1;
+ extern crate foo2;
+ extern crate foo3;
+ extern crate foo4;
+
+ pub fn foo() {
+ foo::foo();
+ foo1::foo1();
+ foo2::foo2();
+ foo3::foo3();
+ foo4::foo4();
+ }
+ ",
+ )
+ .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("foo/src/lib.rs", "pub fn foo4() {}")
+ .build();
+
+ p.cargo("build -v").run();
+}
+
+#[cargo_test]
+fn rename_and_patch() {
+ Package::new("foo", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "test"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { version = "0.1", package = "foo" }
+
+ [patch.crates-io]
+ foo = { path = "foo" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate bar; pub fn foo() { bar::foo(); }",
+ )
+ .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("foo/src/lib.rs", "pub fn foo() {}")
+ .build();
+
+ p.cargo("build -v").run();
+}
+
+#[cargo_test]
+fn rename_twice() {
+ Package::new("foo", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "test"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { version = "0.1", package = "foo" }
+ [build-dependencies]
+ foo = { version = "0.1" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v0.1.0 (registry [..])
+error: the crate `test v0.1.0 ([CWD])` depends on crate `foo v0.1.0` multiple times with different names
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rename_affects_fingerprint() {
+ Package::new("foo", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "test"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ foo = { version = "0.1", package = "foo" }
+ "#,
+ )
+ .file("src/lib.rs", "extern crate foo;")
+ .build();
+
+ p.cargo("build -v").run();
+
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "test"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { version = "0.1", package = "foo" }
+ "#,
+ );
+
+ p.cargo("build -v")
+ .with_status(101)
+ .with_stderr_contains("[..]can't find crate for `foo`")
+ .run();
+}
+
+#[cargo_test]
+fn can_run_doc_tests() {
+ Package::new("bar", "0.1.0").publish();
+ Package::new("bar", "0.2.0").publish();
+
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = { version = "0.1.0" }
+ baz = { version = "0.2.0", package = "bar" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
+ extern crate bar;
+ extern crate baz;
+ ",
+ )
+ .build();
+
+ foo.cargo("test -v")
+ .with_stderr_contains(
+ "\
+[DOCTEST] foo
+[RUNNING] `rustdoc [..]--test [..]src/lib.rs \
+ [..] \
+ --extern bar=[CWD]/target/debug/deps/libbar-[..].rlib \
+ --extern baz=[CWD]/target/debug/deps/libbar-[..].rlib \
+ [..]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn features_still_work() {
+ Package::new("foo", "0.1.0").publish();
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "test"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ p1 = { path = 'a', features = ['b'] }
+ p2 = { path = 'b' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "p1"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ b = { version = "0.1", package = "foo", optional = true }
+ "#,
+ )
+ .file("a/src/lib.rs", "extern crate b;")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "p2"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ b = { version = "0.1", package = "bar", optional = true }
+
+ [features]
+ default = ['b']
+ "#,
+ )
+ .file("b/src/lib.rs", "extern crate b;")
+ .build();
+
+ p.cargo("build -v").run();
+}
+
+#[cargo_test]
+fn features_not_working() {
+ Package::new("foo", "0.1.0").publish();
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "test"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ a = { path = 'a', package = 'p1', optional = true }
+
+ [features]
+ default = ['p1']
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("a/Cargo.toml", &basic_manifest("p1", "0.1.0"))
+ .build();
+
+ p.cargo("build -v")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ feature `default` includes `p1` which is neither a dependency nor another feature
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rename_with_dash() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "qwerty"
+ version = "0.1.0"
+
+ [dependencies]
+ foo-bar = { path = 'a', package = 'a' }
+ "#,
+ )
+ .file("src/lib.rs", "extern crate foo_bar;")
+ .file("a/Cargo.toml", &basic_manifest("a", "0.1.0"))
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+}
diff --git a/src/tools/cargo/tests/testsuite/replace.rs b/src/tools/cargo/tests/testsuite/replace.rs
new file mode 100644
index 000000000..c11c49330
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/replace.rs
@@ -0,0 +1,1300 @@
+//! Tests for `[replace]` table source replacement.
+
+use cargo_test_support::git;
+use cargo_test_support::paths;
+use cargo_test_support::registry::Package;
+use cargo_test_support::{basic_manifest, project};
+
+#[cargo_test]
+fn override_simple() {
+ Package::new("bar", "0.1.0").publish();
+
+ let bar = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [replace]
+ "bar:0.1.0" = {{ git = '{}' }}
+ "#,
+ bar.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate bar; pub fn foo() { bar::bar(); }",
+ )
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[UPDATING] git repository `[..]`
+[CHECKING] bar v0.1.0 (file://[..])
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn override_with_features() {
+ Package::new("bar", "0.1.0").publish();
+
+ let bar = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [replace]
+ "bar:0.1.0" = {{ git = '{}', features = ["some_feature"] }}
+ "#,
+ bar.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate bar; pub fn foo() { bar::bar(); }",
+ )
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] [..] index
+[UPDATING] git repository `[..]`
+[WARNING] replacement for `bar` uses the features mechanism. default-features and features \
+will not take effect because the replacement dependency does not support this mechanism
+[CHECKING] bar v0.1.0 (file://[..])
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn override_with_setting_default_features() {
+ Package::new("bar", "0.1.0").publish();
+
+ let bar = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [replace]
+ "bar:0.1.0" = {{ git = '{}', default-features = false, features = ["none_default_feature"] }}
+ "#,
+ bar.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate bar; pub fn foo() { bar::bar(); }",
+ )
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] [..] index
+[UPDATING] git repository `[..]`
+[WARNING] replacement for `bar` uses the features mechanism. default-features and features \
+will not take effect because the replacement dependency does not support this mechanism
+[CHECKING] bar v0.1.0 (file://[..])
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn missing_version() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [replace]
+ bar = { git = 'https://example.com' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ replacements must specify a version to replace, but `[..]bar` does not
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid_semver_version() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+
+ [replace]
+ "bar:*" = { git = 'https://example.com' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ replacements must specify a valid semver version to replace, but `bar:*` does not
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn different_version() {
+ Package::new("bar", "0.2.0").publish();
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [replace]
+ "bar:0.1.0" = "0.2.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ replacements cannot specify a version requirement, but found one for [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn transitive() {
+ Package::new("bar", "0.1.0").publish();
+ Package::new("baz", "0.2.0")
+ .dep("bar", "0.1.0")
+ .file("src/lib.rs", "extern crate bar; fn baz() { bar::bar(); }")
+ .publish();
+
+ let foo = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ baz = "0.2.0"
+
+ [replace]
+ "bar:0.1.0" = {{ git = '{}' }}
+ "#,
+ foo.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[UPDATING] git repository `[..]`
+[DOWNLOADING] crates ...
+[DOWNLOADED] baz v0.2.0 (registry [..])
+[CHECKING] bar v0.1.0 (file://[..])
+[CHECKING] baz v0.2.0
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("check").with_stdout("").run();
+}
+
+#[cargo_test]
+fn persists_across_rebuilds() {
+ Package::new("bar", "0.1.0").publish();
+
+ let foo = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [replace]
+ "bar:0.1.0" = {{ git = '{}' }}
+ "#,
+ foo.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate bar; pub fn foo() { bar::bar(); }",
+ )
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[UPDATING] git repository `file://[..]`
+[CHECKING] bar v0.1.0 (file://[..])
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("check").with_stdout("").run();
+}
+
+#[cargo_test]
+fn replace_registry_with_path() {
+ Package::new("bar", "0.1.0").publish();
+
+ let _ = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [replace]
+ "bar:0.1.0" = { path = "../bar" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "extern crate bar; pub fn foo() { bar::bar(); }",
+ )
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[CHECKING] bar v0.1.0 ([ROOT][..]/bar)
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn use_a_spec_to_select() {
+ Package::new("baz", "0.1.1")
+ .file("src/lib.rs", "pub fn baz1() {}")
+ .publish();
+ Package::new("baz", "0.2.0").publish();
+ Package::new("bar", "0.1.1")
+ .dep("baz", "0.2")
+ .file(
+ "src/lib.rs",
+ "extern crate baz; pub fn bar() { baz::baz3(); }",
+ )
+ .publish();
+
+ let foo = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", &basic_manifest("baz", "0.2.0"))
+ .file("src/lib.rs", "pub fn baz3() {}")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1"
+ baz = "0.1"
+
+ [replace]
+ "baz:0.2.0" = {{ git = '{}' }}
+ "#,
+ foo.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "
+ extern crate bar;
+ extern crate baz;
+
+ pub fn local() {
+ baz::baz1();
+ bar::bar();
+ }
+ ",
+ )
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[UPDATING] git repository `[..]`
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..]
+[DOWNLOADED] [..]
+[CHECKING] [..]
+[CHECKING] [..]
+[CHECKING] [..]
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn override_adds_some_deps() {
+ Package::new("baz", "0.1.1").publish();
+ Package::new("bar", "0.1.0").publish();
+
+ let foo = git::repo(&paths::root().join("override"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ baz = "0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1"
+
+ [replace]
+ "bar:0.1.0" = {{ git = '{}' }}
+ "#,
+ foo.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[UPDATING] git repository `[..]`
+[DOWNLOADING] crates ...
+[DOWNLOADED] baz v0.1.1 (registry [..])
+[CHECKING] baz v0.1.1
+[CHECKING] bar v0.1.0 ([..])
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("check").with_stdout("").run();
+
+ Package::new("baz", "0.1.2").publish();
+ p.cargo("update -p")
+ .arg(&format!("{}#bar", foo.url()))
+ .with_stderr(
+ "\
+[UPDATING] git repository `file://[..]`
+[UPDATING] `dummy-registry` index
+",
+ )
+ .run();
+ p.cargo("update -p https://github.com/rust-lang/crates.io-index#bar")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+",
+ )
+ .run();
+
+ p.cargo("check").with_stdout("").run();
+}
+
+#[cargo_test]
+fn locked_means_locked_yes_no_seriously_i_mean_locked() {
+ // this in theory exercises #2041
+ Package::new("baz", "0.1.0").publish();
+ Package::new("baz", "0.2.0").publish();
+ Package::new("bar", "0.1.0").publish();
+
+ let foo = git::repo(&paths::root().join("override"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ baz = "*"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1"
+ baz = "0.1"
+
+ [replace]
+ "bar:0.1.0" = {{ git = '{}' }}
+ "#,
+ foo.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+
+ p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stdout("").run();
+}
+
+#[cargo_test]
+fn override_wrong_name() {
+ Package::new("baz", "0.1.0").publish();
+
+ let foo = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ baz = "0.1"
+
+ [replace]
+ "baz:0.1.0" = {{ git = '{}' }}
+ "#,
+ foo.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..] index
+[UPDATING] git repository [..]
+[ERROR] failed to get `baz` as a dependency of package `foo v0.0.1 ([..])`
+
+Caused by:
+ no matching package for override `[..]baz@0.1.0` found
+ location searched: file://[..]
+ version required: =0.1.0
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn override_with_nothing() {
+ Package::new("bar", "0.1.0").publish();
+
+ let foo = git::repo(&paths::root().join("override"))
+ .file("src/lib.rs", "")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1"
+
+ [replace]
+ "bar:0.1.0" = {{ git = '{}' }}
+ "#,
+ foo.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..] index
+[UPDATING] git repository [..]
+[ERROR] failed to get `bar` as a dependency of package `foo v0.0.1 ([..])`
+
+Caused by:
+ failed to load source for dependency `bar`
+
+Caused by:
+ Unable to update file://[..]
+
+Caused by:
+ Could not find Cargo.toml in `[..]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn override_wrong_version() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [replace]
+ "bar:0.1.0" = { git = 'https://example.com', version = '0.2.0' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ replacements cannot specify a version requirement, but found one for `[..]bar@0.1.0`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn multiple_specs() {
+ Package::new("bar", "0.1.0").publish();
+
+ let bar = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [replace]
+ "bar:0.1.0" = {{ git = '{0}' }}
+
+ [replace."https://github.com/rust-lang/crates.io-index#bar:0.1.0"]
+ git = '{0}'
+ "#,
+ bar.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..] index
+[UPDATING] git repository [..]
+[ERROR] failed to get `bar` as a dependency of package `foo v0.0.1 ([..])`
+
+Caused by:
+ overlapping replacement specifications found:
+
+ * [..]
+ * [..]
+
+ both specifications match: bar v0.1.0
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_override_dep() {
+ Package::new("bar", "0.1.0").publish();
+
+ let bar = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [replace]
+ "bar:0.1.0" = {{ git = '{0}' }}
+ "#,
+ bar.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("test -p bar")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+error: There are multiple `bar` packages in your project, and the [..]
+Please re-run this command with [..]
+ [..]#bar@0.1.0
+ [..]#bar@0.1.0
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn update() {
+ Package::new("bar", "0.1.0").publish();
+
+ let bar = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [replace]
+ "bar:0.1.0" = {{ git = '{0}' }}
+ "#,
+ bar.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("generate-lockfile").run();
+ p.cargo("update")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[UPDATING] git repository `[..]`
+",
+ )
+ .run();
+}
+
+// foo -> near -> far
+// near is overridden with itself
+#[cargo_test]
+fn no_override_self() {
+ let deps = git::repo(&paths::root().join("override"))
+ .file("far/Cargo.toml", &basic_manifest("far", "0.1.0"))
+ .file("far/src/lib.rs", "")
+ .file(
+ "near/Cargo.toml",
+ r#"
+ [package]
+ name = "near"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ far = { path = "../far" }
+ "#,
+ )
+ .file("near/src/lib.rs", "#![no_std] pub extern crate far;")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ near = {{ git = '{0}' }}
+
+ [replace]
+ "near:0.1.0" = {{ git = '{0}' }}
+ "#,
+ deps.url()
+ ),
+ )
+ .file("src/lib.rs", "#![no_std] pub extern crate near;")
+ .build();
+
+ p.cargo("check --verbose").run();
+}
+
+#[cargo_test]
+fn override_an_override() {
+ Package::new("chrono", "0.2.0")
+ .dep("serde", "< 0.9")
+ .publish();
+ Package::new("serde", "0.7.0")
+ .file("src/lib.rs", "pub fn serde07() {}")
+ .publish();
+ Package::new("serde", "0.8.0")
+ .file("src/lib.rs", "pub fn serde08() {}")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ chrono = "0.2"
+ serde = "0.8"
+
+ [replace]
+ "chrono:0.2.0" = { path = "chrono" }
+ "serde:0.8.0" = { path = "serde" }
+ "#,
+ )
+ .file(
+ "Cargo.lock",
+ r#"
+ [[package]]
+ name = "foo"
+ version = "0.0.1"
+ dependencies = [
+ "chrono 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ ]
+
+ [[package]]
+ name = "chrono"
+ version = "0.2.0"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+ replace = "chrono 0.2.0"
+
+ [[package]]
+ name = "chrono"
+ version = "0.2.0"
+ dependencies = [
+ "serde 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ ]
+
+ [[package]]
+ name = "serde"
+ version = "0.7.0"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+
+ [[package]]
+ name = "serde"
+ version = "0.8.0"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+ replace = "serde 0.8.0"
+
+ [[package]]
+ name = "serde"
+ version = "0.8.0"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
+ extern crate chrono;
+ extern crate serde;
+
+ pub fn foo() {
+ chrono::chrono();
+ serde::serde08_override();
+ }
+ ",
+ )
+ .file(
+ "chrono/Cargo.toml",
+ r#"
+ [package]
+ name = "chrono"
+ version = "0.2.0"
+ authors = []
+
+ [dependencies]
+ serde = "< 0.9"
+ "#,
+ )
+ .file(
+ "chrono/src/lib.rs",
+ "
+ extern crate serde;
+ pub fn chrono() {
+ serde::serde07();
+ }
+ ",
+ )
+ .file("serde/Cargo.toml", &basic_manifest("serde", "0.8.0"))
+ .file("serde/src/lib.rs", "pub fn serde08_override() {}")
+ .build();
+
+ p.cargo("check -v").run();
+}
+
+#[cargo_test]
+fn overriding_nonexistent_no_spurious() {
+ Package::new("bar", "0.1.0").dep("baz", "0.1").publish();
+ Package::new("baz", "0.1.0").publish();
+
+ let bar = git::repo(&paths::root().join("override"))
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ baz = { path = "baz" }
+ "#,
+ )
+ .file("src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [replace]
+ "bar:0.1.0" = {{ git = '{url}' }}
+ "baz:0.1.0" = {{ git = '{url}' }}
+ "#,
+ url = bar.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+ p.cargo("check")
+ .with_stderr(
+ "\
+[WARNING] package replacement is not used: [..]baz@0.1.0
+[FINISHED] [..]
+",
+ )
+ .with_stdout("")
+ .run();
+}
+
+#[cargo_test]
+fn no_warnings_when_replace_is_used_in_another_workspace_member() {
+ Package::new("bar", "0.1.0").publish();
+ Package::new("baz", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = [ "first_crate", "second_crate"]
+
+ [replace]
+ "bar:0.1.0" = { path = "local_bar" }
+ "#,
+ )
+ .file(
+ "first_crate/Cargo.toml",
+ r#"
+ [package]
+ name = "first_crate"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("first_crate/src/lib.rs", "")
+ .file(
+ "second_crate/Cargo.toml",
+ &basic_manifest("second_crate", "0.1.0"),
+ )
+ .file("second_crate/src/lib.rs", "")
+ .file("local_bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("local_bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .cwd("first_crate")
+ .with_stdout("")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[CHECKING] bar v0.1.0 ([..])
+[CHECKING] first_crate v0.1.0 ([..])
+[FINISHED] [..]",
+ )
+ .run();
+
+ p.cargo("check")
+ .cwd("second_crate")
+ .with_stdout("")
+ .with_stderr(
+ "\
+[CHECKING] second_crate v0.1.0 ([..])
+[FINISHED] [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn replace_to_path_dep() {
+ Package::new("bar", "0.1.0").dep("baz", "0.1").publish();
+ Package::new("baz", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [replace]
+ "bar:0.1.0" = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar;")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ baz = { path = "baz" }
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ "extern crate baz; pub fn bar() { baz::baz(); }",
+ )
+ .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("bar/baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn override_with_default_feature() {
+ Package::new("another", "0.1.0").publish();
+ Package::new("another", "0.1.1").dep("bar", "0.1").publish();
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar", default-features = false }
+ another = "0.1"
+ another2 = { path = "another2" }
+
+ [replace]
+ 'bar:0.1.0' = { path = "bar" }
+ "#,
+ )
+ .file("src/main.rs", "extern crate bar; fn main() { bar::bar(); }")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [features]
+ default = []
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ #[cfg(feature = "default")]
+ pub fn bar() {}
+ "#,
+ )
+ .file(
+ "another2/Cargo.toml",
+ r#"
+ [package]
+ name = "another2"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { version = "0.1", default-features = false }
+ "#,
+ )
+ .file("another2/src/lib.rs", "")
+ .build();
+
+ p.cargo("run").run();
+}
+
+#[cargo_test]
+fn override_plus_dep() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1"
+
+ [replace]
+ 'bar:0.1.0' = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ foo = { path = ".." }
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains("error: cyclic package dependency: [..]")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/required_features.rs b/src/tools/cargo/tests/testsuite/required_features.rs
new file mode 100644
index 000000000..ac6c9d233
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/required_features.rs
@@ -0,0 +1,1452 @@
+//! Tests for targets with `required-features`.
+
+use cargo_test_support::install::{
+ assert_has_installed_exe, assert_has_not_installed_exe, cargo_home,
+};
+use cargo_test_support::is_nightly;
+use cargo_test_support::paths::CargoPathExt;
+use cargo_test_support::project;
+
+#[cargo_test]
+fn build_bin_default_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ default = ["a"]
+ a = []
+
+ [[bin]]
+ name = "foo"
+ required-features = ["a"]
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ extern crate foo;
+
+ #[cfg(feature = "a")]
+ fn test() {
+ foo::foo();
+ }
+
+ fn main() {}
+ "#,
+ )
+ .file("src/lib.rs", r#"#[cfg(feature = "a")] pub fn foo() {}"#)
+ .build();
+
+ p.cargo("build").run();
+ assert!(p.bin("foo").is_file());
+
+ p.cargo("build --no-default-features").run();
+
+ p.cargo("build --bin=foo").run();
+ assert!(p.bin("foo").is_file());
+
+ p.cargo("build --bin=foo --no-default-features")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: target `foo` in package `foo` requires the features: `a`
+Consider enabling them by passing, e.g., `--features=\"a\"`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_bin_arg_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ a = []
+
+ [[bin]]
+ name = "foo"
+ required-features = ["a"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --features a").run();
+ assert!(p.bin("foo").is_file());
+}
+
+#[cargo_test]
+fn build_bin_multiple_required_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ default = ["a", "b"]
+ a = []
+ b = ["a"]
+ c = []
+
+ [[bin]]
+ name = "foo_1"
+ path = "src/foo_1.rs"
+ required-features = ["b", "c"]
+
+ [[bin]]
+ name = "foo_2"
+ path = "src/foo_2.rs"
+ required-features = ["a"]
+ "#,
+ )
+ .file("src/foo_1.rs", "fn main() {}")
+ .file("src/foo_2.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build").run();
+
+ assert!(!p.bin("foo_1").is_file());
+ assert!(p.bin("foo_2").is_file());
+
+ p.cargo("build --features c").run();
+
+ assert!(p.bin("foo_1").is_file());
+ assert!(p.bin("foo_2").is_file());
+
+ p.cargo("build --no-default-features").run();
+}
+
+#[cargo_test]
+fn build_example_default_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ default = ["a"]
+ a = []
+
+ [[example]]
+ name = "foo"
+ required-features = ["a"]
+ "#,
+ )
+ .file("examples/foo.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --example=foo").run();
+ assert!(p.bin("examples/foo").is_file());
+
+ p.cargo("build --example=foo --no-default-features")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: target `foo` in package `foo` requires the features: `a`
+Consider enabling them by passing, e.g., `--features=\"a\"`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_example_arg_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ a = []
+
+ [[example]]
+ name = "foo"
+ required-features = ["a"]
+ "#,
+ )
+ .file("examples/foo.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --example=foo --features a").run();
+ assert!(p.bin("examples/foo").is_file());
+}
+
+#[cargo_test]
+fn build_example_multiple_required_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ default = ["a", "b"]
+ a = []
+ b = ["a"]
+ c = []
+
+ [[example]]
+ name = "foo_1"
+ required-features = ["b", "c"]
+
+ [[example]]
+ name = "foo_2"
+ required-features = ["a"]
+ "#,
+ )
+ .file("examples/foo_1.rs", "fn main() {}")
+ .file("examples/foo_2.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --example=foo_1")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: target `foo_1` in package `foo` requires the features: `b`, `c`
+Consider enabling them by passing, e.g., `--features=\"b c\"`
+",
+ )
+ .run();
+ p.cargo("build --example=foo_2").run();
+
+ assert!(!p.bin("examples/foo_1").is_file());
+ assert!(p.bin("examples/foo_2").is_file());
+
+ p.cargo("build --example=foo_1 --features c").run();
+ p.cargo("build --example=foo_2 --features c").run();
+
+ assert!(p.bin("examples/foo_1").is_file());
+ assert!(p.bin("examples/foo_2").is_file());
+
+ p.cargo("build --example=foo_1 --no-default-features")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: target `foo_1` in package `foo` requires the features: `b`, `c`
+Consider enabling them by passing, e.g., `--features=\"b c\"`
+",
+ )
+ .run();
+ p.cargo("build --example=foo_2 --no-default-features")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: target `foo_2` in package `foo` requires the features: `a`
+Consider enabling them by passing, e.g., `--features=\"a\"`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_default_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ default = ["a"]
+ a = []
+
+ [[test]]
+ name = "foo"
+ required-features = ["a"]
+ "#,
+ )
+ .file("tests/foo.rs", "#[test]\nfn test() {}")
+ .build();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("test test ... ok")
+ .run();
+
+ p.cargo("test --no-default-features")
+ .with_stderr("[FINISHED] test [unoptimized + debuginfo] target(s) in [..]")
+ .with_stdout("")
+ .run();
+
+ p.cargo("test --test=foo")
+ .with_stderr(
+ "\
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("test test ... ok")
+ .run();
+
+ p.cargo("test --test=foo --no-default-features")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: target `foo` in package `foo` requires the features: `a`
+Consider enabling them by passing, e.g., `--features=\"a\"`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_arg_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ a = []
+
+ [[test]]
+ name = "foo"
+ required-features = ["a"]
+ "#,
+ )
+ .file("tests/foo.rs", "#[test]\nfn test() {}")
+ .build();
+
+ p.cargo("test --features a")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("test test ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn test_multiple_required_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ default = ["a", "b"]
+ a = []
+ b = ["a"]
+ c = []
+
+ [[test]]
+ name = "foo_1"
+ required-features = ["b", "c"]
+
+ [[test]]
+ name = "foo_2"
+ required-features = ["a"]
+ "#,
+ )
+ .file("tests/foo_1.rs", "#[test]\nfn test() {}")
+ .file("tests/foo_2.rs", "#[test]\nfn test() {}")
+ .build();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo_2-[..][EXE])",
+ )
+ .with_stdout_contains("test test ... ok")
+ .run();
+
+ p.cargo("test --features c")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo_1-[..][EXE])
+[RUNNING] [..] (target/debug/deps/foo_2-[..][EXE])",
+ )
+ .with_stdout_contains_n("test test ... ok", 2)
+ .run();
+
+ p.cargo("test --no-default-features")
+ .with_stderr("[FINISHED] test [unoptimized + debuginfo] target(s) in [..]")
+ .with_stdout("")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn bench_default_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ default = ["a"]
+ a = []
+
+ [[bench]]
+ name = "foo"
+ required-features = ["a"]
+ "#,
+ )
+ .file(
+ "benches/foo.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+
+ #[bench]
+ fn bench(_: &mut test::Bencher) {
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("bench")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("test bench ... bench: [..]")
+ .run();
+
+ p.cargo("bench --no-default-features")
+ .with_stderr("[FINISHED] bench [optimized] target(s) in [..]".to_string())
+ .with_stdout("")
+ .run();
+
+ p.cargo("bench --bench=foo")
+ .with_stderr(
+ "\
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("test bench ... bench: [..]")
+ .run();
+
+ p.cargo("bench --bench=foo --no-default-features")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: target `foo` in package `foo` requires the features: `a`
+Consider enabling them by passing, e.g., `--features=\"a\"`
+",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn bench_arg_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ a = []
+
+ [[bench]]
+ name = "foo"
+ required-features = ["a"]
+ "#,
+ )
+ .file(
+ "benches/foo.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+
+ #[bench]
+ fn bench(_: &mut test::Bencher) {
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("bench --features a")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("test bench ... bench: [..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn bench_multiple_required_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ default = ["a", "b"]
+ a = []
+ b = ["a"]
+ c = []
+
+ [[bench]]
+ name = "foo_1"
+ required-features = ["b", "c"]
+
+ [[bench]]
+ name = "foo_2"
+ required-features = ["a"]
+ "#,
+ )
+ .file(
+ "benches/foo_1.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+
+ #[bench]
+ fn bench(_: &mut test::Bencher) {
+ }
+ "#,
+ )
+ .file(
+ "benches/foo_2.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+
+ #[bench]
+ fn bench(_: &mut test::Bencher) {
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("bench")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo_2-[..][EXE])",
+ )
+ .with_stdout_contains("test bench ... bench: [..]")
+ .run();
+
+ p.cargo("bench --features c")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo_1-[..][EXE])
+[RUNNING] [..] (target/release/deps/foo_2-[..][EXE])",
+ )
+ .with_stdout_contains_n("test bench ... bench: [..]", 2)
+ .run();
+
+ p.cargo("bench --no-default-features")
+ .with_stderr("[FINISHED] bench [optimized] target(s) in [..]")
+ .with_stdout("")
+ .run();
+}
+
+#[cargo_test]
+fn install_default_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ default = ["a"]
+ a = []
+
+ [[bin]]
+ name = "foo"
+ required-features = ["a"]
+
+ [[example]]
+ name = "foo"
+ required-features = ["a"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("examples/foo.rs", "fn main() {}")
+ .build();
+
+ p.cargo("install --path .").run();
+ assert_has_installed_exe(cargo_home(), "foo");
+ p.cargo("uninstall foo").run();
+
+ p.cargo("install --path . --no-default-features")
+ .with_stderr(
+ "\
+[INSTALLING] foo v0.0.1 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+[WARNING] none of the package's binaries are available for install using the selected features
+ bin \"foo\" requires the features: `a`
+ example \"foo\" requires the features: `a`
+Consider enabling some of the needed features by passing, e.g., `--features=\"a\"`
+",
+ )
+ .run();
+ assert_has_not_installed_exe(cargo_home(), "foo");
+
+ p.cargo("install --path . --bin=foo").run();
+ assert_has_installed_exe(cargo_home(), "foo");
+ p.cargo("uninstall foo").run();
+
+ p.cargo("install --path . --bin=foo --no-default-features")
+ .with_status(101)
+ .with_stderr(
+ "\
+[INSTALLING] foo v0.0.1 ([..])
+[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \
+ `[..]target`
+
+Caused by:
+ target `foo` in package `foo` requires the features: `a`
+ Consider enabling them by passing, e.g., `--features=\"a\"`
+",
+ )
+ .run();
+ assert_has_not_installed_exe(cargo_home(), "foo");
+
+ p.cargo("install --path . --example=foo").run();
+ assert_has_installed_exe(cargo_home(), "foo");
+ p.cargo("uninstall foo").run();
+
+ p.cargo("install --path . --example=foo --no-default-features")
+ .with_status(101)
+ .with_stderr(
+ "\
+[INSTALLING] foo v0.0.1 ([..])
+[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \
+ `[..]target`
+
+Caused by:
+ target `foo` in package `foo` requires the features: `a`
+ Consider enabling them by passing, e.g., `--features=\"a\"`
+",
+ )
+ .run();
+ assert_has_not_installed_exe(cargo_home(), "foo");
+}
+
+#[cargo_test]
+fn install_arg_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ a = []
+
+ [[bin]]
+ name = "foo"
+ required-features = ["a"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("install --features a").run();
+ assert_has_installed_exe(cargo_home(), "foo");
+ p.cargo("uninstall foo").run();
+}
+
+#[cargo_test]
+fn install_multiple_required_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ default = ["a", "b"]
+ a = []
+ b = ["a"]
+ c = []
+
+ [[bin]]
+ name = "foo_1"
+ path = "src/foo_1.rs"
+ required-features = ["b", "c"]
+
+ [[bin]]
+ name = "foo_2"
+ path = "src/foo_2.rs"
+ required-features = ["a"]
+
+ [[example]]
+ name = "foo_3"
+ path = "src/foo_3.rs"
+ required-features = ["b", "c"]
+
+ [[example]]
+ name = "foo_4"
+ path = "src/foo_4.rs"
+ required-features = ["a"]
+ "#,
+ )
+ .file("src/foo_1.rs", "fn main() {}")
+ .file("src/foo_2.rs", "fn main() {}")
+ .file("src/foo_3.rs", "fn main() {}")
+ .file("src/foo_4.rs", "fn main() {}")
+ .build();
+
+ p.cargo("install --path .").run();
+ assert_has_not_installed_exe(cargo_home(), "foo_1");
+ assert_has_installed_exe(cargo_home(), "foo_2");
+ assert_has_not_installed_exe(cargo_home(), "foo_3");
+ assert_has_not_installed_exe(cargo_home(), "foo_4");
+ p.cargo("uninstall foo").run();
+
+ p.cargo("install --path . --bins --examples").run();
+ assert_has_not_installed_exe(cargo_home(), "foo_1");
+ assert_has_installed_exe(cargo_home(), "foo_2");
+ assert_has_not_installed_exe(cargo_home(), "foo_3");
+ assert_has_installed_exe(cargo_home(), "foo_4");
+ p.cargo("uninstall foo").run();
+
+ p.cargo("install --path . --features c").run();
+ assert_has_installed_exe(cargo_home(), "foo_1");
+ assert_has_installed_exe(cargo_home(), "foo_2");
+ assert_has_not_installed_exe(cargo_home(), "foo_3");
+ assert_has_not_installed_exe(cargo_home(), "foo_4");
+ p.cargo("uninstall foo").run();
+
+ p.cargo("install --path . --features c --bins --examples")
+ .run();
+ assert_has_installed_exe(cargo_home(), "foo_1");
+ assert_has_installed_exe(cargo_home(), "foo_2");
+ assert_has_installed_exe(cargo_home(), "foo_3");
+ assert_has_installed_exe(cargo_home(), "foo_4");
+ p.cargo("uninstall foo").run();
+
+ p.cargo("install --path . --no-default-features")
+ .with_stderr(
+ "\
+[INSTALLING] foo v0.0.1 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+[WARNING] none of the package's binaries are available for install using the selected features
+ bin \"foo_1\" requires the features: `b`, `c`
+ bin \"foo_2\" requires the features: `a`
+ example \"foo_3\" requires the features: `b`, `c`
+ example \"foo_4\" requires the features: `a`
+Consider enabling some of the needed features by passing, e.g., `--features=\"b c\"`
+",
+ )
+ .run();
+ p.cargo("install --path . --no-default-features --bins")
+ .with_stderr(
+ "\
+[INSTALLING] foo v0.0.1 ([..])
+[WARNING] Target filter `bins` specified, but no targets matched. This is a no-op
+[FINISHED] release [optimized] target(s) in [..]
+[WARNING] none of the package's binaries are available for install using the selected features
+ bin \"foo_1\" requires the features: `b`, `c`
+ bin \"foo_2\" requires the features: `a`
+ example \"foo_3\" requires the features: `b`, `c`
+ example \"foo_4\" requires the features: `a`
+Consider enabling some of the needed features by passing, e.g., `--features=\"b c\"`
+",
+ )
+ .run();
+ p.cargo("install --path . --no-default-features --examples")
+ .with_stderr(
+ "\
+[INSTALLING] foo v0.0.1 ([..])
+[WARNING] Target filter `examples` specified, but no targets matched. This is a no-op
+[FINISHED] release [optimized] target(s) in [..]
+[WARNING] none of the package's binaries are available for install using the selected features
+ bin \"foo_1\" requires the features: `b`, `c`
+ bin \"foo_2\" requires the features: `a`
+ example \"foo_3\" requires the features: `b`, `c`
+ example \"foo_4\" requires the features: `a`
+Consider enabling some of the needed features by passing, e.g., `--features=\"b c\"`
+",
+ )
+ .run();
+ p.cargo("install --path . --no-default-features --bins --examples")
+ .with_stderr(
+ "\
+[INSTALLING] foo v0.0.1 ([..])
+[WARNING] Target filters `bins`, `examples` specified, but no targets matched. This is a no-op
+[FINISHED] release [optimized] target(s) in [..]
+[WARNING] none of the package's binaries are available for install using the selected features
+ bin \"foo_1\" requires the features: `b`, `c`
+ bin \"foo_2\" requires the features: `a`
+ example \"foo_3\" requires the features: `b`, `c`
+ example \"foo_4\" requires the features: `a`
+Consider enabling some of the needed features by passing, e.g., `--features=\"b c\"`
+",
+ )
+ .run();
+ assert_has_not_installed_exe(cargo_home(), "foo_1");
+ assert_has_not_installed_exe(cargo_home(), "foo_2");
+ assert_has_not_installed_exe(cargo_home(), "foo_3");
+ assert_has_not_installed_exe(cargo_home(), "foo_4");
+}
+
+#[cargo_test]
+fn dep_feature_in_toml() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar", features = ["a"] }
+
+ [[bin]]
+ name = "foo"
+ required-features = ["bar/a"]
+
+ [[example]]
+ name = "foo"
+ required-features = ["bar/a"]
+
+ [[test]]
+ name = "foo"
+ required-features = ["bar/a"]
+
+ [[bench]]
+ name = "foo"
+ required-features = ["bar/a"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("examples/foo.rs", "fn main() {}")
+ .file("tests/foo.rs", "#[test]\nfn test() {}")
+ .file(
+ "benches/foo.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+
+ #[bench]
+ fn bench(_: &mut test::Bencher) {
+ }
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ a = []
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+
+ // bin
+ p.cargo("build --bin=foo").run();
+ assert!(p.bin("foo").is_file());
+
+ // example
+ p.cargo("build --example=foo").run();
+ assert!(p.bin("examples/foo").is_file());
+
+ // test
+ p.cargo("test --test=foo")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("test test ... ok")
+ .run();
+
+ // bench
+ if is_nightly() {
+ p.cargo("bench --bench=foo")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("test bench ... bench: [..]")
+ .run();
+ }
+
+ // install
+ p.cargo("install").run();
+ assert_has_installed_exe(cargo_home(), "foo");
+ p.cargo("uninstall foo").run();
+}
+
+#[cargo_test]
+fn dep_feature_in_cmd_line() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar" }
+
+ [[bin]]
+ name = "foo"
+ required-features = ["bar/a"]
+
+ [[example]]
+ name = "foo"
+ required-features = ["bar/a"]
+
+ [[test]]
+ name = "foo"
+ required-features = ["bar/a"]
+
+ [[bench]]
+ name = "foo"
+ required-features = ["bar/a"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("examples/foo.rs", "fn main() {}")
+ .file(
+ "tests/foo.rs",
+ r#"
+ #[test]
+ fn bin_is_built() {
+ let s = format!("target/debug/foo{}", std::env::consts::EXE_SUFFIX);
+ let p = std::path::Path::new(&s);
+ assert!(p.exists(), "foo does not exist");
+ }
+ "#,
+ )
+ .file(
+ "benches/foo.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+
+ #[bench]
+ fn bench(_: &mut test::Bencher) {
+ }
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ a = []
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ // This is a no-op
+ p.cargo("build").with_stderr("[FINISHED] dev [..]").run();
+ assert!(!p.bin("foo").is_file());
+
+ // bin
+ p.cargo("build --bin=foo")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: target `foo` in package `foo` requires the features: `bar/a`
+Consider enabling them by passing, e.g., `--features=\"bar/a\"`
+",
+ )
+ .run();
+
+ p.cargo("build --bin=foo --features bar/a").run();
+ assert!(p.bin("foo").is_file());
+
+ // example
+ p.cargo("build --example=foo")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: target `foo` in package `foo` requires the features: `bar/a`
+Consider enabling them by passing, e.g., `--features=\"bar/a\"`
+",
+ )
+ .run();
+
+ p.cargo("build --example=foo --features bar/a").run();
+ assert!(p.bin("examples/foo").is_file());
+
+ // test
+ // This is a no-op, since no tests are enabled
+ p.cargo("test")
+ .with_stderr("[FINISHED] test [unoptimized + debuginfo] target(s) in [..]")
+ .with_stdout("")
+ .run();
+
+ // Delete the target directory so this can check if the main.rs gets built.
+ p.build_dir().rm_rf();
+ p.cargo("test --test=foo --features bar/a")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("test bin_is_built ... ok")
+ .run();
+
+ // bench
+ if is_nightly() {
+ p.cargo("bench")
+ .with_stderr("[FINISHED] bench [optimized] target(s) in [..]")
+ .with_stdout("")
+ .run();
+
+ p.cargo("bench --bench=foo --features bar/a")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("test bench ... bench: [..]")
+ .run();
+ }
+
+ // install
+ p.cargo("install --path .")
+ .with_stderr(
+ "\
+[INSTALLING] foo v0.0.1 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+[WARNING] none of the package's binaries are available for install using the selected features
+ bin \"foo\" requires the features: `bar/a`
+ example \"foo\" requires the features: `bar/a`
+Consider enabling some of the needed features by passing, e.g., `--features=\"bar/a\"`
+",
+ )
+ .run();
+ assert_has_not_installed_exe(cargo_home(), "foo");
+
+ p.cargo("install --features bar/a").run();
+ assert_has_installed_exe(cargo_home(), "foo");
+ p.cargo("uninstall foo").run();
+}
+
+#[cargo_test]
+fn test_skips_compiling_bin_with_missing_required_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ a = []
+
+ [[bin]]
+ name = "bin_foo"
+ path = "src/bin/foo.rs"
+ required-features = ["a"]
+ "#,
+ )
+ .file("src/bin/foo.rs", "extern crate bar; fn main() {}")
+ .file("tests/foo.rs", "")
+ .file("benches/foo.rs", "")
+ .build();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("running 0 tests")
+ .run();
+
+ p.cargo("test --features a -j 1")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+error[E0463]: can't find crate for `bar`",
+ )
+ .run();
+
+ if is_nightly() {
+ p.cargo("bench")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] [..] (target/release/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("running 0 tests")
+ .run();
+
+ p.cargo("bench --features a -j 1")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+error[E0463]: can't find crate for `bar`",
+ )
+ .run();
+ }
+}
+
+#[cargo_test]
+fn run_default() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ default = []
+ a = []
+
+ [[bin]]
+ name = "foo"
+ required-features = ["a"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("src/main.rs", "extern crate foo; fn main() {}")
+ .build();
+
+ p.cargo("run")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: target `foo` in package `foo` requires the features: `a`
+Consider enabling them by passing, e.g., `--features=\"a\"`
+",
+ )
+ .run();
+
+ p.cargo("run --features a").run();
+}
+
+#[cargo_test]
+fn run_default_multiple_required_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ default = ["a"]
+ a = []
+ b = []
+
+ [[bin]]
+ name = "foo1"
+ path = "src/foo1.rs"
+ required-features = ["a"]
+
+ [[bin]]
+ name = "foo3"
+ path = "src/foo3.rs"
+ required-features = ["b"]
+
+ [[bin]]
+ name = "foo2"
+ path = "src/foo2.rs"
+ required-features = ["b"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("src/foo1.rs", "extern crate foo; fn main() {}")
+ .file("src/foo3.rs", "extern crate foo; fn main() {}")
+ .file("src/foo2.rs", "extern crate foo; fn main() {}")
+ .build();
+
+ p.cargo("run")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: `cargo run` could not determine which binary to run[..]
+available binaries: foo1, foo2, foo3",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn renamed_required_features() {
+ // Test that required-features uses renamed package feature names.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [[bin]]
+ name = "x"
+ required-features = ["a1/f1"]
+
+ [dependencies]
+ a1 = {path="a1", package="a"}
+ a2 = {path="a2", package="a"}
+ "#,
+ )
+ .file(
+ "src/bin/x.rs",
+ r#"
+ fn main() {
+ a1::f();
+ a2::f();
+ }
+ "#,
+ )
+ .file(
+ "a1/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [features]
+ f1 = []
+ "#,
+ )
+ .file(
+ "a1/src/lib.rs",
+ r#"
+ pub fn f() {
+ if cfg!(feature="f1") {
+ println!("a1 f1");
+ }
+ }
+ "#,
+ )
+ .file(
+ "a2/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.2.0"
+
+ [features]
+ f2 = []
+ "#,
+ )
+ .file(
+ "a2/src/lib.rs",
+ r#"
+ pub fn f() {
+ if cfg!(feature="f2") {
+ println!("a2 f2");
+ }
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] target `x` in package `foo` requires the features: `a1/f1`
+Consider enabling them by passing, e.g., `--features=\"a1/f1\"`
+",
+ )
+ .run();
+
+ p.cargo("build --features a1/f1").run();
+ p.rename_run("x", "x_with_f1").with_stdout("a1 f1").run();
+
+ p.cargo("build --features a1/f1,a2/f2").run();
+ p.rename_run("x", "x_with_f1_f2")
+ .with_stdout("a1 f1\na2 f2")
+ .run();
+}
+
+#[cargo_test]
+fn truncated_install_warning_message() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2021"
+
+ [features]
+ feature1 = []
+ feature2 = []
+ feature3 = []
+ feature4 = []
+ feature5 = []
+
+ [[bin]]
+ name = "foo1"
+ required-features = ["feature1", "feature2", "feature3"]
+
+ [[bin]]
+ name = "foo2"
+ required-features = ["feature2"]
+
+ [[bin]]
+ name = "foo3"
+ required-features = ["feature3"]
+
+ [[bin]]
+ name = "foo4"
+ required-features = ["feature4", "feature1"]
+
+ [[bin]]
+ name = "foo5"
+ required-features = ["feature1", "feature2", "feature3", "feature4", "feature5"]
+
+ [[bin]]
+ name = "foo6"
+ required-features = ["feature1", "feature2", "feature3", "feature4", "feature5"]
+
+ [[bin]]
+ name = "foo7"
+ required-features = ["feature1", "feature2", "feature3", "feature4", "feature5"]
+
+ [[bin]]
+ name = "foo8"
+ required-features = ["feature1", "feature2", "feature3", "feature4", "feature5"]
+
+ [[bin]]
+ name = "foo9"
+ required-features = ["feature1", "feature2", "feature3", "feature4", "feature5"]
+
+ [[bin]]
+ name = "foo10"
+ required-features = ["feature1", "feature2", "feature3", "feature4", "feature5"]
+
+ [[example]]
+ name = "example1"
+ required-features = ["feature1", "feature2"]
+ "#,
+ )
+ .file("src/bin/foo1.rs", "fn main() {}")
+ .file("src/bin/foo2.rs", "fn main() {}")
+ .file("src/bin/foo3.rs", "fn main() {}")
+ .file("src/bin/foo4.rs", "fn main() {}")
+ .file("src/bin/foo5.rs", "fn main() {}")
+ .file("src/bin/foo6.rs", "fn main() {}")
+ .file("src/bin/foo7.rs", "fn main() {}")
+ .file("src/bin/foo8.rs", "fn main() {}")
+ .file("src/bin/foo9.rs", "fn main() {}")
+ .file("src/bin/foo10.rs", "fn main() {}")
+ .file("examples/example1.rs", "fn main() {}")
+ .build();
+
+ p.cargo("install --path .").with_stderr("\
+[INSTALLING] foo v0.1.0 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+[WARNING] none of the package's binaries are available for install using the selected features
+ bin \"foo1\" requires the features: `feature1`, `feature2`, `feature3`
+ bin \"foo2\" requires the features: `feature2`
+ bin \"foo3\" requires the features: `feature3`
+ bin \"foo4\" requires the features: `feature4`, `feature1`
+ bin \"foo5\" requires the features: `feature1`, `feature2`, `feature3`, `feature4`, `feature5`
+ bin \"foo6\" requires the features: `feature1`, `feature2`, `feature3`, `feature4`, `feature5`
+ bin \"foo7\" requires the features: `feature1`, `feature2`, `feature3`, `feature4`, `feature5`
+4 more targets also requires features not enabled. See them in the Cargo.toml file.
+Consider enabling some of the needed features by passing, e.g., `--features=\"feature1 feature2 feature3\"`").run();
+}
diff --git a/src/tools/cargo/tests/testsuite/run.rs b/src/tools/cargo/tests/testsuite/run.rs
new file mode 100644
index 000000000..aa210d6ae
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/run.rs
@@ -0,0 +1,1509 @@
+//! Tests for the `cargo run` command.
+
+use cargo_test_support::{basic_bin_manifest, basic_lib_manifest, project, Project};
+use cargo_util::paths::dylib_path_envvar;
+
+#[cargo_test]
+fn simple() {
+ let p = project()
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .build();
+
+ p.cargo("run")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/foo[EXE]`",
+ )
+ .with_stdout("hello")
+ .run();
+ assert!(p.bin("foo").is_file());
+}
+
+#[cargo_test]
+fn quiet_arg() {
+ let p = project()
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .build();
+
+ p.cargo("run -q").with_stderr("").with_stdout("hello").run();
+
+ p.cargo("run --quiet")
+ .with_stderr("")
+ .with_stdout("hello")
+ .run();
+}
+
+#[cargo_test]
+fn quiet_arg_and_verbose_arg() {
+ let p = project()
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .build();
+
+ p.cargo("run -q -v")
+ .with_status(101)
+ .with_stderr("[ERROR] cannot set both --verbose and --quiet")
+ .run();
+}
+
+#[cargo_test]
+fn quiet_arg_and_verbose_config() {
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [term]
+ verbose = true
+ "#,
+ )
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .build();
+
+ p.cargo("run -q").with_stderr("").with_stdout("hello").run();
+}
+
+#[cargo_test]
+fn verbose_arg_and_quiet_config() {
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [term]
+ quiet = true
+ "#,
+ )
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .build();
+
+ p.cargo("run -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/foo[EXE]`",
+ )
+ .with_stdout("hello")
+ .run();
+}
+
+#[cargo_test]
+fn quiet_config_alone() {
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [term]
+ quiet = true
+ "#,
+ )
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .build();
+
+ p.cargo("run").with_stderr("").with_stdout("hello").run();
+}
+
+#[cargo_test]
+fn verbose_config_alone() {
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [term]
+ verbose = true
+ "#,
+ )
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .build();
+
+ p.cargo("run")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/foo[EXE]`",
+ )
+ .with_stdout("hello")
+ .run();
+}
+
+#[cargo_test]
+fn quiet_config_and_verbose_config() {
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [term]
+ verbose = true
+ quiet = true
+ "#,
+ )
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .build();
+
+ p.cargo("run")
+ .with_status(101)
+ .with_stderr("[ERROR] cannot set both `term.verbose` and `term.quiet`")
+ .run();
+}
+
+#[cargo_test]
+fn simple_with_args() {
+ let p = project()
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ assert_eq!(std::env::args().nth(1).unwrap(), "hello");
+ assert_eq!(std::env::args().nth(2).unwrap(), "world");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run hello world").run();
+}
+
+#[cfg(unix)]
+#[cargo_test]
+fn simple_with_non_utf8_args() {
+ use std::os::unix::ffi::OsStrExt;
+
+ let p = project()
+ .file(
+ "src/main.rs",
+ r#"
+ use std::ffi::OsStr;
+ use std::os::unix::ffi::OsStrExt;
+
+ fn main() {
+ assert_eq!(std::env::args_os().nth(1).unwrap(), OsStr::from_bytes(b"hello"));
+ assert_eq!(std::env::args_os().nth(2).unwrap(), OsStr::from_bytes(b"ab\xffcd"));
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run")
+ .arg("hello")
+ .arg(std::ffi::OsStr::from_bytes(b"ab\xFFcd"))
+ .run();
+}
+
+#[cargo_test]
+fn exit_code() {
+ let p = project()
+ .file("src/main.rs", "fn main() { std::process::exit(2); }")
+ .build();
+
+ let mut output = String::from(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target[..]`
+",
+ );
+ if !cfg!(unix) {
+ output.push_str(
+ "[ERROR] process didn't exit successfully: `target[..]foo[..]` (exit [..]: 2)",
+ );
+ }
+ p.cargo("run").with_status(2).with_stderr(output).run();
+}
+
+#[cargo_test]
+fn exit_code_verbose() {
+ let p = project()
+ .file("src/main.rs", "fn main() { std::process::exit(2); }")
+ .build();
+
+ let mut output = String::from(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target[..]`
+",
+ );
+ if !cfg!(unix) {
+ output.push_str(
+ "[ERROR] process didn't exit successfully: `target[..]foo[..]` (exit [..]: 2)",
+ );
+ }
+
+ p.cargo("run -v").with_status(2).with_stderr(output).run();
+}
+
+#[cargo_test]
+fn no_main_file() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("run")
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] a bin target must be available \
+ for `cargo run`\n",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn too_many_bins() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("src/bin/a.rs", "")
+ .file("src/bin/b.rs", "")
+ .build();
+
+ // Using [..] here because the order is not stable
+ p.cargo("run")
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] `cargo run` could not determine which binary to run. \
+ Use the `--bin` option to specify a binary, or the \
+ `default-run` manifest key.\
+ \navailable binaries: [..]\n",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn specify_name() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "src/bin/a.rs",
+ r#"
+ #[allow(unused_extern_crates)]
+ extern crate foo;
+ fn main() { println!("hello a.rs"); }
+ "#,
+ )
+ .file(
+ "src/bin/b.rs",
+ r#"
+ #[allow(unused_extern_crates)]
+ extern crate foo;
+ fn main() { println!("hello b.rs"); }
+ "#,
+ )
+ .build();
+
+ p.cargo("run --bin a -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc [..] src/lib.rs [..]`
+[RUNNING] `rustc [..] src/bin/a.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/a[EXE]`",
+ )
+ .with_stdout("hello a.rs")
+ .run();
+
+ p.cargo("run --bin b -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] src/bin/b.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/b[EXE]`",
+ )
+ .with_stdout("hello b.rs")
+ .run();
+}
+
+#[cargo_test]
+fn specify_default_run() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ default-run = "a"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("src/bin/a.rs", r#"fn main() { println!("hello A"); }"#)
+ .file("src/bin/b.rs", r#"fn main() { println!("hello B"); }"#)
+ .build();
+
+ p.cargo("run").with_stdout("hello A").run();
+ p.cargo("run --bin a").with_stdout("hello A").run();
+ p.cargo("run --bin b").with_stdout("hello B").run();
+}
+
+#[cargo_test]
+fn bogus_default_run() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ default-run = "b"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("src/bin/a.rs", r#"fn main() { println!("hello A"); }"#)
+ .build();
+
+ p.cargo("run")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml`
+
+Caused by:
+ default-run target `b` not found
+
+ <tab>Did you mean `a`?
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn run_example() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("examples/a.rs", r#"fn main() { println!("example"); }"#)
+ .file("src/bin/a.rs", r#"fn main() { println!("bin"); }"#)
+ .build();
+
+ p.cargo("run --example a")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/examples/a[EXE]`",
+ )
+ .with_stdout("example")
+ .run();
+}
+
+#[cargo_test]
+fn run_library_example() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ [[example]]
+ name = "bar"
+ crate_type = ["lib"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("examples/bar.rs", "fn foo() {}")
+ .build();
+
+ p.cargo("run --example bar")
+ .with_status(101)
+ .with_stderr("[ERROR] example target `bar` is a library and cannot be executed")
+ .run();
+}
+
+#[cargo_test]
+fn run_bin_example() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ [[example]]
+ name = "bar"
+ crate_type = ["bin"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("examples/bar.rs", r#"fn main() { println!("example"); }"#)
+ .build();
+
+ p.cargo("run --example bar")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/examples/bar[EXE]`",
+ )
+ .with_stdout("example")
+ .run();
+}
+
+fn autodiscover_examples_project(rust_edition: &str, autoexamples: Option<bool>) -> Project {
+ let autoexamples = match autoexamples {
+ None => "".to_string(),
+ Some(bool) => format!("autoexamples = {}", bool),
+ };
+ project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ edition = "{rust_edition}"
+ {autoexamples}
+
+ [features]
+ magic = []
+
+ [[example]]
+ name = "do_magic"
+ required-features = ["magic"]
+ "#,
+ rust_edition = rust_edition,
+ autoexamples = autoexamples
+ ),
+ )
+ .file("examples/a.rs", r#"fn main() { println!("example"); }"#)
+ .file(
+ "examples/do_magic.rs",
+ r#"
+ fn main() { println!("magic example"); }
+ "#,
+ )
+ .build()
+}
+
+#[cargo_test]
+fn run_example_autodiscover_2015() {
+ let p = autodiscover_examples_project("2015", None);
+ p.cargo("run --example a")
+ .with_status(101)
+ .with_stderr(
+ "warning: \
+An explicit [[example]] section is specified in Cargo.toml which currently
+disables Cargo from automatically inferring other example targets.
+This inference behavior will change in the Rust 2018 edition and the following
+files will be included as a example target:
+
+* [..]a.rs
+
+This is likely to break cargo build or cargo test as these files may not be
+ready to be compiled as a example target today. You can future-proof yourself
+and disable this warning by adding `autoexamples = false` to your [package]
+section. You may also move the files to a location where Cargo would not
+automatically infer them to be a target, such as in subfolders.
+
+For more information on this warning you can consult
+https://github.com/rust-lang/cargo/issues/5330
+error: no example target named `a`.
+Available example targets:
+ do_magic
+
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn run_example_autodiscover_2015_with_autoexamples_enabled() {
+ let p = autodiscover_examples_project("2015", Some(true));
+ p.cargo("run --example a")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/examples/a[EXE]`",
+ )
+ .with_stdout("example")
+ .run();
+}
+
+#[cargo_test]
+fn run_example_autodiscover_2015_with_autoexamples_disabled() {
+ let p = autodiscover_examples_project("2015", Some(false));
+ p.cargo("run --example a")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: no example target named `a`.
+Available example targets:
+ do_magic
+
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn run_example_autodiscover_2018() {
+ let p = autodiscover_examples_project("2018", None);
+ p.cargo("run --example a")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/examples/a[EXE]`",
+ )
+ .with_stdout("example")
+ .run();
+}
+
+#[cargo_test]
+fn autobins_disables() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ autobins = false
+ "#,
+ )
+ .file("src/lib.rs", "pub mod bin;")
+ .file("src/bin/mod.rs", "// empty")
+ .build();
+
+ p.cargo("run")
+ .with_status(101)
+ .with_stderr("[ERROR] a bin target must be available for `cargo run`")
+ .run();
+}
+
+#[cargo_test]
+fn run_bins() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("examples/a.rs", r#"fn main() { println!("example"); }"#)
+ .file("src/bin/a.rs", r#"fn main() { println!("bin"); }"#)
+ .build();
+
+ p.cargo("run --bins")
+ .with_status(1)
+ .with_stderr_contains(
+ "\
+error: unexpected argument '--bins' found
+
+ tip: a similar argument exists: '--bin'",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn run_with_filename() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "src/bin/a.rs",
+ r#"
+ extern crate foo;
+ fn main() { println!("hello a.rs"); }
+ "#,
+ )
+ .file("examples/a.rs", r#"fn main() { println!("example"); }"#)
+ .build();
+
+ p.cargo("run --bin bin.rs")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] no bin target named `bin.rs`.
+Available bin targets:
+ a
+
+",
+ )
+ .run();
+
+ p.cargo("run --bin a.rs")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] no bin target named `a.rs`
+
+<tab>Did you mean `a`?",
+ )
+ .run();
+
+ p.cargo("run --example example.rs")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] no example target named `example.rs`.
+Available example targets:
+ a
+
+",
+ )
+ .run();
+
+ p.cargo("run --example a.rs")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] no example target named `a.rs`
+
+<tab>Did you mean `a`?",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn either_name_or_example() {
+ let p = project()
+ .file("src/bin/a.rs", r#"fn main() { println!("hello a.rs"); }"#)
+ .file("examples/b.rs", r#"fn main() { println!("hello b.rs"); }"#)
+ .build();
+
+ p.cargo("run --bin a --example b")
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] `cargo run` can run at most one \
+ executable, but multiple were \
+ specified",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn one_bin_multiple_examples() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "src/bin/main.rs",
+ r#"fn main() { println!("hello main.rs"); }"#,
+ )
+ .file("examples/a.rs", r#"fn main() { println!("hello a.rs"); }"#)
+ .file("examples/b.rs", r#"fn main() { println!("hello b.rs"); }"#)
+ .build();
+
+ p.cargo("run")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/main[EXE]`",
+ )
+ .with_stdout("hello main.rs")
+ .run();
+}
+
+#[cargo_test]
+fn example_with_release_flag() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ version = "*"
+ path = "bar"
+ "#,
+ )
+ .file(
+ "examples/a.rs",
+ r#"
+ extern crate bar;
+
+ fn main() {
+ if cfg!(debug_assertions) {
+ println!("slow1")
+ } else {
+ println!("fast1")
+ }
+ bar::baz();
+ }
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+ .file(
+ "bar/src/bar.rs",
+ r#"
+ pub fn baz() {
+ if cfg!(debug_assertions) {
+ println!("slow2")
+ } else {
+ println!("fast2")
+ }
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run -v --release --example a")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.5.0 ([CWD]/bar)
+[RUNNING] `rustc --crate-name bar bar/src/bar.rs [..]--crate-type lib \
+ --emit=[..]link \
+ -C opt-level=3[..]\
+ -C metadata=[..] \
+ --out-dir [CWD]/target/release/deps \
+ -L dependency=[CWD]/target/release/deps`
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name a examples/a.rs [..]--crate-type bin \
+ --emit=[..]link \
+ -C opt-level=3[..]\
+ -C metadata=[..] \
+ --out-dir [CWD]/target/release/examples \
+ -L dependency=[CWD]/target/release/deps \
+ --extern bar=[CWD]/target/release/deps/libbar-[..].rlib`
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] `target/release/examples/a[EXE]`
+",
+ )
+ .with_stdout(
+ "\
+fast1
+fast2",
+ )
+ .run();
+
+ p.cargo("run -v --example a")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.5.0 ([CWD]/bar)
+[RUNNING] `rustc --crate-name bar bar/src/bar.rs [..]--crate-type lib \
+ --emit=[..]link[..]\
+ -C debuginfo=2 \
+ -C metadata=[..] \
+ --out-dir [CWD]/target/debug/deps \
+ -L dependency=[CWD]/target/debug/deps`
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name a examples/a.rs [..]--crate-type bin \
+ --emit=[..]link[..]\
+ -C debuginfo=2 \
+ -C metadata=[..] \
+ --out-dir [CWD]/target/debug/examples \
+ -L dependency=[CWD]/target/debug/deps \
+ --extern bar=[CWD]/target/debug/deps/libbar-[..].rlib`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/examples/a[EXE]`
+",
+ )
+ .with_stdout(
+ "\
+slow1
+slow2",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn run_dylib_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"extern crate bar; fn main() { bar::bar(); }"#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "bar"
+ crate-type = ["dylib"]
+ "#,
+ )
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("run hello world").run();
+}
+
+#[cargo_test]
+fn run_with_bin_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name = "bar"
+ "#,
+ )
+ .file("bar/src/main.rs", r#"fn main() { println!("bar"); }"#)
+ .build();
+
+ p.cargo("run")
+ .with_stderr(
+ "\
+[WARNING] foo v0.0.1 ([CWD]) ignoring invalid dependency `bar` which is missing a lib target
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/foo[EXE]`",
+ )
+ .with_stdout("hello")
+ .run();
+}
+
+#[cargo_test]
+fn run_with_bin_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies.bar1]
+ path = "bar1"
+ [dependencies.bar2]
+ path = "bar2"
+ "#,
+ )
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .file(
+ "bar1/Cargo.toml",
+ r#"
+ [package]
+ name = "bar1"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name = "bar1"
+ "#,
+ )
+ .file("bar1/src/main.rs", r#"fn main() { println!("bar1"); }"#)
+ .file(
+ "bar2/Cargo.toml",
+ r#"
+ [package]
+ name = "bar2"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name = "bar2"
+ "#,
+ )
+ .file("bar2/src/main.rs", r#"fn main() { println!("bar2"); }"#)
+ .build();
+
+ p.cargo("run")
+ .with_stderr(
+ "\
+[WARNING] foo v0.0.1 ([CWD]) ignoring invalid dependency `bar1` which is missing a lib target
+[WARNING] foo v0.0.1 ([CWD]) ignoring invalid dependency `bar2` which is missing a lib target
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/foo[EXE]`",
+ )
+ .with_stdout("hello")
+ .run();
+}
+
+#[cargo_test]
+fn run_with_bin_dep_in_workspace() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo1", "foo2"]
+ "#,
+ )
+ .file(
+ "foo1/Cargo.toml",
+ r#"
+ [package]
+ name = "foo1"
+ version = "0.0.1"
+
+ [dependencies.bar1]
+ path = "bar1"
+ "#,
+ )
+ .file("foo1/src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .file(
+ "foo1/bar1/Cargo.toml",
+ r#"
+ [package]
+ name = "bar1"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name = "bar1"
+ "#,
+ )
+ .file(
+ "foo1/bar1/src/main.rs",
+ r#"fn main() { println!("bar1"); }"#,
+ )
+ .file(
+ "foo2/Cargo.toml",
+ r#"
+ [package]
+ name = "foo2"
+ version = "0.0.1"
+
+ [dependencies.bar2]
+ path = "bar2"
+ "#,
+ )
+ .file("foo2/src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .file(
+ "foo2/bar2/Cargo.toml",
+ r#"
+ [package]
+ name = "bar2"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name = "bar2"
+ "#,
+ )
+ .file(
+ "foo2/bar2/src/main.rs",
+ r#"fn main() { println!("bar2"); }"#,
+ )
+ .build();
+
+ p.cargo("run")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] `cargo run` could not determine which binary to run[..]
+available binaries: bar1, bar2, foo1, foo2",
+ )
+ .run();
+
+ p.cargo("run --bin foo1")
+ .with_stderr(
+ "\
+[WARNING] foo1 v0.0.1 ([CWD]/foo1) ignoring invalid dependency `bar1` which is missing a lib target
+[WARNING] foo2 v0.0.1 ([CWD]/foo2) ignoring invalid dependency `bar2` which is missing a lib target
+[COMPILING] foo1 v0.0.1 ([CWD]/foo1)
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/foo1[EXE]`",
+ )
+ .with_stdout("hello")
+ .run();
+}
+
+#[cargo_test]
+fn release_works() {
+ let p = project()
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() { if cfg!(debug_assertions) { panic!() } }
+ "#,
+ )
+ .build();
+
+ p.cargo("run --release")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] `target/release/foo[EXE]`
+",
+ )
+ .run();
+ assert!(p.release_bin("foo").is_file());
+}
+
+#[cargo_test]
+fn release_short_works() {
+ let p = project()
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() { if cfg!(debug_assertions) { panic!() } }
+ "#,
+ )
+ .build();
+
+ p.cargo("run -r")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] `target/release/foo[EXE]`
+",
+ )
+ .run();
+ assert!(p.release_bin("foo").is_file());
+}
+
+#[cargo_test]
+fn run_bin_different_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name = "bar"
+ "#,
+ )
+ .file("src/bar.rs", "fn main() {}")
+ .build();
+
+ p.cargo("run").run();
+}
+
+#[cargo_test]
+fn dashes_are_forwarded() {
+ let p = project()
+ .file(
+ "src/bin/bar.rs",
+ r#"
+ fn main() {
+ let s: Vec<String> = std::env::args().collect();
+ assert_eq!(s[1], "--");
+ assert_eq!(s[2], "a");
+ assert_eq!(s[3], "--");
+ assert_eq!(s[4], "b");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run -- -- a -- b").run();
+}
+
+#[cargo_test]
+fn run_from_executable_folder() {
+ let p = project()
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .build();
+
+ let cwd = p.root().join("target").join("debug");
+ p.cargo("build").run();
+
+ p.cargo("run")
+ .cwd(cwd)
+ .with_stderr(
+ "[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n\
+ [RUNNING] `./foo[EXE]`",
+ )
+ .with_stdout("hello")
+ .run();
+}
+
+#[cargo_test]
+fn run_with_library_paths() {
+ let p = project();
+
+ // Only link search directories within the target output directory are
+ // propagated through to dylib_path_envvar() (see #3366).
+ let mut dir1 = p.target_debug_dir();
+ dir1.push("foo\\backslash");
+
+ let mut dir2 = p.target_debug_dir();
+ dir2.push("dir=containing=equal=signs");
+
+ let p = p
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "build.rs",
+ &format!(
+ r##"
+ fn main() {{
+ println!(r#"cargo:rustc-link-search=native={}"#);
+ println!(r#"cargo:rustc-link-search={}"#);
+ }}
+ "##,
+ dir1.display(),
+ dir2.display()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &format!(
+ r##"
+ fn main() {{
+ let search_path = std::env::var_os("{}").unwrap();
+ let paths = std::env::split_paths(&search_path).collect::<Vec<_>>();
+ println!("{{:#?}}", paths);
+ assert!(paths.contains(&r#"{}"#.into()));
+ assert!(paths.contains(&r#"{}"#.into()));
+ }}
+ "##,
+ dylib_path_envvar(),
+ dir1.display(),
+ dir2.display()
+ ),
+ )
+ .build();
+
+ p.cargo("run").run();
+}
+
+#[cargo_test]
+fn library_paths_sorted_alphabetically() {
+ let p = project();
+
+ let mut dir1 = p.target_debug_dir();
+ dir1.push("zzzzzzz");
+
+ let mut dir2 = p.target_debug_dir();
+ dir2.push("BBBBBBB");
+
+ let mut dir3 = p.target_debug_dir();
+ dir3.push("aaaaaaa");
+
+ let p = p
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "build.rs",
+ &format!(
+ r##"
+ fn main() {{
+ println!(r#"cargo:rustc-link-search=native={}"#);
+ println!(r#"cargo:rustc-link-search=native={}"#);
+ println!(r#"cargo:rustc-link-search=native={}"#);
+ }}
+ "##,
+ dir1.display(),
+ dir2.display(),
+ dir3.display()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &format!(
+ r##"
+ fn main() {{
+ let search_path = std::env::var_os("{}").unwrap();
+ let paths = std::env::split_paths(&search_path).collect::<Vec<_>>();
+ // ASCII case-sensitive sort
+ assert_eq!("BBBBBBB", paths[0].file_name().unwrap().to_string_lossy());
+ assert_eq!("aaaaaaa", paths[1].file_name().unwrap().to_string_lossy());
+ assert_eq!("zzzzzzz", paths[2].file_name().unwrap().to_string_lossy());
+ }}
+ "##,
+ dylib_path_envvar()
+ ),
+ )
+ .build();
+
+ p.cargo("run").run();
+}
+
+#[cargo_test]
+fn fail_no_extra_verbose() {
+ let p = project()
+ .file("src/main.rs", "fn main() { std::process::exit(1); }")
+ .build();
+
+ p.cargo("run -q")
+ .with_status(1)
+ .with_stdout("")
+ .with_stderr("")
+ .run();
+}
+
+#[cargo_test]
+fn run_multiple_packages() {
+ let p = project()
+ .no_manifest()
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [workspace]
+
+ [dependencies]
+ d1 = { path = "d1" }
+ d2 = { path = "d2" }
+ d3 = { path = "../d3" } # outside of the workspace
+
+ [[bin]]
+ name = "foo"
+ "#,
+ )
+ .file("foo/src/foo.rs", "fn main() { println!(\"foo\"); }")
+ .file("foo/d1/Cargo.toml", &basic_bin_manifest("d1"))
+ .file("foo/d1/src/lib.rs", "")
+ .file("foo/d1/src/main.rs", "fn main() { println!(\"d1\"); }")
+ .file("foo/d2/Cargo.toml", &basic_bin_manifest("d2"))
+ .file("foo/d2/src/main.rs", "fn main() { println!(\"d2\"); }")
+ .file("d3/Cargo.toml", &basic_bin_manifest("d3"))
+ .file("d3/src/main.rs", "fn main() { println!(\"d2\"); }")
+ .build();
+
+ let cargo = || {
+ let mut process_builder = p.cargo("run");
+ process_builder.cwd("foo");
+ process_builder
+ };
+
+ cargo().arg("-p").arg("d1").with_stdout("d1").run();
+
+ cargo()
+ .arg("-p")
+ .arg("d2")
+ .arg("--bin")
+ .arg("d2")
+ .with_stdout("d2")
+ .run();
+
+ cargo().with_stdout("foo").run();
+
+ cargo()
+ .arg("-p")
+ .arg("d1")
+ .arg("-p")
+ .arg("d2")
+ .with_status(1)
+ .with_stderr_contains(
+ "error: the argument '--package [<SPEC>]' cannot be used multiple times",
+ )
+ .run();
+
+ cargo()
+ .arg("-p")
+ .arg("d3")
+ .with_status(101)
+ .with_stderr_contains("[ERROR] package(s) `d3` not found in workspace [..]")
+ .run();
+
+ cargo()
+ .arg("-p")
+ .arg("d*")
+ .with_status(101)
+ .with_stderr_contains(
+ "[ERROR] `cargo run` does not support glob pattern `d*` on package selection",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn explicit_bin_with_args() {
+ let p = project()
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ assert_eq!(std::env::args().nth(1).unwrap(), "hello");
+ assert_eq!(std::env::args().nth(2).unwrap(), "world");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run --bin foo hello world").run();
+}
+
+#[cargo_test]
+fn run_workspace() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_bin_manifest("a"))
+ .file("a/src/main.rs", r#"fn main() {println!("run-a");}"#)
+ .file("b/Cargo.toml", &basic_bin_manifest("b"))
+ .file("b/src/main.rs", r#"fn main() {println!("run-b");}"#)
+ .build();
+
+ p.cargo("run")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] `cargo run` could not determine which binary to run[..]
+available binaries: a, b",
+ )
+ .run();
+ p.cargo("run --bin a").with_stdout("run-a").run();
+}
+
+#[cargo_test]
+fn default_run_workspace() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ default-run = "a"
+ "#,
+ )
+ .file("a/src/main.rs", r#"fn main() {println!("run-a");}"#)
+ .file("b/Cargo.toml", &basic_bin_manifest("b"))
+ .file("b/src/main.rs", r#"fn main() {println!("run-b");}"#)
+ .build();
+
+ p.cargo("run").with_stdout("run-a").run();
+}
+
+#[cargo_test]
+#[cfg(target_os = "macos")]
+fn run_link_system_path_macos() {
+ use cargo_test_support::paths::{self, CargoPathExt};
+ use std::fs;
+ // Check that the default system library path is honored.
+ // First, build a shared library that will be accessed from
+ // DYLD_FALLBACK_LIBRARY_PATH.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ [lib]
+ crate-type = ["cdylib"]
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "#[no_mangle] pub extern fn something_shared() {}",
+ )
+ .build();
+ p.cargo("build").run();
+
+ // This is convoluted. Since this test can't modify things in /usr,
+ // this needs to dance around to check that things work.
+ //
+ // The default DYLD_FALLBACK_LIBRARY_PATH is:
+ // $(HOME)/lib:/usr/local/lib:/lib:/usr/lib
+ //
+ // This will make use of ~/lib in the path, but the default cc link
+ // path is /usr/lib:/usr/local/lib. So first need to build in one
+ // location, and then move it to ~/lib.
+ //
+ // 1. Build with rustc-link-search pointing to libfoo so the initial
+ // binary can be linked.
+ // 2. Move the library to ~/lib
+ // 3. Run `cargo run` to make sure it can still find the library in
+ // ~/lib.
+ //
+ // This should be equivalent to having the library in /usr/local/lib.
+ let p2 = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_bin_manifest("bar"))
+ .file(
+ "src/main.rs",
+ r#"
+ extern {
+ fn something_shared();
+ }
+ fn main() {
+ unsafe { something_shared(); }
+ }
+ "#,
+ )
+ .file(
+ "build.rs",
+ &format!(
+ r#"
+ fn main() {{
+ println!("cargo:rustc-link-lib=foo");
+ println!("cargo:rustc-link-search={}");
+ }}
+ "#,
+ p.target_debug_dir().display()
+ ),
+ )
+ .build();
+ p2.cargo("build").run();
+ p2.cargo("test").run();
+
+ let libdir = paths::home().join("lib");
+ fs::create_dir(&libdir).unwrap();
+ fs::rename(
+ p.target_debug_dir().join("libfoo.dylib"),
+ libdir.join("libfoo.dylib"),
+ )
+ .unwrap();
+ p.root().rm_rf();
+ const VAR: &str = "DYLD_FALLBACK_LIBRARY_PATH";
+ // Reset DYLD_FALLBACK_LIBRARY_PATH so that we don't inherit anything that
+ // was set by the cargo that invoked the test.
+ p2.cargo("run").env_remove(VAR).run();
+ p2.cargo("test").env_remove(VAR).run();
+ // Ensure this still works when DYLD_FALLBACK_LIBRARY_PATH has
+ // a value set.
+ p2.cargo("run").env(VAR, &libdir).run();
+ p2.cargo("test").env(VAR, &libdir).run();
+}
diff --git a/src/tools/cargo/tests/testsuite/rust_version.rs b/src/tools/cargo/tests/testsuite/rust_version.rs
new file mode 100644
index 000000000..91711cf1a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/rust_version.rs
@@ -0,0 +1,194 @@
+//! Tests for targets with `rust-version`.
+
+use cargo_test_support::{project, registry::Package};
+
+#[cargo_test]
+fn rust_version_satisfied() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ rust-version = "1.1.1"
+ [[bin]]
+ name = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check").run();
+ p.cargo("check --ignore-rust-version").run();
+}
+
+#[cargo_test]
+fn rust_version_bad_caret() {
+ project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ rust-version = "^1.43"
+ [[bin]]
+ name = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build()
+ .cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "error: failed to parse manifest at `[..]`\n\n\
+ Caused by:\n `rust-version` must be a value like \"1.32\"",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rust_version_bad_pre_release() {
+ project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ rust-version = "1.43-beta.1"
+ [[bin]]
+ name = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build()
+ .cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "error: failed to parse manifest at `[..]`\n\n\
+ Caused by:\n `rust-version` must be a value like \"1.32\"",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rust_version_bad_nonsense() {
+ project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ rust-version = "foodaddle"
+ [[bin]]
+ name = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build()
+ .cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "error: failed to parse manifest at `[..]`\n\n\
+ Caused by:\n `rust-version` must be a value like \"1.32\"",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rust_version_too_high() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ rust-version = "1.9876.0"
+ [[bin]]
+ name = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "error: package `foo v0.0.1 ([..])` cannot be built because it requires \
+ rustc 1.9876.0 or newer, while the currently active rustc version is [..]\n\n",
+ )
+ .run();
+ p.cargo("check --ignore-rust-version").run();
+}
+
+#[cargo_test]
+fn rust_version_dependency_fails() {
+ Package::new("bar", "0.0.1")
+ .rust_version("1.2345.0")
+ .file("src/lib.rs", "fn other_stuff() {}")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ [dependencies]
+ bar = "0.0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main(){}")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ " Updating `[..]` index\n \
+ Downloading crates ...\n \
+ Downloaded bar v0.0.1 (registry `[..]`)\n\
+ error: package `bar v0.0.1` cannot be built because it requires \
+ rustc 1.2345.0 or newer, while the currently active rustc version is [..]\n\
+ Either upgrade to rustc 1.2345.0 or newer, or use\n\
+ cargo update -p bar@0.0.1 --precise ver\n\
+ where `ver` is the latest version of `bar` supporting rustc [..]",
+ )
+ .run();
+ p.cargo("check --ignore-rust-version").run();
+}
+
+#[cargo_test]
+fn rust_version_older_than_edition() {
+ project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ rust-version = "1.1"
+ edition = "2018"
+ [[bin]]
+ name = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build()
+ .cargo("check")
+ .with_status(101)
+ .with_stderr_contains(" rust-version 1.1 is older than first version (1.31.0) required by the specified edition (2018)",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/rustc.rs b/src/tools/cargo/tests/testsuite/rustc.rs
new file mode 100644
index 000000000..65e0740f8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/rustc.rs
@@ -0,0 +1,794 @@
+//! Tests for the `cargo rustc` command.
+
+use cargo_test_support::{basic_bin_manifest, basic_lib_manifest, basic_manifest, project};
+
+const CARGO_RUSTC_ERROR: &str =
+ "[ERROR] extra arguments to `rustc` can only be passed to one target, consider filtering
+the package by passing, e.g., `--lib` or `--bin NAME` to specify a single target";
+
+#[cargo_test]
+fn build_lib_for_foo() {
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file("src/lib.rs", r#" "#)
+ .build();
+
+ p.cargo("rustc --lib -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \
+ --emit=[..]link[..]-C debuginfo=2 \
+ -C metadata=[..] \
+ --out-dir [..] \
+ -L dependency=[CWD]/target/debug/deps`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn lib() {
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file("src/lib.rs", r#" "#)
+ .build();
+
+ p.cargo("rustc --lib -v -- -C debug-assertions=off")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \
+ --emit=[..]link[..]-C debuginfo=2 \
+ -C debug-assertions=off \
+ -C metadata=[..] \
+ --out-dir [..] \
+ -L dependency=[CWD]/target/debug/deps`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_main_and_allow_unstable_options() {
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file("src/lib.rs", r#" "#)
+ .build();
+
+ p.cargo("rustc -v --bin foo -- -C debug-assertions")
+ .with_stderr(format!(
+ "\
+[COMPILING] {name} v{version} ([CWD])
+[RUNNING] `rustc --crate-name {name} src/lib.rs [..]--crate-type lib \
+ --emit=[..]link[..]-C debuginfo=2 \
+ -C metadata=[..] \
+ --out-dir [..] \
+ -L dependency=[CWD]/target/debug/deps`
+[RUNNING] `rustc --crate-name {name} src/main.rs [..]--crate-type bin \
+ --emit=[..]link[..]-C debuginfo=2 \
+ -C debug-assertions \
+ -C metadata=[..] \
+ --out-dir [..] \
+ -L dependency=[CWD]/target/debug/deps \
+ --extern {name}=[CWD]/target/debug/deps/lib{name}-[..].rlib`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ name = "foo",
+ version = "0.0.1"
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn fails_when_trying_to_build_main_and_lib_with_args() {
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file("src/lib.rs", r#" "#)
+ .build();
+
+ p.cargo("rustc -v -- -C debug-assertions")
+ .with_status(101)
+ .with_stderr(CARGO_RUSTC_ERROR)
+ .run();
+}
+
+#[cargo_test]
+fn build_with_args_to_one_of_multiple_binaries() {
+ let p = project()
+ .file("src/bin/foo.rs", "fn main() {}")
+ .file("src/bin/bar.rs", "fn main() {}")
+ .file("src/bin/baz.rs", "fn main() {}")
+ .file("src/lib.rs", r#" "#)
+ .build();
+
+ p.cargo("rustc -v --bin bar -- -C debug-assertions")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link[..]\
+ -C debuginfo=2 -C metadata=[..] \
+ --out-dir [..]`
+[RUNNING] `rustc --crate-name bar src/bin/bar.rs [..]--crate-type bin --emit=[..]link[..]\
+ -C debuginfo=2 -C debug-assertions [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn fails_with_args_to_all_binaries() {
+ let p = project()
+ .file("src/bin/foo.rs", "fn main() {}")
+ .file("src/bin/bar.rs", "fn main() {}")
+ .file("src/bin/baz.rs", "fn main() {}")
+ .file("src/lib.rs", r#" "#)
+ .build();
+
+ p.cargo("rustc -v -- -C debug-assertions")
+ .with_status(101)
+ .with_stderr(CARGO_RUSTC_ERROR)
+ .run();
+}
+
+#[cargo_test]
+fn fails_with_crate_type_to_multi_binaries() {
+ let p = project()
+ .file("src/bin/foo.rs", "fn main() {}")
+ .file("src/bin/bar.rs", "fn main() {}")
+ .file("src/bin/baz.rs", "fn main() {}")
+ .file("src/lib.rs", r#" "#)
+ .build();
+
+ p.cargo("rustc --crate-type lib")
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] crate types to rustc can only be passed to one target, consider filtering
+the package by passing, e.g., `--lib` or `--example` to specify a single target",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn fails_with_crate_type_to_multi_examples() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[example]]
+ name = "ex1"
+ crate-type = ["rlib"]
+ [[example]]
+ name = "ex2"
+ crate-type = ["rlib"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("examples/ex1.rs", "")
+ .file("examples/ex2.rs", "")
+ .build();
+
+ p.cargo("rustc -v --example ex1 --example ex2 --crate-type lib,cdylib")
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] crate types to rustc can only be passed to one target, consider filtering
+the package by passing, e.g., `--lib` or `--example` to specify a single target",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn fails_with_crate_type_to_binary() {
+ let p = project().file("src/bin/foo.rs", "fn main() {}").build();
+
+ p.cargo("rustc --crate-type lib")
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] crate types can only be specified for libraries and example libraries.
+Binaries, tests, and benchmarks are always the `bin` crate type",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_with_crate_type_for_foo() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("rustc -v --crate-type cdylib")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type cdylib [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_with_crate_type_for_foo_with_deps() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate a;
+ pub fn foo() { a::hello(); }
+ "#,
+ )
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = { path = "a" }
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_manifest("a", "0.1.0"))
+ .file("a/src/lib.rs", "pub fn hello() {}")
+ .build();
+
+ p.cargo("rustc -v --crate-type cdylib")
+ .with_stderr(
+ "\
+[COMPILING] a v0.1.0 ([CWD]/a)
+[RUNNING] `rustc --crate-name a a/src/lib.rs [..]--crate-type lib [..]
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type cdylib [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_with_crate_types_for_foo() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("rustc -v --crate-type lib,cdylib")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib,cdylib [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_with_crate_type_to_example() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[example]]
+ name = "ex"
+ crate-type = ["rlib"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("examples/ex.rs", "")
+ .build();
+
+ p.cargo("rustc -v --example ex --crate-type cdylib")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib [..]
+[RUNNING] `rustc --crate-name ex examples/ex.rs [..]--crate-type cdylib [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_with_crate_types_to_example() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[example]]
+ name = "ex"
+ crate-type = ["rlib"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("examples/ex.rs", "")
+ .build();
+
+ p.cargo("rustc -v --example ex --crate-type lib,cdylib")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib [..]
+[RUNNING] `rustc --crate-name ex examples/ex.rs [..]--crate-type lib,cdylib [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_with_crate_types_to_one_of_multi_examples() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[example]]
+ name = "ex1"
+ crate-type = ["rlib"]
+ [[example]]
+ name = "ex2"
+ crate-type = ["rlib"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("examples/ex1.rs", "")
+ .file("examples/ex2.rs", "")
+ .build();
+
+ p.cargo("rustc -v --example ex1 --crate-type lib,cdylib")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib [..]
+[RUNNING] `rustc --crate-name ex1 examples/ex1.rs [..]--crate-type lib,cdylib [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_with_args_to_one_of_multiple_tests() {
+ let p = project()
+ .file("tests/foo.rs", r#" "#)
+ .file("tests/bar.rs", r#" "#)
+ .file("tests/baz.rs", r#" "#)
+ .file("src/lib.rs", r#" "#)
+ .build();
+
+ p.cargo("rustc -v --test bar -- -C debug-assertions")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link[..]\
+ -C debuginfo=2 -C metadata=[..] \
+ --out-dir [..]`
+[RUNNING] `rustc --crate-name bar tests/bar.rs [..]--emit=[..]link[..]-C debuginfo=2 \
+ -C debug-assertions [..]--test[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_foo_with_bar_dependency() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file("src/main.rs", "extern crate bar; fn main() { bar::baz() }")
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ foo.cargo("rustc -v -- -C debug-assertions")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.1.0 ([..])
+[RUNNING] `[..] -C debuginfo=2 [..]`
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `[..] -C debuginfo=2 -C debug-assertions [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn build_only_bar_dependency() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file("src/main.rs", "extern crate bar; fn main() { bar::baz() }")
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ foo.cargo("rustc -v -p bar -- -C debug-assertions")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.1.0 ([..])
+[RUNNING] `rustc --crate-name bar [..]--crate-type lib [..] -C debug-assertions [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn targets_selected_default() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+ p.cargo("rustc -v")
+ // bin
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \
+ --emit=[..]link[..]",
+ )
+ // bench
+ .with_stderr_does_not_contain(
+ "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link \
+ -C opt-level=3 --test [..]",
+ )
+ // unit test
+ .with_stderr_does_not_contain(
+ "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link \
+ -C debuginfo=2 --test [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn targets_selected_all() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+ p.cargo("rustc -v --all-targets")
+ // bin
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \
+ --emit=[..]link[..]",
+ )
+ // unit test
+ .with_stderr_contains(
+ "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link[..]\
+ -C debuginfo=2 --test [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn fail_with_multiple_packages() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "../bar"
+
+ [dependencies.baz]
+ path = "../baz"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ if cfg!(flag = "1") { println!("Yeah from bar!"); }
+ }
+ "#,
+ )
+ .build();
+
+ let _baz = project()
+ .at("baz")
+ .file("Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ if cfg!(flag = "1") { println!("Yeah from baz!"); }
+ }
+ "#,
+ )
+ .build();
+
+ foo.cargo("rustc -v -p bar -p baz")
+ .with_status(1)
+ .with_stderr_contains(
+ "\
+error: the argument '--package [<SPEC>]' cannot be used multiple times
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn fail_with_glob() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() { break_the_build(); }")
+ .build();
+
+ p.cargo("rustc -p '*z'")
+ .with_status(101)
+ .with_stderr("[ERROR] Glob patterns on package selection are not supported.")
+ .run();
+}
+
+#[cargo_test]
+fn rustc_with_other_profile() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dev-dependencies]
+ a = { path = "a" }
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[cfg(test)] extern crate a;
+
+ #[test]
+ fn foo() {}
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_manifest("a", "0.1.0"))
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("rustc --profile test").run();
+}
+
+#[cargo_test]
+fn rustc_fingerprint() {
+ // Verify that the fingerprint includes the rustc args.
+ let p = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("rustc -v -- -C debug-assertions")
+ .with_stderr(
+ "\
+[COMPILING] foo [..]
+[RUNNING] `rustc [..]-C debug-assertions [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ p.cargo("rustc -v -- -C debug-assertions")
+ .with_stderr(
+ "\
+[FRESH] foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ p.cargo("rustc -v")
+ .with_stderr_does_not_contain("-C debug-assertions")
+ .with_stderr(
+ "\
+[DIRTY] foo [..]: the profile configuration changed
+[COMPILING] foo [..]
+[RUNNING] `rustc [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ p.cargo("rustc -v")
+ .with_stderr(
+ "\
+[FRESH] foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustc_test_with_implicit_bin() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ "src/main.rs",
+ r#"
+ #[cfg(foo)]
+ fn f() { compile_fail!("Foo shouldn't be set."); }
+ fn main() {}
+ "#,
+ )
+ .file(
+ "tests/test1.rs",
+ r#"
+ #[cfg(not(foo))]
+ fn f() { compile_fail!("Foo should be set."); }
+ "#,
+ )
+ .build();
+
+ p.cargo("rustc --test test1 -v -- --cfg foo")
+ .with_stderr_contains(
+ "\
+[RUNNING] `rustc --crate-name test1 tests/test1.rs [..] --cfg foo [..]
+",
+ )
+ .with_stderr_contains(
+ "\
+[RUNNING] `rustc --crate-name foo src/main.rs [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustc_with_print_cfg_single_target() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", r#"fn main() {} "#)
+ .build();
+
+ p.cargo("rustc -Z unstable-options --target x86_64-pc-windows-msvc --print cfg")
+ .masquerade_as_nightly_cargo(&["print"])
+ .with_stdout_contains("debug_assertions")
+ .with_stdout_contains("target_arch=\"x86_64\"")
+ .with_stdout_contains("target_endian=\"little\"")
+ .with_stdout_contains("target_env=\"msvc\"")
+ .with_stdout_contains("target_family=\"windows\"")
+ .with_stdout_contains("target_os=\"windows\"")
+ .with_stdout_contains("target_pointer_width=\"64\"")
+ .with_stdout_contains("target_vendor=\"pc\"")
+ .with_stdout_contains("windows")
+ .run();
+}
+
+#[cargo_test]
+fn rustc_with_print_cfg_multiple_targets() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", r#"fn main() {} "#)
+ .build();
+
+ p.cargo("rustc -Z unstable-options --target x86_64-pc-windows-msvc --target i686-unknown-linux-gnu --print cfg")
+ .masquerade_as_nightly_cargo(&["print"])
+ .with_stdout_contains("debug_assertions")
+ .with_stdout_contains("target_arch=\"x86_64\"")
+ .with_stdout_contains("target_endian=\"little\"")
+ .with_stdout_contains("target_env=\"msvc\"")
+ .with_stdout_contains("target_family=\"windows\"")
+ .with_stdout_contains("target_os=\"windows\"")
+ .with_stdout_contains("target_pointer_width=\"64\"")
+ .with_stdout_contains("target_vendor=\"pc\"")
+ .with_stdout_contains("windows")
+ .with_stdout_contains("target_env=\"gnu\"")
+ .with_stdout_contains("target_family=\"unix\"")
+ .with_stdout_contains("target_pointer_width=\"32\"")
+ .with_stdout_contains("target_vendor=\"unknown\"")
+ .with_stdout_contains("target_os=\"linux\"")
+ .with_stdout_contains("unix")
+ .run();
+}
+
+#[cargo_test]
+fn rustc_with_print_cfg_rustflags_env_var() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", r#"fn main() {} "#)
+ .build();
+
+ p.cargo("rustc -Z unstable-options --target x86_64-pc-windows-msvc --print cfg")
+ .masquerade_as_nightly_cargo(&["print"])
+ .env("RUSTFLAGS", "-C target-feature=+crt-static")
+ .with_stdout_contains("debug_assertions")
+ .with_stdout_contains("target_arch=\"x86_64\"")
+ .with_stdout_contains("target_endian=\"little\"")
+ .with_stdout_contains("target_env=\"msvc\"")
+ .with_stdout_contains("target_family=\"windows\"")
+ .with_stdout_contains("target_feature=\"crt-static\"")
+ .with_stdout_contains("target_os=\"windows\"")
+ .with_stdout_contains("target_pointer_width=\"64\"")
+ .with_stdout_contains("target_vendor=\"pc\"")
+ .with_stdout_contains("windows")
+ .run();
+}
+
+#[cargo_test]
+fn rustc_with_print_cfg_config_toml() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ ".cargo/config.toml",
+ r#"
+[target.x86_64-pc-windows-msvc]
+rustflags = ["-C", "target-feature=+crt-static"]
+"#,
+ )
+ .file("src/main.rs", r#"fn main() {} "#)
+ .build();
+
+ p.cargo("rustc -Z unstable-options --target x86_64-pc-windows-msvc --print cfg")
+ .masquerade_as_nightly_cargo(&["print"])
+ .env("RUSTFLAGS", "-C target-feature=+crt-static")
+ .with_stdout_contains("debug_assertions")
+ .with_stdout_contains("target_arch=\"x86_64\"")
+ .with_stdout_contains("target_endian=\"little\"")
+ .with_stdout_contains("target_env=\"msvc\"")
+ .with_stdout_contains("target_family=\"windows\"")
+ .with_stdout_contains("target_feature=\"crt-static\"")
+ .with_stdout_contains("target_os=\"windows\"")
+ .with_stdout_contains("target_pointer_width=\"64\"")
+ .with_stdout_contains("target_vendor=\"pc\"")
+ .with_stdout_contains("windows")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/rustc_info_cache.rs b/src/tools/cargo/tests/testsuite/rustc_info_cache.rs
new file mode 100644
index 000000000..9747fa357
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/rustc_info_cache.rs
@@ -0,0 +1,186 @@
+//! Tests for the cache file for the rustc version info.
+
+use cargo_test_support::{basic_bin_manifest, paths::CargoPathExt};
+use cargo_test_support::{basic_manifest, project};
+use std::env;
+
+const MISS: &str = "[..] rustc info cache miss[..]";
+const HIT: &str = "[..]rustc info cache hit[..]";
+const UPDATE: &str = "[..]updated rustc info cache[..]";
+
+#[cargo_test]
+fn rustc_info_cache() {
+ let p = project()
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .build();
+
+ p.cargo("build")
+ .env("CARGO_LOG", "cargo::util::rustc=debug")
+ .with_stderr_contains("[..]failed to read rustc info cache[..]")
+ .with_stderr_contains(MISS)
+ .with_stderr_does_not_contain(HIT)
+ .with_stderr_contains(UPDATE)
+ .run();
+
+ p.cargo("build")
+ .env("CARGO_LOG", "cargo::util::rustc=debug")
+ .with_stderr_contains("[..]reusing existing rustc info cache[..]")
+ .with_stderr_contains(HIT)
+ .with_stderr_does_not_contain(MISS)
+ .with_stderr_does_not_contain(UPDATE)
+ .run();
+
+ p.cargo("build")
+ .env("CARGO_LOG", "cargo::util::rustc=debug")
+ .env("CARGO_CACHE_RUSTC_INFO", "0")
+ .with_stderr_contains("[..]rustc info cache disabled[..]")
+ .with_stderr_does_not_contain(UPDATE)
+ .run();
+
+ let other_rustc = {
+ let p = project()
+ .at("compiler")
+ .file("Cargo.toml", &basic_manifest("compiler", "0.1.0"))
+ .file(
+ "src/main.rs",
+ r#"
+ use std::process::Command;
+ use std::env;
+
+ fn main() {
+ let mut cmd = Command::new("rustc");
+ for arg in env::args_os().skip(1) {
+ cmd.arg(arg);
+ }
+ std::process::exit(cmd.status().unwrap().code().unwrap());
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build").run();
+
+ p.root()
+ .join("target/debug/compiler")
+ .with_extension(env::consts::EXE_EXTENSION)
+ };
+
+ p.cargo("build")
+ .env("CARGO_LOG", "cargo::util::rustc=debug")
+ .env("RUSTC", other_rustc.display().to_string())
+ .with_stderr_contains("[..]different compiler, creating new rustc info cache[..]")
+ .with_stderr_contains(MISS)
+ .with_stderr_does_not_contain(HIT)
+ .with_stderr_contains(UPDATE)
+ .run();
+
+ p.cargo("build")
+ .env("CARGO_LOG", "cargo::util::rustc=debug")
+ .env("RUSTC", other_rustc.display().to_string())
+ .with_stderr_contains("[..]reusing existing rustc info cache[..]")
+ .with_stderr_contains(HIT)
+ .with_stderr_does_not_contain(MISS)
+ .with_stderr_does_not_contain(UPDATE)
+ .run();
+
+ other_rustc.move_into_the_future();
+
+ p.cargo("build")
+ .env("CARGO_LOG", "cargo::util::rustc=debug")
+ .env("RUSTC", other_rustc.display().to_string())
+ .with_stderr_contains("[..]different compiler, creating new rustc info cache[..]")
+ .with_stderr_contains(MISS)
+ .with_stderr_does_not_contain(HIT)
+ .with_stderr_contains(UPDATE)
+ .run();
+
+ p.cargo("build")
+ .env("CARGO_LOG", "cargo::util::rustc=debug")
+ .env("RUSTC", other_rustc.display().to_string())
+ .with_stderr_contains("[..]reusing existing rustc info cache[..]")
+ .with_stderr_contains(HIT)
+ .with_stderr_does_not_contain(MISS)
+ .with_stderr_does_not_contain(UPDATE)
+ .run();
+}
+
+#[cargo_test]
+fn rustc_info_cache_with_wrappers() {
+ let wrapper_project = project()
+ .at("wrapper")
+ .file("Cargo.toml", &basic_bin_manifest("wrapper"))
+ .file("src/main.rs", r#"fn main() { }"#)
+ .build();
+ let wrapper = wrapper_project.bin("wrapper");
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "test"
+ version = "0.0.0"
+ authors = []
+ [workspace]
+ "#,
+ )
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .build();
+
+ for &wrapper_env in ["RUSTC_WRAPPER", "RUSTC_WORKSPACE_WRAPPER"].iter() {
+ p.cargo("clean").with_status(0).run();
+ wrapper_project.change_file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ let mut args = std::env::args_os();
+ let _me = args.next().unwrap();
+ let rustc = args.next().unwrap();
+ let status = std::process::Command::new(rustc).args(args).status().unwrap();
+ std::process::exit(if status.success() { 0 } else { 1 })
+ }
+ "#,
+ );
+ wrapper_project.cargo("build").with_status(0).run();
+
+ p.cargo("build")
+ .env("CARGO_LOG", "cargo::util::rustc=debug")
+ .env(wrapper_env, &wrapper)
+ .with_stderr_contains("[..]failed to read rustc info cache[..]")
+ .with_stderr_contains(MISS)
+ .with_stderr_contains(UPDATE)
+ .with_stderr_does_not_contain(HIT)
+ .with_status(0)
+ .run();
+ p.cargo("build")
+ .env("CARGO_LOG", "cargo::util::rustc=debug")
+ .env(wrapper_env, &wrapper)
+ .with_stderr_contains("[..]reusing existing rustc info cache[..]")
+ .with_stderr_contains(HIT)
+ .with_stderr_does_not_contain(UPDATE)
+ .with_stderr_does_not_contain(MISS)
+ .with_status(0)
+ .run();
+
+ wrapper_project.change_file("src/main.rs", r#"fn main() { panic!() }"#);
+ wrapper_project.cargo("build").with_status(0).run();
+
+ p.cargo("build")
+ .env("CARGO_LOG", "cargo::util::rustc=debug")
+ .env(wrapper_env, &wrapper)
+ .with_stderr_contains("[..]different compiler, creating new rustc info cache[..]")
+ .with_stderr_contains(MISS)
+ .with_stderr_contains(UPDATE)
+ .with_stderr_does_not_contain(HIT)
+ .with_status(101)
+ .run();
+ p.cargo("build")
+ .env("CARGO_LOG", "cargo::util::rustc=debug")
+ .env(wrapper_env, &wrapper)
+ .with_stderr_contains("[..]reusing existing rustc info cache[..]")
+ .with_stderr_contains(HIT)
+ .with_stderr_does_not_contain(UPDATE)
+ .with_stderr_does_not_contain(MISS)
+ .with_status(101)
+ .run();
+ }
+}
diff --git a/src/tools/cargo/tests/testsuite/rustdoc.rs b/src/tools/cargo/tests/testsuite/rustdoc.rs
new file mode 100644
index 000000000..5650f3e0a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/rustdoc.rs
@@ -0,0 +1,252 @@
+//! Tests for the `cargo rustdoc` command.
+
+use cargo_test_support::{basic_manifest, cross_compile, project};
+
+#[cargo_test]
+fn rustdoc_simple() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("rustdoc -v")
+ .with_stderr(
+ "\
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[RUNNING] `rustdoc [..]--crate-name foo src/lib.rs [..]\
+ -o [CWD]/target/doc \
+ [..] \
+ -L dependency=[CWD]/target/debug/deps [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustdoc_args() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("rustdoc -v -- --cfg=foo")
+ .with_stderr(
+ "\
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[RUNNING] `rustdoc [..]--crate-name foo src/lib.rs [..]\
+ -o [CWD]/target/doc \
+ [..] \
+ --cfg=foo \
+ -C metadata=[..] \
+ -L dependency=[CWD]/target/debug/deps [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustdoc_binary_args_passed() {
+ let p = project().file("src/main.rs", "").build();
+
+ p.cargo("rustdoc -v")
+ .arg("--")
+ .arg("--markdown-no-toc")
+ .with_stderr_contains("[RUNNING] `rustdoc [..] --markdown-no-toc[..]`")
+ .run();
+}
+
+#[cargo_test]
+fn rustdoc_foo_with_bar_dependency() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file("src/lib.rs", "extern crate bar; pub fn foo() {}")
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ foo.cargo("rustdoc -v -- --cfg=foo")
+ .with_stderr(
+ "\
+[CHECKING] bar v0.0.1 ([..])
+[RUNNING] `rustc [..]bar/src/lib.rs [..]`
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[RUNNING] `rustdoc [..]--crate-name foo src/lib.rs [..]\
+ -o [CWD]/target/doc \
+ [..] \
+ --cfg=foo \
+ -C metadata=[..] \
+ -L dependency=[CWD]/target/debug/deps \
+ --extern [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustdoc_only_bar_dependency() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file("src/main.rs", "extern crate bar; fn main() { bar::baz() }")
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ foo.cargo("rustdoc -v -p bar -- --cfg=foo")
+ .with_stderr(
+ "\
+[DOCUMENTING] bar v0.0.1 ([..])
+[RUNNING] `rustdoc [..]--crate-name bar [..]bar/src/lib.rs [..]\
+ -o [CWD]/target/doc \
+ [..] \
+ --cfg=foo \
+ -C metadata=[..] \
+ -L dependency=[CWD]/target/debug/deps [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustdoc_same_name_documents_lib() {
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file("src/lib.rs", r#" "#)
+ .build();
+
+ p.cargo("rustdoc -v -- --cfg=foo")
+ .with_stderr(
+ "\
+[DOCUMENTING] foo v0.0.1 ([..])
+[RUNNING] `rustdoc [..]--crate-name foo src/lib.rs [..]\
+ -o [CWD]/target/doc \
+ [..] \
+ --cfg=foo \
+ -C metadata=[..] \
+ -L dependency=[CWD]/target/debug/deps [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [features]
+ quux = []
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("rustdoc --verbose --features quux")
+ .with_stderr_contains("[..]feature=[..]quux[..]")
+ .run();
+}
+
+#[cargo_test]
+fn proc_macro_crate_type() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ proc-macro = true
+
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("rustdoc --verbose")
+ .with_stderr_contains(
+ "\
+[RUNNING] `rustdoc --crate-type proc-macro [..]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustdoc_target() {
+ if cross_compile::disabled() {
+ return;
+ }
+
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("rustdoc --verbose --target")
+ .arg(cross_compile::alternate())
+ .with_stderr(format!(
+ "\
+[DOCUMENTING] foo v0.0.1 ([..])
+[RUNNING] `rustdoc [..]--crate-name foo src/lib.rs [..]\
+ --target {target} \
+ -o [CWD]/target/{target}/doc \
+ [..] \
+ -L dependency=[CWD]/target/{target}/debug/deps \
+ -L dependency=[CWD]/target/debug/deps[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ target = cross_compile::alternate()
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn fail_with_glob() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() { break_the_build(); }")
+ .build();
+
+ p.cargo("rustdoc -p '*z'")
+ .with_status(101)
+ .with_stderr("[ERROR] Glob patterns on package selection are not supported.")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/rustdoc_extern_html.rs b/src/tools/cargo/tests/testsuite/rustdoc_extern_html.rs
new file mode 100644
index 000000000..b18358d1c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/rustdoc_extern_html.rs
@@ -0,0 +1,426 @@
+//! Tests for the -Zrustdoc-map feature.
+
+use cargo_test_support::registry::{self, Package};
+use cargo_test_support::{paths, project, Project};
+
+fn basic_project() -> Project {
+ Package::new("bar", "1.0.0")
+ .file("src/lib.rs", "pub struct Straw;")
+ .publish();
+
+ project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn myfun() -> Option<bar::Straw> {
+ None
+ }
+ "#,
+ )
+ .build()
+}
+
+#[cargo_test]
+fn ignores_on_stable() {
+ // Requires -Zrustdoc-map to use.
+ let p = basic_project();
+ p.cargo("doc -v --no-deps")
+ .with_stderr_does_not_contain("[..]--extern-html-root-url[..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--extern-html-root-url is unstable")]
+fn simple() {
+ // Basic test that it works with crates.io.
+ let p = basic_project();
+ p.cargo("doc -v --no-deps -Zrustdoc-map")
+ .masquerade_as_nightly_cargo(&["rustdoc-map"])
+ .with_stderr_contains(
+ "[RUNNING] `rustdoc [..]--crate-name foo [..]bar=https://docs.rs/bar/1.0.0/[..]",
+ )
+ .run();
+ let myfun = p.read_file("target/doc/foo/fn.myfun.html");
+ assert!(myfun.contains(r#"href="https://docs.rs/bar/1.0.0/bar/struct.Straw.html""#));
+}
+
+#[ignore = "Broken, temporarily disabled until https://github.com/rust-lang/rust/pull/82776 is resolved."]
+#[cargo_test]
+// #[cargo_test(nightly, reason = "--extern-html-root-url is unstable")]
+fn std_docs() {
+ // Mapping std docs somewhere else.
+ // For local developers, skip this test if docs aren't installed.
+ let docs = std::path::Path::new(&paths::sysroot()).join("share/doc/rust/html");
+ if !docs.exists() {
+ if cargo_util::is_ci() {
+ panic!("std docs are not installed, check that the rust-docs component is installed");
+ } else {
+ eprintln!(
+ "documentation not found at {}, \
+ skipping test (run `rustdoc component add rust-docs` to install",
+ docs.display()
+ );
+ return;
+ }
+ }
+ let p = basic_project();
+ p.change_file(
+ ".cargo/config",
+ r#"
+ [doc.extern-map]
+ std = "local"
+ "#,
+ );
+ p.cargo("doc -v --no-deps -Zrustdoc-map")
+ .masquerade_as_nightly_cargo(&["rustdoc-map"])
+ .with_stderr_contains("[RUNNING] `rustdoc [..]--crate-name foo [..]std=file://[..]")
+ .run();
+ let myfun = p.read_file("target/doc/foo/fn.myfun.html");
+ assert!(myfun.contains(r#"share/doc/rust/html/core/option/enum.Option.html""#));
+
+ p.change_file(
+ ".cargo/config",
+ r#"
+ [doc.extern-map]
+ std = "https://example.com/rust/"
+ "#,
+ );
+ p.cargo("doc -v --no-deps -Zrustdoc-map")
+ .masquerade_as_nightly_cargo(&["rustdoc-map"])
+ .with_stderr_contains(
+ "[RUNNING] `rustdoc [..]--crate-name foo [..]std=https://example.com/rust/[..]",
+ )
+ .run();
+ let myfun = p.read_file("target/doc/foo/fn.myfun.html");
+ assert!(myfun.contains(r#"href="https://example.com/rust/core/option/enum.Option.html""#));
+}
+
+#[cargo_test(nightly, reason = "--extern-html-root-url is unstable")]
+fn renamed_dep() {
+ // Handles renamed dependencies.
+ Package::new("bar", "1.0.0")
+ .file("src/lib.rs", "pub struct Straw;")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ groovy = { version = "1.0", package = "bar" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn myfun() -> Option<groovy::Straw> {
+ None
+ }
+ "#,
+ )
+ .build();
+ p.cargo("doc -v --no-deps -Zrustdoc-map")
+ .masquerade_as_nightly_cargo(&["rustdoc-map"])
+ .with_stderr_contains(
+ "[RUNNING] `rustdoc [..]--crate-name foo [..]bar=https://docs.rs/bar/1.0.0/[..]",
+ )
+ .run();
+ let myfun = p.read_file("target/doc/foo/fn.myfun.html");
+ assert!(myfun.contains(r#"href="https://docs.rs/bar/1.0.0/bar/struct.Straw.html""#));
+}
+
+#[cargo_test(nightly, reason = "--extern-html-root-url is unstable")]
+fn lib_name() {
+ // Handles lib name != package name.
+ Package::new("bar", "1.0.0")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "1.0.0"
+
+ [lib]
+ name = "rumpelstiltskin"
+ "#,
+ )
+ .file("src/lib.rs", "pub struct Straw;")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn myfun() -> Option<rumpelstiltskin::Straw> {
+ None
+ }
+ "#,
+ )
+ .build();
+ p.cargo("doc -v --no-deps -Zrustdoc-map")
+ .masquerade_as_nightly_cargo(&["rustdoc-map"])
+ .with_stderr_contains(
+ "[RUNNING] `rustdoc [..]--crate-name foo [..]rumpelstiltskin=https://docs.rs/bar/1.0.0/[..]",
+ )
+ .run();
+ let myfun = p.read_file("target/doc/foo/fn.myfun.html");
+ assert!(myfun.contains(r#"href="https://docs.rs/bar/1.0.0/rumpelstiltskin/struct.Straw.html""#));
+}
+
+#[cargo_test(nightly, reason = "--extern-html-root-url is unstable")]
+fn alt_registry() {
+ // Supports other registry names.
+ registry::alt_init();
+ Package::new("bar", "1.0.0")
+ .alternative(true)
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate baz;
+ pub struct Queen;
+ pub use baz::King;
+ "#,
+ )
+ .registry_dep("baz", "1.0")
+ .publish();
+ Package::new("baz", "1.0.0")
+ .alternative(true)
+ .file("src/lib.rs", "pub struct King;")
+ .publish();
+ Package::new("grimm", "1.0.0")
+ .file("src/lib.rs", "pub struct Gold;")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ bar = { version = "1.0", registry="alternative" }
+ grimm = "1.0"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn queen() -> bar::Queen { bar::Queen }
+ pub fn king() -> bar::King { bar::King }
+ pub fn gold() -> grimm::Gold { grimm::Gold }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
+ [doc.extern-map.registries]
+ alternative = "https://example.com/{pkg_name}/{version}/"
+ crates-io = "https://docs.rs/"
+ "#,
+ )
+ .build();
+ p.cargo("doc -v --no-deps -Zrustdoc-map")
+ .masquerade_as_nightly_cargo(&["rustdoc-map"])
+ .with_stderr_contains(
+ "[RUNNING] `rustdoc [..]--crate-name foo \
+ [..]bar=https://example.com/bar/1.0.0/[..]grimm=https://docs.rs/grimm/1.0.0/[..]",
+ )
+ .run();
+ let queen = p.read_file("target/doc/foo/fn.queen.html");
+ assert!(queen.contains(r#"href="https://example.com/bar/1.0.0/bar/struct.Queen.html""#));
+ // The king example fails to link. Rustdoc seems to want the origin crate
+ // name (baz) for re-exports. There are many issues in the issue tracker
+ // for rustdoc re-exports, so I'm not sure, but I think this is maybe a
+ // rustdoc issue. Alternatively, Cargo could provide mappings for all
+ // transitive dependencies to fix this.
+ let king = p.read_file("target/doc/foo/fn.king.html");
+ assert!(king.contains(r#"-&gt; King"#));
+
+ let gold = p.read_file("target/doc/foo/fn.gold.html");
+ assert!(gold.contains(r#"href="https://docs.rs/grimm/1.0.0/grimm/struct.Gold.html""#));
+}
+
+#[cargo_test(nightly, reason = "--extern-html-root-url is unstable")]
+fn multiple_versions() {
+ // What happens when there are multiple versions.
+ // NOTE: This is currently broken behavior. Rustdoc does not provide a way
+ // to match renamed dependencies.
+ Package::new("bar", "1.0.0")
+ .file("src/lib.rs", "pub struct Spin;")
+ .publish();
+ Package::new("bar", "2.0.0")
+ .file("src/lib.rs", "pub struct Straw;")
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ bar = "1.0"
+ bar2 = {version="2.0", package="bar"}
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
+ pub fn fn1() -> bar::Spin {bar::Spin}
+ pub fn fn2() -> bar2::Straw {bar2::Straw}
+ ",
+ )
+ .build();
+ p.cargo("doc -v --no-deps -Zrustdoc-map")
+ .masquerade_as_nightly_cargo(&["rustdoc-map"])
+ .with_stderr_contains(
+ "[RUNNING] `rustdoc [..]--crate-name foo \
+ [..]bar=https://docs.rs/bar/1.0.0/[..]bar=https://docs.rs/bar/2.0.0/[..]",
+ )
+ .run();
+ let fn1 = p.read_file("target/doc/foo/fn.fn1.html");
+ // This should be 1.0.0, rustdoc seems to use the last entry when there
+ // are duplicates.
+ assert!(fn1.contains(r#"href="https://docs.rs/bar/2.0.0/bar/struct.Spin.html""#));
+ let fn2 = p.read_file("target/doc/foo/fn.fn2.html");
+ assert!(fn2.contains(r#"href="https://docs.rs/bar/2.0.0/bar/struct.Straw.html""#));
+}
+
+#[cargo_test(nightly, reason = "--extern-html-root-url is unstable")]
+fn rebuilds_when_changing() {
+ // Make sure it rebuilds if the map changes.
+ let p = basic_project();
+ p.cargo("doc -v --no-deps -Zrustdoc-map")
+ .masquerade_as_nightly_cargo(&["rustdoc-map"])
+ .with_stderr_contains("[..]--extern-html-root-url[..]")
+ .run();
+
+ // This also tests that the map for docs.rs can be overridden.
+ p.change_file(
+ ".cargo/config",
+ r#"
+ [doc.extern-map.registries]
+ crates-io = "https://example.com/"
+ "#,
+ );
+ p.cargo("doc -v --no-deps -Zrustdoc-map")
+ .masquerade_as_nightly_cargo(&["rustdoc-map"])
+ .with_stderr_contains(
+ "[RUNNING] `rustdoc [..]--extern-html-root-url [..]bar=https://example.com/bar/1.0.0/[..]",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--extern-html-root-url is unstable")]
+fn alt_sparse_registry() {
+ // Supports other registry names.
+
+ registry::init();
+ let _registry = registry::RegistryBuilder::new()
+ .http_index()
+ .alternative()
+ .build();
+
+ Package::new("bar", "1.0.0")
+ .alternative(true)
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate baz;
+ pub struct Queen;
+ pub use baz::King;
+ "#,
+ )
+ .registry_dep("baz", "1.0")
+ .publish();
+ Package::new("baz", "1.0.0")
+ .alternative(true)
+ .file("src/lib.rs", "pub struct King;")
+ .publish();
+ Package::new("grimm", "1.0.0")
+ .file("src/lib.rs", "pub struct Gold;")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ bar = { version = "1.0", registry="alternative" }
+ grimm = "1.0"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn queen() -> bar::Queen { bar::Queen }
+ pub fn king() -> bar::King { bar::King }
+ pub fn gold() -> grimm::Gold { grimm::Gold }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
+ [doc.extern-map.registries]
+ alternative = "https://example.com/{pkg_name}/{version}/"
+ crates-io = "https://docs.rs/"
+ "#,
+ )
+ .build();
+ p.cargo("doc -v --no-deps -Zrustdoc-map")
+ .masquerade_as_nightly_cargo(&["rustdoc-map"])
+ .with_stderr_contains(
+ "[RUNNING] `rustdoc [..]--crate-name foo \
+ [..]bar=https://example.com/bar/1.0.0/[..]grimm=https://docs.rs/grimm/1.0.0/[..]",
+ )
+ .run();
+ let queen = p.read_file("target/doc/foo/fn.queen.html");
+ assert!(queen.contains(r#"href="https://example.com/bar/1.0.0/bar/struct.Queen.html""#));
+ // The king example fails to link. Rustdoc seems to want the origin crate
+ // name (baz) for re-exports. There are many issues in the issue tracker
+ // for rustdoc re-exports, so I'm not sure, but I think this is maybe a
+ // rustdoc issue. Alternatively, Cargo could provide mappings for all
+ // transitive dependencies to fix this.
+ let king = p.read_file("target/doc/foo/fn.king.html");
+ assert!(king.contains(r#"-&gt; King"#));
+
+ let gold = p.read_file("target/doc/foo/fn.gold.html");
+ assert!(gold.contains(r#"href="https://docs.rs/grimm/1.0.0/grimm/struct.Gold.html""#));
+}
diff --git a/src/tools/cargo/tests/testsuite/rustdocflags.rs b/src/tools/cargo/tests/testsuite/rustdocflags.rs
new file mode 100644
index 000000000..6992961ce
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/rustdocflags.rs
@@ -0,0 +1,155 @@
+//! Tests for setting custom rustdoc flags.
+
+use cargo_test_support::project;
+
+#[cargo_test]
+fn parses_env() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("doc -v")
+ .env("RUSTDOCFLAGS", "--cfg=foo")
+ .with_stderr_contains("[RUNNING] `rustdoc [..] --cfg=foo[..]`")
+ .run();
+}
+
+#[cargo_test]
+fn parses_config() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ rustdocflags = ["--cfg", "foo"]
+ "#,
+ )
+ .build();
+
+ p.cargo("doc -v")
+ .with_stderr_contains("[RUNNING] `rustdoc [..] --cfg foo[..]`")
+ .run();
+}
+
+#[cargo_test]
+fn bad_flags() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("doc")
+ .env("RUSTDOCFLAGS", "--bogus")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+}
+
+#[cargo_test]
+fn rerun() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=foo").run();
+ p.cargo("doc")
+ .env("RUSTDOCFLAGS", "--cfg=foo")
+ .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+ .run();
+ p.cargo("doc")
+ .env("RUSTDOCFLAGS", "--cfg=bar")
+ .with_stderr(
+ "\
+[DOCUMENTING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn rustdocflags_passed_to_rustdoc_through_cargo_test() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ //! ```
+ //! assert!(cfg!(do_not_choke));
+ //! ```
+ "#,
+ )
+ .build();
+
+ p.cargo("test --doc")
+ .env("RUSTDOCFLAGS", "--cfg do_not_choke")
+ .run();
+}
+
+#[cargo_test]
+fn rustdocflags_passed_to_rustdoc_through_cargo_test_only_once() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("test --doc")
+ .env("RUSTDOCFLAGS", "--markdown-no-toc")
+ .run();
+}
+
+#[cargo_test]
+fn rustdocflags_misspelled() {
+ let p = project().file("src/main.rs", "fn main() { }").build();
+
+ p.cargo("doc")
+ .env("RUSTDOC_FLAGS", "foo")
+ .with_stderr_contains("[WARNING] Cargo does not read `RUSTDOC_FLAGS` environment variable. Did you mean `RUSTDOCFLAGS`?")
+ .run();
+}
+
+#[cargo_test]
+fn whitespace() {
+ // Checks behavior of different whitespace characters.
+ let p = project().file("src/lib.rs", "").build();
+
+ // "too many operands"
+ p.cargo("doc")
+ .env("RUSTDOCFLAGS", "--crate-version this has spaces")
+ .with_stderr_contains("[ERROR] could not document `foo`")
+ .with_status(101)
+ .run();
+
+ const SPACED_VERSION: &str = "a\nb\tc\u{00a0}d";
+ p.cargo("doc")
+ .env_remove("__CARGO_TEST_FORCE_ARGFILE") // Not applicable for argfile.
+ .env(
+ "RUSTDOCFLAGS",
+ format!("--crate-version {}", SPACED_VERSION),
+ )
+ .run();
+
+ let contents = p.read_file("target/doc/foo/index.html");
+ assert!(contents.contains(SPACED_VERSION));
+}
+
+#[cargo_test]
+fn not_affected_by_target_rustflags() {
+ let cfg = if cfg!(windows) { "windows" } else { "unix" };
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.'cfg({cfg})']
+ rustflags = ["-D", "missing-docs"]
+
+ [build]
+ rustdocflags = ["--cfg", "foo"]
+ "#,
+ ),
+ )
+ .build();
+
+ // `cargo build` should fail due to missing docs.
+ p.cargo("build -v")
+ .with_status(101)
+ .with_stderr_contains("[RUNNING] `rustc [..] -D missing-docs[..]`")
+ .run();
+
+ // `cargo doc` shouldn't fail.
+ p.cargo("doc -v")
+ .with_stderr_contains("[RUNNING] `rustdoc [..] --cfg foo[..]`")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/rustflags.rs b/src/tools/cargo/tests/testsuite/rustflags.rs
new file mode 100644
index 000000000..6677beb04
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/rustflags.rs
@@ -0,0 +1,1673 @@
+//! Tests for setting custom rustc flags.
+
+use cargo_test_support::registry::Package;
+use cargo_test_support::{
+ basic_lib_manifest, basic_manifest, paths, project, project_in_home, rustc_host,
+};
+use std::fs;
+
+#[cargo_test]
+fn env_rustflags_normal_source() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("src/bin/a.rs", "fn main() {}")
+ .file("examples/b.rs", "fn main() {}")
+ .file("tests/c.rs", "#[test] fn f() { }")
+ .file(
+ "benches/d.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+ #[bench] fn run1(_ben: &mut test::Bencher) { }
+ "#,
+ )
+ .build();
+
+ // Use RUSTFLAGS to pass an argument that will generate an error
+ p.cargo("check --lib")
+ .env("RUSTFLAGS", "-Z bogus")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("check --bin=a")
+ .env("RUSTFLAGS", "-Z bogus")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("check --example=b")
+ .env("RUSTFLAGS", "-Z bogus")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("test")
+ .env("RUSTFLAGS", "-Z bogus")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("bench")
+ .env("RUSTFLAGS", "-Z bogus")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+}
+
+#[cargo_test]
+fn env_rustflags_build_script() {
+ // RUSTFLAGS should be passed to rustc for build scripts
+ // when --target is not specified.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() { assert!(cfg!(foo)); }
+ "#,
+ )
+ .build();
+
+ p.cargo("check").env("RUSTFLAGS", "--cfg foo").run();
+}
+
+#[cargo_test]
+fn env_rustflags_build_script_dep() {
+ // RUSTFLAGS should be passed to rustc for build scripts
+ // when --target is not specified.
+ // In this test if --cfg foo is not passed the build will fail.
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ build = "build.rs"
+
+ [build-dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file(
+ "src/lib.rs",
+ r#"
+ fn bar() { }
+ #[cfg(not(foo))]
+ fn bar() { }
+ "#,
+ )
+ .build();
+
+ foo.cargo("check").env("RUSTFLAGS", "--cfg foo").run();
+}
+
+#[cargo_test]
+fn env_rustflags_plugin() {
+ // RUSTFLAGS should be passed to rustc for plugins
+ // when --target is not specified.
+ // In this test if --cfg foo is not passed the build will fail.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [lib]
+ name = "foo"
+ plugin = true
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ fn main() { }
+ #[cfg(not(foo))]
+ fn main() { }
+ "#,
+ )
+ .build();
+
+ p.cargo("check").env("RUSTFLAGS", "--cfg foo").run();
+}
+
+#[cargo_test]
+fn env_rustflags_plugin_dep() {
+ // RUSTFLAGS should be passed to rustc for plugins
+ // when --target is not specified.
+ // In this test if --cfg foo is not passed the build will fail.
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [lib]
+ name = "foo"
+ plugin = true
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file("src/lib.rs", "fn foo() {}")
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_lib_manifest("bar"))
+ .file(
+ "src/lib.rs",
+ r#"
+ fn bar() { }
+ #[cfg(not(foo))]
+ fn bar() { }
+ "#,
+ )
+ .build();
+
+ foo.cargo("check").env("RUSTFLAGS", "--cfg foo").run();
+}
+
+#[cargo_test]
+fn env_rustflags_normal_source_with_target() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("src/bin/a.rs", "fn main() {}")
+ .file("examples/b.rs", "fn main() {}")
+ .file("tests/c.rs", "#[test] fn f() { }")
+ .file(
+ "benches/d.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+ #[bench] fn run1(_ben: &mut test::Bencher) { }
+ "#,
+ )
+ .build();
+
+ let host = &rustc_host();
+
+ // Use RUSTFLAGS to pass an argument that will generate an error
+ p.cargo("check --lib --target")
+ .arg(host)
+ .env("RUSTFLAGS", "-Z bogus")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("check --bin=a --target")
+ .arg(host)
+ .env("RUSTFLAGS", "-Z bogus")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("check --example=b --target")
+ .arg(host)
+ .env("RUSTFLAGS", "-Z bogus")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("test --target")
+ .arg(host)
+ .env("RUSTFLAGS", "-Z bogus")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("bench --target")
+ .arg(host)
+ .env("RUSTFLAGS", "-Z bogus")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+}
+
+#[cargo_test]
+fn env_rustflags_build_script_with_target() {
+ // RUSTFLAGS should not be passed to rustc for build scripts
+ // when --target is specified.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() { assert!(!cfg!(foo)); }
+ "#,
+ )
+ .build();
+
+ let host = rustc_host();
+ p.cargo("check --target")
+ .arg(host)
+ .env("RUSTFLAGS", "--cfg foo")
+ .run();
+}
+
+#[cargo_test]
+fn env_rustflags_build_script_with_target_doesnt_apply_to_host_kind() {
+ // RUSTFLAGS should *not* be passed to rustc for build scripts when --target is specified as the
+ // host triple even if target-applies-to-host-kind is enabled, to match legacy Cargo behavior.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() { assert!(!cfg!(foo)); }
+ "#,
+ )
+ .file(
+ ".cargo/config.toml",
+ r#"
+ target-applies-to-host = true
+ "#,
+ )
+ .build();
+
+ let host = rustc_host();
+ p.cargo("check --target")
+ .masquerade_as_nightly_cargo(&["target-applies-to-host"])
+ .arg(host)
+ .arg("-Ztarget-applies-to-host")
+ .env("RUSTFLAGS", "--cfg foo")
+ .run();
+}
+
+#[cargo_test]
+fn env_rustflags_build_script_dep_with_target() {
+ // RUSTFLAGS should not be passed to rustc for build scripts
+ // when --target is specified.
+ // In this test if --cfg foo is passed the build will fail.
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ build = "build.rs"
+
+ [build-dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file(
+ "src/lib.rs",
+ r#"
+ fn bar() { }
+ #[cfg(foo)]
+ fn bar() { }
+ "#,
+ )
+ .build();
+
+ let host = rustc_host();
+ foo.cargo("check --target")
+ .arg(host)
+ .env("RUSTFLAGS", "--cfg foo")
+ .run();
+}
+
+#[cargo_test]
+fn env_rustflags_plugin_with_target() {
+ // RUSTFLAGS should not be passed to rustc for plugins
+ // when --target is specified.
+ // In this test if --cfg foo is passed the build will fail.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [lib]
+ name = "foo"
+ plugin = true
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ fn main() { }
+ #[cfg(foo)]
+ fn main() { }
+ "#,
+ )
+ .build();
+
+ let host = rustc_host();
+ p.cargo("check --target")
+ .arg(host)
+ .env("RUSTFLAGS", "--cfg foo")
+ .run();
+}
+
+#[cargo_test]
+fn env_rustflags_plugin_dep_with_target() {
+ // RUSTFLAGS should not be passed to rustc for plugins
+ // when --target is specified.
+ // In this test if --cfg foo is passed the build will fail.
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [lib]
+ name = "foo"
+ plugin = true
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file("src/lib.rs", "fn foo() {}")
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_lib_manifest("bar"))
+ .file(
+ "src/lib.rs",
+ r#"
+ fn bar() { }
+ #[cfg(foo)]
+ fn bar() { }
+ "#,
+ )
+ .build();
+
+ let host = rustc_host();
+ foo.cargo("check --target")
+ .arg(host)
+ .env("RUSTFLAGS", "--cfg foo")
+ .run();
+}
+
+#[cargo_test]
+fn env_rustflags_recompile() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("check").run();
+ // Setting RUSTFLAGS forces a recompile
+ p.cargo("check")
+ .env("RUSTFLAGS", "-Z bogus")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+}
+
+#[cargo_test]
+fn env_rustflags_recompile2() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("check").env("RUSTFLAGS", "--cfg foo").run();
+ // Setting RUSTFLAGS forces a recompile
+ p.cargo("check")
+ .env("RUSTFLAGS", "-Z bogus")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+}
+
+#[cargo_test]
+fn env_rustflags_no_recompile() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("check").env("RUSTFLAGS", "--cfg foo").run();
+ p.cargo("check")
+ .env("RUSTFLAGS", "--cfg foo")
+ .with_stdout("")
+ .run();
+}
+
+#[cargo_test]
+fn build_rustflags_normal_source() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("src/bin/a.rs", "fn main() {}")
+ .file("examples/b.rs", "fn main() {}")
+ .file("tests/c.rs", "#[test] fn f() { }")
+ .file(
+ "benches/d.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+ #[bench] fn run1(_ben: &mut test::Bencher) { }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ rustflags = ["-Z", "bogus"]
+ "#,
+ )
+ .build();
+
+ p.cargo("check --lib")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("check --bin=a")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("check --example=b")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("test")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("bench")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+}
+
+#[cargo_test]
+fn build_rustflags_build_script() {
+ // RUSTFLAGS should be passed to rustc for build scripts
+ // when --target is not specified.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() { assert!(cfg!(foo)); }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ rustflags = ["--cfg", "foo"]
+ "#,
+ )
+ .build();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn build_rustflags_build_script_dep() {
+ // RUSTFLAGS should be passed to rustc for build scripts
+ // when --target is not specified.
+ // In this test if --cfg foo is not passed the build will fail.
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ build = "build.rs"
+
+ [build-dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ rustflags = ["--cfg", "foo"]
+ "#,
+ )
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file(
+ "src/lib.rs",
+ r#"
+ fn bar() { }
+ #[cfg(not(foo))]
+ fn bar() { }
+ "#,
+ )
+ .build();
+
+ foo.cargo("check").run();
+}
+
+#[cargo_test]
+fn build_rustflags_plugin() {
+ // RUSTFLAGS should be passed to rustc for plugins
+ // when --target is not specified.
+ // In this test if --cfg foo is not passed the build will fail.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [lib]
+ name = "foo"
+ plugin = true
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ fn main() { }
+ #[cfg(not(foo))]
+ fn main() { }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ rustflags = ["--cfg", "foo"]
+ "#,
+ )
+ .build();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn build_rustflags_plugin_dep() {
+ // RUSTFLAGS should be passed to rustc for plugins
+ // when --target is not specified.
+ // In this test if --cfg foo is not passed the build will fail.
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [lib]
+ name = "foo"
+ plugin = true
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file("src/lib.rs", "fn foo() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ rustflags = ["--cfg", "foo"]
+ "#,
+ )
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_lib_manifest("bar"))
+ .file(
+ "src/lib.rs",
+ r#"
+ fn bar() { }
+ #[cfg(not(foo))]
+ fn bar() { }
+ "#,
+ )
+ .build();
+
+ foo.cargo("check").run();
+}
+
+#[cargo_test]
+fn build_rustflags_normal_source_with_target() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("src/bin/a.rs", "fn main() {}")
+ .file("examples/b.rs", "fn main() {}")
+ .file("tests/c.rs", "#[test] fn f() { }")
+ .file(
+ "benches/d.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+ #[bench] fn run1(_ben: &mut test::Bencher) { }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ rustflags = ["-Z", "bogus"]
+ "#,
+ )
+ .build();
+
+ let host = &rustc_host();
+
+ // Use build.rustflags to pass an argument that will generate an error
+ p.cargo("check --lib --target")
+ .arg(host)
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("check --bin=a --target")
+ .arg(host)
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("check --example=b --target")
+ .arg(host)
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("test --target")
+ .arg(host)
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("bench --target")
+ .arg(host)
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+}
+
+#[cargo_test]
+fn build_rustflags_build_script_with_target() {
+ // RUSTFLAGS should not be passed to rustc for build scripts
+ // when --target is specified.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() { assert!(!cfg!(foo)); }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ rustflags = ["--cfg", "foo"]
+ "#,
+ )
+ .build();
+
+ let host = rustc_host();
+ p.cargo("check --target").arg(host).run();
+}
+
+#[cargo_test]
+fn build_rustflags_build_script_dep_with_target() {
+ // RUSTFLAGS should not be passed to rustc for build scripts
+ // when --target is specified.
+ // In this test if --cfg foo is passed the build will fail.
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ build = "build.rs"
+
+ [build-dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ rustflags = ["--cfg", "foo"]
+ "#,
+ )
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file(
+ "src/lib.rs",
+ r#"
+ fn bar() { }
+ #[cfg(foo)]
+ fn bar() { }
+ "#,
+ )
+ .build();
+
+ let host = rustc_host();
+ foo.cargo("check --target").arg(host).run();
+}
+
+#[cargo_test]
+fn build_rustflags_plugin_with_target() {
+ // RUSTFLAGS should not be passed to rustc for plugins
+ // when --target is specified.
+ // In this test if --cfg foo is passed the build will fail.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [lib]
+ name = "foo"
+ plugin = true
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ fn main() { }
+ #[cfg(foo)]
+ fn main() { }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ rustflags = ["--cfg", "foo"]
+ "#,
+ )
+ .build();
+
+ let host = rustc_host();
+ p.cargo("check --target").arg(host).run();
+}
+
+#[cargo_test]
+fn build_rustflags_plugin_dep_with_target() {
+ // RUSTFLAGS should not be passed to rustc for plugins
+ // when --target is specified.
+ // In this test if --cfg foo is passed the build will fail.
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [lib]
+ name = "foo"
+ plugin = true
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file("src/lib.rs", "fn foo() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ rustflags = ["--cfg", "foo"]
+ "#,
+ )
+ .build();
+ let _bar = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_lib_manifest("bar"))
+ .file(
+ "src/lib.rs",
+ r#"
+ fn bar() { }
+ #[cfg(foo)]
+ fn bar() { }
+ "#,
+ )
+ .build();
+
+ let host = rustc_host();
+ foo.cargo("check --target").arg(host).run();
+}
+
+#[cargo_test]
+fn build_rustflags_recompile() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("check").run();
+
+ // Setting RUSTFLAGS forces a recompile
+ let config = r#"
+ [build]
+ rustflags = ["-Z", "bogus"]
+ "#;
+ let config_file = paths::root().join("foo/.cargo/config");
+ fs::create_dir_all(config_file.parent().unwrap()).unwrap();
+ fs::write(config_file, config).unwrap();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+}
+
+#[cargo_test]
+fn build_rustflags_recompile2() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("check").env("RUSTFLAGS", "--cfg foo").run();
+
+ // Setting RUSTFLAGS forces a recompile
+ let config = r#"
+ [build]
+ rustflags = ["-Z", "bogus"]
+ "#;
+ let config_file = paths::root().join("foo/.cargo/config");
+ fs::create_dir_all(config_file.parent().unwrap()).unwrap();
+ fs::write(config_file, config).unwrap();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+}
+
+#[cargo_test]
+fn build_rustflags_no_recompile() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ rustflags = ["--cfg", "foo"]
+ "#,
+ )
+ .build();
+
+ p.cargo("check").env("RUSTFLAGS", "--cfg foo").run();
+ p.cargo("check")
+ .env("RUSTFLAGS", "--cfg foo")
+ .with_stdout("")
+ .run();
+}
+
+#[cargo_test]
+fn build_rustflags_with_home_config() {
+ // We need a config file inside the home directory
+ let home = paths::home();
+ let home_config = home.join(".cargo");
+ fs::create_dir(&home_config).unwrap();
+ fs::write(
+ &home_config.join("config"),
+ r#"
+ [build]
+ rustflags = ["-Cllvm-args=-x86-asm-syntax=intel"]
+ "#,
+ )
+ .unwrap();
+
+ // And we need the project to be inside the home directory
+ // so the walking process finds the home project twice.
+ let p = project_in_home("foo").file("src/lib.rs", "").build();
+
+ p.cargo("check -v").run();
+}
+
+#[cargo_test]
+fn target_rustflags_normal_source() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("src/bin/a.rs", "fn main() {}")
+ .file("examples/b.rs", "fn main() {}")
+ .file("tests/c.rs", "#[test] fn f() { }")
+ .file(
+ "benches/d.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+ #[bench] fn run1(_ben: &mut test::Bencher) { }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ &format!(
+ "
+ [target.{}]
+ rustflags = [\"-Z\", \"bogus\"]
+ ",
+ rustc_host()
+ ),
+ )
+ .build();
+
+ p.cargo("check --lib")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("check --bin=a")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("check --example=b")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("test")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("bench")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+}
+
+#[cargo_test]
+fn target_rustflags_also_for_build_scripts() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() { assert!(cfg!(foo)); }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ &format!(
+ "
+ [target.{}]
+ rustflags = [\"--cfg=foo\"]
+ ",
+ rustc_host()
+ ),
+ )
+ .build();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn target_rustflags_not_for_build_scripts_with_target() {
+ let host = rustc_host();
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() { assert!(!cfg!(foo)); }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ &format!(
+ "
+ [target.{}]
+ rustflags = [\"--cfg=foo\"]
+ ",
+ host
+ ),
+ )
+ .build();
+
+ p.cargo("check --target").arg(host).run();
+
+ // Enabling -Ztarget-applies-to-host should not make a difference without the config setting
+ p.cargo("check --target")
+ .arg(host)
+ .masquerade_as_nightly_cargo(&["target-applies-to-host"])
+ .arg("-Ztarget-applies-to-host")
+ .run();
+
+ // Even with the setting, the rustflags from `target.` should not apply, to match the legacy
+ // Cargo behavior.
+ p.change_file(
+ ".cargo/config",
+ &format!(
+ "
+ target-applies-to-host = true
+
+ [target.{}]
+ rustflags = [\"--cfg=foo\"]
+ ",
+ host
+ ),
+ );
+ p.cargo("check --target")
+ .arg(host)
+ .masquerade_as_nightly_cargo(&["target-applies-to-host"])
+ .arg("-Ztarget-applies-to-host")
+ .run();
+}
+
+#[cargo_test]
+fn build_rustflags_for_build_scripts() {
+ let host = rustc_host();
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() { assert!(cfg!(foo)); }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ "
+ [build]
+ rustflags = [\"--cfg=foo\"]
+ ",
+ )
+ .build();
+
+ // With "legacy" behavior, build.rustflags should apply to build scripts without --target
+ p.cargo("check").run();
+
+ // But should _not_ apply _with_ --target
+ p.cargo("check --target")
+ .arg(host)
+ .with_status(101)
+ .with_stderr_contains("[..]assertion failed[..]")
+ .run();
+
+ // Enabling -Ztarget-applies-to-host should not make a difference without the config setting
+ p.cargo("check")
+ .masquerade_as_nightly_cargo(&["target-applies-to-host"])
+ .arg("-Ztarget-applies-to-host")
+ .run();
+ p.cargo("check --target")
+ .arg(host)
+ .masquerade_as_nightly_cargo(&["target-applies-to-host"])
+ .arg("-Ztarget-applies-to-host")
+ .with_status(101)
+ .with_stderr_contains("[..]assertion failed[..]")
+ .run();
+
+ // When set to false though, the "proper" behavior where host artifacts _only_ pick up on
+ // [host] should be applied.
+ p.change_file(
+ ".cargo/config",
+ "
+ target-applies-to-host = false
+
+ [build]
+ rustflags = [\"--cfg=foo\"]
+ ",
+ );
+ p.cargo("check")
+ .masquerade_as_nightly_cargo(&["target-applies-to-host"])
+ .arg("-Ztarget-applies-to-host")
+ .with_status(101)
+ .with_stderr_contains("[..]assertion failed[..]")
+ .run();
+ p.cargo("check --target")
+ .arg(host)
+ .masquerade_as_nightly_cargo(&["target-applies-to-host"])
+ .arg("-Ztarget-applies-to-host")
+ .with_status(101)
+ .with_stderr_contains("[..]assertion failed[..]")
+ .run();
+}
+
+#[cargo_test]
+fn host_rustflags_for_build_scripts() {
+ let host = rustc_host();
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ // Ensure that --cfg=foo is passed.
+ fn main() { assert!(cfg!(foo)); }
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ &format!(
+ "
+ target-applies-to-host = false
+
+ [host.{}]
+ rustflags = [\"--cfg=foo\"]
+ ",
+ host
+ ),
+ )
+ .build();
+
+ p.cargo("check --target")
+ .arg(host)
+ .masquerade_as_nightly_cargo(&["target-applies-to-host", "host-config"])
+ .arg("-Ztarget-applies-to-host")
+ .arg("-Zhost-config")
+ .run();
+}
+
+// target.{}.rustflags takes precedence over build.rustflags
+#[cargo_test]
+fn target_rustflags_precedence() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("src/bin/a.rs", "fn main() {}")
+ .file("examples/b.rs", "fn main() {}")
+ .file("tests/c.rs", "#[test] fn f() { }")
+ .file(
+ ".cargo/config",
+ &format!(
+ "
+ [build]
+ rustflags = [\"--cfg\", \"foo\"]
+
+ [target.{}]
+ rustflags = [\"-Z\", \"bogus\"]
+ ",
+ rustc_host()
+ ),
+ )
+ .build();
+
+ p.cargo("check --lib")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("check --bin=a")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("check --example=b")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("test")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+ p.cargo("bench")
+ .with_status(101)
+ .with_stderr_contains("[..]bogus[..]")
+ .run();
+}
+
+#[cargo_test]
+fn cfg_rustflags_normal_source() {
+ let p = project()
+ .file("src/lib.rs", "pub fn t() {}")
+ .file("src/bin/a.rs", "fn main() {}")
+ .file("examples/b.rs", "fn main() {}")
+ .file("tests/c.rs", "#[test] fn f() { }")
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.'cfg({})']
+ rustflags = ["--cfg", "bar"]
+ "#,
+ if rustc_host().contains("-windows-") {
+ "windows"
+ } else {
+ "not(windows)"
+ }
+ ),
+ )
+ .build();
+
+ p.cargo("build --lib -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("build --bin=a -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("build --example=b -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("test --no-run -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[RUNNING] `rustc [..] --cfg bar[..]`
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]`
+[EXECUTABLE] `[..]/target/debug/deps/a-[..][EXE]`
+[EXECUTABLE] `[..]/target/debug/deps/c-[..][EXE]`
+",
+ )
+ .run();
+
+ p.cargo("bench --no-run -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[RUNNING] `rustc [..] --cfg bar[..]`
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] bench [optimized] target(s) in [..]
+[EXECUTABLE] `[..]/target/release/deps/foo-[..][EXE]`
+[EXECUTABLE] `[..]/target/release/deps/a-[..][EXE]`
+",
+ )
+ .run();
+}
+
+// target.'cfg(...)'.rustflags takes precedence over build.rustflags
+#[cargo_test]
+fn cfg_rustflags_precedence() {
+ let p = project()
+ .file("src/lib.rs", "pub fn t() {}")
+ .file("src/bin/a.rs", "fn main() {}")
+ .file("examples/b.rs", "fn main() {}")
+ .file("tests/c.rs", "#[test] fn f() { }")
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [build]
+ rustflags = ["--cfg", "foo"]
+
+ [target.'cfg({})']
+ rustflags = ["--cfg", "bar"]
+ "#,
+ if rustc_host().contains("-windows-") {
+ "windows"
+ } else {
+ "not(windows)"
+ }
+ ),
+ )
+ .build();
+
+ p.cargo("build --lib -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("build --bin=a -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("build --example=b -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("test --no-run -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[RUNNING] `rustc [..] --cfg bar[..]`
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]`
+[EXECUTABLE] `[..]/target/debug/deps/a-[..][EXE]`
+[EXECUTABLE] `[..]/target/debug/deps/c-[..][EXE]`
+",
+ )
+ .run();
+
+ p.cargo("bench --no-run -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[RUNNING] `rustc [..] --cfg bar[..]`
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] bench [optimized] target(s) in [..]
+[EXECUTABLE] `[..]/target/release/deps/foo-[..][EXE]`
+[EXECUTABLE] `[..]/target/release/deps/a-[..][EXE]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn target_rustflags_string_and_array_form1() {
+ let p1 = project()
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ rustflags = ["--cfg", "foo"]
+ "#,
+ )
+ .build();
+
+ p1.cargo("check -v")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg foo[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ let p2 = project()
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ rustflags = "--cfg foo"
+ "#,
+ )
+ .build();
+
+ p2.cargo("check -v")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg foo[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn target_rustflags_string_and_array_form2() {
+ let p1 = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}]
+ rustflags = ["--cfg", "foo"]
+ "#,
+ rustc_host()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p1.cargo("check -v")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg foo[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ let p2 = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}]
+ rustflags = "--cfg foo"
+ "#,
+ rustc_host()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p2.cargo("check -v")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg foo[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn two_matching_in_config() {
+ let p1 = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [target.'cfg(unix)']
+ rustflags = ["--cfg", 'foo="a"']
+ [target.'cfg(windows)']
+ rustflags = ["--cfg", 'foo="a"']
+ [target.'cfg(target_pointer_width = "32")']
+ rustflags = ["--cfg", 'foo="b"']
+ [target.'cfg(target_pointer_width = "64")']
+ rustflags = ["--cfg", 'foo="b"']
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ if cfg!(foo = "a") {
+ println!("a");
+ } else if cfg!(foo = "b") {
+ println!("b");
+ } else {
+ panic!()
+ }
+ }
+ "#,
+ )
+ .build();
+
+ p1.cargo("run").run();
+ p1.cargo("build").with_stderr("[FINISHED] [..]").run();
+}
+
+#[cargo_test]
+fn env_rustflags_misspelled() {
+ let p = project().file("src/main.rs", "fn main() { }").build();
+
+ for cmd in &["check", "build", "run", "test", "bench"] {
+ p.cargo(cmd)
+ .env("RUST_FLAGS", "foo")
+ .with_stderr_contains("[WARNING] Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?")
+ .run();
+ }
+}
+
+#[cargo_test]
+fn env_rustflags_misspelled_build_script() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ build = "build.rs"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() { }")
+ .build();
+
+ p.cargo("check")
+ .env("RUST_FLAGS", "foo")
+ .with_stderr_contains("[WARNING] Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?")
+ .run();
+}
+
+#[cargo_test]
+fn remap_path_prefix_ignored() {
+ // Ensure that --remap-path-prefix does not affect metadata hash.
+ let p = project().file("src/lib.rs", "").build();
+ p.cargo("build").run();
+ let rlibs = p
+ .glob("target/debug/deps/*.rlib")
+ .collect::<Result<Vec<_>, _>>()
+ .unwrap();
+ assert_eq!(rlibs.len(), 1);
+ p.cargo("clean").run();
+
+ let check_metadata_same = || {
+ let rlibs2 = p
+ .glob("target/debug/deps/*.rlib")
+ .collect::<Result<Vec<_>, _>>()
+ .unwrap();
+ assert_eq!(rlibs, rlibs2);
+ };
+
+ p.cargo("build")
+ .env(
+ "RUSTFLAGS",
+ "--remap-path-prefix=/abc=/zoo --remap-path-prefix /spaced=/zoo",
+ )
+ .run();
+ check_metadata_same();
+
+ p.cargo("clean").run();
+ p.cargo("rustc -- --remap-path-prefix=/abc=/zoo --remap-path-prefix /spaced=/zoo")
+ .run();
+ check_metadata_same();
+}
+
+#[cargo_test]
+fn remap_path_prefix_works() {
+ // Check that remap-path-prefix works.
+ Package::new("bar", "0.1.0")
+ .file("src/lib.rs", "pub fn f() -> &'static str { file!() }")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ println!("{}", bar::f());
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run")
+ .env(
+ "RUSTFLAGS",
+ format!("--remap-path-prefix={}=/foo", paths::root().display()),
+ )
+ .with_stdout("/foo/home/.cargo/registry/src/[..]/bar-0.1.0/src/lib.rs")
+ .run();
+}
+
+#[cargo_test]
+fn host_config_rustflags_with_target() {
+ // regression test for https://github.com/rust-lang/cargo/issues/10206
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("build.rs.rs", "fn main() { assert!(cfg!(foo)); }")
+ .file(".cargo/config.toml", "target-applies-to-host = false")
+ .build();
+
+ p.cargo("check")
+ .masquerade_as_nightly_cargo(&["target-applies-to-host", "host-config"])
+ .arg("-Zhost-config")
+ .arg("-Ztarget-applies-to-host")
+ .arg("-Zunstable-options")
+ .arg("--config")
+ .arg("host.rustflags=[\"--cfg=foo\"]")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/search.rs b/src/tools/cargo/tests/testsuite/search.rs
new file mode 100644
index 000000000..1f6f40327
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/search.rs
@@ -0,0 +1,192 @@
+//! Tests for the `cargo search` command.
+
+use cargo_test_support::cargo_process;
+use cargo_test_support::paths;
+use cargo_test_support::registry::{RegistryBuilder, Response};
+use std::collections::HashSet;
+
+const SEARCH_API_RESPONSE: &[u8] = br#"
+{
+ "crates": [{
+ "created_at": "2014-11-16T20:17:35Z",
+ "description": "Design by contract style assertions for Rust",
+ "documentation": null,
+ "downloads": 2,
+ "homepage": null,
+ "id": "hoare",
+ "keywords": [],
+ "license": null,
+ "links": {
+ "owners": "/api/v1/crates/hoare/owners",
+ "reverse_dependencies": "/api/v1/crates/hoare/reverse_dependencies",
+ "version_downloads": "/api/v1/crates/hoare/downloads",
+ "versions": "/api/v1/crates/hoare/versions"
+ },
+ "max_version": "0.1.1",
+ "name": "hoare",
+ "repository": "https://github.com/nick29581/libhoare",
+ "updated_at": "2014-11-20T21:49:21Z",
+ "versions": null
+ },
+ {
+ "id": "postgres",
+ "name": "postgres",
+ "updated_at": "2020-05-01T23:17:54.335921+00:00",
+ "versions": null,
+ "keywords": null,
+ "categories": null,
+ "badges": [
+ {
+ "badge_type": "circle-ci",
+ "attributes": {
+ "repository": "sfackler/rust-postgres",
+ "branch": null
+ }
+ }
+ ],
+ "created_at": "2014-11-24T02:34:44.756689+00:00",
+ "downloads": 535491,
+ "recent_downloads": 88321,
+ "max_version": "0.17.3",
+ "newest_version": "0.17.3",
+ "description": "A native, synchronous PostgreSQL client",
+ "homepage": null,
+ "documentation": null,
+ "repository": "https://github.com/sfackler/rust-postgres",
+ "links": {
+ "version_downloads": "/api/v1/crates/postgres/downloads",
+ "versions": "/api/v1/crates/postgres/versions",
+ "owners": "/api/v1/crates/postgres/owners",
+ "owner_team": "/api/v1/crates/postgres/owner_team",
+ "owner_user": "/api/v1/crates/postgres/owner_user",
+ "reverse_dependencies": "/api/v1/crates/postgres/reverse_dependencies"
+ },
+ "exact_match": true
+ }
+ ],
+ "meta": {
+ "total": 2
+ }
+}"#;
+
+const SEARCH_RESULTS: &str = "\
+hoare = \"0.1.1\" # Design by contract style assertions for Rust
+postgres = \"0.17.3\" # A native, synchronous PostgreSQL client
+";
+
+#[must_use]
+fn setup() -> RegistryBuilder {
+ RegistryBuilder::new()
+ .http_api()
+ .add_responder("/api/v1/crates", |_, _| Response {
+ code: 200,
+ headers: vec![],
+ body: SEARCH_API_RESPONSE.to_vec(),
+ })
+}
+
+#[cargo_test]
+fn not_update() {
+ let registry = setup().build();
+
+ use cargo::core::{Shell, Source, SourceId};
+ use cargo::sources::RegistrySource;
+ use cargo::util::Config;
+
+ let sid = SourceId::for_registry(registry.index_url()).unwrap();
+ let cfg = Config::new(
+ Shell::from_write(Box::new(Vec::new())),
+ paths::root(),
+ paths::home().join(".cargo"),
+ );
+ let lock = cfg.acquire_package_cache_lock().unwrap();
+ let mut regsrc = RegistrySource::remote(sid, &HashSet::new(), &cfg).unwrap();
+ regsrc.invalidate_cache();
+ regsrc.block_until_ready().unwrap();
+ drop(lock);
+
+ cargo_process("search postgres")
+ .replace_crates_io(registry.index_url())
+ .with_stdout_contains(SEARCH_RESULTS)
+ .with_stderr("") // without "Updating ... index"
+ .run();
+}
+
+#[cargo_test]
+fn replace_default() {
+ let registry = setup().build();
+
+ cargo_process("search postgres")
+ .replace_crates_io(registry.index_url())
+ .with_stdout_contains(SEARCH_RESULTS)
+ .with_stderr_contains("[..]Updating [..] index")
+ .run();
+}
+
+#[cargo_test]
+fn simple() {
+ let registry = setup().build();
+
+ cargo_process("search postgres --index")
+ .arg(registry.index_url().as_str())
+ .with_stdout_contains(SEARCH_RESULTS)
+ .run();
+}
+
+#[cargo_test]
+fn multiple_query_params() {
+ let registry = setup().build();
+
+ cargo_process("search postgres sql --index")
+ .arg(registry.index_url().as_str())
+ .with_stdout_contains(SEARCH_RESULTS)
+ .run();
+}
+
+#[cargo_test]
+fn ignore_quiet() {
+ let registry = setup().build();
+
+ cargo_process("search -q postgres")
+ .replace_crates_io(registry.index_url())
+ .with_stdout_contains(SEARCH_RESULTS)
+ .run();
+}
+
+#[cargo_test]
+fn colored_results() {
+ let registry = setup().build();
+
+ cargo_process("search --color=never postgres")
+ .replace_crates_io(registry.index_url())
+ .with_stdout_does_not_contain("[..]\x1b[[..]")
+ .run();
+
+ cargo_process("search --color=always postgres")
+ .replace_crates_io(registry.index_url())
+ .with_stdout_contains("[..]\x1b[[..]")
+ .run();
+}
+
+#[cargo_test]
+fn auth_required_failure() {
+ let server = setup().auth_required().no_configure_token().build();
+
+ cargo_process("-Zregistry-auth search postgres")
+ .masquerade_as_nightly_cargo(&["registry-auth"])
+ .replace_crates_io(server.index_url())
+ .with_status(101)
+ .with_stderr_contains("[ERROR] no token found, please run `cargo login`")
+ .run();
+}
+
+#[cargo_test]
+fn auth_required() {
+ let server = setup().auth_required().build();
+
+ cargo_process("-Zregistry-auth search postgres")
+ .masquerade_as_nightly_cargo(&["registry-auth"])
+ .replace_crates_io(server.index_url())
+ .with_stdout_contains(SEARCH_RESULTS)
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/shell_quoting.rs b/src/tools/cargo/tests/testsuite/shell_quoting.rs
new file mode 100644
index 000000000..bff333389
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/shell_quoting.rs
@@ -0,0 +1,37 @@
+//! This file tests that when the commands being run are shown
+//! in the output, their arguments are quoted properly
+//! so that the command can be run in a terminal.
+
+use cargo_test_support::project;
+
+#[cargo_test]
+fn features_are_quoted() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = ["mikeyhew@example.com"]
+
+ [features]
+ some_feature = []
+ default = ["some_feature"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {error}")
+ .build();
+
+ p.cargo("check -v")
+ .env("MSYSTEM", "1")
+ .with_status(101)
+ .with_stderr_contains(
+ r#"[RUNNING] `rustc [..] --cfg 'feature="default"' --cfg 'feature="some_feature"' [..]`"#
+ ).with_stderr_contains(
+ r#"
+Caused by:
+ process didn't exit successfully: [..] --cfg 'feature="default"' --cfg 'feature="some_feature"' [..]"#
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/source_replacement.rs b/src/tools/cargo/tests/testsuite/source_replacement.rs
new file mode 100644
index 000000000..24f2ca3e3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/source_replacement.rs
@@ -0,0 +1,250 @@
+//! Tests for `[source]` table (source replacement).
+
+use std::fs;
+
+use cargo_test_support::registry::{Package, RegistryBuilder, TestRegistry};
+use cargo_test_support::{cargo_process, paths, project, t};
+
+fn setup_replacement(config: &str) -> TestRegistry {
+ let crates_io = RegistryBuilder::new()
+ .no_configure_registry()
+ .http_api()
+ .build();
+
+ let root = paths::root();
+ t!(fs::create_dir(&root.join(".cargo")));
+ t!(fs::write(root.join(".cargo/config"), config,));
+ crates_io
+}
+
+#[cargo_test]
+fn crates_io_token_not_sent_to_replacement() {
+ // verifies that the crates.io token is not sent to a replacement registry during publish.
+ let crates_io = setup_replacement(
+ r#"
+ [source.crates-io]
+ replace-with = 'alternative'
+ "#,
+ );
+ let _alternative = RegistryBuilder::new()
+ .alternative()
+ .http_api()
+ .no_configure_token()
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify --registry crates-io")
+ .replace_crates_io(crates_io.index_url())
+ .with_stderr_contains("[UPDATING] crates.io index")
+ .run();
+}
+
+#[cargo_test]
+fn token_sent_to_correct_registry() {
+ // verifies that the crates.io token is not sent to a replacement registry during yank.
+ let crates_io = setup_replacement(
+ r#"
+ [source.crates-io]
+ replace-with = 'alternative'
+ "#,
+ );
+ let _alternative = RegistryBuilder::new().alternative().http_api().build();
+
+ cargo_process("yank foo@0.0.1 --registry crates-io")
+ .replace_crates_io(crates_io.index_url())
+ .with_stderr(
+ "\
+[UPDATING] crates.io index
+[YANK] foo@0.0.1
+",
+ )
+ .run();
+
+ cargo_process("yank foo@0.0.1 --registry alternative")
+ .replace_crates_io(crates_io.index_url())
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[YANK] foo@0.0.1
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn ambiguous_registry() {
+ // verifies that an error is issued when a source-replacement is configured
+ // and no --registry argument is given.
+ let crates_io = setup_replacement(
+ r#"
+ [source.crates-io]
+ replace-with = 'alternative'
+ "#,
+ );
+ let _alternative = RegistryBuilder::new()
+ .alternative()
+ .http_api()
+ .no_configure_token()
+ .build();
+
+ cargo_process("yank foo@0.0.1")
+ .replace_crates_io(crates_io.index_url())
+ .with_status(101)
+ .with_stderr(
+ "\
+error: crates-io is replaced with remote registry alternative;
+include `--registry alternative` or `--registry crates-io`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn yank_with_default_crates_io() {
+ // verifies that no error is given when registry.default is used.
+ let crates_io = setup_replacement(
+ r#"
+ [source.crates-io]
+ replace-with = 'alternative'
+
+ [registry]
+ default = 'crates-io'
+ "#,
+ );
+ let _alternative = RegistryBuilder::new().alternative().http_api().build();
+
+ cargo_process("yank foo@0.0.1")
+ .replace_crates_io(crates_io.index_url())
+ .with_stderr(
+ "\
+[UPDATING] crates.io index
+[YANK] foo@0.0.1
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn yank_with_default_alternative() {
+ // verifies that no error is given when registry.default is an alt registry.
+ let crates_io = setup_replacement(
+ r#"
+ [source.crates-io]
+ replace-with = 'alternative'
+
+ [registry]
+ default = 'alternative'
+ "#,
+ );
+ let _alternative = RegistryBuilder::new().alternative().http_api().build();
+
+ cargo_process("yank foo@0.0.1")
+ .replace_crates_io(crates_io.index_url())
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[YANK] foo@0.0.1
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn publish_with_replacement() {
+ // verifies that the crates.io token is not sent to a replacement registry during publish.
+ let crates_io = setup_replacement(
+ r#"
+ [source.crates-io]
+ replace-with = 'alternative'
+ "#,
+ );
+ let _alternative = RegistryBuilder::new()
+ .alternative()
+ .http_api()
+ .no_configure_token()
+ .build();
+
+ // Publish bar only to alternative. This tests that the publish verification build
+ // does uses the source replacement.
+ Package::new("bar", "1.0.0").alternative(true).publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // Verifies that the crates.io index is used to find the publishing endpoint
+ // and that the crate is sent to crates.io. The source replacement is only used
+ // for the verification step.
+ p.cargo("publish --registry crates-io")
+ .replace_crates_io(crates_io.index_url())
+ .with_stderr(
+ "\
+[UPDATING] crates.io index
+[WARNING] manifest has no documentation, homepage or repository.
+See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
+[PACKAGING] foo v0.0.1 ([..])
+[VERIFYING] foo v0.0.1 ([..])
+[UPDATING] `alternative` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v1.0.0 (registry `alternative`)
+[COMPILING] bar v1.0.0
+[COMPILING] foo v0.0.1 ([..]foo-0.0.1)
+[FINISHED] dev [..]
+[PACKAGED] [..]
+[UPLOADING] foo v0.0.1 ([..])
+[UPLOADED] foo v0.0.1 to registry `crates-io`
+note: Waiting for `foo v0.0.1` to be available at registry `crates-io`.
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+[PUBLISHED] foo v0.0.1 at registry `crates-io`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn undefined_default() {
+ // verifies that no error is given when registry.default is used.
+ let crates_io = setup_replacement(
+ r#"
+ [registry]
+ default = 'undefined'
+ "#,
+ );
+
+ cargo_process("yank foo@0.0.1")
+ .replace_crates_io(crates_io.index_url())
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] no index found for registry: `undefined`
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/ssh.rs b/src/tools/cargo/tests/testsuite/ssh.rs
new file mode 100644
index 000000000..d1701d32d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/ssh.rs
@@ -0,0 +1,592 @@
+//! Network tests for SSH connections.
+//!
+//! Note that these tests will generally require setting CARGO_CONTAINER_TESTS
+//! or CARGO_PUBLIC_NETWORK_TESTS.
+//!
+//! NOTE: The container tests almost certainly won't work on Windows.
+
+use cargo_test_support::containers::{Container, ContainerHandle, MkFile};
+use cargo_test_support::git::cargo_uses_gitoxide;
+use cargo_test_support::{paths, process, project, Project};
+use std::fs;
+use std::io::Write;
+use std::path::PathBuf;
+
+fn ssh_repo_url(container: &ContainerHandle, name: &str) -> String {
+ let port = container.port_mappings[&22];
+ format!("ssh://testuser@127.0.0.1:{port}/repos/{name}.git")
+}
+
+/// The path to the client's private key.
+fn key_path() -> PathBuf {
+ paths::home().join(".ssh/id_ed25519")
+}
+
+/// Generates the SSH keys for authenticating into the container.
+fn gen_ssh_keys() -> String {
+ let path = key_path();
+ process("ssh-keygen")
+ .args(&["-t", "ed25519", "-N", "", "-f"])
+ .arg(&path)
+ .exec_with_output()
+ .unwrap();
+ let pub_key = path.with_extension("pub");
+ fs::read_to_string(pub_key).unwrap()
+}
+
+/// Handler for running ssh-agent for SSH authentication.
+///
+/// Be sure to set `SSH_AUTH_SOCK` when running a process in order to use the
+/// agent. Keys will need to be copied into the container with the
+/// `authorized_keys()` method.
+struct Agent {
+ sock: PathBuf,
+ pid: String,
+ ssh_dir: PathBuf,
+ pub_key: String,
+}
+
+impl Agent {
+ fn launch() -> Agent {
+ let ssh_dir = paths::home().join(".ssh");
+ fs::create_dir(&ssh_dir).unwrap();
+ let pub_key = gen_ssh_keys();
+
+ let sock = paths::root().join("agent");
+ let output = process("ssh-agent")
+ .args(&["-s", "-a"])
+ .arg(&sock)
+ .exec_with_output()
+ .unwrap();
+ let stdout = std::str::from_utf8(&output.stdout).unwrap();
+ let start = stdout.find("SSH_AGENT_PID=").unwrap() + 14;
+ let end = &stdout[start..].find(';').unwrap();
+ let pid = (&stdout[start..start + end]).to_string();
+ eprintln!("SSH_AGENT_PID={pid}");
+ process("ssh-add")
+ .arg(key_path())
+ .env("SSH_AUTH_SOCK", &sock)
+ .exec_with_output()
+ .unwrap();
+ Agent {
+ sock,
+ pid,
+ ssh_dir,
+ pub_key,
+ }
+ }
+
+ /// Returns a `MkFile` which can be passed into the `Container` builder to
+ /// copy an `authorized_keys` file containing this agent's public key.
+ fn authorized_keys(&self) -> MkFile {
+ MkFile::path("home/testuser/.ssh/authorized_keys")
+ .contents(self.pub_key.as_bytes())
+ .mode(0o600)
+ .uid(100)
+ .gid(101)
+ }
+}
+
+impl Drop for Agent {
+ fn drop(&mut self) {
+ if let Err(e) = process("ssh-agent")
+ .args(&["-k", "-a"])
+ .arg(&self.sock)
+ .env("SSH_AGENT_PID", &self.pid)
+ .exec_with_output()
+ {
+ eprintln!("failed to stop ssh-agent: {e:?}");
+ }
+ }
+}
+
+/// Common project used for several tests.
+fn foo_bar_project(url: &str) -> Project {
+ project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = {{ git = "{url}" }}
+ "#
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build()
+}
+
+#[cargo_test(container_test)]
+fn no_known_host() {
+ // When host is not known, it should show an error.
+ let sshd = Container::new("sshd").launch();
+ let url = ssh_repo_url(&sshd, "bar");
+ let p = foo_bar_project(&url);
+ p.cargo("fetch")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] git repository `ssh://testuser@127.0.0.1:[..]/repos/bar.git`
+error: failed to get `bar` as a dependency of package `foo v0.1.0 ([ROOT]/foo)`
+
+Caused by:
+ failed to load source for dependency `bar`
+
+Caused by:
+ Unable to update ssh://testuser@127.0.0.1:[..]/repos/bar.git
+
+Caused by:
+ failed to clone into: [ROOT]/home/.cargo/git/db/bar-[..]
+
+Caused by:
+ error: unknown SSH host key
+ The SSH host key for `[127.0.0.1]:[..]` is not known and cannot be validated.
+
+ To resolve this issue, add the host key to the `net.ssh.known-hosts` array in \
+ your Cargo configuration (such as [ROOT]/home/.cargo/config.toml) or in your \
+ OpenSSH known_hosts file at [ROOT]/home/.ssh/known_hosts
+
+ The key to add is:
+
+ [127.0.0.1]:[..] ecdsa-sha2-nistp256 AAAA[..]
+
+ The ECDSA key fingerprint is: SHA256:[..]
+ This fingerprint should be validated with the server administrator that it is correct.
+
+ See https://doc.rust-lang.org/stable/cargo/appendix/git-authentication.html#ssh-known-hosts \
+ for more information.
+",
+ )
+ .run();
+}
+
+#[cargo_test(container_test)]
+fn known_host_works() {
+ // The key displayed in the error message should work when added to known_hosts.
+ let agent = Agent::launch();
+ let sshd = Container::new("sshd")
+ .file(agent.authorized_keys())
+ .launch();
+ let url = ssh_repo_url(&sshd, "bar");
+ let p = foo_bar_project(&url);
+ let output = p
+ .cargo("fetch")
+ .env("SSH_AUTH_SOCK", &agent.sock)
+ .build_command()
+ .output()
+ .unwrap();
+ let stderr = std::str::from_utf8(&output.stderr).unwrap();
+
+ // Validate the fingerprint while we're here.
+ let fingerprint = stderr
+ .lines()
+ .find(|line| line.starts_with(" The ECDSA key fingerprint"))
+ .unwrap()
+ .trim();
+ let fingerprint = &fingerprint[30..];
+ let finger_out = sshd.exec(&["ssh-keygen", "-l", "-f", "/etc/ssh/ssh_host_ecdsa_key.pub"]);
+ let gen_finger = std::str::from_utf8(&finger_out.stdout).unwrap();
+ // <key-size> <fingerprint> <comments…>
+ let gen_finger = gen_finger.split_whitespace().nth(1).unwrap();
+ assert_eq!(fingerprint, gen_finger);
+
+ // Add the key to known_hosts, and try again.
+ let key = stderr
+ .lines()
+ .find(|line| line.starts_with(" [127.0.0.1]:"))
+ .unwrap()
+ .trim();
+ fs::write(agent.ssh_dir.join("known_hosts"), key).unwrap();
+ p.cargo("fetch")
+ .env("SSH_AUTH_SOCK", &agent.sock)
+ .with_stderr("[UPDATING] git repository `ssh://testuser@127.0.0.1:[..]/repos/bar.git`")
+ .run();
+}
+
+#[cargo_test(container_test)]
+fn same_key_different_hostname() {
+ // The error message should mention if an identical key was found.
+ let agent = Agent::launch();
+ let sshd = Container::new("sshd").launch();
+
+ let hostkey = sshd.read_file("/etc/ssh/ssh_host_ecdsa_key.pub");
+ let known_hosts = format!("example.com {hostkey}");
+ fs::write(agent.ssh_dir.join("known_hosts"), known_hosts).unwrap();
+
+ let url = ssh_repo_url(&sshd, "bar");
+ let p = foo_bar_project(&url);
+ p.cargo("fetch")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] git repository `ssh://testuser@127.0.0.1:[..]/repos/bar.git`
+error: failed to get `bar` as a dependency of package `foo v0.1.0 ([ROOT]/foo)`
+
+Caused by:
+ failed to load source for dependency `bar`
+
+Caused by:
+ Unable to update ssh://testuser@127.0.0.1:[..]/repos/bar.git
+
+Caused by:
+ failed to clone into: [ROOT]/home/.cargo/git/db/bar-[..]
+
+Caused by:
+ error: unknown SSH host key
+ The SSH host key for `[127.0.0.1]:[..]` is not known and cannot be validated.
+
+ To resolve this issue, add the host key to the `net.ssh.known-hosts` array in \
+ your Cargo configuration (such as [ROOT]/home/.cargo/config.toml) or in your \
+ OpenSSH known_hosts file at [ROOT]/home/.ssh/known_hosts
+
+ The key to add is:
+
+ [127.0.0.1]:[..] ecdsa-sha2-nistp256 AAAA[..]
+
+ The ECDSA key fingerprint is: SHA256:[..]
+ This fingerprint should be validated with the server administrator that it is correct.
+ Note: This host key was found, but is associated with a different host:
+ [ROOT]/home/.ssh/known_hosts line 1: example.com
+
+ See https://doc.rust-lang.org/stable/cargo/appendix/git-authentication.html#ssh-known-hosts \
+ for more information.
+",
+ )
+ .run();
+}
+
+#[cargo_test(container_test)]
+fn known_host_without_port() {
+ // A known_host entry without a port should match a connection to a non-standard port.
+ let agent = Agent::launch();
+ let sshd = Container::new("sshd")
+ .file(agent.authorized_keys())
+ .launch();
+
+ let hostkey = sshd.read_file("/etc/ssh/ssh_host_ecdsa_key.pub");
+ // The important part of this test is that this line does not have a port.
+ let known_hosts = format!("127.0.0.1 {hostkey}");
+ fs::write(agent.ssh_dir.join("known_hosts"), known_hosts).unwrap();
+ let url = ssh_repo_url(&sshd, "bar");
+ let p = foo_bar_project(&url);
+ p.cargo("fetch")
+ .env("SSH_AUTH_SOCK", &agent.sock)
+ .with_stderr("[UPDATING] git repository `ssh://testuser@127.0.0.1:[..]/repos/bar.git`")
+ .run();
+}
+
+#[cargo_test(container_test)]
+fn hostname_case_insensitive() {
+ // hostname checking should be case-insensitive.
+ let agent = Agent::launch();
+ let sshd = Container::new("sshd")
+ .file(agent.authorized_keys())
+ .launch();
+
+ // Consider using `gethostname-rs` instead?
+ let hostname = process("hostname").exec_with_output().unwrap();
+ let hostname = std::str::from_utf8(&hostname.stdout).unwrap().trim();
+ let inv_hostname = if hostname.chars().any(|c| c.is_lowercase()) {
+ hostname.to_uppercase()
+ } else {
+ // There should be *some* chars in the name.
+ assert!(hostname.chars().any(|c| c.is_uppercase()));
+ hostname.to_lowercase()
+ };
+ eprintln!("converted {hostname} to {inv_hostname}");
+
+ let hostkey = sshd.read_file("/etc/ssh/ssh_host_ecdsa_key.pub");
+ let known_hosts = format!("{inv_hostname} {hostkey}");
+ fs::write(agent.ssh_dir.join("known_hosts"), known_hosts).unwrap();
+ let port = sshd.port_mappings[&22];
+ let url = format!("ssh://testuser@{hostname}:{port}/repos/bar.git");
+ let p = foo_bar_project(&url);
+ p.cargo("fetch")
+ .env("SSH_AUTH_SOCK", &agent.sock)
+ .with_stderr(&format!(
+ "[UPDATING] git repository `ssh://testuser@{hostname}:{port}/repos/bar.git`"
+ ))
+ .run();
+}
+
+#[cargo_test(container_test)]
+fn invalid_key_error() {
+ // An error when a known_host value doesn't match.
+ let agent = Agent::launch();
+ let sshd = Container::new("sshd")
+ .file(agent.authorized_keys())
+ .launch();
+
+ let port = sshd.port_mappings[&22];
+ let known_hosts = format!(
+ "[127.0.0.1]:{port} ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBLqLMclVr7MDuaVsm3sEnnq2OrGxTFiHSw90wd6N14BU8xVC9cZldC3rJ58Wmw6bEVKPjk7foNG0lHwS5bCKX+U=\n"
+ );
+ fs::write(agent.ssh_dir.join("known_hosts"), known_hosts).unwrap();
+ let url = ssh_repo_url(&sshd, "bar");
+ let p = foo_bar_project(&url);
+ p.cargo("fetch")
+ .env("SSH_AUTH_SOCK", &agent.sock)
+ .with_status(101)
+ .with_stderr(&format!("\
+[UPDATING] git repository `ssh://testuser@127.0.0.1:{port}/repos/bar.git`
+error: failed to get `bar` as a dependency of package `foo v0.1.0 ([ROOT]/foo)`
+
+Caused by:
+ failed to load source for dependency `bar`
+
+Caused by:
+ Unable to update ssh://testuser@127.0.0.1:{port}/repos/bar.git
+
+Caused by:
+ failed to clone into: [ROOT]/home/.cargo/git/db/bar-[..]
+
+Caused by:
+ error: SSH host key has changed for `[127.0.0.1]:{port}`
+ *********************************
+ * WARNING: HOST KEY HAS CHANGED *
+ *********************************
+ This may be caused by a man-in-the-middle attack, or the server may have changed its host key.
+
+ The ECDSA fingerprint for the key from the remote host is:
+ SHA256:[..]
+
+ You are strongly encouraged to contact the server administrator for `[127.0.0.1]:{port}` \
+ to verify that this new key is correct.
+
+ If you can verify that the server has a new key, you can resolve this error by \
+ removing the old ecdsa-sha2-nistp256 key for `[127.0.0.1]:{port}` located at \
+ [ROOT]/home/.ssh/known_hosts line 1, and adding the new key to the \
+ `net.ssh.known-hosts` array in your Cargo configuration (such as \
+ [ROOT]/home/.cargo/config.toml) or in your OpenSSH known_hosts file at \
+ [ROOT]/home/.ssh/known_hosts
+
+ The key provided by the remote host is:
+
+ [127.0.0.1]:{port} ecdsa-sha2-nistp256 [..]
+
+ See https://doc.rust-lang.org/stable/cargo/appendix/git-authentication.html#ssh-known-hosts for more information.
+"))
+ .run();
+ // Add the key, it should work even with the old key left behind.
+ let hostkey = sshd.read_file("/etc/ssh/ssh_host_ecdsa_key.pub");
+ let known_hosts_path = agent.ssh_dir.join("known_hosts");
+ let mut f = fs::OpenOptions::new()
+ .append(true)
+ .open(known_hosts_path)
+ .unwrap();
+ write!(f, "[127.0.0.1]:{port} {hostkey}").unwrap();
+ drop(f);
+ p.cargo("fetch")
+ .env("SSH_AUTH_SOCK", &agent.sock)
+ .with_stderr("[UPDATING] git repository `ssh://testuser@127.0.0.1:[..]/repos/bar.git`")
+ .run();
+}
+
+// For unknown reasons, this test occasionally fails on Windows with a
+// LIBSSH2_ERROR_KEY_EXCHANGE_FAILURE error:
+// failed to start SSH session: Unable to exchange encryption keys; class=Ssh (23)
+#[cargo_test(public_network_test, ignore_windows = "test is flaky on windows")]
+fn invalid_github_key() {
+ // A key for github.com in known_hosts should override the built-in key.
+ // This uses a bogus key which should result in an error.
+ let ssh_dir = paths::home().join(".ssh");
+ fs::create_dir(&ssh_dir).unwrap();
+ let known_hosts = "\
+ github.com ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBLqLMclVr7MDuaVsm3sEnnq2OrGxTFiHSw90wd6N14BU8xVC9cZldC3rJ58Wmw6bEVKPjk7foNG0lHwS5bCKX+U=\n\
+ github.com ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDgi+8rMcyFCBq5y7BXrb2aaYGhMjlU3QDy7YDvtNL5KSecYOsaqQHaXr87Bbx0EEkgbhK4kVMkmThlCoNITQS9Vc3zIMQ+Tg6+O4qXx719uCzywl50Tb5tDqPGMj54jcq3VUiu/dvse0yeehyvzoPNWewgGWLx11KI4A4wOwMnc6guhculEWe9DjGEjUQ34lPbmdfu/Hza7ZVu/RhgF/wc43uzXWB2KpMEqtuY1SgRlCZqTASoEtfKZi0AuM7AEdOwE5aTotS4CQZHWimb1bMFpF4DAq92CZ8Jhrm4rWETbO29WmjviCJEA3KNQyd3oA7H9AE9z/22PJaVEmjiZZ+wyLgwyIpOlsnHYNEdGeQMQ4SgLRkARLwcnKmByv1AAxsBW4LI3Os4FpwxVPdXHcBebydtvxIsbtUVkkq99nbsIlnSRFSTvb0alrdzRuKTdWpHtN1v9hagFqmeCx/kJfH76NXYBbtaWZhSOnxfEbhLYuOb+IS4jYzHAIkzy9FjVuk=\n\
+ ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIEeMB6BUAW6FfvfLxRO3kGASe0yXnrRT4kpqncsup2b2\n";
+ fs::write(ssh_dir.join("known_hosts"), known_hosts).unwrap();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bitflags = { git = "ssh://git@github.com/rust-lang/bitflags.git", tag = "1.3.2" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("fetch")
+ .with_status(101)
+ .with_stderr_contains(if cargo_uses_gitoxide() {
+ " git@github.com: Permission denied (publickey)."
+ } else {
+ " error: SSH host key has changed for `github.com`"
+ })
+ .run();
+}
+
+// For unknown reasons, this test occasionally fails on Windows with a
+// LIBSSH2_ERROR_KEY_EXCHANGE_FAILURE error:
+// failed to start SSH session: Unable to exchange encryption keys; class=Ssh (23)
+#[cargo_test(public_network_test, ignore_windows = "test is flaky on windows")]
+fn bundled_github_works() {
+ // The bundled key for github.com works.
+ //
+ // Use a bogus auth sock to force an authentication error.
+ // On Windows, if the agent service is running, it could allow a
+ // successful authentication.
+ //
+ // If the bundled hostkey did not work, it would result in an "unknown SSH
+ // host key" instead.
+ let bogus_auth_sock = paths::home().join("ssh_auth_sock");
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bitflags = { git = "ssh://git@github.com/rust-lang/bitflags.git", tag = "1.3.2" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ let shared_stderr = "\
+[UPDATING] git repository `ssh://git@github.com/rust-lang/bitflags.git`
+error: failed to get `bitflags` as a dependency of package `foo v0.1.0 ([ROOT]/foo)`
+
+Caused by:
+ failed to load source for dependency `bitflags`
+
+Caused by:
+ Unable to update ssh://git@github.com/rust-lang/bitflags.git?tag=1.3.2
+
+Caused by:
+ failed to clone into: [ROOT]/home/.cargo/git/db/bitflags-[..]
+
+Caused by:
+ failed to authenticate when downloading repository
+
+ *";
+ let expected = if cargo_uses_gitoxide() {
+ format!(
+ "{shared_stderr} attempted to find username/password via `credential.helper`, but maybe the found credentials were incorrect
+
+ if the git CLI succeeds then `net.git-fetch-with-cli` may help here
+ https://doc.rust-lang.org/cargo/reference/config.html#netgit-fetch-with-cli
+
+Caused by:
+ Credentials provided for \"ssh://git@github.com/rust-lang/bitflags.git\" were not accepted by the remote
+
+Caused by:
+ git@github.com: Permission denied (publickey).
+"
+ )
+ } else {
+ format!(
+ "{shared_stderr} attempted ssh-agent authentication, but no usernames succeeded: `git`
+
+ if the git CLI succeeds then `net.git-fetch-with-cli` may help here
+ https://doc.rust-lang.org/cargo/reference/config.html#netgit-fetch-with-cli
+
+Caused by:
+ no authentication methods succeeded
+"
+ )
+ };
+ p.cargo("fetch")
+ .env("SSH_AUTH_SOCK", &bogus_auth_sock)
+ .with_status(101)
+ .with_stderr(&expected)
+ .run();
+
+ let shared_stderr = "\
+[UPDATING] git repository `ssh://git@github.com:22/rust-lang/bitflags.git`
+error: failed to get `bitflags` as a dependency of package `foo v0.1.0 ([ROOT]/foo)`
+
+Caused by:
+ failed to load source for dependency `bitflags`
+
+Caused by:
+ Unable to update ssh://git@github.com:22/rust-lang/bitflags.git?tag=1.3.2
+
+Caused by:
+ failed to clone into: [ROOT]/home/.cargo/git/db/bitflags-[..]
+
+Caused by:
+ failed to authenticate when downloading repository
+
+ *";
+
+ let expected = if cargo_uses_gitoxide() {
+ format!(
+ "{shared_stderr} attempted to find username/password via `credential.helper`, but maybe the found credentials were incorrect
+
+ if the git CLI succeeds then `net.git-fetch-with-cli` may help here
+ https://doc.rust-lang.org/cargo/reference/config.html#netgit-fetch-with-cli
+
+Caused by:
+ Credentials provided for \"ssh://git@github.com:22/rust-lang/bitflags.git\" were not accepted by the remote
+
+Caused by:
+ git@github.com: Permission denied (publickey).
+"
+ )
+ } else {
+ format!(
+ "{shared_stderr} attempted ssh-agent authentication, but no usernames succeeded: `git`
+
+ if the git CLI succeeds then `net.git-fetch-with-cli` may help here
+ https://doc.rust-lang.org/cargo/reference/config.html#netgit-fetch-with-cli
+
+Caused by:
+ no authentication methods succeeded
+"
+ )
+ };
+
+ // Explicit :22 should also work with bundled.
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bitflags = { git = "ssh://git@github.com:22/rust-lang/bitflags.git", tag = "1.3.2" }
+ "#,
+ );
+ p.cargo("fetch")
+ .env("SSH_AUTH_SOCK", &bogus_auth_sock)
+ .with_status(101)
+ .with_stderr(&expected)
+ .run();
+}
+
+#[cargo_test(container_test)]
+fn ssh_key_in_config() {
+ // known_host in config works.
+ let agent = Agent::launch();
+ let sshd = Container::new("sshd")
+ .file(agent.authorized_keys())
+ .launch();
+ let hostkey = sshd.read_file("/etc/ssh/ssh_host_ecdsa_key.pub");
+ let url = ssh_repo_url(&sshd, "bar");
+ let p = foo_bar_project(&url);
+ p.change_file(
+ ".cargo/config.toml",
+ &format!(
+ r#"
+ [net.ssh]
+ known-hosts = ['127.0.0.1 {}']
+ "#,
+ hostkey.trim()
+ ),
+ );
+ p.cargo("fetch")
+ .env("SSH_AUTH_SOCK", &agent.sock)
+ .with_stderr("[UPDATING] git repository `ssh://testuser@127.0.0.1:[..]/repos/bar.git`")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/standard_lib.rs b/src/tools/cargo/tests/testsuite/standard_lib.rs
new file mode 100644
index 000000000..d3be303ea
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/standard_lib.rs
@@ -0,0 +1,657 @@
+//! Tests for building the standard library (-Zbuild-std).
+//!
+//! These tests all use a "mock" standard library so that we don't have to
+//! rebuild the real one. There is a separate integration test `build-std`
+//! which builds the real thing, but that should be avoided if possible.
+
+use cargo_test_support::registry::{Dependency, Package};
+use cargo_test_support::ProjectBuilder;
+use cargo_test_support::{paths, project, rustc_host, Execs};
+use std::path::{Path, PathBuf};
+
+struct Setup {
+ rustc_wrapper: PathBuf,
+ real_sysroot: String,
+}
+
+fn setup() -> Setup {
+ // Our mock sysroot requires a few packages from crates.io, so make sure
+ // they're "published" to crates.io. Also edit their code a bit to make sure
+ // that they have access to our custom crates with custom apis.
+ Package::new("registry-dep-using-core", "1.0.0")
+ .file(
+ "src/lib.rs",
+ "
+ #![no_std]
+
+ #[cfg(feature = \"mockbuild\")]
+ pub fn custom_api() {
+ }
+
+ #[cfg(not(feature = \"mockbuild\"))]
+ pub fn non_sysroot_api() {
+ core::custom_api();
+ }
+ ",
+ )
+ .add_dep(Dependency::new("rustc-std-workspace-core", "*").optional(true))
+ .feature("mockbuild", &["rustc-std-workspace-core"])
+ .publish();
+ Package::new("registry-dep-using-alloc", "1.0.0")
+ .file(
+ "src/lib.rs",
+ "
+ #![no_std]
+
+ extern crate alloc;
+
+ #[cfg(feature = \"mockbuild\")]
+ pub fn custom_api() {
+ }
+
+ #[cfg(not(feature = \"mockbuild\"))]
+ pub fn non_sysroot_api() {
+ core::custom_api();
+ alloc::custom_api();
+ }
+ ",
+ )
+ .add_dep(Dependency::new("rustc-std-workspace-core", "*").optional(true))
+ .add_dep(Dependency::new("rustc-std-workspace-alloc", "*").optional(true))
+ .feature(
+ "mockbuild",
+ &["rustc-std-workspace-core", "rustc-std-workspace-alloc"],
+ )
+ .publish();
+ Package::new("registry-dep-using-std", "1.0.0")
+ .file(
+ "src/lib.rs",
+ "
+ #[cfg(feature = \"mockbuild\")]
+ pub fn custom_api() {
+ }
+
+ #[cfg(not(feature = \"mockbuild\"))]
+ pub fn non_sysroot_api() {
+ std::custom_api();
+ }
+ ",
+ )
+ .add_dep(Dependency::new("rustc-std-workspace-std", "*").optional(true))
+ .feature("mockbuild", &["rustc-std-workspace-std"])
+ .publish();
+
+ let p = ProjectBuilder::new(paths::root().join("rustc-wrapper"))
+ .file(
+ "src/main.rs",
+ r#"
+ use std::process::Command;
+ use std::env;
+ fn main() {
+ let mut args = env::args().skip(1).collect::<Vec<_>>();
+
+ let is_sysroot_crate = env::var_os("RUSTC_BOOTSTRAP").is_some();
+ if is_sysroot_crate {
+ args.push("--sysroot".to_string());
+ args.push(env::var("REAL_SYSROOT").unwrap());
+ } else if args.iter().any(|arg| arg == "--target") {
+ // build-std target unit
+ args.push("--sysroot".to_string());
+ args.push("/path/to/nowhere".to_string());
+ } else {
+ // host unit, do not use sysroot
+ }
+
+ let ret = Command::new(&args[0]).args(&args[1..]).status().unwrap();
+ std::process::exit(ret.code().unwrap_or(1));
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build").run();
+
+ Setup {
+ rustc_wrapper: p.bin("foo"),
+ real_sysroot: paths::sysroot(),
+ }
+}
+
+fn enable_build_std(e: &mut Execs, setup: &Setup) {
+ // First up, force Cargo to use our "mock sysroot" which mimics what
+ // libstd looks like upstream.
+ let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("tests/testsuite/mock-std");
+ e.env("__CARGO_TESTS_ONLY_SRC_ROOT", &root);
+
+ e.masquerade_as_nightly_cargo(&["build-std"]);
+
+ // We do various shenanigans to ensure our "mock sysroot" actually links
+ // with the real sysroot, so we don't have to actually recompile std for
+ // each test. Perform all that logic here, namely:
+ //
+ // * RUSTC_WRAPPER - uses our shim executable built above to control rustc
+ // * REAL_SYSROOT - used by the shim executable to swap out to the real
+ // sysroot temporarily for some compilations
+ // * RUST{,DOC}FLAGS - an extra `-L` argument to ensure we can always load
+ // crates from the sysroot, but only indirectly through other crates.
+ e.env("RUSTC_WRAPPER", &setup.rustc_wrapper);
+ e.env("REAL_SYSROOT", &setup.real_sysroot);
+ let libdir = format!("/lib/rustlib/{}/lib", rustc_host());
+ e.env(
+ "RUSTFLAGS",
+ format!("-Ldependency={}{}", setup.real_sysroot, libdir),
+ );
+ e.env(
+ "RUSTDOCFLAGS",
+ format!("-Ldependency={}{}", setup.real_sysroot, libdir),
+ );
+}
+
+// Helper methods used in the tests below
+trait BuildStd: Sized {
+ fn build_std(&mut self, setup: &Setup) -> &mut Self;
+ fn build_std_arg(&mut self, setup: &Setup, arg: &str) -> &mut Self;
+ fn target_host(&mut self) -> &mut Self;
+}
+
+impl BuildStd for Execs {
+ fn build_std(&mut self, setup: &Setup) -> &mut Self {
+ enable_build_std(self, setup);
+ self.arg("-Zbuild-std");
+ self
+ }
+
+ fn build_std_arg(&mut self, setup: &Setup, arg: &str) -> &mut Self {
+ enable_build_std(self, setup);
+ self.arg(format!("-Zbuild-std={}", arg));
+ self
+ }
+
+ fn target_host(&mut self) -> &mut Self {
+ self.arg("--target").arg(rustc_host());
+ self
+ }
+}
+
+#[cargo_test(build_std_mock)]
+fn basic() {
+ let setup = setup();
+
+ let p = project()
+ .file(
+ "src/main.rs",
+ "
+ fn main() {
+ std::custom_api();
+ foo::f();
+ }
+
+ #[test]
+ fn smoke_bin_unit() {
+ std::custom_api();
+ foo::f();
+ }
+ ",
+ )
+ .file(
+ "src/lib.rs",
+ "
+ extern crate alloc;
+ extern crate proc_macro;
+
+ /// ```
+ /// foo::f();
+ /// ```
+ pub fn f() {
+ core::custom_api();
+ std::custom_api();
+ alloc::custom_api();
+ proc_macro::custom_api();
+ }
+
+ #[test]
+ fn smoke_lib_unit() {
+ std::custom_api();
+ f();
+ }
+ ",
+ )
+ .file(
+ "tests/smoke.rs",
+ "
+ #[test]
+ fn smoke_integration() {
+ std::custom_api();
+ foo::f();
+ }
+ ",
+ )
+ .build();
+
+ p.cargo("check -v").build_std(&setup).target_host().run();
+ p.cargo("build").build_std(&setup).target_host().run();
+ p.cargo("run").build_std(&setup).target_host().run();
+ p.cargo("test").build_std(&setup).target_host().run();
+}
+
+#[cargo_test(build_std_mock)]
+fn simple_lib_std() {
+ let setup = setup();
+
+ let p = project().file("src/lib.rs", "").build();
+ p.cargo("build -v")
+ .build_std(&setup)
+ .target_host()
+ .with_stderr_contains("[RUNNING] `[..]--crate-name std [..]`")
+ .run();
+ // Check freshness.
+ p.change_file("src/lib.rs", " ");
+ p.cargo("build -v")
+ .build_std(&setup)
+ .target_host()
+ .with_stderr_contains("[FRESH] std[..]")
+ .run();
+}
+
+#[cargo_test(build_std_mock)]
+fn simple_bin_std() {
+ let setup = setup();
+
+ let p = project().file("src/main.rs", "fn main() {}").build();
+ p.cargo("run -v").build_std(&setup).target_host().run();
+}
+
+#[cargo_test(build_std_mock)]
+fn lib_nostd() {
+ let setup = setup();
+
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ #![no_std]
+ pub fn foo() {
+ assert_eq!(u8::MIN, 0);
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build -v --lib")
+ .build_std_arg(&setup, "core")
+ .target_host()
+ .with_stderr_does_not_contain("[..]libstd[..]")
+ .run();
+}
+
+#[cargo_test(build_std_mock)]
+fn check_core() {
+ let setup = setup();
+
+ let p = project()
+ .file("src/lib.rs", "#![no_std] fn unused_fn() {}")
+ .build();
+
+ p.cargo("check -v")
+ .build_std_arg(&setup, "core")
+ .target_host()
+ .with_stderr_contains("[WARNING] [..]unused_fn[..]")
+ .run();
+}
+
+#[cargo_test(build_std_mock)]
+fn depend_same_as_std() {
+ let setup = setup();
+
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn f() {
+ registry_dep_using_core::non_sysroot_api();
+ registry_dep_using_alloc::non_sysroot_api();
+ registry_dep_using_std::non_sysroot_api();
+ }
+ "#,
+ )
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [dependencies]
+ registry-dep-using-core = "1.0"
+ registry-dep-using-alloc = "1.0"
+ registry-dep-using-std = "1.0"
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v").build_std(&setup).target_host().run();
+}
+
+#[cargo_test(build_std_mock)]
+fn test() {
+ let setup = setup();
+
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ #[cfg(test)]
+ mod tests {
+ #[test]
+ fn it_works() {
+ assert_eq!(2 + 2, 4);
+ }
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("test -v")
+ .build_std(&setup)
+ .target_host()
+ .with_stdout_contains("test tests::it_works ... ok")
+ .run();
+}
+
+#[cargo_test(build_std_mock)]
+fn target_proc_macro() {
+ let setup = setup();
+
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate proc_macro;
+ pub fn f() {
+ let _ts = proc_macro::TokenStream::new();
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v").build_std(&setup).target_host().run();
+}
+
+#[cargo_test(build_std_mock)]
+fn bench() {
+ let setup = setup();
+
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ #![feature(test)]
+ extern crate test;
+
+ #[bench]
+ fn b1(b: &mut test::Bencher) {
+ b.iter(|| ())
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("bench -v").build_std(&setup).target_host().run();
+}
+
+#[cargo_test(build_std_mock)]
+fn doc() {
+ let setup = setup();
+
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ /// Doc
+ pub fn f() -> Result<(), ()> {Ok(())}
+ "#,
+ )
+ .build();
+
+ p.cargo("doc -v").build_std(&setup).target_host().run();
+}
+
+#[cargo_test(build_std_mock)]
+fn check_std() {
+ let setup = setup();
+
+ let p = project()
+ .file(
+ "src/lib.rs",
+ "
+ extern crate core;
+ extern crate alloc;
+ extern crate proc_macro;
+ pub fn f() {}
+ ",
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "tests/t1.rs",
+ r#"
+ #[test]
+ fn t1() {
+ assert_eq!(1, 2);
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("check -v --all-targets")
+ .build_std(&setup)
+ .target_host()
+ .run();
+ p.cargo("check -v --all-targets --profile=test")
+ .build_std(&setup)
+ .target_host()
+ .run();
+}
+
+#[cargo_test(build_std_mock)]
+fn doctest() {
+ let setup = setup();
+
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ /// Doc
+ /// ```
+ /// std::custom_api();
+ /// ```
+ pub fn f() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("test --doc -v -Zdoctest-xcompile")
+ .build_std(&setup)
+ .with_stdout_contains("test src/lib.rs - f [..] ... ok")
+ .target_host()
+ .run();
+}
+
+#[cargo_test(build_std_mock)]
+fn no_implicit_alloc() {
+ // Demonstrate that alloc is not implicitly in scope.
+ let setup = setup();
+
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn f() {
+ let _: Vec<i32> = alloc::vec::Vec::new();
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v")
+ .build_std(&setup)
+ .target_host()
+ .with_stderr_contains("[..]use of undeclared [..]`alloc`")
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test(build_std_mock)]
+fn macro_expanded_shadow() {
+ // This tests a bug caused by the previous use of `--extern` to directly
+ // load sysroot crates. This necessitated the switch to `--sysroot` to
+ // retain existing behavior. See
+ // https://github.com/rust-lang/wg-cargo-std-aware/issues/40 for more
+ // detail.
+ let setup = setup();
+
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ macro_rules! a {
+ () => (extern crate std as alloc;)
+ }
+ a!();
+ "#,
+ )
+ .build();
+
+ p.cargo("build -v").build_std(&setup).target_host().run();
+}
+
+#[cargo_test(build_std_mock)]
+fn ignores_incremental() {
+ // Incremental is not really needed for std, make sure it is disabled.
+ // Incremental also tends to have bugs that affect std libraries more than
+ // any other crate.
+ let setup = setup();
+
+ let p = project().file("src/lib.rs", "").build();
+ p.cargo("build")
+ .env("CARGO_INCREMENTAL", "1")
+ .build_std(&setup)
+ .target_host()
+ .run();
+ let incremental: Vec<_> = p
+ .glob(format!("target/{}/debug/incremental/*", rustc_host()))
+ .map(|e| e.unwrap())
+ .collect();
+ assert_eq!(incremental.len(), 1);
+ assert!(incremental[0]
+ .file_name()
+ .unwrap()
+ .to_str()
+ .unwrap()
+ .starts_with("foo-"));
+}
+
+#[cargo_test(build_std_mock)]
+fn cargo_config_injects_compiler_builtins() {
+ let setup = setup();
+
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ #![no_std]
+ pub fn foo() {
+ assert_eq!(u8::MIN, 0);
+ }
+ "#,
+ )
+ .file(
+ ".cargo/config.toml",
+ r#"
+ [unstable]
+ build-std = ['core']
+ "#,
+ )
+ .build();
+ let mut build = p.cargo("build -v --lib");
+ enable_build_std(&mut build, &setup);
+ build
+ .target_host()
+ .with_stderr_does_not_contain("[..]libstd[..]")
+ .run();
+}
+
+#[cargo_test(build_std_mock)]
+fn different_features() {
+ let setup = setup();
+
+ let p = project()
+ .file(
+ "src/lib.rs",
+ "
+ pub fn foo() {
+ std::conditional_function();
+ }
+ ",
+ )
+ .build();
+ p.cargo("build")
+ .build_std(&setup)
+ .arg("-Zbuild-std-features=feature1")
+ .target_host()
+ .run();
+}
+
+#[cargo_test(build_std_mock)]
+fn no_roots() {
+ // Checks for a bug where it would panic if there are no roots.
+ let setup = setup();
+
+ let p = project().file("tests/t1.rs", "").build();
+ p.cargo("build")
+ .build_std(&setup)
+ .target_host()
+ .with_stderr_contains("[FINISHED] [..]")
+ .run();
+}
+
+#[cargo_test(build_std_mock)]
+fn proc_macro_only() {
+ // Checks for a bug where it would panic if building a proc-macro only
+ let setup = setup();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "pm"
+ version = "0.1.0"
+
+ [lib]
+ proc-macro = true
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("build")
+ .build_std(&setup)
+ .target_host()
+ .with_stderr_contains("[FINISHED] [..]")
+ .run();
+}
+
+#[cargo_test(build_std_mock)]
+fn fetch() {
+ let setup = setup();
+
+ let p = project().file("src/main.rs", "fn main() {}").build();
+ p.cargo("fetch")
+ .build_std(&setup)
+ .target_host()
+ .with_stderr_contains("[DOWNLOADED] [..]")
+ .run();
+ p.cargo("build")
+ .build_std(&setup)
+ .target_host()
+ .with_stderr_does_not_contain("[DOWNLOADED] [..]")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/test.rs b/src/tools/cargo/tests/testsuite/test.rs
new file mode 100644
index 000000000..add0a991f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/test.rs
@@ -0,0 +1,4820 @@
+//! Tests for the `cargo test` command.
+
+use cargo_test_support::paths::CargoPathExt;
+use cargo_test_support::registry::Package;
+use cargo_test_support::{
+ basic_bin_manifest, basic_lib_manifest, basic_manifest, cargo_exe, project,
+};
+use cargo_test_support::{cross_compile, paths};
+use cargo_test_support::{rustc_host, rustc_host_env, sleep_ms};
+use std::fs;
+
+#[cargo_test]
+fn cargo_test_simple() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn hello() -> &'static str {
+ "hello"
+ }
+
+ pub fn main() {
+ println!("{}", hello())
+ }
+
+ #[test]
+ fn test_hello() {
+ assert_eq!(hello(), "hello")
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build").run();
+ assert!(p.bin("foo").is_file());
+
+ p.process(&p.bin("foo")).with_stdout("hello\n").run();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("test test_hello ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_test_release() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate bar;
+ pub fn foo() { bar::bar(); }
+
+ #[test]
+ fn test() { foo(); }
+ "#,
+ )
+ .file(
+ "tests/test.rs",
+ r#"
+ extern crate foo;
+
+ #[test]
+ fn test() { foo::foo(); }
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("test -v --release")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[RUNNING] [..] -C opt-level=3 [..]
+[COMPILING] foo v0.1.0 ([CWD])
+[RUNNING] [..] -C opt-level=3 [..]
+[RUNNING] [..] -C opt-level=3 [..]
+[RUNNING] [..] -C opt-level=3 [..]
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] `[..]target/release/deps/foo-[..][EXE]`
+[RUNNING] `[..]target/release/deps/test-[..][EXE]`
+[DOCTEST] foo
+[RUNNING] `rustdoc [..]--test [..]lib.rs[..]`",
+ )
+ .with_stdout_contains_n("test test ... ok", 2)
+ .with_stdout_contains("running 0 tests")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_test_overflow_checks() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [[bin]]
+ name = "foo"
+
+ [profile.release]
+ overflow-checks = true
+ "#,
+ )
+ .file(
+ "src/foo.rs",
+ r#"
+ use std::panic;
+ pub fn main() {
+ let r = panic::catch_unwind(|| {
+ [1, i32::MAX].iter().sum::<i32>();
+ });
+ assert!(r.is_err());
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build --release").run();
+ assert!(p.release_bin("foo").is_file());
+
+ p.process(&p.release_bin("foo")).with_stdout("").run();
+}
+
+#[cargo_test]
+fn cargo_test_quiet_with_harness() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [[test]]
+ name = "foo"
+ path = "src/foo.rs"
+ harness = true
+ "#,
+ )
+ .file(
+ "src/foo.rs",
+ r#"
+ fn main() {}
+ #[test] fn test_hello() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("test -q")
+ .with_stdout(
+ "
+running 1 test
+.
+test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]
+
+",
+ )
+ .with_stderr("")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_test_quiet_no_harness() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [[bin]]
+ name = "foo"
+ test = false
+
+ [[test]]
+ name = "foo"
+ path = "src/main.rs"
+ harness = false
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {}
+ #[test] fn test_hello() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("test -q").with_stdout("").with_stderr("").run();
+}
+
+#[cargo_test]
+fn cargo_doc_test_quiet() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ /// ```
+ /// let result = foo::add(2, 3);
+ /// assert_eq!(result, 5);
+ /// ```
+ pub fn add(a: i32, b: i32) -> i32 {
+ a + b
+ }
+
+ /// ```
+ /// let result = foo::div(10, 2);
+ /// assert_eq!(result, 5);
+ /// ```
+ ///
+ /// # Panics
+ ///
+ /// The function panics if the second argument is zero.
+ ///
+ /// ```rust,should_panic
+ /// // panics on division by zero
+ /// foo::div(10, 0);
+ /// ```
+ pub fn div(a: i32, b: i32) -> i32 {
+ if b == 0 {
+ panic!("Divide-by-zero error");
+ }
+
+ a / b
+ }
+
+ #[test] fn test_hello() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("test -q")
+ .with_stdout(
+ "
+running 1 test
+.
+test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]
+
+
+running 3 tests
+...
+test result: ok. 3 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]
+
+",
+ )
+ .with_stderr("")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_test_verbose() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {}
+ #[test] fn test_hello() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("test -v hello")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc [..] src/main.rs [..]`
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[CWD]/target/debug/deps/foo-[..] hello`
+",
+ )
+ .with_stdout_contains("test test_hello ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn many_similar_names() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ "
+ pub fn foo() {}
+ #[test] fn lib_test() {}
+ ",
+ )
+ .file(
+ "src/main.rs",
+ "
+ extern crate foo;
+ fn main() {}
+ #[test] fn bin_test() { foo::foo() }
+ ",
+ )
+ .file(
+ "tests/foo.rs",
+ r#"
+ extern crate foo;
+ #[test] fn test_test() { foo::foo() }
+ "#,
+ )
+ .build();
+
+ p.cargo("test -v")
+ .with_stdout_contains("test bin_test ... ok")
+ .with_stdout_contains("test lib_test ... ok")
+ .with_stdout_contains("test test_test ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_test_failing_test_in_bin() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn hello() -> &'static str {
+ "hello"
+ }
+
+ pub fn main() {
+ println!("{}", hello())
+ }
+
+ #[test]
+ fn test_hello() {
+ assert_eq!(hello(), "nope")
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build").run();
+ assert!(p.bin("foo").is_file());
+
+ p.process(&p.bin("foo")).with_stdout("hello\n").run();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+[ERROR] test failed, to rerun pass `--bin foo`",
+ )
+ .with_stdout_contains(
+ "
+running 1 test
+test test_hello ... FAILED
+
+failures:
+
+---- test_hello stdout ----
+[..]thread '[..]' panicked at 'assertion failed:[..]",
+ )
+ .with_stdout_contains("[..]`(left == right)`[..]")
+ .with_stdout_contains("[..]left: `\"hello\"`,[..]")
+ .with_stdout_contains("[..]right: `\"nope\"`[..]")
+ .with_stdout_contains("[..]src/main.rs:12[..]")
+ .with_stdout_contains(
+ "\
+failures:
+ test_hello
+",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn cargo_test_failing_test_in_test() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", r#"pub fn main() { println!("hello"); }"#)
+ .file(
+ "tests/footest.rs",
+ "#[test] fn test_hello() { assert!(false) }",
+ )
+ .build();
+
+ p.cargo("build").run();
+ assert!(p.bin("foo").is_file());
+
+ p.process(&p.bin("foo")).with_stdout("hello\n").run();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+[RUNNING] [..] (target/debug/deps/footest-[..][EXE])
+[ERROR] test failed, to rerun pass `--test footest`",
+ )
+ .with_stdout_contains("running 0 tests")
+ .with_stdout_contains(
+ "\
+running 1 test
+test test_hello ... FAILED
+
+failures:
+
+---- test_hello stdout ----
+[..]thread '[..]' panicked at 'assertion failed: false', \
+ tests/footest.rs:1[..]
+",
+ )
+ .with_stdout_contains(
+ "\
+failures:
+ test_hello
+",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn cargo_test_failing_test_in_lib() {
+ let p = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file("src/lib.rs", "#[test] fn test_hello() { assert!(false) }")
+ .build();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+[ERROR] test failed, to rerun pass `--lib`",
+ )
+ .with_stdout_contains(
+ "\
+test test_hello ... FAILED
+
+failures:
+
+---- test_hello stdout ----
+[..]thread '[..]' panicked at 'assertion failed: false', \
+ src/lib.rs:1[..]
+",
+ )
+ .with_stdout_contains(
+ "\
+failures:
+ test_hello
+",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn test_with_lib_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name = "baz"
+ path = "src/main.rs"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ ///
+ /// ```rust
+ /// extern crate foo;
+ /// fn main() {
+ /// println!("{:?}", foo::foo());
+ /// }
+ /// ```
+ ///
+ pub fn foo(){}
+ #[test] fn lib_test() {}
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "
+ #[allow(unused_extern_crates)]
+ extern crate foo;
+
+ fn main() {}
+
+ #[test]
+ fn bin_test() {}
+ ",
+ )
+ .build();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+[RUNNING] [..] (target/debug/deps/baz-[..][EXE])
+[DOCTEST] foo",
+ )
+ .with_stdout_contains("test lib_test ... ok")
+ .with_stdout_contains("test bin_test ... ok")
+ .with_stdout_contains_n("test [..] ... ok", 3)
+ .run();
+}
+
+#[cargo_test]
+fn test_with_deep_lib_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "../bar"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
+ #[cfg(test)]
+ extern crate bar;
+ /// ```
+ /// foo::foo();
+ /// ```
+ pub fn foo() {}
+
+ #[test]
+ fn bar_test() {
+ bar::bar();
+ }
+ ",
+ )
+ .build();
+ let _p2 = project()
+ .at("bar")
+ .file("Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("src/lib.rs", "pub fn bar() {} #[test] fn foo_test() {}")
+ .build();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.0.1 ([..])
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target[..])
+[DOCTEST] foo",
+ )
+ .with_stdout_contains("test bar_test ... ok")
+ .with_stdout_contains_n("test [..] ... ok", 2)
+ .run();
+}
+
+#[cargo_test]
+fn external_test_explicit() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[test]]
+ name = "test"
+ path = "src/test.rs"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn get_hello() -> &'static str { "Hello" }
+
+ #[test]
+ fn internal_test() {}
+ "#,
+ )
+ .file(
+ "src/test.rs",
+ r#"
+ extern crate foo;
+
+ #[test]
+ fn external_test() { assert_eq!(foo::get_hello(), "Hello") }
+ "#,
+ )
+ .build();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+[RUNNING] [..] (target/debug/deps/test-[..][EXE])
+[DOCTEST] foo",
+ )
+ .with_stdout_contains("test internal_test ... ok")
+ .with_stdout_contains("test external_test ... ok")
+ .with_stdout_contains("running 0 tests")
+ .run();
+}
+
+#[cargo_test]
+fn external_test_named_test() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[test]]
+ name = "test"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("tests/test.rs", "#[test] fn foo() {}")
+ .build();
+
+ p.cargo("test").run();
+}
+
+#[cargo_test]
+fn external_test_implicit() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn get_hello() -> &'static str { "Hello" }
+
+ #[test]
+ fn internal_test() {}
+ "#,
+ )
+ .file(
+ "tests/external.rs",
+ r#"
+ extern crate foo;
+
+ #[test]
+ fn external_test() { assert_eq!(foo::get_hello(), "Hello") }
+ "#,
+ )
+ .build();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+[RUNNING] [..] (target/debug/deps/external-[..][EXE])
+[DOCTEST] foo",
+ )
+ .with_stdout_contains("test internal_test ... ok")
+ .with_stdout_contains("test external_test ... ok")
+ .with_stdout_contains("running 0 tests")
+ .run();
+}
+
+#[cargo_test]
+fn dont_run_examples() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "examples/dont-run-me-i-will-fail.rs",
+ r#"
+ fn main() { panic!("Examples should not be run by 'cargo test'"); }
+ "#,
+ )
+ .build();
+ p.cargo("test").run();
+}
+
+#[cargo_test]
+fn pass_through_escaped() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ "
+ /// ```rust
+ /// assert!(foo::foo());
+ /// ```
+ pub fn foo() -> bool {
+ true
+ }
+
+ /// ```rust
+ /// assert!(!foo::bar());
+ /// ```
+ pub fn bar() -> bool {
+ false
+ }
+
+ #[test] fn test_foo() {
+ assert!(foo());
+ }
+ #[test] fn test_bar() {
+ assert!(!bar());
+ }
+ ",
+ )
+ .build();
+
+ p.cargo("test -- bar")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+[DOCTEST] foo
+",
+ )
+ .with_stdout_contains("running 1 test")
+ .with_stdout_contains("test test_bar ... ok")
+ .run();
+
+ p.cargo("test -- foo")
+ .with_stderr(
+ "\
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+[DOCTEST] foo
+",
+ )
+ .with_stdout_contains("running 1 test")
+ .with_stdout_contains("test test_foo ... ok")
+ .run();
+
+ p.cargo("test -- foo bar")
+ .with_stderr(
+ "\
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+[DOCTEST] foo
+",
+ )
+ .with_stdout_contains("running 2 tests")
+ .with_stdout_contains("test test_foo ... ok")
+ .with_stdout_contains("test test_bar ... ok")
+ .run();
+}
+
+// Unlike `pass_through_escaped`, doctests won't run when using `testname` as an optimization
+#[cargo_test]
+fn pass_through_testname() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ "
+ /// ```rust
+ /// assert!(foo::foo());
+ /// ```
+ pub fn foo() -> bool {
+ true
+ }
+
+ /// ```rust
+ /// assert!(!foo::bar());
+ /// ```
+ pub fn bar() -> bool {
+ false
+ }
+
+ #[test] fn test_foo() {
+ assert!(foo());
+ }
+ #[test] fn test_bar() {
+ assert!(!bar());
+ }
+ ",
+ )
+ .build();
+
+ p.cargo("test bar")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+",
+ )
+ .with_stdout_contains("running 1 test")
+ .with_stdout_contains("test test_bar ... ok")
+ .run();
+
+ p.cargo("test foo")
+ .with_stderr(
+ "\
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+",
+ )
+ .with_stdout_contains("running 1 test")
+ .with_stdout_contains("test test_foo ... ok")
+ .run();
+
+ p.cargo("test foo -- bar")
+ .with_stderr(
+ "\
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+",
+ )
+ .with_stdout_contains("running 2 tests")
+ .with_stdout_contains("test test_foo ... ok")
+ .with_stdout_contains("test test_bar ... ok")
+ .run();
+}
+
+// Regression test for running cargo-test twice with
+// tests in an rlib
+#[cargo_test]
+fn cargo_test_twice() {
+ let p = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file(
+ "src/foo.rs",
+ r#"
+ #![crate_type = "rlib"]
+
+ #[test]
+ fn dummy_test() { }
+ "#,
+ )
+ .build();
+
+ for _ in 0..2 {
+ p.cargo("test").run();
+ }
+}
+
+#[cargo_test]
+fn lib_bin_same_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "foo"
+ [[bin]]
+ name = "foo"
+ "#,
+ )
+ .file("src/lib.rs", "#[test] fn lib_test() {}")
+ .file(
+ "src/main.rs",
+ "
+ #[allow(unused_extern_crates)]
+ extern crate foo;
+
+ #[test]
+ fn bin_test() {}
+ ",
+ )
+ .build();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+[DOCTEST] foo",
+ )
+ .with_stdout_contains_n("test [..] ... ok", 2)
+ .with_stdout_contains("running 0 tests")
+ .run();
+}
+
+#[cargo_test]
+fn lib_with_standard_name() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("syntax", "0.0.1"))
+ .file(
+ "src/lib.rs",
+ "
+ /// ```
+ /// syntax::foo();
+ /// ```
+ pub fn foo() {}
+
+ #[test]
+ fn foo_test() {}
+ ",
+ )
+ .file(
+ "tests/test.rs",
+ "
+ extern crate syntax;
+
+ #[test]
+ fn test() { syntax::foo() }
+ ",
+ )
+ .build();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] syntax v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/syntax-[..][EXE])
+[RUNNING] [..] (target/debug/deps/test-[..][EXE])
+[DOCTEST] syntax",
+ )
+ .with_stdout_contains("test foo_test ... ok")
+ .with_stdout_contains("test test ... ok")
+ .with_stdout_contains_n("test [..] ... ok", 3)
+ .run();
+}
+
+#[cargo_test]
+fn lib_with_standard_name2() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "syntax"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "syntax"
+ test = false
+ doctest = false
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
+ .file(
+ "src/main.rs",
+ "
+ extern crate syntax;
+
+ fn main() {}
+
+ #[test]
+ fn test() { syntax::foo() }
+ ",
+ )
+ .build();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] syntax v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/syntax-[..][EXE])",
+ )
+ .with_stdout_contains("test test ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn lib_without_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "syntax"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ test = false
+ doctest = false
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
+ .file(
+ "src/main.rs",
+ "
+ extern crate syntax;
+
+ fn main() {}
+
+ #[test]
+ fn test() { syntax::foo() }
+ ",
+ )
+ .build();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] syntax v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/syntax-[..][EXE])",
+ )
+ .with_stdout_contains("test test ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn bin_without_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "syntax"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ test = false
+ doctest = false
+
+ [[bin]]
+ path = "src/main.rs"
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
+ .file(
+ "src/main.rs",
+ "
+ extern crate syntax;
+
+ fn main() {}
+
+ #[test]
+ fn test() { syntax::foo() }
+ ",
+ )
+ .build();
+
+ p.cargo("test")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ binary target bin.name is required",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bench_without_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "syntax"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ test = false
+ doctest = false
+
+ [[bench]]
+ path = "src/bench.rs"
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
+ .file(
+ "src/main.rs",
+ "
+ extern crate syntax;
+
+ fn main() {}
+
+ #[test]
+ fn test() { syntax::foo() }
+ ",
+ )
+ .file(
+ "src/bench.rs",
+ "
+ #![feature(test)]
+ extern crate syntax;
+ extern crate test;
+
+ #[bench]
+ fn external_bench(_b: &mut test::Bencher) {}
+ ",
+ )
+ .build();
+
+ p.cargo("test")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ benchmark target bench.name is required",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_without_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "syntax"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ test = false
+ doctest = false
+
+ [[test]]
+ path = "src/test.rs"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() {}
+ pub fn get_hello() -> &'static str { "Hello" }
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "
+ extern crate syntax;
+
+ fn main() {}
+
+ #[test]
+ fn test() { syntax::foo() }
+ ",
+ )
+ .file(
+ "src/test.rs",
+ r#"
+ extern crate syntax;
+
+ #[test]
+ fn external_test() { assert_eq!(syntax::get_hello(), "Hello") }
+ "#,
+ )
+ .build();
+
+ p.cargo("test")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ test target test.name is required",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn example_without_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "syntax"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ test = false
+ doctest = false
+
+ [[example]]
+ path = "examples/example.rs"
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
+ .file(
+ "src/main.rs",
+ "
+ extern crate syntax;
+
+ fn main() {}
+
+ #[test]
+ fn test() { syntax::foo() }
+ ",
+ )
+ .file(
+ "examples/example.rs",
+ r#"
+ extern crate syntax;
+
+ fn main() {
+ println!("example1");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("test")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+ example target example.name is required",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bin_there_for_integration() {
+ let p = project()
+ .file(
+ "src/main.rs",
+ "
+ fn main() { std::process::exit(101); }
+ #[test] fn main_test() {}
+ ",
+ )
+ .file(
+ "tests/foo.rs",
+ r#"
+ use std::process::Command;
+ #[test]
+ fn test_test() {
+ let status = Command::new("target/debug/foo").status().unwrap();
+ assert_eq!(status.code(), Some(101));
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("test -v")
+ .with_stdout_contains("test main_test ... ok")
+ .with_stdout_contains("test test_test ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn test_dylib() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "foo"
+ crate_type = ["dylib"]
+
+ [dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate bar as the_bar;
+
+ pub fn bar() { the_bar::baz(); }
+
+ #[test]
+ fn foo() { bar(); }
+ "#,
+ )
+ .file(
+ "tests/test.rs",
+ r#"
+ extern crate foo as the_foo;
+
+ #[test]
+ fn foo() { the_foo::bar(); }
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "bar"
+ crate_type = ["dylib"]
+ "#,
+ )
+ .file("bar/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+[RUNNING] [..] (target/debug/deps/test-[..][EXE])",
+ )
+ .with_stdout_contains_n("test foo ... ok", 2)
+ .run();
+
+ p.root().move_into_the_past();
+ p.cargo("test")
+ .with_stderr(
+ "\
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+[RUNNING] [..] (target/debug/deps/test-[..][EXE])",
+ )
+ .with_stdout_contains_n("test foo ... ok", 2)
+ .run();
+}
+
+#[cargo_test]
+fn test_twice_with_build_cmd() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "#[test] fn foo() {}")
+ .build();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+[DOCTEST] foo",
+ )
+ .with_stdout_contains("test foo ... ok")
+ .with_stdout_contains("running 0 tests")
+ .run();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+[DOCTEST] foo",
+ )
+ .with_stdout_contains("test foo ... ok")
+ .with_stdout_contains("running 0 tests")
+ .run();
+}
+
+#[cargo_test]
+fn test_then_build() {
+ let p = project().file("src/lib.rs", "#[test] fn foo() {}").build();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])
+[DOCTEST] foo",
+ )
+ .with_stdout_contains("test foo ... ok")
+ .with_stdout_contains("running 0 tests")
+ .run();
+
+ p.cargo("build").with_stdout("").run();
+}
+
+#[cargo_test]
+fn test_no_run() {
+ let p = project()
+ .file("src/lib.rs", "#[test] fn foo() { panic!() }")
+ .build();
+
+ p.cargo("test --no-run")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[EXECUTABLE] unittests src/lib.rs (target/debug/deps/foo-[..][EXE])
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_no_run_emit_json() {
+ let p = project()
+ .file("src/lib.rs", "#[test] fn foo() { panic!() }")
+ .build();
+
+ p.cargo("test --no-run --message-format json")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_run_specific_bin_target() {
+ let prj = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name="bin1"
+ path="src/bin1.rs"
+
+ [[bin]]
+ name="bin2"
+ path="src/bin2.rs"
+ "#,
+ )
+ .file("src/bin1.rs", "#[test] fn test1() { }")
+ .file("src/bin2.rs", "#[test] fn test2() { }")
+ .build();
+
+ prj.cargo("test --bin bin2")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/bin2-[..][EXE])",
+ )
+ .with_stdout_contains("test test2 ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn test_run_implicit_bin_target() {
+ let prj = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name="mybin"
+ path="src/mybin.rs"
+ "#,
+ )
+ .file(
+ "src/mybin.rs",
+ "#[test] fn test_in_bin() { }
+ fn main() { panic!(\"Don't execute me!\"); }",
+ )
+ .file("tests/mytest.rs", "#[test] fn test_in_test() { }")
+ .file("benches/mybench.rs", "#[test] fn test_in_bench() { }")
+ .file(
+ "examples/myexm.rs",
+ "#[test] fn test_in_exm() { }
+ fn main() { panic!(\"Don't execute me!\"); }",
+ )
+ .build();
+
+ prj.cargo("test --bins")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/mybin-[..][EXE])",
+ )
+ .with_stdout_contains("test test_in_bin ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn test_run_specific_test_target() {
+ let prj = project()
+ .file("src/bin/a.rs", "fn main() { }")
+ .file("src/bin/b.rs", "#[test] fn test_b() { } fn main() { }")
+ .file("tests/a.rs", "#[test] fn test_a() { }")
+ .file("tests/b.rs", "#[test] fn test_b() { }")
+ .build();
+
+ prj.cargo("test --test b")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/b-[..][EXE])",
+ )
+ .with_stdout_contains("test test_b ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn test_run_implicit_test_target() {
+ let prj = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name="mybin"
+ path="src/mybin.rs"
+ "#,
+ )
+ .file(
+ "src/mybin.rs",
+ "#[test] fn test_in_bin() { }
+ fn main() { panic!(\"Don't execute me!\"); }",
+ )
+ .file("tests/mytest.rs", "#[test] fn test_in_test() { }")
+ .file("benches/mybench.rs", "#[test] fn test_in_bench() { }")
+ .file(
+ "examples/myexm.rs",
+ "fn main() { compile_error!(\"Don't build me!\"); }",
+ )
+ .build();
+
+ prj.cargo("test --tests")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/mybin-[..][EXE])
+[RUNNING] [..] (target/debug/deps/mytest-[..][EXE])",
+ )
+ .with_stdout_contains("test test_in_test ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn test_run_implicit_bench_target() {
+ let prj = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name="mybin"
+ path="src/mybin.rs"
+ "#,
+ )
+ .file(
+ "src/mybin.rs",
+ "#[test] fn test_in_bin() { }
+ fn main() { panic!(\"Don't execute me!\"); }",
+ )
+ .file("tests/mytest.rs", "#[test] fn test_in_test() { }")
+ .file("benches/mybench.rs", "#[test] fn test_in_bench() { }")
+ .file(
+ "examples/myexm.rs",
+ "fn main() { compile_error!(\"Don't build me!\"); }",
+ )
+ .build();
+
+ prj.cargo("test --benches")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/mybin-[..][EXE])
+[RUNNING] [..] (target/debug/deps/mybench-[..][EXE])",
+ )
+ .with_stdout_contains("test test_in_bench ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn test_run_implicit_example_target() {
+ let prj = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name = "mybin"
+ path = "src/mybin.rs"
+
+ [[example]]
+ name = "myexm1"
+
+ [[example]]
+ name = "myexm2"
+ test = true
+ "#,
+ )
+ .file(
+ "src/mybin.rs",
+ "#[test] fn test_in_bin() { }
+ fn main() { panic!(\"Don't execute me!\"); }",
+ )
+ .file("tests/mytest.rs", "#[test] fn test_in_test() { }")
+ .file("benches/mybench.rs", "#[test] fn test_in_bench() { }")
+ .file(
+ "examples/myexm1.rs",
+ "#[test] fn test_in_exm() { }
+ fn main() { panic!(\"Don't execute me!\"); }",
+ )
+ .file(
+ "examples/myexm2.rs",
+ "#[test] fn test_in_exm() { }
+ fn main() { panic!(\"Don't execute me!\"); }",
+ )
+ .build();
+
+ // Compiles myexm1 as normal, but does not run it.
+ prj.cargo("test -v")
+ .with_stderr_contains("[RUNNING] `rustc [..]myexm1.rs [..]--crate-type bin[..]")
+ .with_stderr_contains("[RUNNING] `rustc [..]myexm2.rs [..]--test[..]")
+ .with_stderr_does_not_contain("[RUNNING] [..]myexm1-[..]")
+ .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]")
+ .run();
+
+ // Only tests myexm2.
+ prj.cargo("test --tests")
+ .with_stderr_does_not_contain("[RUNNING] [..]myexm1-[..]")
+ .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]")
+ .run();
+
+ // Tests all examples.
+ prj.cargo("test --examples")
+ .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm1-[..]")
+ .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]")
+ .run();
+
+ // Test an example, even without `test` set.
+ prj.cargo("test --example myexm1")
+ .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm1-[..]")
+ .run();
+
+ // Tests all examples.
+ prj.cargo("test --all-targets")
+ .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm1-[..]")
+ .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]")
+ .run();
+}
+
+#[cargo_test]
+fn test_filtered_excludes_compiling_examples() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name = "mybin"
+ test = false
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "#[cfg(test)] mod tests { #[test] fn test_in_lib() { } }",
+ )
+ .file(
+ "src/bin/mybin.rs",
+ "#[test] fn test_in_bin() { }
+ fn main() { panic!(\"Don't execute me!\"); }",
+ )
+ .file("tests/mytest.rs", "#[test] fn test_in_test() { }")
+ .file(
+ "benches/mybench.rs",
+ "#[test] fn test_in_bench() { assert!(false) }",
+ )
+ .file(
+ "examples/myexm1.rs",
+ "#[test] fn test_in_exm() { assert!(false) }
+ fn main() { panic!(\"Don't execute me!\"); }",
+ )
+ .build();
+
+ p.cargo("test -v test_in_")
+ .with_stdout(
+ "
+running 1 test
+test tests::test_in_lib ... ok
+
+test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]
+
+
+running 1 test
+test test_in_test ... ok
+
+test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]
+
+",
+ )
+ .with_stderr_unordered(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..] --crate-type lib [..]`
+[RUNNING] `rustc --crate-name foo src/lib.rs [..] --test [..]`
+[RUNNING] `rustc --crate-name mybin src/bin/mybin.rs [..] --crate-type bin [..]`
+[RUNNING] `rustc --crate-name mytest tests/mytest.rs [..] --test [..]`
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[CWD]/target/debug/deps/foo-[..] test_in_`
+[RUNNING] `[CWD]/target/debug/deps/mytest-[..] test_in_`
+",
+ )
+ .with_stderr_does_not_contain("[RUNNING][..]rustc[..]myexm1[..]")
+ .with_stderr_does_not_contain("[RUNNING][..]deps/mybin-[..] test_in_")
+ .run();
+}
+
+#[cargo_test]
+fn test_no_harness() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name = "foo"
+ test = false
+
+ [[test]]
+ name = "bar"
+ path = "foo.rs"
+ harness = false
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("foo.rs", "fn main() {}")
+ .build();
+
+ p.cargo("test -- --nocapture")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/bar-[..][EXE])
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn selective_testing() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.d1]
+ path = "d1"
+ [dependencies.d2]
+ path = "d2"
+
+ [lib]
+ name = "foo"
+ doctest = false
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "d1/Cargo.toml",
+ r#"
+ [package]
+ name = "d1"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "d1"
+ doctest = false
+ "#,
+ )
+ .file("d1/src/lib.rs", "")
+ .file(
+ "d1/src/main.rs",
+ "#[allow(unused_extern_crates)] extern crate d1; fn main() {}",
+ )
+ .file(
+ "d2/Cargo.toml",
+ r#"
+ [package]
+ name = "d2"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "d2"
+ doctest = false
+ "#,
+ )
+ .file("d2/src/lib.rs", "")
+ .file(
+ "d2/src/main.rs",
+ "#[allow(unused_extern_crates)] extern crate d2; fn main() {}",
+ );
+ let p = p.build();
+
+ println!("d1");
+ p.cargo("test -p d1")
+ .with_stderr(
+ "\
+[COMPILING] d1 v0.0.1 ([CWD]/d1)
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/d1-[..][EXE])
+[RUNNING] [..] (target/debug/deps/d1-[..][EXE])",
+ )
+ .with_stdout_contains_n("running 0 tests", 2)
+ .run();
+
+ println!("d2");
+ p.cargo("test -p d2")
+ .with_stderr(
+ "\
+[COMPILING] d2 v0.0.1 ([CWD]/d2)
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/d2-[..][EXE])
+[RUNNING] [..] (target/debug/deps/d2-[..][EXE])",
+ )
+ .with_stdout_contains_n("running 0 tests", 2)
+ .run();
+
+ println!("whole");
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..][EXE])",
+ )
+ .with_stdout_contains("running 0 tests")
+ .run();
+}
+
+#[cargo_test]
+fn almost_cyclic_but_not_quite() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dev-dependencies.b]
+ path = "b"
+ [dev-dependencies.c]
+ path = "c"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #[cfg(test)] extern crate b;
+ #[cfg(test)] extern crate c;
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.foo]
+ path = ".."
+ "#,
+ )
+ .file(
+ "b/src/lib.rs",
+ r#"
+ #[allow(unused_extern_crates)]
+ extern crate foo;
+ "#,
+ )
+ .file("c/Cargo.toml", &basic_manifest("c", "0.0.1"))
+ .file("c/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+ p.cargo("test").run();
+}
+
+#[cargo_test]
+fn build_then_selective_test() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.b]
+ path = "b"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "#[allow(unused_extern_crates)] extern crate b;",
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ #[allow(unused_extern_crates)]
+ extern crate b;
+ #[allow(unused_extern_crates)]
+ extern crate foo;
+ fn main() {}
+ "#,
+ )
+ .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+ .file("b/src/lib.rs", "")
+ .build();
+
+ p.cargo("build").run();
+ p.root().move_into_the_past();
+ p.cargo("test -p b").run();
+}
+
+#[cargo_test]
+fn example_dev_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dev-dependencies.bar]
+ path = "bar"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("examples/e1.rs", "extern crate bar; fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ // make sure this file takes awhile to compile
+ macro_rules! f0( () => (1) );
+ macro_rules! f1( () => ({(f0!()) + (f0!())}) );
+ macro_rules! f2( () => ({(f1!()) + (f1!())}) );
+ macro_rules! f3( () => ({(f2!()) + (f2!())}) );
+ macro_rules! f4( () => ({(f3!()) + (f3!())}) );
+ macro_rules! f5( () => ({(f4!()) + (f4!())}) );
+ macro_rules! f6( () => ({(f5!()) + (f5!())}) );
+ macro_rules! f7( () => ({(f6!()) + (f6!())}) );
+ macro_rules! f8( () => ({(f7!()) + (f7!())}) );
+ pub fn bar() {
+ f8!();
+ }
+ "#,
+ )
+ .build();
+ p.cargo("test").run();
+ p.cargo("run --example e1 --release -v").run();
+}
+
+#[cargo_test]
+fn selective_testing_with_docs() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.d1]
+ path = "d1"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ /// ```
+ /// not valid rust
+ /// ```
+ pub fn foo() {}
+ "#,
+ )
+ .file(
+ "d1/Cargo.toml",
+ r#"
+ [package]
+ name = "d1"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "d1"
+ path = "d1.rs"
+ "#,
+ )
+ .file("d1/d1.rs", "");
+ let p = p.build();
+
+ p.cargo("test -p d1")
+ .with_stderr(
+ "\
+[COMPILING] d1 v0.0.1 ([CWD]/d1)
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/d1[..][EXE])
+[DOCTEST] d1",
+ )
+ .with_stdout_contains_n("running 0 tests", 2)
+ .run();
+}
+
+#[cargo_test]
+fn example_bin_same_name() {
+ let p = project()
+ .file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#)
+ .file("examples/foo.rs", r#"fn main() { println!("example"); }"#)
+ .build();
+
+ p.cargo("test --no-run -v")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc [..]`
+[RUNNING] `rustc [..]`
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]`
+",
+ )
+ .run();
+
+ assert!(!p.bin("foo").is_file());
+ assert!(p.bin("examples/foo").is_file());
+
+ p.process(&p.bin("examples/foo"))
+ .with_stdout("example\n")
+ .run();
+
+ p.cargo("run")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..]",
+ )
+ .with_stdout("bin")
+ .run();
+ assert!(p.bin("foo").is_file());
+}
+
+#[cargo_test]
+fn test_with_example_twice() {
+ let p = project()
+ .file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#)
+ .file("examples/foo.rs", r#"fn main() { println!("example"); }"#)
+ .build();
+
+ println!("first");
+ p.cargo("test -v").run();
+ assert!(p.bin("examples/foo").is_file());
+ println!("second");
+ p.cargo("test -v").run();
+ assert!(p.bin("examples/foo").is_file());
+}
+
+#[cargo_test]
+fn example_with_dev_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "foo"
+ test = false
+ doctest = false
+
+ [dev-dependencies.a]
+ path = "a"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "examples/ex.rs",
+ "#[allow(unused_extern_crates)] extern crate a; fn main() {}",
+ )
+ .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("test -v")
+ .with_stderr(
+ "\
+[..]
+[..]
+[..]
+[..]
+[RUNNING] `rustc --crate-name ex [..] --extern a=[..]`
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bin_is_preserved() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build -v").run();
+ assert!(p.bin("foo").is_file());
+
+ println!("test");
+ p.cargo("test -v").run();
+ assert!(p.bin("foo").is_file());
+}
+
+#[cargo_test]
+fn bad_example() {
+ let p = project().file("src/lib.rs", "");
+ let p = p.build();
+
+ p.cargo("run --example foo")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] no example target named `foo`.
+
+",
+ )
+ .run();
+ p.cargo("run --bin foo")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] no bin target named `foo`.
+
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn doctest_feature() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ [features]
+ bar = []
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ /// ```rust
+ /// assert_eq!(foo::foo(), 1);
+ /// ```
+ #[cfg(feature = "bar")]
+ pub fn foo() -> i32 { 1 }
+ "#,
+ )
+ .build();
+
+ p.cargo("test --features bar")
+ .with_stderr(
+ "\
+[COMPILING] foo [..]
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo[..][EXE])
+[DOCTEST] foo",
+ )
+ .with_stdout_contains("running 0 tests")
+ .with_stdout_contains("test [..] ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn dashes_to_underscores() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo-bar", "0.0.1"))
+ .file(
+ "src/lib.rs",
+ r#"
+ /// ```
+ /// assert_eq!(foo_bar::foo(), 1);
+ /// ```
+ pub fn foo() -> i32 { 1 }
+ "#,
+ )
+ .build();
+
+ p.cargo("test -v").run();
+}
+
+#[cargo_test]
+fn doctest_dev_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dev-dependencies]
+ b = { path = "b" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ /// ```
+ /// extern crate b;
+ /// ```
+ pub fn foo() {}
+ "#,
+ )
+ .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+ .file("b/src/lib.rs", "")
+ .build();
+
+ p.cargo("test -v").run();
+}
+
+#[cargo_test]
+fn filter_no_doc_tests() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ /// ```
+ /// extern crate b;
+ /// ```
+ pub fn foo() {}
+ "#,
+ )
+ .file("tests/foo.rs", "")
+ .build();
+
+ p.cargo("test --test=foo")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo[..][EXE])",
+ )
+ .with_stdout_contains("running 0 tests")
+ .run();
+}
+
+#[cargo_test]
+fn dylib_doctest() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "foo"
+ crate-type = ["rlib", "dylib"]
+ test = false
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ /// ```
+ /// foo::foo();
+ /// ```
+ pub fn foo() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[DOCTEST] foo",
+ )
+ .with_stdout_contains("test [..] ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn dylib_doctest2() {
+ // Can't doc-test dylibs, as they're statically linked together.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "foo"
+ crate-type = ["dylib"]
+ test = false
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ /// ```
+ /// foo::foo();
+ /// ```
+ pub fn foo() {}
+ "#,
+ )
+ .build();
+
+ p.cargo("test").with_stdout("").run();
+}
+
+#[cargo_test]
+fn cyclic_dev_dep_doc_test() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dev-dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ //! ```
+ //! extern crate bar;
+ //! ```
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ foo = { path = ".." }
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ #[allow(unused_extern_crates)]
+ extern crate foo;
+ "#,
+ )
+ .build();
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[COMPILING] bar v0.0.1 ([..])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo[..][EXE])
+[DOCTEST] foo",
+ )
+ .with_stdout_contains("running 0 tests")
+ .with_stdout_contains("test [..] ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn dev_dep_with_build_script() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dev-dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("examples/foo.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+ build = "build.rs"
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .file("bar/build.rs", "fn main() {}")
+ .build();
+ p.cargo("test").run();
+}
+
+#[cargo_test]
+fn no_fail_fast() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn add_one(x: i32) -> i32{
+ x + 1
+ }
+
+ /// ```rust
+ /// use foo::sub_one;
+ /// assert_eq!(sub_one(101), 100);
+ /// ```
+ pub fn sub_one(x: i32) -> i32{
+ x - 1
+ }
+ "#,
+ )
+ .file(
+ "tests/test_add_one.rs",
+ r#"
+ extern crate foo;
+ use foo::*;
+
+ #[test]
+ fn add_one_test() {
+ assert_eq!(add_one(1), 2);
+ }
+
+ #[test]
+ fn fail_add_one_test() {
+ assert_eq!(add_one(1), 1);
+ }
+ "#,
+ )
+ .file(
+ "tests/test_sub_one.rs",
+ r#"
+ extern crate foo;
+ use foo::*;
+
+ #[test]
+ fn sub_one_test() {
+ assert_eq!(sub_one(1), 0);
+ }
+ "#,
+ )
+ .build();
+ p.cargo("test --no-fail-fast")
+ .with_status(101)
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 [..]
+[FINISHED] test [..]
+[RUNNING] unittests src/lib.rs (target/debug/deps/foo[..])
+[RUNNING] tests/test_add_one.rs (target/debug/deps/test_add_one[..])
+[ERROR] test failed, to rerun pass `--test test_add_one`
+[RUNNING] tests/test_sub_one.rs (target/debug/deps/test_sub_one[..])
+[DOCTEST] foo
+[ERROR] 1 target failed:
+ `--test test_add_one`
+",
+ )
+ .with_stdout_contains("running 0 tests")
+ .with_stdout_contains("test result: FAILED. [..]")
+ .with_stdout_contains("test sub_one_test ... ok")
+ .with_stdout_contains_n("test [..] ... ok", 3)
+ .run();
+}
+
+#[cargo_test]
+fn test_multiple_packages() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.d1]
+ path = "d1"
+ [dependencies.d2]
+ path = "d2"
+
+ [lib]
+ name = "foo"
+ doctest = false
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "d1/Cargo.toml",
+ r#"
+ [package]
+ name = "d1"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "d1"
+ doctest = false
+ "#,
+ )
+ .file("d1/src/lib.rs", "")
+ .file(
+ "d2/Cargo.toml",
+ r#"
+ [package]
+ name = "d2"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "d2"
+ doctest = false
+ "#,
+ )
+ .file("d2/src/lib.rs", "");
+ let p = p.build();
+
+ p.cargo("test -p d1 -p d2")
+ .with_stderr_contains("[RUNNING] [..] (target/debug/deps/d1-[..][EXE])")
+ .with_stderr_contains("[RUNNING] [..] (target/debug/deps/d2-[..][EXE])")
+ .with_stdout_contains_n("running 0 tests", 2)
+ .run();
+}
+
+#[cargo_test]
+fn bin_does_not_rebuild_tests() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file("src/main.rs", "fn main() {}")
+ .file("tests/foo.rs", "");
+ let p = p.build();
+
+ p.cargo("test -v").run();
+
+ sleep_ms(1000);
+ fs::write(p.root().join("src/main.rs"), "fn main() { 3; }").unwrap();
+
+ p.cargo("test -v --no-run")
+ .with_stderr(
+ "\
+[DIRTY] foo v0.0.1 ([..]): the file `src/main.rs` has changed ([..])
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] src/main.rs [..]`
+[RUNNING] `rustc [..] src/main.rs [..]`
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]`
+[EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]`
+[EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn selective_test_wonky_profile() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [profile.release]
+ opt-level = 2
+
+ [dependencies]
+ a = { path = "a" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+ .file("a/src/lib.rs", "");
+ let p = p.build();
+
+ p.cargo("test -v --no-run --release -p foo -p a").run();
+}
+
+#[cargo_test]
+fn selective_test_optional_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = { path = "a", optional = true }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+ .file("a/src/lib.rs", "");
+ let p = p.build();
+
+ p.cargo("test -v --no-run --features a -p a")
+ .with_stderr(
+ "\
+[COMPILING] a v0.0.1 ([..])
+[RUNNING] `rustc [..] a/src/lib.rs [..]`
+[RUNNING] `rustc [..] a/src/lib.rs [..]`
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[EXECUTABLE] `[..]/target/debug/deps/a-[..][EXE]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn only_test_docs() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ #[test]
+ fn foo() {
+ let a: u32 = "hello";
+ }
+
+ /// ```
+ /// foo::bar();
+ /// println!("ok");
+ /// ```
+ pub fn bar() {
+ }
+ "#,
+ )
+ .file("tests/foo.rs", "this is not rust");
+ let p = p.build();
+
+ p.cargo("test --doc")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[DOCTEST] foo",
+ )
+ .with_stdout_contains("test [..] ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn test_panic_abort_with_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar" }
+
+ [profile.dev]
+ panic = 'abort'
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ extern crate bar;
+
+ #[test]
+ fn foo() {}
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+ .file("bar/src/lib.rs", "")
+ .build();
+ p.cargo("test -v").run();
+}
+
+#[cargo_test]
+fn cfg_test_even_with_no_harness() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ harness = false
+ doctest = false
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"#[cfg(test)] fn main() { println!("hello!"); }"#,
+ )
+ .build();
+ p.cargo("test -v")
+ .with_stdout("hello!\n")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..]`
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn panic_abort_multiple() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = { path = "a" }
+
+ [profile.release]
+ panic = 'abort'
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "#[allow(unused_extern_crates)] extern crate a;",
+ )
+ .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+ .file("a/src/lib.rs", "")
+ .build();
+ p.cargo("test --release -v -p foo -p a").run();
+}
+
+#[cargo_test]
+fn pass_correct_cfgs_flags_to_rustdoc() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [features]
+ default = ["feature_a/default"]
+ nightly = ["feature_a/nightly"]
+
+ [dependencies.feature_a]
+ path = "libs/feature_a"
+ default-features = false
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #[cfg(test)]
+ mod tests {
+ #[test]
+ fn it_works() {
+ assert!(true);
+ }
+ }
+ "#,
+ )
+ .file(
+ "libs/feature_a/Cargo.toml",
+ r#"
+ [package]
+ name = "feature_a"
+ version = "0.1.0"
+ authors = []
+
+ [features]
+ default = ["mock_serde_codegen"]
+ nightly = ["mock_serde_derive"]
+
+ [dependencies]
+ mock_serde_derive = { path = "../mock_serde_derive", optional = true }
+
+ [build-dependencies]
+ mock_serde_codegen = { path = "../mock_serde_codegen", optional = true }
+ "#,
+ )
+ .file(
+ "libs/feature_a/src/lib.rs",
+ r#"
+ #[cfg(feature = "mock_serde_derive")]
+ const MSG: &'static str = "This is safe";
+
+ #[cfg(feature = "mock_serde_codegen")]
+ const MSG: &'static str = "This is risky";
+
+ pub fn get() -> &'static str {
+ MSG
+ }
+ "#,
+ )
+ .file(
+ "libs/mock_serde_derive/Cargo.toml",
+ &basic_manifest("mock_serde_derive", "0.1.0"),
+ )
+ .file("libs/mock_serde_derive/src/lib.rs", "")
+ .file(
+ "libs/mock_serde_codegen/Cargo.toml",
+ &basic_manifest("mock_serde_codegen", "0.1.0"),
+ )
+ .file("libs/mock_serde_codegen/src/lib.rs", "");
+ let p = p.build();
+
+ p.cargo("test --package feature_a --verbose")
+ .with_stderr_contains(
+ "\
+[DOCTEST] feature_a
+[RUNNING] `rustdoc [..]--test [..]mock_serde_codegen[..]`",
+ )
+ .run();
+
+ p.cargo("test --verbose")
+ .with_stderr_contains(
+ "\
+[DOCTEST] foo
+[RUNNING] `rustdoc [..]--test [..]feature_a[..]`",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_release_ignore_panic() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = { path = "a" }
+
+ [profile.test]
+ panic = 'abort'
+ [profile.release]
+ panic = 'abort'
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "#[allow(unused_extern_crates)] extern crate a;",
+ )
+ .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+ .file("a/src/lib.rs", "");
+ let p = p.build();
+ println!("test");
+ p.cargo("test -v").run();
+ println!("bench");
+ p.cargo("bench -v").run();
+}
+
+#[cargo_test]
+fn test_many_with_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = { path = "a" }
+
+ [features]
+ foo = []
+
+ [workspace]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("test -v -p a -p foo --features foo").run();
+}
+
+#[cargo_test]
+fn test_all_workspace() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { path = "bar" }
+
+ [workspace]
+ "#,
+ )
+ .file("src/main.rs", "#[test] fn foo_test() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "#[test] fn bar_test() {}")
+ .build();
+
+ p.cargo("test --workspace")
+ .with_stdout_contains("test foo_test ... ok")
+ .with_stdout_contains("test bar_test ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn test_all_exclude() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "#[test] pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "#[test] pub fn baz() { assert!(false); }")
+ .build();
+
+ p.cargo("test --workspace --exclude baz")
+ .with_stdout_contains(
+ "running 1 test
+test bar ... ok",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_all_exclude_not_found() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "#[test] pub fn bar() {}")
+ .build();
+
+ p.cargo("test --workspace --exclude baz")
+ .with_stderr_contains("[WARNING] excluded package(s) `baz` not found in workspace [..]")
+ .with_stdout_contains(
+ "running 1 test
+test bar ... ok",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_all_exclude_glob() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "#[test] pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "#[test] pub fn baz() { assert!(false); }")
+ .build();
+
+ p.cargo("test --workspace --exclude '*z'")
+ .with_stdout_contains(
+ "running 1 test
+test bar ... ok",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_all_exclude_glob_not_found() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "#[test] pub fn bar() {}")
+ .build();
+
+ p.cargo("test --workspace --exclude '*z'")
+ .with_stderr_contains(
+ "[WARNING] excluded package pattern(s) `*z` not found in workspace [..]",
+ )
+ .with_stdout_contains(
+ "running 1 test
+test bar ... ok",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_all_exclude_broken_glob() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ p.cargo("test --workspace --exclude '[*z'")
+ .with_status(101)
+ .with_stderr_contains("[ERROR] cannot build glob pattern from `[*z`")
+ .run();
+}
+
+#[cargo_test]
+fn test_all_virtual_manifest() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_manifest("a", "0.1.0"))
+ .file("a/src/lib.rs", "#[test] fn a() {}")
+ .file("b/Cargo.toml", &basic_manifest("b", "0.1.0"))
+ .file("b/src/lib.rs", "#[test] fn b() {}")
+ .build();
+
+ p.cargo("test --workspace")
+ .with_stdout_contains("running 1 test\ntest a ... ok")
+ .with_stdout_contains("running 1 test\ntest b ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn test_virtual_manifest_all_implied() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_manifest("a", "0.1.0"))
+ .file("a/src/lib.rs", "#[test] fn a() {}")
+ .file("b/Cargo.toml", &basic_manifest("b", "0.1.0"))
+ .file("b/src/lib.rs", "#[test] fn b() {}")
+ .build();
+
+ p.cargo("test")
+ .with_stdout_contains("running 1 test\ntest a ... ok")
+ .with_stdout_contains("running 1 test\ntest b ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn test_virtual_manifest_one_project() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "#[test] fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "#[test] fn baz() { assert!(false); }")
+ .build();
+
+ p.cargo("test -p bar")
+ .with_stdout_contains("running 1 test\ntest bar ... ok")
+ .with_stdout_does_not_contain("running 1 test\ntest baz ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn test_virtual_manifest_glob() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "#[test] fn bar() { assert!(false); }")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "#[test] fn baz() {}")
+ .build();
+
+ p.cargo("test -p '*z'")
+ .with_stdout_does_not_contain("running 1 test\ntest bar ... ok")
+ .with_stdout_contains("running 1 test\ntest baz ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn test_virtual_manifest_glob_not_found() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "#[test] fn bar() {}")
+ .build();
+
+ p.cargo("test -p bar -p '*z'")
+ .with_status(101)
+ .with_stderr("[ERROR] package pattern(s) `*z` not found in workspace [..]")
+ .run();
+}
+
+#[cargo_test]
+fn test_virtual_manifest_broken_glob() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "#[test] fn bar() {}")
+ .build();
+
+ p.cargo("test -p '[*z'")
+ .with_status(101)
+ .with_stderr_contains("[ERROR] cannot build glob pattern from `[*z`")
+ .run();
+}
+
+#[cargo_test]
+fn test_all_member_dependency_same_name() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a"]
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [dependencies]
+ a = "0.1.0"
+ "#,
+ )
+ .file("a/src/lib.rs", "#[test] fn a() {}")
+ .build();
+
+ Package::new("a", "0.1.0").publish();
+
+ p.cargo("test --workspace")
+ .with_stdout_contains("test a ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn doctest_only_with_dev_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [dev-dependencies]
+ b = { path = "b" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ /// ```
+ /// extern crate b;
+ ///
+ /// b::b();
+ /// ```
+ pub fn a() {}
+ "#,
+ )
+ .file("b/Cargo.toml", &basic_manifest("b", "0.1.0"))
+ .file("b/src/lib.rs", "pub fn b() {}")
+ .build();
+
+ p.cargo("test --doc -v").run();
+}
+
+#[cargo_test]
+fn test_many_targets() {
+ let p = project()
+ .file(
+ "src/bin/a.rs",
+ r#"
+ fn main() {}
+ #[test] fn bin_a() {}
+ "#,
+ )
+ .file(
+ "src/bin/b.rs",
+ r#"
+ fn main() {}
+ #[test] fn bin_b() {}
+ "#,
+ )
+ .file(
+ "src/bin/c.rs",
+ r#"
+ fn main() {}
+ #[test] fn bin_c() { panic!(); }
+ "#,
+ )
+ .file(
+ "examples/a.rs",
+ r#"
+ fn main() {}
+ #[test] fn example_a() {}
+ "#,
+ )
+ .file(
+ "examples/b.rs",
+ r#"
+ fn main() {}
+ #[test] fn example_b() {}
+ "#,
+ )
+ .file("examples/c.rs", "#[test] fn example_c() { panic!(); }")
+ .file("tests/a.rs", "#[test] fn test_a() {}")
+ .file("tests/b.rs", "#[test] fn test_b() {}")
+ .file("tests/c.rs", "does not compile")
+ .build();
+
+ p.cargo("test --verbose --bin a --bin b --example a --example b --test a --test b")
+ .with_stdout_contains("test bin_a ... ok")
+ .with_stdout_contains("test bin_b ... ok")
+ .with_stdout_contains("test test_a ... ok")
+ .with_stdout_contains("test test_b ... ok")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name a examples/a.rs [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name b examples/b.rs [..]`")
+ .run();
+}
+
+#[cargo_test]
+fn doctest_and_registry() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [dependencies]
+ b = { path = "b" }
+ c = { path = "c" }
+
+ [workspace]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("b/Cargo.toml", &basic_manifest("b", "0.1.0"))
+ .file(
+ "b/src/lib.rs",
+ "
+ /// ```
+ /// b::foo();
+ /// ```
+ pub fn foo() {}
+ ",
+ )
+ .file(
+ "c/Cargo.toml",
+ r#"
+ [package]
+ name = "c"
+ version = "0.1.0"
+
+ [dependencies]
+ b = "0.1"
+ "#,
+ )
+ .file("c/src/lib.rs", "")
+ .build();
+
+ Package::new("b", "0.1.0").publish();
+
+ p.cargo("test --workspace -v").run();
+}
+
+#[cargo_test]
+fn cargo_test_env() {
+ let src = format!(
+ r#"
+ #![crate_type = "rlib"]
+
+ #[test]
+ fn env_test() {{
+ use std::env;
+ eprintln!("{{}}", env::var("{}").unwrap());
+ }}
+ "#,
+ cargo::CARGO_ENV
+ );
+
+ let p = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file("src/lib.rs", &src)
+ .build();
+
+ let cargo = cargo_exe().canonicalize().unwrap();
+ p.cargo("test --lib -- --nocapture")
+ .with_stderr_contains(cargo.to_str().unwrap())
+ .with_stdout_contains("test env_test ... ok")
+ .run();
+
+ // Check that `cargo test` propagates the environment's $CARGO
+ let rustc = cargo_util::paths::resolve_executable("rustc".as_ref())
+ .unwrap()
+ .canonicalize()
+ .unwrap();
+ let rustc = rustc.to_str().unwrap();
+ p.cargo("test --lib -- --nocapture")
+ // we use rustc since $CARGO is only used if it points to a path that exists
+ .env(cargo::CARGO_ENV, rustc)
+ .with_stderr_contains(rustc)
+ .with_stdout_contains("test env_test ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn test_order() {
+ let p = project()
+ .file("src/lib.rs", "#[test] fn test_lib() {}")
+ .file("tests/a.rs", "#[test] fn test_a() {}")
+ .file("tests/z.rs", "#[test] fn test_z() {}")
+ .build();
+
+ p.cargo("test --workspace")
+ .with_stdout_contains(
+ "
+running 1 test
+test test_lib ... ok
+
+test result: ok. [..]
+
+
+running 1 test
+test test_a ... ok
+
+test result: ok. [..]
+
+
+running 1 test
+test test_z ... ok
+
+test result: ok. [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cyclic_dev() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dev-dependencies]
+ foo = { path = "." }
+ "#,
+ )
+ .file("src/lib.rs", "#[test] fn test_lib() {}")
+ .file("tests/foo.rs", "extern crate foo;")
+ .build();
+
+ p.cargo("test --workspace").run();
+}
+
+#[cargo_test]
+fn publish_a_crate_without_tests() {
+ Package::new("testless", "0.1.0")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "testless"
+ version = "0.1.0"
+ exclude = ["tests/*"]
+
+ [[test]]
+ name = "a_test"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ // In real life, the package will have a test,
+ // which would be excluded from .crate file by the
+ // `exclude` field. Our test harness does not honor
+ // exclude though, so let's just not add the file!
+ // .file("tests/a_test.rs", "")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ testless = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("test").run();
+ p.cargo("test --package testless").run();
+}
+
+#[cargo_test]
+fn find_dependency_of_proc_macro_dependency_with_target() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["root", "proc_macro_dep"]
+ "#,
+ )
+ .file(
+ "root/Cargo.toml",
+ r#"
+ [package]
+ name = "root"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ proc_macro_dep = { path = "../proc_macro_dep" }
+ "#,
+ )
+ .file(
+ "root/src/lib.rs",
+ r#"
+ #[macro_use]
+ extern crate proc_macro_dep;
+
+ #[derive(Noop)]
+ pub struct X;
+ "#,
+ )
+ .file(
+ "proc_macro_dep/Cargo.toml",
+ r#"
+ [package]
+ name = "proc_macro_dep"
+ version = "0.1.0"
+ authors = []
+
+ [lib]
+ proc-macro = true
+
+ [dependencies]
+ baz = "^0.1"
+ "#,
+ )
+ .file(
+ "proc_macro_dep/src/lib.rs",
+ r#"
+ extern crate baz;
+ extern crate proc_macro;
+ use proc_macro::TokenStream;
+
+ #[proc_macro_derive(Noop)]
+ pub fn noop(_input: TokenStream) -> TokenStream {
+ "".parse().unwrap()
+ }
+ "#,
+ )
+ .build();
+ Package::new("bar", "0.1.0").publish();
+ Package::new("baz", "0.1.0")
+ .dep("bar", "0.1")
+ .file("src/lib.rs", "extern crate bar;")
+ .publish();
+ p.cargo("test --workspace --target").arg(rustc_host()).run();
+}
+
+#[cargo_test]
+fn test_hint_not_masked_by_doctest() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ /// ```
+ /// assert_eq!(1, 1);
+ /// ```
+ pub fn this_works() {}
+ "#,
+ )
+ .file(
+ "tests/integ.rs",
+ r#"
+ #[test]
+ fn this_fails() {
+ panic!();
+ }
+ "#,
+ )
+ .build();
+ p.cargo("test --no-fail-fast")
+ .with_status(101)
+ .with_stdout_contains("test this_fails ... FAILED")
+ .with_stdout_contains("[..]this_works (line [..]ok")
+ .with_stderr_contains("[ERROR] test failed, to rerun pass `--test integ`")
+ .run();
+}
+
+#[cargo_test]
+fn test_hint_workspace_virtual() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b", "c"]
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_manifest("a", "0.1.0"))
+ .file("a/src/lib.rs", "#[test] fn t1() {}")
+ .file("b/Cargo.toml", &basic_manifest("b", "0.1.0"))
+ .file("b/src/lib.rs", "#[test] fn t1() {assert!(false)}")
+ .file("c/Cargo.toml", &basic_manifest("c", "0.1.0"))
+ .file(
+ "c/src/lib.rs",
+ r#"
+ /// ```rust
+ /// assert_eq!(1, 2);
+ /// ```
+ pub fn foo() {}
+ "#,
+ )
+ .file(
+ "c/src/main.rs",
+ r#"
+ fn main() {}
+
+ #[test]
+ fn from_main() { assert_eq!(1, 2); }
+ "#,
+ )
+ .file(
+ "c/tests/t1.rs",
+ r#"
+ #[test]
+ fn from_int_test() { assert_eq!(1, 2); }
+ "#,
+ )
+ .file(
+ "c/examples/ex1.rs",
+ r#"
+ fn main() {}
+
+ #[test]
+ fn from_example() { assert_eq!(1, 2); }
+ "#,
+ )
+ // This does not use #[bench] since it is unstable. #[test] works just
+ // the same for our purpose of checking the hint.
+ .file(
+ "c/benches/b1.rs",
+ r#"
+ #[test]
+ fn from_bench() { assert_eq!(1, 2); }
+ "#,
+ )
+ .build();
+
+ // This depends on Units being sorted so that `b` fails first.
+ p.cargo("test")
+ .with_stderr_unordered(
+ "\
+[COMPILING] c v0.1.0 [..]
+[COMPILING] a v0.1.0 [..]
+[COMPILING] b v0.1.0 [..]
+[FINISHED] test [..]
+[RUNNING] unittests src/lib.rs (target/debug/deps/a[..])
+[RUNNING] unittests src/lib.rs (target/debug/deps/b[..])
+[ERROR] test failed, to rerun pass `-p b --lib`
+",
+ )
+ .with_status(101)
+ .run();
+ p.cargo("test")
+ .cwd("b")
+ .with_stderr(
+ "\
+[FINISHED] test [..]
+[RUNNING] unittests src/lib.rs ([ROOT]/foo/target/debug/deps/b[..])
+[ERROR] test failed, to rerun pass `--lib`
+",
+ )
+ .with_status(101)
+ .run();
+ p.cargo("test --no-fail-fast")
+ .with_stderr(
+ "\
+[FINISHED] test [..]
+[RUNNING] unittests src/lib.rs (target/debug/deps/a[..])
+[RUNNING] unittests src/lib.rs (target/debug/deps/b[..])
+[ERROR] test failed, to rerun pass `-p b --lib`
+[RUNNING] unittests src/lib.rs (target/debug/deps/c[..])
+[RUNNING] unittests src/main.rs (target/debug/deps/c[..])
+[ERROR] test failed, to rerun pass `-p c --bin c`
+[RUNNING] tests/t1.rs (target/debug/deps/t1[..])
+[ERROR] test failed, to rerun pass `-p c --test t1`
+[DOCTEST] a
+[DOCTEST] b
+[DOCTEST] c
+[ERROR] doctest failed, to rerun pass `-p c --doc`
+[ERROR] 4 targets failed:
+ `-p b --lib`
+ `-p c --bin c`
+ `-p c --test t1`
+ `-p c --doc`
+",
+ )
+ .with_status(101)
+ .run();
+ // Check others that are not in the default set.
+ p.cargo("test -p c --examples --benches --no-fail-fast")
+ .with_stderr(
+ "\
+[COMPILING] c v0.1.0 [..]
+[FINISHED] test [..]
+[RUNNING] unittests src/lib.rs (target/debug/deps/c[..])
+[RUNNING] unittests src/main.rs (target/debug/deps/c[..])
+[ERROR] test failed, to rerun pass `-p c --bin c`
+[RUNNING] benches/b1.rs (target/debug/deps/b1[..])
+[ERROR] test failed, to rerun pass `-p c --bench b1`
+[RUNNING] unittests examples/ex1.rs (target/debug/examples/ex1[..])
+[ERROR] test failed, to rerun pass `-p c --example ex1`
+[ERROR] 3 targets failed:
+ `-p c --bin c`
+ `-p c --bench b1`
+ `-p c --example ex1`
+",
+ )
+ .with_status(101)
+ .run()
+}
+
+#[cargo_test]
+fn test_hint_workspace_nonvirtual() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [workspace]
+ members = ["a"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("a/Cargo.toml", &basic_manifest("a", "0.1.0"))
+ .file("a/src/lib.rs", "#[test] fn t1() {assert!(false)}")
+ .build();
+
+ p.cargo("test --workspace")
+ .with_stderr_contains("[ERROR] test failed, to rerun pass `-p a --lib`")
+ .with_status(101)
+ .run();
+ p.cargo("test -p a")
+ .with_stderr_contains("[ERROR] test failed, to rerun pass `-p a --lib`")
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn json_artifact_includes_test_flag() {
+ // Verify that the JSON artifact output includes `test` flag.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [profile.test]
+ opt-level = 1
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("test --lib -v --message-format=json")
+ .with_json(
+ r#"
+ {
+ "reason":"compiler-artifact",
+ "profile": {
+ "debug_assertions": true,
+ "debuginfo": 2,
+ "opt_level": "1",
+ "overflow_checks": true,
+ "test": true
+ },
+ "executable": "[..]/foo-[..]",
+ "features": [],
+ "package_id":"foo 0.0.1 ([..])",
+ "manifest_path": "[..]",
+ "target":{
+ "kind":["lib"],
+ "crate_types":["lib"],
+ "doc": true,
+ "doctest": true,
+ "edition": "2015",
+ "name":"foo",
+ "src_path":"[..]lib.rs",
+ "test": true
+ },
+ "filenames":"{...}",
+ "fresh": false
+ }
+
+ {"reason": "build-finished", "success": true}
+ "#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn json_artifact_includes_executable_for_library_tests() {
+ let p = project()
+ .file("src/main.rs", "fn main() { }")
+ .file("src/lib.rs", r#"#[test] fn lib_test() {}"#)
+ .build();
+
+ p.cargo("test --lib -v --no-run --message-format=json")
+ .with_json(
+ r#"
+ {
+ "executable": "[..]/foo/target/debug/deps/foo-[..][EXE]",
+ "features": [],
+ "filenames": "{...}",
+ "fresh": false,
+ "package_id": "foo 0.0.1 ([..])",
+ "manifest_path": "[..]",
+ "profile": "{...}",
+ "reason": "compiler-artifact",
+ "target": {
+ "crate_types": [ "lib" ],
+ "kind": [ "lib" ],
+ "doc": true,
+ "doctest": true,
+ "edition": "2015",
+ "name": "foo",
+ "src_path": "[..]/foo/src/lib.rs",
+ "test": true
+ }
+ }
+
+ {"reason": "build-finished", "success": true}
+ "#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn json_artifact_includes_executable_for_integration_tests() {
+ let p = project()
+ .file(
+ "tests/integration_test.rs",
+ r#"#[test] fn integration_test() {}"#,
+ )
+ .build();
+
+ p.cargo("test -v --no-run --message-format=json --test integration_test")
+ .with_json(
+ r#"
+ {
+ "executable": "[..]/foo/target/debug/deps/integration_test-[..][EXE]",
+ "features": [],
+ "filenames": "{...}",
+ "fresh": false,
+ "package_id": "foo 0.0.1 ([..])",
+ "manifest_path": "[..]",
+ "profile": "{...}",
+ "reason": "compiler-artifact",
+ "target": {
+ "crate_types": [ "bin" ],
+ "kind": [ "test" ],
+ "doc": false,
+ "doctest": false,
+ "edition": "2015",
+ "name": "integration_test",
+ "src_path": "[..]/foo/tests/integration_test.rs",
+ "test": true
+ }
+ }
+
+ {"reason": "build-finished", "success": true}
+ "#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_build_script_links() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ links = 'something'
+
+ [lib]
+ test = false
+ "#,
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("test --no-run").run();
+}
+
+#[cargo_test]
+fn doctest_skip_staticlib() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [lib]
+ crate-type = ["staticlib"]
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ //! ```
+ //! assert_eq!(1,2);
+ //! ```
+ "#,
+ )
+ .build();
+
+ p.cargo("test --doc")
+ .with_status(101)
+ .with_stderr(
+ "\
+[WARNING] doc tests are not supported for crate type(s) `staticlib` in package `foo`
+[ERROR] no library targets found in package `foo`",
+ )
+ .run();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] foo [..]
+[FINISHED] test [..]
+[RUNNING] [..] (target/debug/deps/foo-[..])",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn can_not_mix_doc_tests_and_regular_tests() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ "\
+/// ```
+/// assert_eq!(1, 1)
+/// ```
+pub fn foo() -> u8 { 1 }
+
+#[cfg(test)] mod tests {
+ #[test] fn it_works() { assert_eq!(2 + 2, 4); }
+}
+",
+ )
+ .build();
+
+ p.cargo("test")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..])
+[DOCTEST] foo
+",
+ )
+ .with_stdout(
+ "
+running 1 test
+test tests::it_works ... ok
+
+test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]
+
+
+running 1 test
+test src/lib.rs - foo (line 1) ... ok
+
+test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]
+\n",
+ )
+ .run();
+
+ p.cargo("test --lib")
+ .with_stderr(
+ "\
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..] (target/debug/deps/foo-[..])\n",
+ )
+ .with_stdout(
+ "
+running 1 test
+test tests::it_works ... ok
+
+test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]
+\n",
+ )
+ .run();
+
+ // This has been modified to attempt to diagnose spurious errors on CI.
+ // For some reason, this is recompiling the lib when it shouldn't. If the
+ // root cause is ever found, the changes here should be reverted.
+ // See https://github.com/rust-lang/cargo/issues/6887
+ p.cargo("test --doc -vv")
+ .with_stderr_does_not_contain("[COMPILING] foo [..]")
+ .with_stderr_contains("[DOCTEST] foo")
+ .with_stdout(
+ "
+running 1 test
+test src/lib.rs - foo (line 1) ... ok
+
+test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]
+
+",
+ )
+ .env("CARGO_LOG", "cargo=trace")
+ .run();
+
+ p.cargo("test --lib --doc")
+ .with_status(101)
+ .with_stderr("[ERROR] Can't mix --doc with other target selecting options\n")
+ .run();
+}
+
+#[cargo_test]
+fn can_not_no_run_doc_tests() {
+ let p = project()
+ .file(
+ "src/lib.rs",
+ r#"
+ /// ```
+ /// let _x = 1 + "foo";
+ /// ```
+ pub fn foo() -> u8 { 1 }
+ "#,
+ )
+ .build();
+
+ p.cargo("test --doc --no-run")
+ .with_status(101)
+ .with_stderr("[ERROR] Can't skip running doc tests with --no-run")
+ .run();
+}
+
+#[cargo_test]
+fn test_all_targets_lib() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("test --all-targets")
+ .with_stderr(
+ "\
+[COMPILING] foo [..]
+[FINISHED] test [..]
+[RUNNING] [..]foo[..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_dep_with_dev() {
+ Package::new("devdep", "0.1.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+
+ [dev-dependencies]
+ devdep = "0.1"
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("test -p bar")
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] package `bar` cannot be tested because it requires dev-dependencies \
+ and is not a member of the workspace",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "-Zdoctest-xcompile is unstable")]
+fn cargo_test_doctest_xcompile_ignores() {
+ // -Zdoctest-xcompile also enables --enable-per-target-ignores which
+ // allows the ignore-TARGET syntax.
+ let p = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file(
+ "src/lib.rs",
+ r#"
+ ///```ignore-x86_64
+ ///assert!(cfg!(not(target_arch = "x86_64")));
+ ///```
+ pub fn foo() -> u8 {
+ 4
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build").run();
+ #[cfg(not(target_arch = "x86_64"))]
+ p.cargo("test")
+ .with_stdout_contains(
+ "test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]",
+ )
+ .run();
+ #[cfg(target_arch = "x86_64")]
+ p.cargo("test")
+ .with_status(101)
+ .with_stdout_contains(
+ "test result: FAILED. 0 passed; 1 failed; 0 ignored; 0 measured; 0 filtered out[..]",
+ )
+ .run();
+
+ #[cfg(not(target_arch = "x86_64"))]
+ p.cargo("test -Zdoctest-xcompile")
+ .masquerade_as_nightly_cargo(&["doctest-xcompile"])
+ .with_stdout_contains(
+ "test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]",
+ )
+ .run();
+
+ #[cfg(target_arch = "x86_64")]
+ p.cargo("test -Zdoctest-xcompile")
+ .masquerade_as_nightly_cargo(&["doctest-xcompile"])
+ .with_stdout_contains(
+ "test result: ok. 0 passed; 0 failed; 1 ignored; 0 measured; 0 filtered out[..]",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "-Zdoctest-xcompile is unstable")]
+fn cargo_test_doctest_xcompile() {
+ if !cross_compile::can_run_on_host() {
+ return;
+ }
+ let p = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file(
+ "src/lib.rs",
+ r#"
+
+ ///```
+ ///assert!(1 == 1);
+ ///```
+ pub fn foo() -> u8 {
+ 4
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build").run();
+ p.cargo(&format!("test --target {}", cross_compile::alternate()))
+ .with_stdout_contains("running 0 tests")
+ .run();
+ p.cargo(&format!(
+ "test --target {} -Zdoctest-xcompile",
+ cross_compile::alternate()
+ ))
+ .masquerade_as_nightly_cargo(&["doctest-xcompile"])
+ .with_stdout_contains(
+ "test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "-Zdoctest-xcompile is unstable")]
+fn cargo_test_doctest_xcompile_runner() {
+ if !cross_compile::can_run_on_host() {
+ return;
+ }
+
+ let runner = project()
+ .file("Cargo.toml", &basic_bin_manifest("runner"))
+ .file(
+ "src/main.rs",
+ r#"
+ pub fn main() {
+ eprintln!("this is a runner");
+ let args: Vec<String> = std::env::args().collect();
+ std::process::Command::new(&args[1]).spawn();
+ }
+ "#,
+ )
+ .build();
+
+ runner.cargo("build").run();
+ assert!(runner.bin("runner").is_file());
+ let runner_path = paths::root().join("runner");
+ fs::copy(&runner.bin("runner"), &runner_path).unwrap();
+
+ let config = paths::root().join(".cargo/config");
+
+ fs::create_dir_all(config.parent().unwrap()).unwrap();
+ // Escape Windows backslashes for TOML config.
+ let runner_str = runner_path.to_str().unwrap().replace('\\', "\\\\");
+ fs::write(
+ config,
+ format!(
+ r#"
+ [target.'cfg(target_arch = "{}")']
+ runner = "{}"
+ "#,
+ cross_compile::alternate_arch(),
+ runner_str
+ ),
+ )
+ .unwrap();
+
+ let p = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file(
+ "src/lib.rs",
+ &format!(
+ r#"
+ ///```
+ ///assert!(cfg!(target_arch = "{}"));
+ ///```
+ pub fn foo() -> u8 {{
+ 4
+ }}
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
+ .build();
+
+ p.cargo("build").run();
+ p.cargo(&format!("test --target {}", cross_compile::alternate()))
+ .with_stdout_contains("running 0 tests")
+ .run();
+ p.cargo(&format!(
+ "test --target {} -Zdoctest-xcompile",
+ cross_compile::alternate()
+ ))
+ .masquerade_as_nightly_cargo(&["doctest-xcompile"])
+ .with_stdout_contains(
+ "test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]",
+ )
+ .with_stderr_contains("this is a runner")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "-Zdoctest-xcompile is unstable")]
+fn cargo_test_doctest_xcompile_no_runner() {
+ if !cross_compile::can_run_on_host() {
+ return;
+ }
+
+ let p = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file(
+ "src/lib.rs",
+ &format!(
+ r#"
+ ///```
+ ///assert!(cfg!(target_arch = "{}"));
+ ///```
+ pub fn foo() -> u8 {{
+ 4
+ }}
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
+ .build();
+
+ p.cargo("build").run();
+ p.cargo(&format!("test --target {}", cross_compile::alternate()))
+ .with_stdout_contains("running 0 tests")
+ .run();
+ p.cargo(&format!(
+ "test --target {} -Zdoctest-xcompile",
+ cross_compile::alternate()
+ ))
+ .masquerade_as_nightly_cargo(&["doctest-xcompile"])
+ .with_stdout_contains(
+ "test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]",
+ )
+ .run();
+}
+
+#[cargo_test(nightly, reason = "-Zpanic-abort-tests in rustc is unstable")]
+fn panic_abort_tests() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = 'foo'
+ version = '0.1.0'
+
+ [dependencies]
+ a = { path = 'a' }
+
+ [profile.dev]
+ panic = 'abort'
+ [profile.test]
+ panic = 'abort'
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #[test]
+ fn foo() {
+ a::foo();
+ }
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_lib_manifest("a"))
+ .file("a/src/lib.rs", "pub fn foo() {}")
+ .build();
+
+ p.cargo("test -Z panic-abort-tests -v")
+ .with_stderr_contains("[..]--crate-name a [..]-C panic=abort[..]")
+ .with_stderr_contains("[..]--crate-name foo [..]-C panic=abort[..]")
+ .with_stderr_contains("[..]--crate-name foo [..]-C panic=abort[..]--test[..]")
+ .masquerade_as_nightly_cargo(&["panic-abort-tests"])
+ .run();
+}
+
+#[cargo_test(nightly, reason = "-Zpanic-abort-tests in rustc is unstable")]
+fn panic_abort_only_test() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = 'foo'
+ version = '0.1.0'
+
+ [dependencies]
+ a = { path = 'a' }
+
+ [profile.test]
+ panic = 'abort'
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #[test]
+ fn foo() {
+ a::foo();
+ }
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_lib_manifest("a"))
+ .file("a/src/lib.rs", "pub fn foo() {}")
+ .build();
+
+ p.cargo("test -Z panic-abort-tests -v")
+ .with_stderr_contains("warning: `panic` setting is ignored for `test` profile")
+ .masquerade_as_nightly_cargo(&["panic-abort-tests"])
+ .run();
+}
+
+#[cargo_test(nightly, reason = "-Zpanic-abort-tests in rustc is unstable")]
+fn panic_abort_test_profile_inherits() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = 'foo'
+ version = '0.1.0'
+
+ [dependencies]
+ a = { path = 'a' }
+
+ [profile.dev]
+ panic = 'abort'
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ #[test]
+ fn foo() {
+ a::foo();
+ }
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_lib_manifest("a"))
+ .file("a/src/lib.rs", "pub fn foo() {}")
+ .build();
+
+ p.cargo("test -Z panic-abort-tests -v")
+ .masquerade_as_nightly_cargo(&["panic-abort-tests"])
+ .with_status(0)
+ .run();
+}
+
+#[cargo_test]
+fn bin_env_for_test() {
+ // Test for the `CARGO_BIN_EXE_` environment variables for tests.
+ //
+ // Note: The Unicode binary uses a `[[bin]]` definition because different
+ // filesystems normalize utf-8 in different ways. For example, HFS uses
+ // "gru\u{308}ßen" and APFS uses "gr\u{fc}ßen". Defining it in TOML forces
+ // one form to be used.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ edition = "2018"
+
+ [[bin]]
+ name = 'grüßen'
+ path = 'src/bin/grussen.rs'
+ "#,
+ )
+ .file("src/bin/foo.rs", "fn main() {}")
+ .file("src/bin/with-dash.rs", "fn main() {}")
+ .file("src/bin/grussen.rs", "fn main() {}")
+ .build();
+
+ let bin_path = |name| p.bin(name).to_string_lossy().replace("\\", "\\\\");
+ p.change_file(
+ "tests/check_env.rs",
+ &r#"
+ #[test]
+ fn run_bins() {
+ assert_eq!(env!("CARGO_BIN_EXE_foo"), "<FOO_PATH>");
+ assert_eq!(env!("CARGO_BIN_EXE_with-dash"), "<WITH_DASH_PATH>");
+ assert_eq!(env!("CARGO_BIN_EXE_grüßen"), "<GRÜSSEN_PATH>");
+ }
+ "#
+ .replace("<FOO_PATH>", &bin_path("foo"))
+ .replace("<WITH_DASH_PATH>", &bin_path("with-dash"))
+ .replace("<GRÜSSEN_PATH>", &bin_path("grüßen")),
+ );
+
+ p.cargo("test --test check_env").run();
+ p.cargo("check --test check_env").run();
+}
+
+#[cargo_test]
+fn test_workspaces_cwd() {
+ // This tests that all the different test types are executed from the
+ // crate directory (manifest_dir), and not from the workspace root.
+
+ let make_lib_file = |expected| {
+ format!(
+ r#"
+ //! ```
+ //! assert_eq!("{expected}", std::fs::read_to_string("file.txt").unwrap());
+ //! assert_eq!("{expected}", include_str!("../file.txt"));
+ //! assert_eq!(
+ //! std::path::PathBuf::from(std::env!("CARGO_MANIFEST_DIR")),
+ //! std::env::current_dir().unwrap(),
+ //! );
+ //! ```
+
+ #[test]
+ fn test_unit_{expected}_cwd() {{
+ assert_eq!("{expected}", std::fs::read_to_string("file.txt").unwrap());
+ assert_eq!("{expected}", include_str!("../file.txt"));
+ assert_eq!(
+ std::path::PathBuf::from(std::env!("CARGO_MANIFEST_DIR")),
+ std::env::current_dir().unwrap(),
+ );
+ }}
+ "#,
+ expected = expected
+ )
+ };
+ let make_test_file = |expected| {
+ format!(
+ r#"
+ #[test]
+ fn test_integration_{expected}_cwd() {{
+ assert_eq!("{expected}", std::fs::read_to_string("file.txt").unwrap());
+ assert_eq!("{expected}", include_str!("../file.txt"));
+ assert_eq!(
+ std::path::PathBuf::from(std::env!("CARGO_MANIFEST_DIR")),
+ std::env::current_dir().unwrap(),
+ );
+ }}
+ "#,
+ expected = expected
+ )
+ };
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "root-crate"
+ version = "0.0.0"
+
+ [workspace]
+ members = [".", "nested-crate", "very/deeply/nested/deep-crate"]
+ "#,
+ )
+ .file("file.txt", "root")
+ .file("src/lib.rs", &make_lib_file("root"))
+ .file("tests/integration.rs", &make_test_file("root"))
+ .file(
+ "nested-crate/Cargo.toml",
+ r#"
+ [package]
+ name = "nested-crate"
+ version = "0.0.0"
+ "#,
+ )
+ .file("nested-crate/file.txt", "nested")
+ .file("nested-crate/src/lib.rs", &make_lib_file("nested"))
+ .file(
+ "nested-crate/tests/integration.rs",
+ &make_test_file("nested"),
+ )
+ .file(
+ "very/deeply/nested/deep-crate/Cargo.toml",
+ r#"
+ [package]
+ name = "deep-crate"
+ version = "0.0.0"
+ "#,
+ )
+ .file("very/deeply/nested/deep-crate/file.txt", "deep")
+ .file(
+ "very/deeply/nested/deep-crate/src/lib.rs",
+ &make_lib_file("deep"),
+ )
+ .file(
+ "very/deeply/nested/deep-crate/tests/integration.rs",
+ &make_test_file("deep"),
+ )
+ .build();
+
+ p.cargo("test --workspace --all")
+ .with_stderr_contains("[DOCTEST] root-crate")
+ .with_stderr_contains("[DOCTEST] nested-crate")
+ .with_stderr_contains("[DOCTEST] deep-crate")
+ .with_stdout_contains("test test_unit_root_cwd ... ok")
+ .with_stdout_contains("test test_unit_nested_cwd ... ok")
+ .with_stdout_contains("test test_unit_deep_cwd ... ok")
+ .with_stdout_contains("test test_integration_root_cwd ... ok")
+ .with_stdout_contains("test test_integration_nested_cwd ... ok")
+ .with_stdout_contains("test test_integration_deep_cwd ... ok")
+ .run();
+
+ p.cargo("test -p root-crate --all")
+ .with_stderr_contains("[DOCTEST] root-crate")
+ .with_stdout_contains("test test_unit_root_cwd ... ok")
+ .with_stdout_contains("test test_integration_root_cwd ... ok")
+ .run();
+
+ p.cargo("test -p nested-crate --all")
+ .with_stderr_contains("[DOCTEST] nested-crate")
+ .with_stdout_contains("test test_unit_nested_cwd ... ok")
+ .with_stdout_contains("test test_integration_nested_cwd ... ok")
+ .run();
+
+ p.cargo("test -p deep-crate --all")
+ .with_stderr_contains("[DOCTEST] deep-crate")
+ .with_stdout_contains("test test_unit_deep_cwd ... ok")
+ .with_stdout_contains("test test_integration_deep_cwd ... ok")
+ .run();
+
+ p.cargo("test --all")
+ .cwd("nested-crate")
+ .with_stderr_contains("[DOCTEST] nested-crate")
+ .with_stdout_contains("test test_unit_nested_cwd ... ok")
+ .with_stdout_contains("test test_integration_nested_cwd ... ok")
+ .run();
+
+ p.cargo("test --all")
+ .cwd("very/deeply/nested/deep-crate")
+ .with_stderr_contains("[DOCTEST] deep-crate")
+ .with_stdout_contains("test test_unit_deep_cwd ... ok")
+ .with_stdout_contains("test test_integration_deep_cwd ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn execution_error() {
+ // Checks the behavior when a test fails to launch.
+ let p = project()
+ .file(
+ "tests/t1.rs",
+ r#"
+ #[test]
+ fn foo() {}
+ "#,
+ )
+ .build();
+ let key = format!("CARGO_TARGET_{}_RUNNER", rustc_host_env());
+ p.cargo("test")
+ .env(&key, "does_not_exist")
+ // The actual error is usually "no such file", but on Windows it has a
+ // custom message. Since matching against the error string produced by
+ // Rust is not very reliable, this just uses `[..]`.
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 [..]
+[FINISHED] test [..]
+[RUNNING] tests/t1.rs (target/debug/deps/t1[..])
+error: test failed, to rerun pass `--test t1`
+
+Caused by:
+ could not execute process `does_not_exist [ROOT]/foo/target/debug/deps/t1[..]` (never executed)
+
+Caused by:
+ [..]
+",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn nonzero_exit_status() {
+ // Tests for nonzero exit codes from tests.
+ let p = project()
+ .file(
+ "tests/t1.rs",
+ r#"
+ #[test]
+ fn t() { panic!("this is a normal error") }
+ "#,
+ )
+ .file(
+ "tests/t2.rs",
+ r#"
+ #[test]
+ fn t() { std::process::exit(4) }
+ "#,
+ )
+ .build();
+
+ p.cargo("test --test t1")
+ .with_stderr(
+ "\
+[COMPILING] foo [..]
+[FINISHED] test [..]
+[RUNNING] tests/t1.rs (target/debug/deps/t1[..])
+error: test failed, to rerun pass `--test t1`
+",
+ )
+ .with_stdout_contains("[..]this is a normal error[..]")
+ .with_status(101)
+ .run();
+
+ p.cargo("test --test t2")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 [..]
+[FINISHED] test [..]
+[RUNNING] tests/t2.rs (target/debug/deps/t2[..])
+error: test failed, to rerun pass `--test t2`
+
+Caused by:
+ process didn't exit successfully: `[ROOT]/foo/target/debug/deps/t2[..]` (exit [..]: 4)
+",
+ )
+ .with_status(4)
+ .run();
+
+ // no-fail-fast always uses 101
+ p.cargo("test --no-fail-fast")
+ .with_stderr(
+ "\
+[FINISHED] test [..]
+[RUNNING] tests/t1.rs (target/debug/deps/t1[..])
+error: test failed, to rerun pass `--test t1`
+[RUNNING] tests/t2.rs (target/debug/deps/t2[..])
+error: test failed, to rerun pass `--test t2`
+
+Caused by:
+ process didn't exit successfully: `[ROOT]/foo/target/debug/deps/t2[..]` (exit [..]: 4)
+error: 2 targets failed:
+ `--test t1`
+ `--test t2`
+",
+ )
+ .with_status(101)
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/timings.rs b/src/tools/cargo/tests/testsuite/timings.rs
new file mode 100644
index 000000000..8f06ac69b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/timings.rs
@@ -0,0 +1,53 @@
+//! Tests for --timings.
+
+use cargo_test_support::project;
+use cargo_test_support::registry::Package;
+
+#[cargo_test]
+fn timings_works() {
+ Package::new("dep", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ dep = "0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("src/main.rs", "fn main() {}")
+ .file("tests/t1.rs", "")
+ .file("examples/ex1.rs", "fn main() {}")
+ .build();
+
+ p.cargo("build --all-targets --timings")
+ .with_stderr_unordered(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] dep v0.1.0 [..]
+[COMPILING] dep v0.1.0
+[COMPILING] foo v0.1.0 [..]
+[FINISHED] [..]
+ Timing report saved to [..]/foo/target/cargo-timings/cargo-timing-[..].html
+",
+ )
+ .run();
+
+ p.cargo("clean").run();
+
+ p.cargo("test --timings").run();
+
+ p.cargo("clean").run();
+
+ p.cargo("check --timings").run();
+
+ p.cargo("clean").run();
+
+ p.cargo("doc --timings").run();
+}
diff --git a/src/tools/cargo/tests/testsuite/tool_paths.rs b/src/tools/cargo/tests/testsuite/tool_paths.rs
new file mode 100644
index 000000000..a211b5328
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/tool_paths.rs
@@ -0,0 +1,402 @@
+//! Tests for configuration values that point to programs.
+
+use cargo_test_support::{basic_lib_manifest, project, rustc_host, rustc_host_env};
+
+#[cargo_test]
+fn pathless_tools() {
+ let target = rustc_host();
+
+ let foo = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}]
+ linker = "nonexistent-linker"
+ "#,
+ target
+ ),
+ )
+ .build();
+
+ foo.cargo("build --verbose")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc [..] -C linker=nonexistent-linker [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn absolute_tools() {
+ let target = rustc_host();
+
+ // Escaped as they appear within a TOML config file
+ let linker = if cfg!(windows) {
+ r#"C:\\bogus\\nonexistent-linker"#
+ } else {
+ r#"/bogus/nonexistent-linker"#
+ };
+
+ let foo = project()
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{target}]
+ linker = "{linker}"
+ "#,
+ target = target,
+ linker = linker
+ ),
+ )
+ .build();
+
+ foo.cargo("build --verbose")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc [..] -C linker=[..]bogus/nonexistent-linker [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn relative_tools() {
+ let target = rustc_host();
+
+ // Escaped as they appear within a TOML config file
+ let linker = if cfg!(windows) {
+ r#".\\tools\\nonexistent-linker"#
+ } else {
+ r#"./tools/nonexistent-linker"#
+ };
+
+ // Funky directory structure to test that relative tool paths are made absolute
+ // by reference to the `.cargo/..` directory and not to (for example) the CWD.
+ let p = project()
+ .no_manifest()
+ .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+ .file("bar/src/lib.rs", "")
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{target}]
+ linker = "{linker}"
+ "#,
+ target = target,
+ linker = linker
+ ),
+ )
+ .build();
+
+ let prefix = p.root().into_os_string().into_string().unwrap();
+
+ p.cargo("build --verbose")
+ .cwd("bar")
+ .with_stderr(&format!(
+ "\
+[COMPILING] bar v0.5.0 ([CWD])
+[RUNNING] `rustc [..] -C linker={prefix}/./tools/nonexistent-linker [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ prefix = prefix,
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn custom_runner() {
+ let target = rustc_host();
+
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file("tests/test.rs", "")
+ .file("benches/bench.rs", "")
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.{}]
+ runner = "nonexistent-runner -r"
+ "#,
+ target
+ ),
+ )
+ .build();
+
+ p.cargo("run -- --param")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `nonexistent-runner -r target/debug/foo[EXE] --param`
+",
+ )
+ .run();
+
+ p.cargo("test --test test --verbose -- --param")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc [..]`
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `nonexistent-runner -r [..]/target/debug/deps/test-[..][EXE] --param`
+",
+ )
+ .run();
+
+ p.cargo("bench --bench bench --verbose -- --param")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc [..]`
+[RUNNING] `rustc [..]`
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] `nonexistent-runner -r [..]/target/release/deps/bench-[..][EXE] --param --bench`
+",
+ )
+ .run();
+}
+
+// can set a custom runner via `target.'cfg(..)'.runner`
+#[cargo_test]
+fn custom_runner_cfg() {
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [target.'cfg(not(target_os = "none"))']
+ runner = "nonexistent-runner -r"
+ "#,
+ )
+ .build();
+
+ p.cargo("run -- --param")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `nonexistent-runner -r target/debug/foo[EXE] --param`
+",
+ )
+ .run();
+}
+
+// custom runner set via `target.$triple.runner` have precedence over `target.'cfg(..)'.runner`
+#[cargo_test]
+fn custom_runner_cfg_precedence() {
+ let target = rustc_host();
+
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [target.'cfg(not(target_os = "none"))']
+ runner = "ignored-runner"
+
+ [target.{}]
+ runner = "nonexistent-runner -r"
+ "#,
+ target
+ ),
+ )
+ .build();
+
+ p.cargo("run -- --param")
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `nonexistent-runner -r target/debug/foo[EXE] --param`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn custom_runner_cfg_collision() {
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config",
+ r#"
+ [target.'cfg(not(target_arch = "avr"))']
+ runner = "true"
+
+ [target.'cfg(not(target_os = "none"))']
+ runner = "false"
+ "#,
+ )
+ .build();
+
+ p.cargo("run -- --param")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] several matching instances of `target.'cfg(..)'.runner` in configurations
+first match `cfg(not(target_arch = \"avr\"))` located in [..]/foo/.cargo/config
+second match `cfg(not(target_os = \"none\"))` located in [..]/foo/.cargo/config
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn custom_runner_env() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ let key = format!("CARGO_TARGET_{}_RUNNER", rustc_host_env());
+
+ p.cargo("run")
+ .env(&key, "nonexistent-runner --foo")
+ .with_status(101)
+ // FIXME: Update "Caused by" error message once rust/pull/87704 is merged.
+ // On Windows, changing to a custom executable resolver has changed the
+ // error messages.
+ .with_stderr(&format!(
+ "\
+[COMPILING] foo [..]
+[FINISHED] dev [..]
+[RUNNING] `nonexistent-runner --foo target/debug/foo[EXE]`
+[ERROR] could not execute process `nonexistent-runner --foo target/debug/foo[EXE]` (never executed)
+
+Caused by:
+ [..]
+"
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn custom_runner_env_overrides_config() {
+ let target = rustc_host();
+ let p = project()
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ ".cargo/config.toml",
+ &format!(
+ r#"
+ [target.{}]
+ runner = "should-not-run -r"
+ "#,
+ target
+ ),
+ )
+ .build();
+
+ let key = format!("CARGO_TARGET_{}_RUNNER", rustc_host_env());
+
+ p.cargo("run")
+ .env(&key, "should-run --foo")
+ .with_status(101)
+ .with_stderr_contains("[RUNNING] `should-run --foo target/debug/foo[EXE]`")
+ .run();
+}
+
+#[cargo_test]
+#[cfg(unix)] // Assumes `true` is in PATH.
+fn custom_runner_env_true() {
+ // Check for a bug where "true" was interpreted as a boolean instead of
+ // the executable.
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ let key = format!("CARGO_TARGET_{}_RUNNER", rustc_host_env());
+
+ p.cargo("run")
+ .env(&key, "true")
+ .with_stderr_contains("[RUNNING] `true target/debug/foo[EXE]`")
+ .run();
+}
+
+#[cargo_test]
+fn custom_linker_env() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ let key = format!("CARGO_TARGET_{}_LINKER", rustc_host_env());
+
+ p.cargo("build -v")
+ .env(&key, "nonexistent-linker")
+ .with_status(101)
+ .with_stderr_contains("[RUNNING] `rustc [..]-C linker=nonexistent-linker [..]")
+ .run();
+}
+
+#[cargo_test]
+fn target_in_environment_contains_lower_case() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+
+ let target = rustc_host();
+ let env_key = format!(
+ "CARGO_TARGET_{}_LINKER",
+ target.to_lowercase().replace('-', "_")
+ );
+
+ p.cargo("build -v --target")
+ .arg(target)
+ .env(&env_key, "nonexistent-linker")
+ .with_stderr_contains(format!(
+ "warning: Environment variables are expected to use uppercase \
+ letters and underscores, the variable `{}` will be ignored and \
+ have no effect",
+ env_key
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn cfg_ignored_fields() {
+ // Test for some ignored fields in [target.'cfg()'] tables.
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ # Try some empty tables.
+ [target.'cfg(not(foo))']
+ [target.'cfg(not(bar))'.somelib]
+
+ # A bunch of unused fields.
+ [target.'cfg(not(target_os = "none"))']
+ linker = 'false'
+ ar = 'false'
+ foo = {rustc-flags = "-l foo"}
+ invalid = 1
+ runner = 'false'
+ rustflags = ''
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[WARNING] unused key `somelib` in [target] config table `cfg(not(bar))`
+[WARNING] unused key `ar` in [target] config table `cfg(not(target_os = \"none\"))`
+[WARNING] unused key `foo` in [target] config table `cfg(not(target_os = \"none\"))`
+[WARNING] unused key `invalid` in [target] config table `cfg(not(target_os = \"none\"))`
+[WARNING] unused key `linker` in [target] config table `cfg(not(target_os = \"none\"))`
+[CHECKING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/tree.rs b/src/tools/cargo/tests/testsuite/tree.rs
new file mode 100644
index 000000000..c3c1ca6d3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/tree.rs
@@ -0,0 +1,2150 @@
+//! Tests for the `cargo tree` command.
+
+use super::features2::switch_to_resolver_2;
+use cargo_test_support::cross_compile::{self, alternate};
+use cargo_test_support::registry::{Dependency, Package};
+use cargo_test_support::{basic_manifest, git, project, rustc_host, Project};
+
+fn make_simple_proj() -> Project {
+ Package::new("c", "1.0.0").publish();
+ Package::new("b", "1.0.0").dep("c", "1.0").publish();
+ Package::new("a", "1.0.0").dep("b", "1.0").publish();
+ Package::new("bdep", "1.0.0").dep("b", "1.0").publish();
+ Package::new("devdep", "1.0.0").dep("b", "1.0.0").publish();
+
+ project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ a = "1.0"
+ c = "1.0"
+
+ [build-dependencies]
+ bdep = "1.0"
+
+ [dev-dependencies]
+ devdep = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .build()
+}
+
+#[cargo_test]
+fn simple() {
+ // A simple test with a few different dependencies.
+ let p = make_simple_proj();
+
+ p.cargo("tree")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── a v1.0.0
+│ └── b v1.0.0
+│ └── c v1.0.0
+└── c v1.0.0
+[build-dependencies]
+└── bdep v1.0.0
+ └── b v1.0.0 (*)
+[dev-dependencies]
+└── devdep v1.0.0
+ └── b v1.0.0 (*)
+",
+ )
+ .run();
+
+ p.cargo("tree -p bdep")
+ .with_stdout(
+ "\
+bdep v1.0.0
+└── b v1.0.0
+ └── c v1.0.0
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn virtual_workspace() {
+ // Multiple packages in a virtual workspace.
+ Package::new("somedep", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "baz", "c"]
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_manifest("a", "1.0.0"))
+ .file("a/src/lib.rs", "")
+ .file(
+ "baz/Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.1.0"
+
+ [dependencies]
+ c = { path = "../c" }
+ somedep = "1.0"
+ "#,
+ )
+ .file("baz/src/lib.rs", "")
+ .file("c/Cargo.toml", &basic_manifest("c", "1.0.0"))
+ .file("c/src/lib.rs", "")
+ .build();
+
+ p.cargo("tree")
+ .with_stdout(
+ "\
+a v1.0.0 ([..]/foo/a)
+
+baz v0.1.0 ([..]/foo/baz)
+├── c v1.0.0 ([..]/foo/c)
+└── somedep v1.0.0
+
+c v1.0.0 ([..]/foo/c)
+",
+ )
+ .run();
+
+ p.cargo("tree -p a").with_stdout("a v1.0.0 [..]").run();
+
+ p.cargo("tree")
+ .cwd("baz")
+ .with_stdout(
+ "\
+baz v0.1.0 ([..]/foo/baz)
+├── c v1.0.0 ([..]/foo/c)
+└── somedep v1.0.0
+",
+ )
+ .run();
+
+ // exclude baz
+ p.cargo("tree --workspace --exclude baz")
+ .with_stdout(
+ "\
+a v1.0.0 ([..]/foo/a)
+
+c v1.0.0 ([..]/foo/c)
+",
+ )
+ .run();
+
+ // exclude glob '*z'
+ p.cargo("tree --workspace --exclude '*z'")
+ .with_stdout(
+ "\
+a v1.0.0 ([..]/foo/a)
+
+c v1.0.0 ([..]/foo/c)
+",
+ )
+ .run();
+
+ // include glob '*z'
+ p.cargo("tree -p '*z'")
+ .with_stdout(
+ "\
+baz v0.1.0 ([..]/foo/baz)
+├── c v1.0.0 ([..]/foo/c)
+└── somedep v1.0.0
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dedupe_edges() {
+ // Works around https://github.com/rust-lang/cargo/issues/7985
+ Package::new("bitflags", "1.0.0").publish();
+ Package::new("manyfeat", "1.0.0")
+ .feature("f1", &[])
+ .feature("f2", &[])
+ .feature("f3", &[])
+ .dep("bitflags", "1.0")
+ .publish();
+ Package::new("a", "1.0.0")
+ .feature_dep("manyfeat", "1.0", &["f1"])
+ .publish();
+ Package::new("b", "1.0.0")
+ .feature_dep("manyfeat", "1.0", &["f2"])
+ .publish();
+ Package::new("c", "1.0.0")
+ .feature_dep("manyfeat", "1.0", &["f3"])
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ a = "1.0"
+ b = "1.0"
+ c = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("tree")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── a v1.0.0
+│ └── manyfeat v1.0.0
+│ └── bitflags v1.0.0
+├── b v1.0.0
+│ └── manyfeat v1.0.0 (*)
+└── c v1.0.0
+ └── manyfeat v1.0.0 (*)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn renamed_deps() {
+ // Handles renamed dependencies.
+ Package::new("one", "1.0.0").publish();
+ Package::new("two", "1.0.0").publish();
+ Package::new("bar", "1.0.0").dep("one", "1.0").publish();
+ Package::new("bar", "2.0.0").dep("two", "1.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+
+ [dependencies]
+ bar1 = {version = "1.0", package="bar"}
+ bar2 = {version = "2.0", package="bar"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("tree")
+ .with_stdout(
+ "\
+foo v1.0.0 ([..]/foo)
+├── bar v1.0.0
+│ └── one v1.0.0
+└── bar v2.0.0
+ └── two v1.0.0
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn source_kinds() {
+ // Handles git and path sources.
+ Package::new("regdep", "1.0.0").publish();
+ let git_project = git::new("gitdep", |p| {
+ p.file("Cargo.toml", &basic_manifest("gitdep", "1.0.0"))
+ .file("src/lib.rs", "")
+ });
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ regdep = "1.0"
+ pathdep = {{ path = "pathdep" }}
+ gitdep = {{ git = "{}" }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file("pathdep/Cargo.toml", &basic_manifest("pathdep", "1.0.0"))
+ .file("pathdep/src/lib.rs", "")
+ .build();
+
+ p.cargo("tree")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── gitdep v1.0.0 (file://[..]/gitdep#[..])
+├── pathdep v1.0.0 ([..]/foo/pathdep)
+└── regdep v1.0.0
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn features() {
+ // Exercises a variety of feature behaviors.
+ Package::new("optdep_default", "1.0.0").publish();
+ Package::new("optdep", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [dependencies]
+ optdep_default = { version = "1.0", optional = true }
+ optdep = { version = "1.0", optional = true }
+
+ [features]
+ default = ["optdep_default"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("tree")
+ .with_stdout(
+ "\
+a v0.1.0 ([..]/foo)
+└── optdep_default v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree --no-default-features")
+ .with_stdout(
+ "\
+a v0.1.0 ([..]/foo)
+",
+ )
+ .run();
+
+ p.cargo("tree --all-features")
+ .with_stdout(
+ "\
+a v0.1.0 ([..]/foo)
+├── optdep v1.0.0
+└── optdep_default v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree --features optdep")
+ .with_stdout(
+ "\
+a v0.1.0 ([..]/foo)
+├── optdep v1.0.0
+└── optdep_default v1.0.0
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn filters_target() {
+ // --target flag
+ if cross_compile::disabled() {
+ return;
+ }
+ Package::new("targetdep", "1.0.0").publish();
+ Package::new("hostdep", "1.0.0").publish();
+ Package::new("devdep", "1.0.0").publish();
+ Package::new("build_target_dep", "1.0.0").publish();
+ Package::new("build_host_dep", "1.0.0")
+ .target_dep("targetdep", "1.0", alternate())
+ .target_dep("hostdep", "1.0", rustc_host())
+ .publish();
+ Package::new("pm_target", "1.0.0")
+ .proc_macro(true)
+ .publish();
+ Package::new("pm_host", "1.0.0").proc_macro(true).publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [target.'{alt}'.dependencies]
+ targetdep = "1.0"
+ pm_target = "1.0"
+
+ [target.'{host}'.dependencies]
+ hostdep = "1.0"
+ pm_host = "1.0"
+
+ [target.'{alt}'.dev-dependencies]
+ devdep = "1.0"
+
+ [target.'{alt}'.build-dependencies]
+ build_target_dep = "1.0"
+
+ [target.'{host}'.build-dependencies]
+ build_host_dep = "1.0"
+ "#,
+ alt = alternate(),
+ host = rustc_host()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .build();
+
+ p.cargo("tree")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── hostdep v1.0.0
+└── pm_host v1.0.0 (proc-macro)
+[build-dependencies]
+└── build_host_dep v1.0.0
+ └── hostdep v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree --target")
+ .arg(alternate())
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── pm_target v1.0.0 (proc-macro)
+└── targetdep v1.0.0
+[build-dependencies]
+└── build_host_dep v1.0.0
+ └── hostdep v1.0.0
+[dev-dependencies]
+└── devdep v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree --target")
+ .arg(rustc_host())
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── hostdep v1.0.0
+└── pm_host v1.0.0 (proc-macro)
+[build-dependencies]
+└── build_host_dep v1.0.0
+ └── hostdep v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree --target=all")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── hostdep v1.0.0
+├── pm_host v1.0.0 (proc-macro)
+├── pm_target v1.0.0 (proc-macro)
+└── targetdep v1.0.0
+[build-dependencies]
+├── build_host_dep v1.0.0
+│ ├── hostdep v1.0.0
+│ └── targetdep v1.0.0
+└── build_target_dep v1.0.0
+[dev-dependencies]
+└── devdep v1.0.0
+",
+ )
+ .run();
+
+ // no-proc-macro
+ p.cargo("tree --target=all -e no-proc-macro")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── hostdep v1.0.0
+└── targetdep v1.0.0
+[build-dependencies]
+├── build_host_dep v1.0.0
+│ ├── hostdep v1.0.0
+│ └── targetdep v1.0.0
+└── build_target_dep v1.0.0
+[dev-dependencies]
+└── devdep v1.0.0
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn no_selected_target_dependency() {
+ // --target flag
+ if cross_compile::disabled() {
+ return;
+ }
+ Package::new("targetdep", "1.0.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [target.'{alt}'.dependencies]
+ targetdep = "1.0"
+
+ "#,
+ alt = alternate(),
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .build();
+
+ p.cargo("tree")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+",
+ )
+ .run();
+
+ p.cargo("tree -i targetdep")
+ .with_stderr(
+ "\
+[WARNING] nothing to print.
+
+To find dependencies that require specific target platforms, \
+try to use option `--target all` first, and then narrow your search scope accordingly.
+",
+ )
+ .run();
+ p.cargo("tree -i targetdep --target all")
+ .with_stdout(
+ "\
+targetdep v1.0.0
+└── foo v0.1.0 ([..]/foo)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dep_kinds() {
+ Package::new("inner-devdep", "1.0.0").publish();
+ Package::new("inner-builddep", "1.0.0").publish();
+ Package::new("inner-normal", "1.0.0").publish();
+ Package::new("inner-pm", "1.0.0").proc_macro(true).publish();
+ Package::new("inner-buildpm", "1.0.0")
+ .proc_macro(true)
+ .publish();
+ Package::new("normaldep", "1.0.0")
+ .dep("inner-normal", "1.0")
+ .dev_dep("inner-devdep", "1.0")
+ .build_dep("inner-builddep", "1.0")
+ .publish();
+ Package::new("devdep", "1.0.0")
+ .dep("inner-normal", "1.0")
+ .dep("inner-pm", "1.0")
+ .dev_dep("inner-devdep", "1.0")
+ .build_dep("inner-builddep", "1.0")
+ .build_dep("inner-buildpm", "1.0")
+ .publish();
+ Package::new("builddep", "1.0.0")
+ .dep("inner-normal", "1.0")
+ .dev_dep("inner-devdep", "1.0")
+ .build_dep("inner-builddep", "1.0")
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ normaldep = "1.0"
+
+ [dev-dependencies]
+ devdep = "1.0"
+
+ [build-dependencies]
+ builddep = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("tree")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+└── normaldep v1.0.0
+ └── inner-normal v1.0.0
+ [build-dependencies]
+ └── inner-builddep v1.0.0
+[build-dependencies]
+└── builddep v1.0.0
+ └── inner-normal v1.0.0
+ [build-dependencies]
+ └── inner-builddep v1.0.0
+[dev-dependencies]
+└── devdep v1.0.0
+ ├── inner-normal v1.0.0
+ └── inner-pm v1.0.0 (proc-macro)
+ [build-dependencies]
+ ├── inner-builddep v1.0.0
+ └── inner-buildpm v1.0.0 (proc-macro)
+",
+ )
+ .run();
+
+ p.cargo("tree -e no-dev")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+└── normaldep v1.0.0
+ └── inner-normal v1.0.0
+ [build-dependencies]
+ └── inner-builddep v1.0.0
+[build-dependencies]
+└── builddep v1.0.0
+ └── inner-normal v1.0.0
+ [build-dependencies]
+ └── inner-builddep v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree -e normal")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+└── normaldep v1.0.0
+ └── inner-normal v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree -e dev,build")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+[build-dependencies]
+└── builddep v1.0.0
+ [build-dependencies]
+ └── inner-builddep v1.0.0
+[dev-dependencies]
+└── devdep v1.0.0
+ [build-dependencies]
+ ├── inner-builddep v1.0.0
+ └── inner-buildpm v1.0.0 (proc-macro)
+",
+ )
+ .run();
+
+ p.cargo("tree -e dev,build,no-proc-macro")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+[build-dependencies]
+└── builddep v1.0.0
+ [build-dependencies]
+ └── inner-builddep v1.0.0
+[dev-dependencies]
+└── devdep v1.0.0
+ [build-dependencies]
+ └── inner-builddep v1.0.0
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cyclic_dev_dep() {
+ // Cyclical dev-dependency and inverse flag.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dev-dependencies]
+ dev-dep = { path = "dev-dep" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "dev-dep/Cargo.toml",
+ r#"
+ [package]
+ name = "dev-dep"
+ version = "0.1.0"
+
+ [dependencies]
+ foo = { path=".." }
+ "#,
+ )
+ .file("dev-dep/src/lib.rs", "")
+ .build();
+
+ p.cargo("tree")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+[dev-dependencies]
+└── dev-dep v0.1.0 ([..]/foo/dev-dep)
+ └── foo v0.1.0 ([..]/foo) (*)
+",
+ )
+ .run();
+
+ p.cargo("tree --invert foo")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+└── dev-dep v0.1.0 ([..]/foo/dev-dep)
+ [dev-dependencies]
+ └── foo v0.1.0 ([..]/foo) (*)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invert() {
+ Package::new("b1", "1.0.0").dep("c", "1.0").publish();
+ Package::new("b2", "1.0.0").dep("d", "1.0").publish();
+ Package::new("c", "1.0.0").publish();
+ Package::new("d", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ b1 = "1.0"
+ b2 = "1.0"
+ c = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("tree")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── b1 v1.0.0
+│ └── c v1.0.0
+├── b2 v1.0.0
+│ └── d v1.0.0
+└── c v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree --invert c")
+ .with_stdout(
+ "\
+c v1.0.0
+├── b1 v1.0.0
+│ └── foo v0.1.0 ([..]/foo)
+└── foo v0.1.0 ([..]/foo)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invert_with_build_dep() {
+ // -i for a common dependency between normal and build deps.
+ Package::new("common", "1.0.0").publish();
+ Package::new("bdep", "1.0.0").dep("common", "1.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ common = "1.0"
+
+ [build-dependencies]
+ bdep = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("tree")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+└── common v1.0.0
+[build-dependencies]
+└── bdep v1.0.0
+ └── common v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree -i common")
+ .with_stdout(
+ "\
+common v1.0.0
+├── bdep v1.0.0
+│ [build-dependencies]
+│ └── foo v0.1.0 ([..]/foo)
+└── foo v0.1.0 ([..]/foo)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn no_indent() {
+ let p = make_simple_proj();
+
+ p.cargo("tree --prefix=none")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+a v1.0.0
+b v1.0.0
+c v1.0.0
+c v1.0.0
+bdep v1.0.0
+b v1.0.0 (*)
+devdep v1.0.0
+b v1.0.0 (*)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn prefix_depth() {
+ let p = make_simple_proj();
+
+ p.cargo("tree --prefix=depth")
+ .with_stdout(
+ "\
+0foo v0.1.0 ([..]/foo)
+1a v1.0.0
+2b v1.0.0
+3c v1.0.0
+1c v1.0.0
+1bdep v1.0.0
+2b v1.0.0 (*)
+1devdep v1.0.0
+2b v1.0.0 (*)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn no_dedupe() {
+ let p = make_simple_proj();
+
+ p.cargo("tree --no-dedupe")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── a v1.0.0
+│ └── b v1.0.0
+│ └── c v1.0.0
+└── c v1.0.0
+[build-dependencies]
+└── bdep v1.0.0
+ └── b v1.0.0
+ └── c v1.0.0
+[dev-dependencies]
+└── devdep v1.0.0
+ └── b v1.0.0
+ └── c v1.0.0
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn no_dedupe_cycle() {
+ // --no-dedupe with a dependency cycle
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dev-dependencies]
+ bar = {path = "bar"}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [dependencies]
+ foo = {path=".."}
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("tree")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+[dev-dependencies]
+└── bar v0.1.0 ([..]/foo/bar)
+ └── foo v0.1.0 ([..]/foo) (*)
+",
+ )
+ .run();
+
+ p.cargo("tree --no-dedupe")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+[dev-dependencies]
+└── bar v0.1.0 ([..]/foo/bar)
+ └── foo v0.1.0 ([..]/foo) (*)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn duplicates() {
+ Package::new("dog", "1.0.0").publish();
+ Package::new("dog", "2.0.0").publish();
+ Package::new("cat", "1.0.0").publish();
+ Package::new("cat", "2.0.0").publish();
+ Package::new("dep", "1.0.0")
+ .dep("dog", "1.0")
+ .dep("cat", "1.0")
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [dependencies]
+ dog1 = { version = "1.0", package = "dog" }
+ dog2 = { version = "2.0", package = "dog" }
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.1.0"
+
+ [dependencies]
+ dep = "1.0"
+ cat = "2.0"
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .build();
+
+ p.cargo("tree -p a")
+ .with_stdout(
+ "\
+a v0.1.0 ([..]/foo/a)
+├── dog v1.0.0
+└── dog v2.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree -p b")
+ .with_stdout(
+ "\
+b v0.1.0 ([..]/foo/b)
+├── cat v2.0.0
+└── dep v1.0.0
+ ├── cat v1.0.0
+ └── dog v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree -p a -d")
+ .with_stdout(
+ "\
+dog v1.0.0
+└── a v0.1.0 ([..]/foo/a)
+
+dog v2.0.0
+└── a v0.1.0 ([..]/foo/a)
+",
+ )
+ .run();
+
+ p.cargo("tree -p b -d")
+ .with_stdout(
+ "\
+cat v1.0.0
+└── dep v1.0.0
+ └── b v0.1.0 ([..]/foo/b)
+
+cat v2.0.0
+└── b v0.1.0 ([..]/foo/b)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn duplicates_with_target() {
+ // --target flag
+ if cross_compile::disabled() {
+ return;
+ }
+ Package::new("a", "1.0.0").publish();
+ Package::new("dog", "1.0.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ a = "1.0"
+ dog = "1.0"
+
+ [build-dependencies]
+ a = "1.0"
+ dog = "1.0"
+
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .build();
+ p.cargo("tree -d").with_stdout("").run();
+
+ p.cargo("tree -d --target")
+ .arg(alternate())
+ .with_stdout("")
+ .run();
+
+ p.cargo("tree -d --target")
+ .arg(rustc_host())
+ .with_stdout("")
+ .run();
+
+ p.cargo("tree -d --target=all").with_stdout("").run();
+}
+
+#[cargo_test]
+fn charset() {
+ let p = make_simple_proj();
+ p.cargo("tree --charset ascii")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+|-- a v1.0.0
+| `-- b v1.0.0
+| `-- c v1.0.0
+`-- c v1.0.0
+[build-dependencies]
+`-- bdep v1.0.0
+ `-- b v1.0.0 (*)
+[dev-dependencies]
+`-- devdep v1.0.0
+ `-- b v1.0.0 (*)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn format() {
+ Package::new("dep", "1.0.0").publish();
+ Package::new("other-dep", "1.0.0").publish();
+
+ Package::new("dep_that_is_awesome", "1.0.0")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "dep_that_is_awesome"
+ version = "1.0.0"
+
+ [lib]
+ name = "awesome_dep"
+ "#,
+ )
+ .file("src/lib.rs", "pub struct Straw;")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ license = "MIT"
+ repository = "https://github.com/rust-lang/cargo"
+
+ [dependencies]
+ dep = {version="1.0", optional=true}
+ other-dep = {version="1.0", optional=true}
+ dep_that_is_awesome = {version="1.0", optional=true}
+
+
+ [features]
+ default = ["foo"]
+ foo = ["bar"]
+ bar = []
+ "#,
+ )
+ .file("src/main.rs", "")
+ .build();
+
+ p.cargo("tree --format <<<{p}>>>")
+ .with_stdout("<<<foo v0.1.0 ([..]/foo)>>>")
+ .run();
+
+ p.cargo("tree --format {}")
+ .with_stderr(
+ "\
+[ERROR] tree format `{}` not valid
+
+Caused by:
+ unsupported pattern ``
+",
+ )
+ .with_status(101)
+ .run();
+
+ p.cargo("tree --format {p}-{{hello}}")
+ .with_stdout("foo v0.1.0 ([..]/foo)-{hello}")
+ .run();
+
+ p.cargo("tree --format")
+ .arg("{p} {l} {r}")
+ .with_stdout("foo v0.1.0 ([..]/foo) MIT https://github.com/rust-lang/cargo")
+ .run();
+
+ p.cargo("tree --format")
+ .arg("{p} {f}")
+ .with_stdout("foo v0.1.0 ([..]/foo) bar,default,foo")
+ .run();
+
+ p.cargo("tree --all-features --format")
+ .arg("{p} [{f}]")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo) [bar,default,dep,dep_that_is_awesome,foo,other-dep]
+├── dep v1.0.0 []
+├── dep_that_is_awesome v1.0.0 []
+└── other-dep v1.0.0 []
+",
+ )
+ .run();
+
+ p.cargo("tree")
+ .arg("--features=other-dep,dep_that_is_awesome")
+ .arg("--format={lib}")
+ .with_stdout(
+ "
+├── awesome_dep
+└── other_dep
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dev_dep_feature() {
+ // New feature resolver with optional dep
+ Package::new("optdep", "1.0.0").publish();
+ Package::new("bar", "1.0.0")
+ .add_dep(Dependency::new("optdep", "1.0").optional(true))
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dev-dependencies]
+ bar = { version = "1.0", features = ["optdep"] }
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // Old behavior.
+ p.cargo("tree")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+└── bar v1.0.0
+ └── optdep v1.0.0
+[dev-dependencies]
+└── bar v1.0.0 (*)
+",
+ )
+ .run();
+
+ p.cargo("tree -e normal")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+└── bar v1.0.0
+ └── optdep v1.0.0
+",
+ )
+ .run();
+
+ // New behavior.
+ switch_to_resolver_2(&p);
+
+ p.cargo("tree")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+└── bar v1.0.0
+ └── optdep v1.0.0
+[dev-dependencies]
+└── bar v1.0.0 (*)
+",
+ )
+ .run();
+
+ p.cargo("tree -e normal")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+└── bar v1.0.0
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn host_dep_feature() {
+ // New feature resolver with optional build dep
+ Package::new("optdep", "1.0.0").publish();
+ Package::new("bar", "1.0.0")
+ .add_dep(Dependency::new("optdep", "1.0").optional(true))
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [build-dependencies]
+ bar = { version = "1.0", features = ["optdep"] }
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("build.rs", "fn main() {}")
+ .build();
+
+ // Old behavior
+ p.cargo("tree")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+└── bar v1.0.0
+ └── optdep v1.0.0
+[build-dependencies]
+└── bar v1.0.0 (*)
+",
+ )
+ .run();
+
+ // -p
+ p.cargo("tree -p bar")
+ .with_stdout(
+ "\
+bar v1.0.0
+└── optdep v1.0.0
+",
+ )
+ .run();
+
+ // invert
+ p.cargo("tree -i optdep")
+ .with_stdout(
+ "\
+optdep v1.0.0
+└── bar v1.0.0
+ └── foo v0.1.0 ([..]/foo)
+ [build-dependencies]
+ └── foo v0.1.0 ([..]/foo)
+",
+ )
+ .run();
+
+ // New behavior.
+ switch_to_resolver_2(&p);
+
+ p.cargo("tree")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+└── bar v1.0.0
+[build-dependencies]
+└── bar v1.0.0
+ └── optdep v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree -p bar")
+ .with_stdout(
+ "\
+bar v1.0.0
+
+bar v1.0.0
+└── optdep v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree -i optdep")
+ .with_stdout(
+ "\
+optdep v1.0.0
+└── bar v1.0.0
+ [build-dependencies]
+ └── foo v0.1.0 ([..]/foo)
+",
+ )
+ .run();
+
+ // Check that -d handles duplicates with features.
+ p.cargo("tree -d")
+ .with_stdout(
+ "\
+bar v1.0.0
+└── foo v0.1.0 ([..]/foo)
+
+bar v1.0.0
+[build-dependencies]
+└── foo v0.1.0 ([..]/foo)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn proc_macro_features() {
+ // New feature resolver with a proc-macro
+ Package::new("optdep", "1.0.0").publish();
+ Package::new("somedep", "1.0.0")
+ .add_dep(Dependency::new("optdep", "1.0").optional(true))
+ .publish();
+ Package::new("pm", "1.0.0")
+ .proc_macro(true)
+ .feature_dep("somedep", "1.0", &["optdep"])
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ pm = "1.0"
+ somedep = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // Old behavior
+ p.cargo("tree")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── pm v1.0.0 (proc-macro)
+│ └── somedep v1.0.0
+│ └── optdep v1.0.0
+└── somedep v1.0.0 (*)
+",
+ )
+ .run();
+
+ // Old behavior + no-proc-macro
+ p.cargo("tree -e no-proc-macro")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+└── somedep v1.0.0
+ └── optdep v1.0.0
+",
+ )
+ .run();
+
+ // -p
+ p.cargo("tree -p somedep")
+ .with_stdout(
+ "\
+somedep v1.0.0
+└── optdep v1.0.0
+",
+ )
+ .run();
+
+ // -p -e no-proc-macro
+ p.cargo("tree -p somedep -e no-proc-macro")
+ .with_stdout(
+ "\
+somedep v1.0.0
+└── optdep v1.0.0
+",
+ )
+ .run();
+
+ // invert
+ p.cargo("tree -i somedep")
+ .with_stdout(
+ "\
+somedep v1.0.0
+├── foo v0.1.0 ([..]/foo)
+└── pm v1.0.0 (proc-macro)
+ └── foo v0.1.0 ([..]/foo)
+",
+ )
+ .run();
+
+ // invert + no-proc-macro
+ p.cargo("tree -i somedep -e no-proc-macro")
+ .with_stdout(
+ "\
+somedep v1.0.0
+└── foo v0.1.0 ([..]/foo)
+",
+ )
+ .run();
+
+ // New behavior.
+ switch_to_resolver_2(&p);
+
+ // Note the missing (*)
+ p.cargo("tree")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── pm v1.0.0 (proc-macro)
+│ └── somedep v1.0.0
+│ └── optdep v1.0.0
+└── somedep v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree -e no-proc-macro")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+└── somedep v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree -p somedep")
+ .with_stdout(
+ "\
+somedep v1.0.0
+
+somedep v1.0.0
+└── optdep v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree -i somedep")
+ .with_stdout(
+ "\
+somedep v1.0.0
+└── foo v0.1.0 ([..]/foo)
+
+somedep v1.0.0
+└── pm v1.0.0 (proc-macro)
+ └── foo v0.1.0 ([..]/foo)
+",
+ )
+ .run();
+
+ p.cargo("tree -i somedep -e no-proc-macro")
+ .with_stdout(
+ "\
+somedep v1.0.0
+└── foo v0.1.0 ([..]/foo)
+
+somedep v1.0.0
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn itarget_opt_dep() {
+ // New feature resolver with optional target dep
+ Package::new("optdep", "1.0.0").publish();
+ Package::new("common", "1.0.0")
+ .add_dep(Dependency::new("optdep", "1.0").optional(true))
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+
+ [dependencies]
+ common = "1.0"
+
+ [target.'cfg(whatever)'.dependencies]
+ common = { version = "1.0", features = ["optdep"] }
+
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // Old behavior
+ p.cargo("tree")
+ .with_stdout(
+ "\
+foo v1.0.0 ([..]/foo)
+└── common v1.0.0
+ └── optdep v1.0.0
+",
+ )
+ .run();
+
+ // New behavior.
+ switch_to_resolver_2(&p);
+
+ p.cargo("tree")
+ .with_stdout(
+ "\
+foo v1.0.0 ([..]/foo)
+└── common v1.0.0
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn ambiguous_name() {
+ // -p that is ambiguous.
+ Package::new("dep", "1.0.0").publish();
+ Package::new("dep", "2.0.0").publish();
+ Package::new("bar", "1.0.0").dep("dep", "2.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ dep = "1.0"
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("tree -p dep")
+ .with_stderr_contains(
+ "\
+error: There are multiple `dep` packages in your project, and the specification `dep` is ambiguous.
+Please re-run this command with `-p <spec>` where `<spec>` is one of the following:
+ dep@1.0.0
+ dep@2.0.0
+",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn workspace_features_are_local() {
+ // The features for workspace packages should be the same as `cargo build`
+ // (i.e., the features selected depend on the "current" package).
+ Package::new("optdep", "1.0.0").publish();
+ Package::new("somedep", "1.0.0")
+ .add_dep(Dependency::new("optdep", "1.0").optional(true))
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [dependencies]
+ somedep = {version="1.0", features=["optdep"]}
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.1.0"
+
+ [dependencies]
+ somedep = "1.0"
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .build();
+
+ p.cargo("tree")
+ .with_stdout(
+ "\
+a v0.1.0 ([..]/foo/a)
+└── somedep v1.0.0
+ └── optdep v1.0.0
+
+b v0.1.0 ([..]/foo/b)
+└── somedep v1.0.0 (*)
+",
+ )
+ .run();
+
+ p.cargo("tree -p a")
+ .with_stdout(
+ "\
+a v0.1.0 ([..]/foo/a)
+└── somedep v1.0.0
+ └── optdep v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree -p b")
+ .with_stdout(
+ "\
+b v0.1.0 ([..]/foo/b)
+└── somedep v1.0.0
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn unknown_edge_kind() {
+ let p = project()
+ .file("Cargo.toml", "")
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("tree -e unknown")
+ .with_stderr(
+ "\
+[ERROR] unknown edge kind `unknown`, valid values are \
+\"normal\", \"build\", \"dev\", \
+\"no-normal\", \"no-build\", \"no-dev\", \"no-proc-macro\", \
+\"features\", or \"all\"
+",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn mixed_no_edge_kinds() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("tree -e no-build,normal")
+ .with_stderr(
+ "\
+[ERROR] `normal` dependency kind cannot be mixed with \
+\"no-normal\", \"no-build\", or \"no-dev\" dependency kinds
+",
+ )
+ .with_status(101)
+ .run();
+
+ // `no-proc-macro` can be mixed with others
+ p.cargo("tree -e no-proc-macro,normal")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn depth_limit() {
+ let p = make_simple_proj();
+
+ p.cargo("tree --depth 0")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+[build-dependencies]
+[dev-dependencies]
+",
+ )
+ .run();
+
+ p.cargo("tree --depth 1")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── a v1.0.0
+└── c v1.0.0
+[build-dependencies]
+└── bdep v1.0.0
+[dev-dependencies]
+└── devdep v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree --depth 2")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── a v1.0.0
+│ └── b v1.0.0
+└── c v1.0.0
+[build-dependencies]
+└── bdep v1.0.0
+ └── b v1.0.0 (*)
+[dev-dependencies]
+└── devdep v1.0.0
+ └── b v1.0.0 (*)
+",
+ )
+ .run();
+
+ // specify a package
+ p.cargo("tree -p bdep --depth 1")
+ .with_stdout(
+ "\
+bdep v1.0.0
+└── b v1.0.0
+",
+ )
+ .run();
+
+ // different prefix
+ p.cargo("tree --depth 1 --prefix depth")
+ .with_stdout(
+ "\
+0foo v0.1.0 ([..]/foo)
+1a v1.0.0
+1c v1.0.0
+1bdep v1.0.0
+1devdep v1.0.0
+",
+ )
+ .run();
+
+ // with edge-kinds
+ p.cargo("tree --depth 1 -e no-dev")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── a v1.0.0
+└── c v1.0.0
+[build-dependencies]
+└── bdep v1.0.0
+",
+ )
+ .run();
+
+ // invert
+ p.cargo("tree --depth 1 --invert c")
+ .with_stdout(
+ "\
+c v1.0.0
+├── b v1.0.0
+└── foo v0.1.0 ([..]/foo)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn prune() {
+ let p = make_simple_proj();
+
+ p.cargo("tree --prune c")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+└── a v1.0.0
+ └── b v1.0.0
+[build-dependencies]
+└── bdep v1.0.0
+ └── b v1.0.0 (*)
+[dev-dependencies]
+└── devdep v1.0.0
+ └── b v1.0.0 (*)
+",
+ )
+ .run();
+
+ // multiple prune
+ p.cargo("tree --prune c --prune bdep")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+└── a v1.0.0
+ └── b v1.0.0
+[build-dependencies]
+[dev-dependencies]
+└── devdep v1.0.0
+ └── b v1.0.0 (*)
+",
+ )
+ .run();
+
+ // with edge-kinds
+ p.cargo("tree --prune c -e normal")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+└── a v1.0.0
+ └── b v1.0.0
+",
+ )
+ .run();
+
+ // pruning self does not works
+ p.cargo("tree --prune foo")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── a v1.0.0
+│ └── b v1.0.0
+│ └── c v1.0.0
+└── c v1.0.0
+[build-dependencies]
+└── bdep v1.0.0
+ └── b v1.0.0 (*)
+[dev-dependencies]
+└── devdep v1.0.0
+ └── b v1.0.0 (*)
+",
+ )
+ .run();
+
+ // dep not exist
+ p.cargo("tree --prune no-dep")
+ .with_stderr(
+ "\
+[ERROR] package ID specification `no-dep` did not match any packages
+
+<tab>Did you mean `bdep`?
+",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn cyclic_features() {
+ // Check for stack overflow with cyclic features (oops!).
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+
+ [features]
+ a = ["b"]
+ b = ["a"]
+ default = ["a"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("tree -e features")
+ .with_stdout("foo v1.0.0 ([ROOT]/foo)")
+ .run();
+
+ p.cargo("tree -e features -i foo")
+ .with_stdout(
+ "\
+foo v1.0.0 ([ROOT]/foo)
+├── foo feature \"a\"
+│ ├── foo feature \"b\"
+│ │ └── foo feature \"a\" (*)
+│ └── foo feature \"default\" (command-line)
+├── foo feature \"b\" (*)
+└── foo feature \"default\" (command-line)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dev_dep_cycle_with_feature() {
+ // Cycle with features and a dev-dependency.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+
+ [dev-dependencies]
+ bar = { path = "bar" }
+
+ [features]
+ a = ["bar/feat1"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "1.0.0"
+
+ [dependencies]
+ foo = { path = ".." }
+
+ [features]
+ feat1 = ["foo/a"]
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("tree -e features --features a")
+ .with_stdout(
+ "\
+foo v1.0.0 ([ROOT]/foo)
+[dev-dependencies]
+└── bar feature \"default\"
+ └── bar v1.0.0 ([ROOT]/foo/bar)
+ └── foo feature \"default\" (command-line)
+ └── foo v1.0.0 ([ROOT]/foo) (*)
+",
+ )
+ .run();
+
+ p.cargo("tree -e features --features a -i foo")
+ .with_stdout(
+ "\
+foo v1.0.0 ([ROOT]/foo)
+├── foo feature \"a\" (command-line)
+│ └── bar feature \"feat1\"
+│ └── foo feature \"a\" (command-line) (*)
+└── foo feature \"default\" (command-line)
+ └── bar v1.0.0 ([ROOT]/foo/bar)
+ ├── bar feature \"default\"
+ │ [dev-dependencies]
+ │ └── foo v1.0.0 ([ROOT]/foo) (*)
+ └── bar feature \"feat1\" (*)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn dev_dep_cycle_with_feature_nested() {
+ // Checks for an issue where a cyclic dev dependency tries to activate a
+ // feature on its parent that tries to activate the feature back on the
+ // dev-dependency.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+
+ [dev-dependencies]
+ bar = { path = "bar" }
+
+ [features]
+ a = ["bar/feat1"]
+ b = ["a"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "1.0.0"
+
+ [dependencies]
+ foo = { path = ".." }
+
+ [features]
+ feat1 = ["foo/b"]
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("tree -e features")
+ .with_stdout(
+ "\
+foo v1.0.0 ([ROOT]/foo)
+[dev-dependencies]
+└── bar feature \"default\"
+ └── bar v1.0.0 ([ROOT]/foo/bar)
+ └── foo feature \"default\" (command-line)
+ └── foo v1.0.0 ([ROOT]/foo) (*)
+",
+ )
+ .run();
+
+ p.cargo("tree -e features --features a -i foo")
+ .with_stdout(
+ "\
+foo v1.0.0 ([ROOT]/foo)
+├── foo feature \"a\" (command-line)
+│ └── foo feature \"b\"
+│ └── bar feature \"feat1\"
+│ └── foo feature \"a\" (command-line) (*)
+├── foo feature \"b\" (*)
+└── foo feature \"default\" (command-line)
+ └── bar v1.0.0 ([ROOT]/foo/bar)
+ ├── bar feature \"default\"
+ │ [dev-dependencies]
+ │ └── foo v1.0.0 ([ROOT]/foo) (*)
+ └── bar feature \"feat1\" (*)
+",
+ )
+ .run();
+
+ p.cargo("tree -e features --features b -i foo")
+ .with_stdout(
+ "\
+foo v1.0.0 ([ROOT]/foo)
+├── foo feature \"a\"
+│ └── foo feature \"b\" (command-line)
+│ └── bar feature \"feat1\"
+│ └── foo feature \"a\" (*)
+├── foo feature \"b\" (command-line) (*)
+└── foo feature \"default\" (command-line)
+ └── bar v1.0.0 ([ROOT]/foo/bar)
+ ├── bar feature \"default\"
+ │ [dev-dependencies]
+ │ └── foo v1.0.0 ([ROOT]/foo) (*)
+ └── bar feature \"feat1\" (*)
+",
+ )
+ .run();
+
+ p.cargo("tree -e features --features bar/feat1 -i foo")
+ .with_stdout(
+ "\
+foo v1.0.0 ([ROOT]/foo)
+├── foo feature \"a\"
+│ └── foo feature \"b\"
+│ └── bar feature \"feat1\" (command-line)
+│ └── foo feature \"a\" (*)
+├── foo feature \"b\" (*)
+└── foo feature \"default\" (command-line)
+ └── bar v1.0.0 ([ROOT]/foo/bar)
+ ├── bar feature \"default\"
+ │ [dev-dependencies]
+ │ └── foo v1.0.0 ([ROOT]/foo) (*)
+ └── bar feature \"feat1\" (command-line) (*)
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/tree_graph_features.rs b/src/tools/cargo/tests/testsuite/tree_graph_features.rs
new file mode 100644
index 000000000..48d654c06
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/tree_graph_features.rs
@@ -0,0 +1,362 @@
+//! Tests for the `cargo tree` command with -e features option.
+
+use cargo_test_support::project;
+use cargo_test_support::registry::{Dependency, Package};
+
+#[cargo_test]
+fn dep_feature_various() {
+ // Checks different ways of setting features via dependencies.
+ Package::new("optdep", "1.0.0")
+ .feature("default", &["cat"])
+ .feature("cat", &[])
+ .publish();
+ Package::new("defaultdep", "1.0.0")
+ .feature("default", &["f1"])
+ .feature("f1", &["optdep"])
+ .add_dep(Dependency::new("optdep", "1.0").optional(true))
+ .publish();
+ Package::new("nodefaultdep", "1.0.0")
+ .feature("default", &["f1"])
+ .feature("f1", &[])
+ .publish();
+ Package::new("nameddep", "1.0.0")
+ .add_dep(Dependency::new("serde", "1.0").optional(true))
+ .feature("default", &["serde-stuff"])
+ .feature("serde-stuff", &["serde/derive"])
+ .feature("vehicle", &["car"])
+ .feature("car", &[])
+ .publish();
+ Package::new("serde_derive", "1.0.0").publish();
+ Package::new("serde", "1.0.0")
+ .feature("derive", &["serde_derive"])
+ .add_dep(Dependency::new("serde_derive", "1.0").optional(true))
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ defaultdep = "1.0"
+ nodefaultdep = {version="1.0", default-features = false}
+ nameddep = {version="1.0", features = ["vehicle", "serde"]}
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("tree -e features")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── nodefaultdep v1.0.0
+├── defaultdep feature \"default\"
+│ ├── defaultdep v1.0.0
+│ │ └── optdep feature \"default\"
+│ │ ├── optdep v1.0.0
+│ │ └── optdep feature \"cat\"
+│ │ └── optdep v1.0.0
+│ └── defaultdep feature \"f1\"
+│ ├── defaultdep v1.0.0 (*)
+│ └── defaultdep feature \"optdep\"
+│ └── defaultdep v1.0.0 (*)
+├── nameddep feature \"default\"
+│ ├── nameddep v1.0.0
+│ │ └── serde feature \"default\"
+│ │ └── serde v1.0.0
+│ │ └── serde_derive feature \"default\"
+│ │ └── serde_derive v1.0.0
+│ └── nameddep feature \"serde-stuff\"
+│ ├── nameddep v1.0.0 (*)
+│ ├── nameddep feature \"serde\"
+│ │ └── nameddep v1.0.0 (*)
+│ └── serde feature \"derive\"
+│ ├── serde v1.0.0 (*)
+│ └── serde feature \"serde_derive\"
+│ └── serde v1.0.0 (*)
+├── nameddep feature \"serde\" (*)
+└── nameddep feature \"vehicle\"
+ ├── nameddep v1.0.0 (*)
+ └── nameddep feature \"car\"
+ └── nameddep v1.0.0 (*)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn graph_features_ws_interdependent() {
+ // A workspace with interdependent crates.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a", "b"]
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [dependencies]
+ b = {path="../b", features=["feat2"]}
+
+ [features]
+ default = ["a1"]
+ a1 = []
+ a2 = []
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .file(
+ "b/Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.1.0"
+
+ [features]
+ default = ["feat1"]
+ feat1 = []
+ feat2 = []
+ "#,
+ )
+ .file("b/src/lib.rs", "")
+ .build();
+
+ p.cargo("tree -e features")
+ .with_stdout(
+ "\
+a v0.1.0 ([..]/foo/a)
+├── b feature \"default\" (command-line)
+│ ├── b v0.1.0 ([..]/foo/b)
+│ └── b feature \"feat1\"
+│ └── b v0.1.0 ([..]/foo/b)
+└── b feature \"feat2\"
+ └── b v0.1.0 ([..]/foo/b)
+
+b v0.1.0 ([..]/foo/b)
+",
+ )
+ .run();
+
+ p.cargo("tree -e features -i a -i b")
+ .with_stdout(
+ "\
+a v0.1.0 ([..]/foo/a)
+├── a feature \"a1\"
+│ └── a feature \"default\" (command-line)
+└── a feature \"default\" (command-line)
+
+b v0.1.0 ([..]/foo/b)
+├── b feature \"default\" (command-line)
+│ └── a v0.1.0 ([..]/foo/a) (*)
+├── b feature \"feat1\"
+│ └── b feature \"default\" (command-line) (*)
+└── b feature \"feat2\"
+ └── a v0.1.0 ([..]/foo/a) (*)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn slash_feature_name() {
+ // dep_name/feat_name syntax
+ Package::new("opt", "1.0.0").feature("feat1", &[]).publish();
+ Package::new("notopt", "1.0.0")
+ .feature("cat", &[])
+ .feature("animal", &["cat"])
+ .publish();
+ Package::new("opt2", "1.0.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ opt = {version = "1.0", optional=true}
+ opt2 = {version = "1.0", optional=true}
+ notopt = "1.0"
+
+ [features]
+ f1 = ["opt/feat1", "notopt/animal"]
+ f2 = ["f1"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("tree -e features --features f1")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── notopt feature \"default\"
+│ └── notopt v1.0.0
+└── opt feature \"default\"
+ └── opt v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree -e features --features f1 -i foo")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── foo feature \"default\" (command-line)
+├── foo feature \"f1\" (command-line)
+└── foo feature \"opt\"
+ └── foo feature \"f1\" (command-line)
+",
+ )
+ .run();
+
+ p.cargo("tree -e features --features f1 -i notopt")
+ .with_stdout(
+ "\
+notopt v1.0.0
+├── notopt feature \"animal\"
+│ └── foo feature \"f1\" (command-line)
+├── notopt feature \"cat\"
+│ └── notopt feature \"animal\" (*)
+└── notopt feature \"default\"
+ └── foo v0.1.0 ([..]/foo)
+ ├── foo feature \"default\" (command-line)
+ ├── foo feature \"f1\" (command-line)
+ └── foo feature \"opt\"
+ └── foo feature \"f1\" (command-line)
+",
+ )
+ .run();
+
+ p.cargo("tree -e features --features notopt/animal -i notopt")
+ .with_stdout(
+ "\
+notopt v1.0.0
+├── notopt feature \"animal\" (command-line)
+├── notopt feature \"cat\"
+│ └── notopt feature \"animal\" (command-line)
+└── notopt feature \"default\"
+ └── foo v0.1.0 ([..]/foo)
+ └── foo feature \"default\" (command-line)
+",
+ )
+ .run();
+
+ p.cargo("tree -e features --all-features")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── notopt feature \"default\"
+│ └── notopt v1.0.0
+├── opt feature \"default\"
+│ └── opt v1.0.0
+└── opt2 feature \"default\"
+ └── opt2 v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree -e features --all-features -i opt2")
+ .with_stdout(
+ "\
+opt2 v1.0.0
+└── opt2 feature \"default\"
+ └── foo v0.1.0 ([..]/foo)
+ ├── foo feature \"default\" (command-line)
+ ├── foo feature \"f1\" (command-line)
+ │ └── foo feature \"f2\" (command-line)
+ ├── foo feature \"f2\" (command-line)
+ ├── foo feature \"opt\" (command-line)
+ │ └── foo feature \"f1\" (command-line) (*)
+ └── foo feature \"opt2\" (command-line)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn features_enables_inactive_target() {
+ // Features that enable things on targets that are not enabled.
+ Package::new("optdep", "1.0.0")
+ .feature("feat1", &[])
+ .publish();
+ Package::new("dep1", "1.0.0")
+ .feature("somefeat", &[])
+ .publish();
+ Package::new("dep2", "1.0.0")
+ .add_dep(
+ Dependency::new("optdep", "1.0.0")
+ .optional(true)
+ .target("cfg(whatever)"),
+ )
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [target.'cfg(whatever)'.dependencies]
+ optdep = {version="1.0", optional=true}
+ dep1 = "1.0"
+
+ [dependencies]
+ dep2 = "1.0"
+
+ [features]
+ f1 = ["optdep"]
+ f2 = ["optdep/feat1"]
+ f3 = ["dep1/somefeat"]
+ f4 = ["dep2/optdep"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("tree -e features")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+└── dep2 feature \"default\"
+ └── dep2 v1.0.0
+",
+ )
+ .run();
+ p.cargo("tree -e features --all-features")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+└── dep2 feature \"default\"
+ └── dep2 v1.0.0
+",
+ )
+ .run();
+ p.cargo("tree -e features --all-features --target=all")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo)
+├── dep1 feature \"default\"
+│ └── dep1 v1.0.0
+├── dep2 feature \"default\"
+│ └── dep2 v1.0.0
+│ └── optdep feature \"default\"
+│ └── optdep v1.0.0
+└── optdep feature \"default\" (*)
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/unit_graph.rs b/src/tools/cargo/tests/testsuite/unit_graph.rs
new file mode 100644
index 000000000..91451177a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/unit_graph.rs
@@ -0,0 +1,233 @@
+//! Tests for --unit-graph option.
+
+use cargo_test_support::project;
+use cargo_test_support::registry::Package;
+
+#[cargo_test]
+fn gated() {
+ let p = project().file("src/lib.rs", "").build();
+ p.cargo("build --unit-graph")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] the `--unit-graph` flag is unstable[..]
+See [..]
+See [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn simple() {
+ Package::new("a", "1.0.0")
+ .dep("b", "1.0")
+ .feature("feata", &["b/featb"])
+ .publish();
+ Package::new("b", "1.0.0")
+ .dep("c", "1.0")
+ .feature("featb", &["c/featc"])
+ .publish();
+ Package::new("c", "1.0.0").feature("featc", &[]).publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ a = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("build --features a/feata --unit-graph -Zunstable-options")
+ .masquerade_as_nightly_cargo(&["unit-graph"])
+ .with_json(
+ r#"{
+ "roots": [
+ 3
+ ],
+ "units": [
+ {
+ "dependencies": [
+ {
+ "extern_crate_name": "b",
+ "index": 1,
+ "noprelude": false,
+ "public": false
+ }
+ ],
+ "features": [
+ "feata"
+ ],
+ "mode": "build",
+ "pkg_id": "a 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "platform": null,
+ "profile": {
+ "codegen_backend": null,
+ "codegen_units": null,
+ "debug_assertions": true,
+ "debuginfo": 2,
+ "incremental": false,
+ "lto": "false",
+ "name": "dev",
+ "opt_level": "0",
+ "overflow_checks": true,
+ "panic": "unwind",
+ "rpath": false,
+ "split_debuginfo": "{...}",
+ "strip": "none"
+ },
+ "target": {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "edition": "2015",
+ "kind": [
+ "lib"
+ ],
+ "name": "a",
+ "src_path": "[..]/a-1.0.0/src/lib.rs",
+ "test": true
+ }
+ },
+ {
+ "dependencies": [
+ {
+ "extern_crate_name": "c",
+ "index": 2,
+ "noprelude": false,
+ "public": false
+ }
+ ],
+ "features": [
+ "featb"
+ ],
+ "mode": "build",
+ "pkg_id": "b 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "platform": null,
+ "profile": {
+ "codegen_backend": null,
+ "codegen_units": null,
+ "debug_assertions": true,
+ "debuginfo": 2,
+ "incremental": false,
+ "lto": "false",
+ "name": "dev",
+ "opt_level": "0",
+ "overflow_checks": true,
+ "panic": "unwind",
+ "rpath": false,
+ "split_debuginfo": "{...}",
+ "strip": "none"
+ },
+ "target": {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "edition": "2015",
+ "kind": [
+ "lib"
+ ],
+ "name": "b",
+ "src_path": "[..]/b-1.0.0/src/lib.rs",
+ "test": true
+ }
+ },
+ {
+ "dependencies": [],
+ "features": [
+ "featc"
+ ],
+ "mode": "build",
+ "pkg_id": "c 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "platform": null,
+ "profile": {
+ "codegen_backend": null,
+ "codegen_units": null,
+ "debug_assertions": true,
+ "debuginfo": 2,
+ "incremental": false,
+ "lto": "false",
+ "name": "dev",
+ "opt_level": "0",
+ "overflow_checks": true,
+ "panic": "unwind",
+ "rpath": false,
+ "split_debuginfo": "{...}",
+ "strip": "none"
+ },
+ "target": {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "edition": "2015",
+ "kind": [
+ "lib"
+ ],
+ "name": "c",
+ "src_path": "[..]/c-1.0.0/src/lib.rs",
+ "test": true
+ }
+ },
+ {
+ "dependencies": [
+ {
+ "extern_crate_name": "a",
+ "index": 0,
+ "noprelude": false,
+ "public": false
+ }
+ ],
+ "features": [],
+ "mode": "build",
+ "pkg_id": "foo 0.1.0 (path+file://[..]/foo)",
+ "platform": null,
+ "profile": {
+ "codegen_backend": null,
+ "codegen_units": null,
+ "debug_assertions": true,
+ "debuginfo": 2,
+ "incremental": false,
+ "lto": "false",
+ "name": "dev",
+ "opt_level": "0",
+ "overflow_checks": true,
+ "panic": "unwind",
+ "rpath": false,
+ "split_debuginfo": "{...}",
+ "strip": "none"
+ },
+ "target": {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "edition": "2015",
+ "kind": [
+ "lib"
+ ],
+ "name": "foo",
+ "src_path": "[..]/foo/src/lib.rs",
+ "test": true
+ }
+ }
+ ],
+ "version": 1
+ }
+ "#,
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/update.rs b/src/tools/cargo/tests/testsuite/update.rs
new file mode 100644
index 000000000..057c8fca4
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/update.rs
@@ -0,0 +1,832 @@
+//! Tests for the `cargo update` command.
+
+use cargo_test_support::registry::Package;
+use cargo_test_support::{basic_manifest, project};
+
+#[cargo_test]
+fn minor_update_two_places() {
+ Package::new("log", "0.1.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ log = "0.1"
+ foo = { path = "foo" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ log = "0.1"
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+ Package::new("log", "0.1.1").publish();
+
+ p.change_file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ log = "0.1.1"
+ "#,
+ );
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn transitive_minor_update() {
+ Package::new("log", "0.1.0").publish();
+ Package::new("serde", "0.1.0").dep("log", "0.1").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ serde = "0.1"
+ log = "0.1"
+ foo = { path = "foo" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ serde = "0.1"
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+
+ Package::new("log", "0.1.1").publish();
+ Package::new("serde", "0.1.1").dep("log", "0.1.1").publish();
+
+ // Note that `serde` isn't actually updated here! The default behavior for
+ // `update` right now is to as conservatively as possible attempt to satisfy
+ // an update. In this case we previously locked the dependency graph to `log
+ // 0.1.0`, but nothing on the command line says we're allowed to update
+ // that. As a result the update of `serde` here shouldn't update to `serde
+ // 0.1.1` as that would also force an update to `log 0.1.1`.
+ //
+ // Also note that this is probably counterintuitive and weird. We may wish
+ // to change this one day.
+ p.cargo("update -p serde")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn conservative() {
+ Package::new("log", "0.1.0").publish();
+ Package::new("serde", "0.1.0").dep("log", "0.1").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ serde = "0.1"
+ log = "0.1"
+ foo = { path = "foo" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ serde = "0.1"
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+
+ Package::new("log", "0.1.1").publish();
+ Package::new("serde", "0.1.1").dep("log", "0.1").publish();
+
+ p.cargo("update -p serde")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[UPDATING] serde v0.1.0 -> v0.1.1
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn update_via_new_dep() {
+ Package::new("log", "0.1.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ log = "0.1"
+ # foo = { path = "foo" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ log = "0.1.1"
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+ Package::new("log", "0.1.1").publish();
+
+ p.uncomment_root_manifest();
+ p.cargo("check").env("CARGO_LOG", "cargo=trace").run();
+}
+
+#[cargo_test]
+fn update_via_new_member() {
+ Package::new("log", "0.1.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [workspace]
+ # members = [ "foo" ]
+
+ [dependencies]
+ log = "0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ log = "0.1.1"
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+ Package::new("log", "0.1.1").publish();
+
+ p.uncomment_root_manifest();
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn add_dep_deep_new_requirement() {
+ Package::new("log", "0.1.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ log = "0.1"
+ # bar = "0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+
+ Package::new("log", "0.1.1").publish();
+ Package::new("bar", "0.1.0").dep("log", "0.1.1").publish();
+
+ p.uncomment_root_manifest();
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn everything_real_deep() {
+ Package::new("log", "0.1.0").publish();
+ Package::new("foo", "0.1.0").dep("log", "0.1").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ foo = "0.1"
+ # bar = "0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+
+ Package::new("log", "0.1.1").publish();
+ Package::new("bar", "0.1.0").dep("log", "0.1.1").publish();
+
+ p.uncomment_root_manifest();
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn change_package_version() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a-foo"
+ version = "0.2.0-alpha"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar", version = "0.2.0-alpha" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0-alpha"))
+ .file("bar/src/lib.rs", "")
+ .file(
+ "Cargo.lock",
+ r#"
+ [[package]]
+ name = "foo"
+ version = "0.2.0"
+ dependencies = ["bar 0.2.0"]
+
+ [[package]]
+ name = "bar"
+ version = "0.2.0"
+ "#,
+ )
+ .build();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn update_precise() {
+ Package::new("serde", "0.1.0").publish();
+ Package::new("serde", "0.2.1").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ serde = "0.2"
+ foo = { path = "foo" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ serde = "0.1"
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+
+ Package::new("serde", "0.2.0").publish();
+
+ p.cargo("update -p serde:0.2.1 --precise 0.2.0")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNGRADING] serde v0.2.1 -> v0.2.0
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn update_precise_do_not_force_update_deps() {
+ Package::new("log", "0.1.0").publish();
+ Package::new("serde", "0.2.1").dep("log", "0.1").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ serde = "0.2"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+
+ Package::new("log", "0.1.1").publish();
+ Package::new("serde", "0.2.2").dep("log", "0.1").publish();
+
+ p.cargo("update -p serde:0.2.1 --precise 0.2.2")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[UPDATING] serde v0.2.1 -> v0.2.2
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn update_aggressive() {
+ Package::new("log", "0.1.0").publish();
+ Package::new("serde", "0.2.1").dep("log", "0.1").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ serde = "0.2"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+
+ Package::new("log", "0.1.1").publish();
+ Package::new("serde", "0.2.2").dep("log", "0.1").publish();
+
+ p.cargo("update -p serde:0.2.1 --aggressive")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[UPDATING] log v0.1.0 -> v0.1.1
+[UPDATING] serde v0.2.1 -> v0.2.2
+",
+ )
+ .run();
+}
+
+// cargo update should respect its arguments even without a lockfile.
+// See issue "Running cargo update without a Cargo.lock ignores arguments"
+// at <https://github.com/rust-lang/cargo/issues/6872>.
+#[cargo_test]
+fn update_precise_first_run() {
+ Package::new("serde", "0.1.0").publish();
+ Package::new("serde", "0.2.0").publish();
+ Package::new("serde", "0.2.1").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+
+ [dependencies]
+ serde = "0.2"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("update -p serde --precise 0.2.0")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNGRADING] serde v0.2.1 -> v0.2.0
+",
+ )
+ .run();
+
+ // Assert `cargo metadata` shows serde 0.2.0
+ p.cargo("metadata")
+ .with_json(
+ r#"{
+ "packages": [
+ {
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [
+ {
+ "features": [],
+ "kind": null,
+ "name": "serde",
+ "optional": false,
+ "registry": null,
+ "rename": null,
+ "req": "^0.2",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "target": null,
+ "uses_default_features": true
+ }
+ ],
+ "description": null,
+ "documentation": null,
+ "edition": "2015",
+ "features": {},
+ "homepage": null,
+ "id": "bar 0.0.1 (path+file://[..]/foo)",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]/foo/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "name": "bar",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "source": null,
+ "targets": [
+ {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "test": true,
+ "edition": "2015",
+ "kind": [
+ "lib"
+ ],
+ "name": "bar",
+ "src_path": "[..]/foo/src/lib.rs"
+ }
+ ],
+ "version": "0.0.1"
+ },
+ {
+ "authors": [],
+ "categories": [],
+ "default_run": null,
+ "dependencies": [],
+ "description": null,
+ "documentation": null,
+ "edition": "2015",
+ "features": {},
+ "homepage": null,
+ "id": "serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "keywords": [],
+ "license": null,
+ "license_file": null,
+ "links": null,
+ "manifest_path": "[..]/home/.cargo/registry/src/-[..]/serde-0.2.0/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "name": "serde",
+ "readme": null,
+ "repository": null,
+ "rust_version": null,
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "targets": [
+ {
+ "crate_types": [
+ "lib"
+ ],
+ "doc": true,
+ "doctest": true,
+ "edition": "2015",
+ "kind": [
+ "lib"
+ ],
+ "name": "serde",
+ "src_path": "[..]/home/.cargo/registry/src/-[..]/serde-0.2.0/src/lib.rs",
+ "test": true
+ }
+ ],
+ "version": "0.2.0"
+ }
+ ],
+ "resolve": {
+ "nodes": [
+ {
+ "dependencies": [
+ "serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)"
+ ],
+ "deps": [
+ {
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": null
+ }
+ ],
+ "name": "serde",
+ "pkg": "serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)"
+ }
+ ],
+ "features": [],
+ "id": "bar 0.0.1 (path+file://[..]/foo)"
+ },
+ {
+ "dependencies": [],
+ "deps": [],
+ "features": [],
+ "id": "serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)"
+ }
+ ],
+ "root": "bar 0.0.1 (path+file://[..]/foo)"
+ },
+ "target_directory": "[..]/foo/target",
+ "version": 1,
+ "workspace_members": [
+ "bar 0.0.1 (path+file://[..]/foo)"
+ ],
+ "workspace_root": "[..]/foo",
+ "metadata": null
+}"#,
+ )
+ .run();
+
+ p.cargo("update -p serde --precise 0.2.0")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn preserve_top_comment() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("update").run();
+
+ let lockfile = p.read_lockfile();
+ assert!(lockfile.starts_with("# This file is automatically @generated by Cargo.\n# It is not intended for manual editing.\n"));
+
+ let mut lines = lockfile.lines().collect::<Vec<_>>();
+ lines.insert(2, "# some other comment");
+ let mut lockfile = lines.join("\n");
+ lockfile.push('\n'); // .lines/.join loses the last newline
+ println!("saving Cargo.lock contents:\n{}", lockfile);
+
+ p.change_file("Cargo.lock", &lockfile);
+
+ p.cargo("update").run();
+
+ let lockfile2 = p.read_lockfile();
+ println!("loaded Cargo.lock contents:\n{}", lockfile2);
+
+ assert_eq!(lockfile, lockfile2);
+}
+
+#[cargo_test]
+fn dry_run_update() {
+ Package::new("log", "0.1.0").publish();
+ Package::new("serde", "0.1.0").dep("log", "0.1").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ serde = "0.1"
+ log = "0.1"
+ foo = { path = "foo" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ serde = "0.1"
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+ let old_lockfile = p.read_lockfile();
+
+ Package::new("log", "0.1.1").publish();
+ Package::new("serde", "0.1.1").dep("log", "0.1").publish();
+
+ p.cargo("update -p serde --dry-run")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[UPDATING] serde v0.1.0 -> v0.1.1
+[WARNING] not updating lockfile due to dry run
+",
+ )
+ .run();
+ let new_lockfile = p.read_lockfile();
+ assert_eq!(old_lockfile, new_lockfile)
+}
+
+#[cargo_test]
+fn workspace_only() {
+ let p = project().file("src/main.rs", "fn main() {}").build();
+ p.cargo("generate-lockfile").run();
+ let lock1 = p.read_lockfile();
+
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.2"
+ "#,
+ );
+ p.cargo("update --workspace").run();
+ let lock2 = p.read_lockfile();
+
+ assert_ne!(lock1, lock2);
+ assert!(lock1.contains("0.0.1"));
+ assert!(lock2.contains("0.0.2"));
+ assert!(!lock1.contains("0.0.2"));
+ assert!(!lock2.contains("0.0.1"));
+}
+
+#[cargo_test]
+fn precise_with_build_metadata() {
+ // +foo syntax shouldn't be necessary with --precise
+ Package::new("bar", "0.1.0+extra-stuff.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("generate-lockfile").run();
+ Package::new("bar", "0.1.1+extra-stuff.1").publish();
+ Package::new("bar", "0.1.2+extra-stuff.2").publish();
+
+ p.cargo("update -p bar --precise 0.1")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: invalid version format for precise version `0.1`
+
+Caused by:
+ unexpected end of input while parsing minor version number
+",
+ )
+ .run();
+
+ p.cargo("update -p bar --precise 0.1.1+does-not-match")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..] index
+error: no matching package named `bar` found
+location searched: registry `crates-io`
+required by package `foo v0.1.0 ([ROOT]/foo)`
+",
+ )
+ .run();
+
+ p.cargo("update -p bar --precise 0.1.1")
+ .with_stderr(
+ "\
+[UPDATING] [..] index
+[UPDATING] bar v0.1.0+extra-stuff.0 -> v0.1.1+extra-stuff.1
+",
+ )
+ .run();
+
+ Package::new("bar", "0.1.3").publish();
+ p.cargo("update -p bar --precise 0.1.3+foo")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..] index
+error: no matching package named `bar` found
+location searched: registry `crates-io`
+required by package `foo v0.1.0 ([ROOT]/foo)`
+",
+ )
+ .run();
+
+ p.cargo("update -p bar --precise 0.1.3")
+ .with_stderr(
+ "\
+[UPDATING] [..] index
+[UPDATING] bar v0.1.1+extra-stuff.1 -> v0.1.3
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/vendor.rs b/src/tools/cargo/tests/testsuite/vendor.rs
new file mode 100644
index 000000000..21a1c097c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/vendor.rs
@@ -0,0 +1,1152 @@
+//! Tests for the `cargo vendor` command.
+//!
+//! Note that every test here uses `--respect-source-config` so that the
+//! "fake" crates.io is used. Otherwise `vendor` would download the crates.io
+//! index from the network.
+
+use std::fs;
+
+use cargo_test_support::git;
+use cargo_test_support::registry::{self, Package, RegistryBuilder};
+use cargo_test_support::{basic_lib_manifest, basic_manifest, paths, project, Project};
+
+#[cargo_test]
+fn vendor_simple() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ log = "0.3.5"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ Package::new("log", "0.3.5").publish();
+
+ p.cargo("vendor --respect-source-config").run();
+ let lock = p.read_file("vendor/log/Cargo.toml");
+ assert!(lock.contains("version = \"0.3.5\""));
+
+ add_vendor_config(&p);
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn vendor_sample_config() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ log = "0.3.5"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ Package::new("log", "0.3.5").publish();
+
+ p.cargo("vendor --respect-source-config")
+ .with_stdout(
+ r#"[source.crates-io]
+replace-with = "vendored-sources"
+
+[source.vendored-sources]
+directory = "vendor"
+"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn vendor_sample_config_alt_registry() {
+ let registry = RegistryBuilder::new().alternative().http_index().build();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ log = { version = "0.3.5", registry = "alternative" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ Package::new("log", "0.3.5").alternative(true).publish();
+
+ p.cargo("vendor --respect-source-config")
+ .with_stdout(format!(
+ r#"[source."{0}"]
+registry = "{0}"
+replace-with = "vendored-sources"
+
+[source.vendored-sources]
+directory = "vendor"
+"#,
+ registry.index_url()
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn vendor_path_specified() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ log = "0.3.5"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ Package::new("log", "0.3.5").publish();
+
+ let path = if cfg!(windows) {
+ r#"deps\.vendor"#
+ } else {
+ "deps/.vendor"
+ };
+
+ let output = p
+ .cargo("vendor --respect-source-config")
+ .arg(path)
+ .exec_with_output()
+ .unwrap();
+ // Assert against original output to ensure that
+ // path is normalized by `ops::vendor` on Windows.
+ assert_eq!(
+ &String::from_utf8(output.stdout).unwrap(),
+ r#"[source.crates-io]
+replace-with = "vendored-sources"
+
+[source.vendored-sources]
+directory = "deps/.vendor"
+"#
+ );
+
+ let lock = p.read_file("deps/.vendor/log/Cargo.toml");
+ assert!(lock.contains("version = \"0.3.5\""));
+}
+
+fn add_vendor_config(p: &Project) {
+ p.change_file(
+ ".cargo/config",
+ r#"
+ [source.crates-io]
+ replace-with = 'vendor'
+
+ [source.vendor]
+ directory = 'vendor'
+ "#,
+ );
+}
+
+#[cargo_test]
+fn package_exclude() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ Package::new("bar", "0.1.0")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ exclude = [".*", "!.include", "!.dotdir/include"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(".exclude", "")
+ .file(".include", "")
+ .file(".dotdir/exclude", "")
+ .file(".dotdir/include", "")
+ .publish();
+
+ p.cargo("vendor --respect-source-config").run();
+ let csum = p.read_file("vendor/bar/.cargo-checksum.json");
+ assert!(csum.contains(".include"));
+ assert!(!csum.contains(".exclude"));
+ assert!(!csum.contains(".dotdir/exclude"));
+ // Gitignore doesn't re-include a file in an excluded parent directory,
+ // even if negating it explicitly.
+ assert!(!csum.contains(".dotdir/include"));
+}
+
+#[cargo_test]
+fn two_versions() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bitflags = "0.8.0"
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [dependencies]
+ bitflags = "0.7.0"
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ Package::new("bitflags", "0.7.0").publish();
+ Package::new("bitflags", "0.8.0").publish();
+
+ p.cargo("vendor --respect-source-config").run();
+
+ let lock = p.read_file("vendor/bitflags/Cargo.toml");
+ assert!(lock.contains("version = \"0.8.0\""));
+ let lock = p.read_file("vendor/bitflags-0.7.0/Cargo.toml");
+ assert!(lock.contains("version = \"0.7.0\""));
+
+ add_vendor_config(&p);
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn two_explicit_versions() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bitflags = "0.8.0"
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [dependencies]
+ bitflags = "0.7.0"
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ Package::new("bitflags", "0.7.0").publish();
+ Package::new("bitflags", "0.8.0").publish();
+
+ p.cargo("vendor --respect-source-config --versioned-dirs")
+ .run();
+
+ let lock = p.read_file("vendor/bitflags-0.8.0/Cargo.toml");
+ assert!(lock.contains("version = \"0.8.0\""));
+ let lock = p.read_file("vendor/bitflags-0.7.0/Cargo.toml");
+ assert!(lock.contains("version = \"0.7.0\""));
+
+ add_vendor_config(&p);
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn help() {
+ let p = project().build();
+ p.cargo("vendor -h").run();
+}
+
+#[cargo_test]
+fn update_versions() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bitflags = "0.7.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ Package::new("bitflags", "0.7.0").publish();
+ Package::new("bitflags", "0.8.0").publish();
+
+ p.cargo("vendor --respect-source-config").run();
+
+ let lock = p.read_file("vendor/bitflags/Cargo.toml");
+ assert!(lock.contains("version = \"0.7.0\""));
+
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bitflags = "0.8.0"
+ "#,
+ );
+ p.cargo("vendor --respect-source-config").run();
+
+ let lock = p.read_file("vendor/bitflags/Cargo.toml");
+ assert!(lock.contains("version = \"0.8.0\""));
+}
+
+#[cargo_test]
+fn two_lockfiles() {
+ let p = project()
+ .no_manifest()
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bitflags = "=0.7.0"
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [dependencies]
+ bitflags = "=0.8.0"
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ Package::new("bitflags", "0.7.0").publish();
+ Package::new("bitflags", "0.8.0").publish();
+
+ p.cargo("vendor --respect-source-config -s bar/Cargo.toml --manifest-path foo/Cargo.toml")
+ .run();
+
+ let lock = p.read_file("vendor/bitflags/Cargo.toml");
+ assert!(lock.contains("version = \"0.8.0\""));
+ let lock = p.read_file("vendor/bitflags-0.7.0/Cargo.toml");
+ assert!(lock.contains("version = \"0.7.0\""));
+
+ add_vendor_config(&p);
+ p.cargo("check").cwd("foo").run();
+ p.cargo("check").cwd("bar").run();
+}
+
+#[cargo_test]
+fn test_sync_argument() {
+ let p = project()
+ .no_manifest()
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bitflags = "=0.7.0"
+ "#,
+ )
+ .file("foo/src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [dependencies]
+ bitflags = "=0.8.0"
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .file(
+ "baz/Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.1.0"
+
+ [dependencies]
+ bitflags = "=0.8.0"
+ "#,
+ )
+ .file("baz/src/lib.rs", "")
+ .build();
+
+ Package::new("bitflags", "0.7.0").publish();
+ Package::new("bitflags", "0.8.0").publish();
+
+ p.cargo("vendor --respect-source-config --manifest-path foo/Cargo.toml -s bar/Cargo.toml baz/Cargo.toml test_vendor")
+ .with_stderr("\
+error: unexpected argument 'test_vendor' found
+
+Usage: cargo[EXE] vendor [OPTIONS] [path]
+
+For more information, try '--help'.",
+ )
+ .with_status(1)
+ .run();
+
+ p.cargo("vendor --respect-source-config --manifest-path foo/Cargo.toml -s bar/Cargo.toml -s baz/Cargo.toml test_vendor")
+ .run();
+
+ let lock = p.read_file("test_vendor/bitflags/Cargo.toml");
+ assert!(lock.contains("version = \"0.8.0\""));
+ let lock = p.read_file("test_vendor/bitflags-0.7.0/Cargo.toml");
+ assert!(lock.contains("version = \"0.7.0\""));
+}
+
+#[cargo_test]
+fn delete_old_crates() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bitflags = "=0.7.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ Package::new("bitflags", "0.7.0").publish();
+ Package::new("log", "0.3.5").publish();
+
+ p.cargo("vendor --respect-source-config").run();
+ p.read_file("vendor/bitflags/Cargo.toml");
+
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ log = "=0.3.5"
+ "#,
+ );
+
+ p.cargo("vendor --respect-source-config").run();
+ let lock = p.read_file("vendor/log/Cargo.toml");
+ assert!(lock.contains("version = \"0.3.5\""));
+ assert!(!p.root().join("vendor/bitflags/Cargo.toml").exists());
+}
+
+#[cargo_test]
+fn ignore_files() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ url = "1.4.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ Package::new("url", "1.4.1")
+ .file("src/lib.rs", "")
+ .file("foo.orig", "")
+ .file(".gitignore", "")
+ .file(".gitattributes", "")
+ .file("foo.rej", "")
+ .publish();
+
+ p.cargo("vendor --respect-source-config").run();
+ let csum = p.read_file("vendor/url/.cargo-checksum.json");
+ assert!(!csum.contains("foo.orig"));
+ assert!(!csum.contains(".gitignore"));
+ assert!(!csum.contains(".gitattributes"));
+ assert!(!csum.contains(".cargo-ok"));
+ assert!(!csum.contains("foo.rej"));
+}
+
+#[cargo_test]
+fn included_files_only() {
+ let git = git::new("a", |p| {
+ p.file("Cargo.toml", &basic_lib_manifest("a"))
+ .file("src/lib.rs", "")
+ .file(".gitignore", "a")
+ .file("a/b.md", "")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ a = {{ git = '{}' }}
+ "#,
+ git.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("vendor --respect-source-config").run();
+ let csum = p.read_file("vendor/a/.cargo-checksum.json");
+ assert!(!csum.contains("a/b.md"));
+}
+
+#[cargo_test]
+fn dependent_crates_in_crates() {
+ let git = git::new("a", |p| {
+ p.file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [dependencies]
+ b = { path = 'b' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("b/Cargo.toml", &basic_lib_manifest("b"))
+ .file("b/src/lib.rs", "")
+ });
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ a = {{ git = '{}' }}
+ "#,
+ git.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("vendor --respect-source-config").run();
+ p.read_file("vendor/a/.cargo-checksum.json");
+ p.read_file("vendor/b/.cargo-checksum.json");
+}
+
+#[cargo_test]
+fn vendoring_git_crates() {
+ let git = git::new("git", |p| {
+ p.file("Cargo.toml", &basic_lib_manifest("serde_derive"))
+ .file("src/lib.rs", "")
+ .file("src/wut.rs", "")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies.serde]
+ version = "0.5.0"
+
+ [dependencies.serde_derive]
+ version = "0.5.0"
+
+ [patch.crates-io]
+ serde_derive = {{ git = '{}' }}
+ "#,
+ git.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+ Package::new("serde", "0.5.0")
+ .dep("serde_derive", "0.5")
+ .publish();
+ Package::new("serde_derive", "0.5.0").publish();
+
+ p.cargo("vendor --respect-source-config").run();
+ p.read_file("vendor/serde_derive/src/wut.rs");
+
+ add_vendor_config(&p);
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn git_simple() {
+ let git = git::new("git", |p| {
+ p.file("Cargo.toml", &basic_lib_manifest("a"))
+ .file("src/lib.rs", "")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ a = {{ git = '{}' }}
+ "#,
+ git.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("vendor --respect-source-config").run();
+ let csum = p.read_file("vendor/a/.cargo-checksum.json");
+ assert!(csum.contains("\"package\":null"));
+}
+
+#[cargo_test]
+fn git_diff_rev() {
+ let (git_project, git_repo) = git::new_repo("git", |p| {
+ p.file("Cargo.toml", &basic_manifest("a", "0.1.0"))
+ .file("src/lib.rs", "")
+ });
+ let url = git_project.url();
+ let ref_1 = "v0.1.0";
+ let ref_2 = "v0.2.0";
+
+ git::tag(&git_repo, ref_1);
+
+ git_project.change_file("Cargo.toml", &basic_manifest("a", "0.2.0"));
+ git::add(&git_repo);
+ git::commit(&git_repo);
+ git::tag(&git_repo, ref_2);
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ a_1 = {{ package = "a", git = '{url}', rev = '{ref_1}' }}
+ a_2 = {{ package = "a", git = '{url}', rev = '{ref_2}' }}
+ "#
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("vendor --respect-source-config")
+ .with_stdout(
+ r#"[source."git+file://[..]/git?rev=v0.1.0"]
+git = [..]
+rev = "v0.1.0"
+replace-with = "vendored-sources"
+
+[source."git+file://[..]/git?rev=v0.2.0"]
+git = [..]
+rev = "v0.2.0"
+replace-with = "vendored-sources"
+
+[source.vendored-sources]
+directory = "vendor"
+"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn git_duplicate() {
+ let git = git::new("a", |p| {
+ p.file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [dependencies]
+ b = { path = 'b' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("b/Cargo.toml", &basic_lib_manifest("b"))
+ .file("b/src/lib.rs", "")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ a = {{ git = '{}' }}
+ b = '0.5.0'
+
+ "#,
+ git.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+ Package::new("b", "0.5.0").publish();
+
+ p.cargo("vendor --respect-source-config")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[UPDATING] [..]
+[DOWNLOADING] [..]
+[DOWNLOADED] [..]
+error: failed to sync
+
+Caused by:
+ found duplicate version of package `b v0.5.0` vendored from two sources:
+
+ <tab>source 1: [..]
+ <tab>source 2: [..]
+",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn git_complex() {
+ let git_b = git::new("git_b", |p| {
+ p.file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "b"
+ version = "0.1.0"
+
+ [dependencies]
+ dep_b = { path = 'dep_b' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("dep_b/Cargo.toml", &basic_lib_manifest("dep_b"))
+ .file("dep_b/src/lib.rs", "")
+ });
+
+ let git_a = git::new("git_a", |p| {
+ p.file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ [dependencies]
+ b = {{ git = '{}' }}
+ dep_a = {{ path = 'dep_a' }}
+ "#,
+ git_b.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file("dep_a/Cargo.toml", &basic_lib_manifest("dep_a"))
+ .file("dep_a/src/lib.rs", "")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ a = {{ git = '{}' }}
+ "#,
+ git_a.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ let output = p
+ .cargo("vendor --respect-source-config")
+ .exec_with_output()
+ .unwrap();
+ let output = String::from_utf8(output.stdout).unwrap();
+ p.change_file(".cargo/config", &output);
+
+ p.cargo("check -v")
+ .with_stderr_contains("[..]foo/vendor/a/src/lib.rs[..]")
+ .with_stderr_contains("[..]foo/vendor/dep_a/src/lib.rs[..]")
+ .with_stderr_contains("[..]foo/vendor/b/src/lib.rs[..]")
+ .with_stderr_contains("[..]foo/vendor/dep_b/src/lib.rs[..]")
+ .run();
+}
+
+#[cargo_test]
+fn depend_on_vendor_dir_not_deleted() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ libc = "0.2.30"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ Package::new("libc", "0.2.30").publish();
+
+ p.cargo("vendor --respect-source-config").run();
+ assert!(p.root().join("vendor/libc").is_dir());
+
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ libc = "0.2.30"
+
+ [patch.crates-io]
+ libc = { path = 'vendor/libc' }
+ "#,
+ );
+
+ p.cargo("vendor --respect-source-config").run();
+ assert!(p.root().join("vendor/libc").is_dir());
+}
+
+#[cargo_test]
+fn ignore_hidden() {
+ // Don't delete files starting with `.`
+ Package::new("bar", "0.1.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "1.0.0"
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("vendor --respect-source-config").run();
+ // Add a `.git` directory.
+ let repo = git::init(&p.root().join("vendor"));
+ git::add(&repo);
+ git::commit(&repo);
+ assert!(p.root().join("vendor/.git").exists());
+ // Vendor again, shouldn't change anything.
+ p.cargo("vendor --respect-source-config").run();
+ // .git should not be removed.
+ assert!(p.root().join("vendor/.git").exists());
+ // And just for good measure, make sure no files changed.
+ let mut opts = git2::StatusOptions::new();
+ assert!(repo
+ .statuses(Some(&mut opts))
+ .unwrap()
+ .iter()
+ .all(|status| status.status() == git2::Status::CURRENT));
+}
+
+#[cargo_test]
+fn config_instructions_works() {
+ // Check that the config instructions work for all dependency kinds.
+ registry::alt_init();
+ Package::new("dep", "0.1.0").publish();
+ Package::new("altdep", "0.1.0").alternative(true).publish();
+ let git_project = git::new("gitdep", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("gitdep"))
+ .file("src/lib.rs", "")
+ });
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ dep = "0.1"
+ altdep = {{version="0.1", registry="alternative"}}
+ gitdep = {{git='{}'}}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+ let output = p
+ .cargo("vendor --respect-source-config")
+ .exec_with_output()
+ .unwrap();
+ let output = String::from_utf8(output.stdout).unwrap();
+ p.change_file(".cargo/config", &output);
+
+ p.cargo("check -v")
+ .with_stderr_contains("[..]foo/vendor/dep/src/lib.rs[..]")
+ .with_stderr_contains("[..]foo/vendor/altdep/src/lib.rs[..]")
+ .with_stderr_contains("[..]foo/vendor/gitdep/src/lib.rs[..]")
+ .run();
+}
+
+#[cargo_test]
+fn git_crlf_preservation() {
+ // Check that newlines don't get changed when you vendor
+ // (will only fail if your system is setup with core.autocrlf=true on windows)
+ let input = "hello \nthere\nmy newline\nfriends";
+ let git_project = git::new("git", |p| {
+ p.file("Cargo.toml", &basic_lib_manifest("a"))
+ .file("src/lib.rs", input)
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ a = {{ git = '{}' }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ fs::write(
+ paths::home().join(".gitconfig"),
+ r#"
+ [core]
+ autocrlf = true
+ "#,
+ )
+ .unwrap();
+
+ p.cargo("vendor --respect-source-config").run();
+ let output = p.read_file("vendor/a/src/lib.rs");
+ assert_eq!(input, output);
+}
+
+#[cargo_test]
+#[cfg(unix)]
+fn vendor_preserves_permissions() {
+ use std::os::unix::fs::MetadataExt;
+
+ Package::new("bar", "1.0.0")
+ .file_with_mode("example.sh", 0o755, "#!/bin/sh")
+ .file("src/lib.rs", "")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("vendor --respect-source-config").run();
+
+ let metadata = fs::metadata(p.root().join("vendor/bar/src/lib.rs")).unwrap();
+ assert_eq!(metadata.mode() & 0o777, 0o644);
+ let metadata = fs::metadata(p.root().join("vendor/bar/example.sh")).unwrap();
+ assert_eq!(metadata.mode() & 0o777, 0o755);
+}
+
+#[cargo_test]
+fn no_remote_dependency_no_vendor() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ [dependencies]
+ bar = { path = "bar" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("vendor")
+ .with_stderr("There is no dependency to vendor in this project.")
+ .run();
+ assert!(!p.root().join("vendor").exists());
+}
+
+#[cargo_test]
+fn vendor_crate_with_ws_inherit() {
+ let git = git::new("ws", |p| {
+ p.file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ [workspace.package]
+ version = "0.1.0"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version.workspace = true
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ });
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = {{ git = '{}' }}
+ "#,
+ git.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("vendor --respect-source-config").run();
+ p.change_file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [source."{}"]
+ git = "{}"
+ replace-with = "vendor"
+
+ [source.vendor]
+ directory = "vendor"
+ "#,
+ git.url(),
+ git.url()
+ ),
+ );
+
+ p.cargo("check -v")
+ .with_stderr_contains("[..]foo/vendor/bar/src/lib.rs[..]")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/verify_project.rs b/src/tools/cargo/tests/testsuite/verify_project.rs
new file mode 100644
index 000000000..216808fb5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/verify_project.rs
@@ -0,0 +1,73 @@
+//! Tests for the `cargo verify-project` command.
+
+use cargo_test_support::{basic_bin_manifest, main_file, project};
+
+fn verify_project_success_output() -> String {
+ r#"{"success":"true"}"#.into()
+}
+
+#[cargo_test]
+fn cargo_verify_project_path_to_cargo_toml_relative() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("verify-project --manifest-path foo/Cargo.toml")
+ .cwd(p.root().parent().unwrap())
+ .with_stdout(verify_project_success_output())
+ .run();
+}
+
+#[cargo_test]
+fn cargo_verify_project_path_to_cargo_toml_absolute() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("verify-project --manifest-path")
+ .arg(p.root().join("Cargo.toml"))
+ .cwd(p.root().parent().unwrap())
+ .with_stdout(verify_project_success_output())
+ .run();
+}
+
+#[cargo_test]
+fn cargo_verify_project_cwd() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .build();
+
+ p.cargo("verify-project")
+ .with_stdout(verify_project_success_output())
+ .run();
+}
+
+#[cargo_test]
+fn cargo_verify_project_honours_unstable_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["test-dummy-unstable"]
+
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("verify-project")
+ .masquerade_as_nightly_cargo(&["test-dummy-unstable"])
+ .with_stdout(verify_project_success_output())
+ .run();
+
+ p.cargo("verify-project")
+ .with_status(1)
+ .with_json(r#"{"invalid":"failed to parse manifest at `[CWD]/Cargo.toml`"}"#)
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/version.rs b/src/tools/cargo/tests/testsuite/version.rs
new file mode 100644
index 000000000..f880c75a6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/version.rs
@@ -0,0 +1,54 @@
+//! Tests for displaying the cargo version.
+
+use cargo_test_support::{cargo_process, project};
+
+#[cargo_test]
+fn simple() {
+ let p = project().build();
+
+ p.cargo("version")
+ .with_stdout(&format!("cargo {}\n", cargo::version()))
+ .run();
+
+ p.cargo("--version")
+ .with_stdout(&format!("cargo {}\n", cargo::version()))
+ .run();
+}
+
+#[cargo_test]
+fn version_works_without_rustc() {
+ let p = project().build();
+ p.cargo("version").env("PATH", "").run();
+}
+
+#[cargo_test]
+fn version_works_with_bad_config() {
+ let p = project().file(".cargo/config", "this is not toml").build();
+ p.cargo("version").run();
+}
+
+#[cargo_test]
+fn version_works_with_bad_target_dir() {
+ let p = project()
+ .file(
+ ".cargo/config",
+ r#"
+ [build]
+ target-dir = 4
+ "#,
+ )
+ .build();
+ p.cargo("version").run();
+}
+
+#[cargo_test]
+fn verbose() {
+ // This is mainly to check that it doesn't explode.
+ cargo_process("-vV")
+ .with_stdout_contains(&format!("cargo {}", cargo::version()))
+ .with_stdout_contains("host: [..]")
+ .with_stdout_contains("libgit2: [..]")
+ .with_stdout_contains("libcurl: [..]")
+ .with_stdout_contains("os: [..]")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/warn_on_failure.rs b/src/tools/cargo/tests/testsuite/warn_on_failure.rs
new file mode 100644
index 000000000..19cb01813
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/warn_on_failure.rs
@@ -0,0 +1,111 @@
+//! Tests for whether or not warnings are displayed for build scripts.
+
+use cargo_test_support::registry::Package;
+use cargo_test_support::{project, Project};
+
+static WARNING1: &str = "Hello! I'm a warning. :)";
+static WARNING2: &str = "And one more!";
+
+fn make_lib(lib_src: &str) {
+ Package::new("bar", "0.0.1")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ authors = []
+ version = "0.0.1"
+ build = "build.rs"
+ "#,
+ )
+ .file(
+ "build.rs",
+ &format!(
+ r#"
+ fn main() {{
+ use std::io::Write;
+ println!("cargo:warning={{}}", "{}");
+ println!("hidden stdout");
+ write!(&mut ::std::io::stderr(), "hidden stderr");
+ println!("cargo:warning={{}}", "{}");
+ }}
+ "#,
+ WARNING1, WARNING2
+ ),
+ )
+ .file("src/lib.rs", &format!("fn f() {{ {} }}", lib_src))
+ .publish();
+}
+
+fn make_upstream(main_src: &str) -> Project {
+ project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("src/main.rs", &format!("fn main() {{ {} }}", main_src))
+ .build()
+}
+
+#[cargo_test]
+fn no_warning_on_success() {
+ make_lib("");
+ let upstream = make_upstream("");
+ upstream
+ .cargo("build")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.1 ([..])
+[COMPILING] bar v0.0.1
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn no_warning_on_bin_failure() {
+ make_lib("");
+ let upstream = make_upstream("hi()");
+ upstream
+ .cargo("build")
+ .with_status(101)
+ .with_stdout_does_not_contain("hidden stdout")
+ .with_stderr_does_not_contain("hidden stderr")
+ .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING1))
+ .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING2))
+ .with_stderr_contains("[UPDATING] `[..]` index")
+ .with_stderr_contains("[DOWNLOADED] bar v0.0.1 ([..])")
+ .with_stderr_contains("[COMPILING] bar v0.0.1")
+ .with_stderr_contains("[COMPILING] foo v0.0.1 ([..])")
+ .run();
+}
+
+#[cargo_test]
+fn warning_on_lib_failure() {
+ make_lib("err()");
+ let upstream = make_upstream("");
+ upstream
+ .cargo("build")
+ .with_status(101)
+ .with_stdout_does_not_contain("hidden stdout")
+ .with_stderr_does_not_contain("hidden stderr")
+ .with_stderr_does_not_contain("[COMPILING] foo v0.0.1 ([..])")
+ .with_stderr_contains("[UPDATING] `[..]` index")
+ .with_stderr_contains("[DOWNLOADED] bar v0.0.1 ([..])")
+ .with_stderr_contains("[COMPILING] bar v0.0.1")
+ .with_stderr_contains(&format!("[WARNING] {}", WARNING1))
+ .with_stderr_contains(&format!("[WARNING] {}", WARNING2))
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/weak_dep_features.rs b/src/tools/cargo/tests/testsuite/weak_dep_features.rs
new file mode 100644
index 000000000..ee91114df
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/weak_dep_features.rs
@@ -0,0 +1,632 @@
+//! Tests for weak-dep-features.
+
+use super::features2::switch_to_resolver_2;
+use cargo_test_support::paths::CargoPathExt;
+use cargo_test_support::registry::{Dependency, Package, RegistryBuilder};
+use cargo_test_support::{project, publish};
+use std::fmt::Write;
+
+// Helper to create lib.rs files that check features.
+fn require(enabled_features: &[&str], disabled_features: &[&str]) -> String {
+ let mut s = String::new();
+ for feature in enabled_features {
+ writeln!(s, "#[cfg(not(feature=\"{feature}\"))] compile_error!(\"expected feature {feature} to be enabled\");",
+ feature=feature).unwrap();
+ }
+ for feature in disabled_features {
+ writeln!(s, "#[cfg(feature=\"{feature}\")] compile_error!(\"did not expect feature {feature} to be enabled\");",
+ feature=feature).unwrap();
+ }
+ s
+}
+
+#[cargo_test]
+fn simple() {
+ Package::new("bar", "1.0.0")
+ .feature("feat", &[])
+ .file("src/lib.rs", &require(&["feat"], &[]))
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { version = "1.0", optional = true }
+
+ [features]
+ f1 = ["bar?/feat"]
+ "#,
+ )
+ .file("src/lib.rs", &require(&["f1"], &[]))
+ .build();
+
+ // It's a bit unfortunate that this has to download `bar`, but avoiding
+ // that is extremely difficult.
+ p.cargo("check --features f1")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v1.0.0 [..]
+[CHECKING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ p.cargo("check --features f1,bar")
+ .with_stderr(
+ "\
+[CHECKING] bar v1.0.0
+[CHECKING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn deferred() {
+ // A complex chain that requires deferring enabling the feature due to
+ // another dependency getting enabled.
+ Package::new("bar", "1.0.0")
+ .feature("feat", &[])
+ .file("src/lib.rs", &require(&["feat"], &[]))
+ .publish();
+ Package::new("dep", "1.0.0")
+ .add_dep(Dependency::new("bar", "1.0").optional(true))
+ .feature("feat", &["bar?/feat"])
+ .publish();
+ Package::new("bar_activator", "1.0.0")
+ .feature_dep("dep", "1.0", &["bar"])
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ dep = { version = "1.0", features = ["feat"] }
+ bar_activator = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] dep v1.0.0 [..]
+[DOWNLOADED] bar_activator v1.0.0 [..]
+[DOWNLOADED] bar v1.0.0 [..]
+[CHECKING] bar v1.0.0
+[CHECKING] dep v1.0.0
+[CHECKING] bar_activator v1.0.0
+[CHECKING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn not_optional_dep() {
+ // Attempt to use dep_name?/feat where dep_name is not optional.
+ Package::new("dep", "1.0.0").feature("feat", &[]).publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ dep = "1.0"
+
+ [features]
+ feat = ["dep?/feat"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr("\
+error: failed to parse manifest at `[ROOT]/foo/Cargo.toml`
+
+Caused by:
+ feature `feat` includes `dep?/feat` with a `?`, but `dep` is not an optional dependency
+ A non-optional dependency of the same name is defined; consider removing the `?` or changing the dependency to be optional
+")
+ .run();
+}
+
+#[cargo_test]
+fn optional_cli_syntax() {
+ // --features bar?/feat
+ Package::new("bar", "1.0.0")
+ .feature("feat", &[])
+ .file("src/lib.rs", &require(&["feat"], &[]))
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { version = "1.0", optional = true }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // Does not build bar.
+ p.cargo("check --features bar?/feat")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v1.0.0 [..]
+[CHECKING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ // Builds bar.
+ p.cargo("check --features bar?/feat,bar")
+ .with_stderr(
+ "\
+[CHECKING] bar v1.0.0
+[CHECKING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ eprintln!("check V2 resolver");
+ switch_to_resolver_2(&p);
+ p.build_dir().rm_rf();
+ // Does not build bar.
+ p.cargo("check --features bar?/feat")
+ .with_stderr(
+ "\
+[CHECKING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ // Builds bar.
+ p.cargo("check --features bar?/feat,bar")
+ .with_stderr(
+ "\
+[CHECKING] bar v1.0.0
+[CHECKING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn required_features() {
+ // required-features doesn't allow ?
+ Package::new("bar", "1.0.0").feature("feat", &[]).publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { version = "1.0", optional = true }
+
+ [[bin]]
+ name = "foo"
+ required-features = ["bar?/feat"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[ERROR] invalid feature `bar?/feat` in required-features of target `foo`: \
+optional dependency with `?` is not allowed in required-features
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn weak_with_host_decouple() {
+ // weak-dep-features with new resolver
+ //
+ // foo v0.1.0
+ // └── common v1.0.0
+ // └── bar v1.0.0 <-- does not have `feat` enabled
+ // [build-dependencies]
+ // └── bar_activator v1.0.0
+ // └── common v1.0.0
+ // └── bar v1.0.0 <-- does have `feat` enabled
+ Package::new("bar", "1.0.0")
+ .feature("feat", &[])
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn feat() -> bool {
+ cfg!(feature = "feat")
+ }
+ "#,
+ )
+ .publish();
+
+ Package::new("common", "1.0.0")
+ .add_dep(Dependency::new("bar", "1.0").optional(true))
+ .feature("feat", &["bar?/feat"])
+ .file(
+ "src/lib.rs",
+ r#"
+ #[cfg(feature = "bar")]
+ pub fn feat() -> bool { bar::feat() }
+ #[cfg(not(feature = "bar"))]
+ pub fn feat() -> bool { false }
+ "#,
+ )
+ .publish();
+
+ Package::new("bar_activator", "1.0.0")
+ .feature_dep("common", "1.0", &["bar", "feat"])
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn feat() -> bool {
+ common::feat()
+ }
+ "#,
+ )
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ resolver = "2"
+
+ [dependencies]
+ common = { version = "1.0", features = ["feat"] }
+
+ [build-dependencies]
+ bar_activator = "1.0"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ assert!(!common::feat());
+ }
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ assert!(bar_activator::feat());
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("run")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..]
+[DOWNLOADED] [..]
+[DOWNLOADED] [..]
+[COMPILING] bar v1.0.0
+[COMPILING] common v1.0.0
+[COMPILING] bar_activator v1.0.0
+[COMPILING] foo v0.1.0 [..]
+[FINISHED] [..]
+[RUNNING] `target/debug/foo[EXE]`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn weak_namespaced() {
+ // Behavior with a dep: dependency.
+ Package::new("bar", "1.0.0")
+ .feature("feat", &[])
+ .file("src/lib.rs", &require(&["feat"], &[]))
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { version = "1.0", optional = true }
+
+ [features]
+ f1 = ["bar?/feat"]
+ f2 = ["dep:bar"]
+ "#,
+ )
+ .file("src/lib.rs", &require(&["f1"], &["f2", "bar"]))
+ .build();
+
+ p.cargo("check --features f1")
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v1.0.0 [..]
+[CHECKING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+
+ p.cargo("tree -f")
+ .arg("{p} feats:{f}")
+ .with_stdout("foo v0.1.0 ([ROOT]/foo) feats:")
+ .run();
+
+ p.cargo("tree --features f1 -f")
+ .arg("{p} feats:{f}")
+ .with_stdout("foo v0.1.0 ([ROOT]/foo) feats:f1")
+ .run();
+
+ p.cargo("tree --features f1,f2 -f")
+ .arg("{p} feats:{f}")
+ .with_stdout(
+ "\
+foo v0.1.0 ([ROOT]/foo) feats:f1,f2
+└── bar v1.0.0 feats:feat
+",
+ )
+ .run();
+
+ // "bar" remains not-a-feature
+ p.change_file("src/lib.rs", &require(&["f1", "f2"], &["bar"]));
+
+ p.cargo("check --features f1,f2")
+ .with_stderr(
+ "\
+[CHECKING] bar v1.0.0
+[CHECKING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn tree() {
+ Package::new("bar", "1.0.0")
+ .feature("feat", &[])
+ .file("src/lib.rs", &require(&["feat"], &[]))
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = { version = "1.0", optional = true }
+
+ [features]
+ f1 = ["bar?/feat"]
+ "#,
+ )
+ .file("src/lib.rs", &require(&["f1"], &[]))
+ .build();
+
+ p.cargo("tree --features f1")
+ .with_stdout("foo v0.1.0 ([ROOT]/foo)")
+ .run();
+
+ p.cargo("tree --features f1,bar")
+ .with_stdout(
+ "\
+foo v0.1.0 ([ROOT]/foo)
+└── bar v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree --features f1,bar -e features")
+ .with_stdout(
+ "\
+foo v0.1.0 ([ROOT]/foo)
+└── bar feature \"default\"
+ └── bar v1.0.0
+",
+ )
+ .run();
+
+ p.cargo("tree --features f1,bar -e features -i bar")
+ .with_stdout(
+ "\
+bar v1.0.0
+├── bar feature \"default\"
+│ └── foo v0.1.0 ([ROOT]/foo)
+│ ├── foo feature \"bar\" (command-line)
+│ ├── foo feature \"default\" (command-line)
+│ └── foo feature \"f1\" (command-line)
+└── bar feature \"feat\"
+ └── foo feature \"f1\" (command-line)
+",
+ )
+ .run();
+
+ p.cargo("tree -e features --features bar?/feat")
+ .with_stdout("foo v0.1.0 ([ROOT]/foo)")
+ .run();
+
+ // This is a little strange in that it produces no output.
+ // Maybe `cargo tree` should print a note about why?
+ p.cargo("tree -e features -i bar --features bar?/feat")
+ .with_stdout("")
+ .run();
+
+ p.cargo("tree -e features -i bar --features bar?/feat,bar")
+ .with_stdout(
+ "\
+bar v1.0.0
+├── bar feature \"default\"
+│ └── foo v0.1.0 ([ROOT]/foo)
+│ ├── foo feature \"bar\" (command-line)
+│ └── foo feature \"default\" (command-line)
+└── bar feature \"feat\" (command-line)
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn publish() {
+ let registry = RegistryBuilder::new().http_api().http_index().build();
+
+ // Publish behavior with /? syntax.
+ Package::new("bar", "1.0.0").feature("feat", &[]).publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ description = "foo"
+ license = "MIT"
+ homepage = "https://example.com/"
+
+ [dependencies]
+ bar = { version = "1.0", optional = true }
+
+ [features]
+ feat1 = []
+ feat2 = ["bar?/feat"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish")
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[PACKAGING] foo v0.1.0 [..]
+[VERIFYING] foo v0.1.0 [..]
+[UPDATING] [..]
+[COMPILING] foo v0.1.0 [..]
+[FINISHED] [..]
+[PACKAGED] [..]
+[UPLOADING] foo v0.1.0 [..]
+[UPLOADED] foo v0.1.0 to registry `crates-io`
+note: Waiting for `foo v0.1.0` to be available at registry `crates-io`.
+You may press ctrl-c to skip waiting; the crate should be available shortly.
+[PUBLISHED] foo v0.1.0 at registry `crates-io`
+",
+ )
+ .run();
+
+ publish::validate_upload_with_contents(
+ r#"
+ {
+ "authors": [],
+ "badges": {},
+ "categories": [],
+ "deps": [
+ {
+ "default_features": true,
+ "features": [],
+ "kind": "normal",
+ "name": "bar",
+ "optional": true,
+ "target": null,
+ "version_req": "^1.0"
+ }
+ ],
+ "description": "foo",
+ "documentation": null,
+ "features": {
+ "feat1": [],
+ "feat2": ["bar?/feat"]
+ },
+ "homepage": "https://example.com/",
+ "keywords": [],
+ "license": "MIT",
+ "license_file": null,
+ "links": null,
+ "name": "foo",
+ "readme": null,
+ "readme_file": null,
+ "repository": null,
+ "vers": "0.1.0"
+ }
+ "#,
+ "foo-0.1.0.crate",
+ &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"],
+ &[(
+ "Cargo.toml",
+ &format!(
+ r#"{}
+[package]
+name = "foo"
+version = "0.1.0"
+description = "foo"
+homepage = "https://example.com/"
+license = "MIT"
+
+[dependencies.bar]
+version = "1.0"
+optional = true
+
+[features]
+feat1 = []
+feat2 = ["bar?/feat"]
+"#,
+ cargo::core::package::MANIFEST_PREAMBLE
+ ),
+ )],
+ );
+}
diff --git a/src/tools/cargo/tests/testsuite/workspaces.rs b/src/tools/cargo/tests/testsuite/workspaces.rs
new file mode 100644
index 000000000..c6698f76a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/workspaces.rs
@@ -0,0 +1,2531 @@
+//! Tests for workspaces.
+
+use cargo_test_support::registry::Package;
+use cargo_test_support::{basic_lib_manifest, basic_manifest, git, project, sleep_ms};
+use std::env;
+use std::fs;
+
+#[cargo_test]
+fn simple_explicit() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ workspace = ".."
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ p.cargo("build").run();
+ assert!(p.bin("foo").is_file());
+ assert!(!p.bin("bar").is_file());
+
+ p.cargo("build").cwd("bar").run();
+ assert!(p.bin("foo").is_file());
+ assert!(p.bin("bar").is_file());
+
+ assert!(p.root().join("Cargo.lock").is_file());
+ assert!(!p.root().join("bar/Cargo.lock").is_file());
+}
+
+#[cargo_test]
+fn simple_explicit_default_members() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["bar"]
+ default-members = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ workspace = ".."
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ p.cargo("build").run();
+ assert!(p.bin("bar").is_file());
+ assert!(!p.bin("foo").is_file());
+}
+
+#[cargo_test]
+fn non_virtual_default_members_build_other_member() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = [".", "bar", "baz"]
+ default-members = ["baz"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "[CHECKING] baz v0.1.0 ([..])\n\
+ [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
+ )
+ .run();
+
+ p.cargo("check --manifest-path bar/Cargo.toml")
+ .with_stderr(
+ "[CHECKING] bar v0.1.0 ([..])\n\
+ [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn non_virtual_default_members_build_root_project() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["bar"]
+ default-members = ["."]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(
+ "[CHECKING] foo v0.1.0 ([..])\n\
+ [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn inferred_root() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ p.cargo("build").run();
+ assert!(p.bin("foo").is_file());
+ assert!(!p.bin("bar").is_file());
+
+ p.cargo("build").cwd("bar").run();
+ assert!(p.bin("foo").is_file());
+ assert!(p.bin("bar").is_file());
+
+ assert!(p.root().join("Cargo.lock").is_file());
+ assert!(!p.root().join("bar/Cargo.lock").is_file());
+}
+
+#[cargo_test]
+fn inferred_path_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar" }
+
+ [workspace]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("bar/src/lib.rs", "");
+ let p = p.build();
+
+ p.cargo("build").run();
+ assert!(p.bin("foo").is_file());
+ assert!(!p.bin("bar").is_file());
+
+ p.cargo("build").cwd("bar").run();
+ assert!(p.bin("foo").is_file());
+ assert!(p.bin("bar").is_file());
+
+ assert!(p.root().join("Cargo.lock").is_file());
+ assert!(!p.root().join("bar/Cargo.lock").is_file());
+}
+
+#[cargo_test]
+fn transitive_path_dep() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar" }
+
+ [workspace]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ baz = { path = "../baz" }
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("bar/src/lib.rs", "")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/main.rs", "fn main() {}")
+ .file("baz/src/lib.rs", "");
+ let p = p.build();
+
+ p.cargo("build").run();
+ assert!(p.bin("foo").is_file());
+ assert!(!p.bin("bar").is_file());
+ assert!(!p.bin("baz").is_file());
+
+ p.cargo("build").cwd("bar").run();
+ assert!(p.bin("foo").is_file());
+ assert!(p.bin("bar").is_file());
+ assert!(!p.bin("baz").is_file());
+
+ p.cargo("build").cwd("baz").run();
+ assert!(p.bin("foo").is_file());
+ assert!(p.bin("bar").is_file());
+ assert!(p.bin("baz").is_file());
+
+ assert!(p.root().join("Cargo.lock").is_file());
+ assert!(!p.root().join("bar/Cargo.lock").is_file());
+ assert!(!p.root().join("baz/Cargo.lock").is_file());
+}
+
+#[cargo_test]
+fn parent_pointer_works() {
+ let p = project()
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "../bar" }
+
+ [workspace]
+ "#,
+ )
+ .file("foo/src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ workspace = "../foo"
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("bar/src/lib.rs", "");
+ let p = p.build();
+
+ p.cargo("build").cwd("foo").run();
+ p.cargo("build").cwd("bar").run();
+ assert!(p.root().join("foo/Cargo.lock").is_file());
+ assert!(!p.root().join("bar/Cargo.lock").is_file());
+}
+
+#[cargo_test]
+fn same_names_in_workspace() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ workspace = ".."
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: two packages named `foo` in this workspace:
+- [..]Cargo.toml
+- [..]Cargo.toml
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn parent_doesnt_point_to_child() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ p.cargo("check")
+ .cwd("bar")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: current package believes it's in a workspace when it's not:
+current: [..]Cargo.toml
+workspace: [..]Cargo.toml
+
+this may be fixable [..]
+[..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid_parent_pointer() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ workspace = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to read `[..]Cargo.toml`
+
+Caused by:
+ [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid_members() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["foo"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to load manifest for workspace member `[..]/foo`
+
+Caused by:
+ failed to read `[..]foo/foo/Cargo.toml`
+
+Caused by:
+ [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn bare_workspace_ok() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn two_roots() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = [".."]
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: multiple workspace roots found in the same workspace:
+ [..]
+ [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn workspace_isnt_root() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ workspace = "bar"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr("error: root of a workspace inferred but wasn't a root: [..]")
+ .run();
+}
+
+#[cargo_test]
+fn dangling_member() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ workspace = "../baz"
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .file(
+ "baz/Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.1.0"
+ authors = []
+ workspace = "../baz"
+ "#,
+ )
+ .file("baz/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: package `[..]` is a member of the wrong workspace
+expected: [..]
+actual: [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cycle() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ workspace = "bar"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ workspace = ".."
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "[ERROR] root of a workspace inferred but wasn't a root: [..]/foo/bar/Cargo.toml",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn share_dependencies() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ dep1 = "0.1"
+
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ dep1 = "< 0.1.5"
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ Package::new("dep1", "0.1.3").publish();
+ Package::new("dep1", "0.1.8").publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] dep1 v0.1.3 ([..])
+[CHECKING] dep1 v0.1.3
+[CHECKING] foo v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn fetch_fetches_all() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ dep1 = "*"
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ Package::new("dep1", "0.1.3").publish();
+
+ p.cargo("fetch")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] dep1 v0.1.3 ([..])
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn lock_works_for_everyone() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ dep2 = "0.1"
+
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ dep1 = "0.1"
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ Package::new("dep1", "0.1.0").publish();
+ Package::new("dep2", "0.1.0").publish();
+
+ p.cargo("generate-lockfile")
+ .with_stderr("[UPDATING] `[..]` index")
+ .run();
+
+ Package::new("dep1", "0.1.1").publish();
+ Package::new("dep2", "0.1.1").publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[DOWNLOADING] crates ...
+[DOWNLOADED] dep2 v0.1.0 ([..])
+[CHECKING] dep2 v0.1.0
+[CHECKING] foo v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ p.cargo("check")
+ .cwd("bar")
+ .with_stderr(
+ "\
+[DOWNLOADING] crates ...
+[DOWNLOADED] dep1 v0.1.0 ([..])
+[CHECKING] dep1 v0.1.0
+[CHECKING] bar v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn virtual_works() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+ p.cargo("build").cwd("bar").run();
+ assert!(p.root().join("Cargo.lock").is_file());
+ assert!(p.bin("bar").is_file());
+ assert!(!p.root().join("bar/Cargo.lock").is_file());
+}
+
+#[cargo_test]
+fn explicit_package_argument_works_with_virtual_manifest() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+ p.cargo("build --package bar").run();
+ assert!(p.root().join("Cargo.lock").is_file());
+ assert!(p.bin("bar").is_file());
+ assert!(!p.root().join("bar/Cargo.lock").is_file());
+}
+
+#[cargo_test]
+fn virtual_misconfigure() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+ p.cargo("check")
+ .cwd("bar")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: current package believes it's in a workspace when it's not:
+current: [CWD]/Cargo.toml
+workspace: [..]Cargo.toml
+
+this may be fixable by adding `bar` to the `workspace.members` array of the \
+manifest located at: [..]
+[..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn virtual_build_all_implied() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn virtual_default_members() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ default-members = ["bar"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("baz/src/main.rs", "fn main() {}");
+ let p = p.build();
+ p.cargo("build").run();
+ assert!(p.bin("bar").is_file());
+ assert!(!p.bin("baz").is_file());
+}
+
+#[cargo_test]
+fn virtual_default_member_is_not_a_member() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar"]
+ default-members = ["something-else"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: package `[..]something-else` is listed in workspace’s default-members \
+but is not a member.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn virtual_default_members_build_other_member() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["bar", "baz"]
+ default-members = ["baz"]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", "pub fn bar() {}")
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("baz/src/lib.rs", "pub fn baz() {}")
+ .build();
+
+ p.cargo("check --manifest-path bar/Cargo.toml")
+ .with_stderr(
+ "[CHECKING] bar v0.1.0 ([..])\n\
+ [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn virtual_build_no_members() {
+ let p = project().file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ "#,
+ );
+ let p = p.build();
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: manifest path `[..]` contains no package: The manifest is virtual, \
+and the workspace has no members.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn include_virtual() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [workspace]
+ "#,
+ );
+ let p = p.build();
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: multiple workspace roots found in the same workspace:
+ [..]
+ [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn members_include_path_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["p1"]
+
+ [dependencies]
+ p3 = { path = "p3" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "p1/Cargo.toml",
+ r#"
+ [package]
+ name = "p1"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ p2 = { path = "../p2" }
+ "#,
+ )
+ .file("p1/src/lib.rs", "")
+ .file("p2/Cargo.toml", &basic_manifest("p2", "0.1.0"))
+ .file("p2/src/lib.rs", "")
+ .file("p3/Cargo.toml", &basic_manifest("p3", "0.1.0"))
+ .file("p3/src/lib.rs", "");
+ let p = p.build();
+
+ p.cargo("check").cwd("p1").run();
+ p.cargo("check").cwd("p2").run();
+ p.cargo("check").cwd("p3").run();
+ p.cargo("check").run();
+
+ assert!(p.root().join("target").is_dir());
+ assert!(!p.root().join("p1/target").is_dir());
+ assert!(!p.root().join("p2/target").is_dir());
+ assert!(!p.root().join("p3/target").is_dir());
+}
+
+#[cargo_test]
+fn new_warns_you_this_will_not_work() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ "#,
+ )
+ .file("src/lib.rs", "");
+ let p = p.build();
+
+ p.cargo("new --lib bar")
+ .with_stderr(
+ "\
+warning: compiling this new package may not work due to invalid workspace configuration
+
+current package believes it's in a workspace when it's not:
+current: [..]
+workspace: [..]
+
+this may be fixable by ensuring that this crate is depended on by the workspace \
+root: [..]
+[..]
+[CREATED] library `bar` package
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn new_warning_with_corrupt_ws() {
+ let p = project().file("Cargo.toml", "asdf").build();
+ p.cargo("new bar")
+ .with_stderr(
+ "\
+[WARNING] compiling this new package may not work due to invalid workspace configuration
+
+failed to parse manifest at `[..]foo/Cargo.toml`
+
+Caused by:
+ could not parse input as TOML
+
+Caused by:
+ TOML parse error at line 1, column 5
+ |
+ 1 | asdf
+ | ^
+ expected `.`, `=`
+ Created binary (application) `bar` package
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn lock_doesnt_change_depending_on_crate() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ['baz']
+
+ [dependencies]
+ foo = "*"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "baz/Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = "*"
+ "#,
+ )
+ .file("baz/src/lib.rs", "");
+ let p = p.build();
+
+ Package::new("foo", "1.0.0").publish();
+ Package::new("bar", "1.0.0").publish();
+
+ p.cargo("check").run();
+
+ let lockfile = p.read_lockfile();
+
+ p.cargo("check").cwd("baz").run();
+
+ let lockfile2 = p.read_lockfile();
+
+ assert_eq!(lockfile, lockfile2);
+}
+
+#[cargo_test]
+fn rebuild_please() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ['lib', 'bin']
+ "#,
+ )
+ .file("lib/Cargo.toml", &basic_manifest("lib", "0.1.0"))
+ .file(
+ "lib/src/lib.rs",
+ r#"
+ pub fn foo() -> u32 { 0 }
+ "#,
+ )
+ .file(
+ "bin/Cargo.toml",
+ r#"
+ [package]
+ name = "bin"
+ version = "0.1.0"
+
+ [dependencies]
+ lib = { path = "../lib" }
+ "#,
+ )
+ .file(
+ "bin/src/main.rs",
+ r#"
+ extern crate lib;
+
+ fn main() {
+ assert_eq!(lib::foo(), 0);
+ }
+ "#,
+ );
+ let p = p.build();
+
+ p.cargo("run").cwd("bin").run();
+
+ sleep_ms(1000);
+
+ p.change_file("lib/src/lib.rs", "pub fn foo() -> u32 { 1 }");
+
+ p.cargo("build").cwd("lib").run();
+
+ p.cargo("run")
+ .cwd("bin")
+ .with_status(101)
+ .with_stderr_contains("[..]assertion[..]")
+ .run();
+}
+
+#[cargo_test]
+fn workspace_in_git() {
+ let git_project = git::new("dep1", |project| {
+ project
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo"]
+ "#,
+ )
+ .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("foo/src/lib.rs", "")
+ });
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "lib"
+ version = "0.1.0"
+
+ [dependencies.foo]
+ git = '{}'
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ pub fn foo() -> u32 { 0 }
+ "#,
+ );
+ let p = p.build();
+
+ p.cargo("check").run();
+}
+
+#[cargo_test]
+fn lockfile_can_specify_nonexistent_members() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a"]
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_manifest("a", "0.1.0"))
+ .file("a/src/main.rs", "fn main() {}")
+ .file(
+ "Cargo.lock",
+ r#"
+ [[package]]
+ name = "a"
+ version = "0.1.0"
+
+ [[package]]
+ name = "b"
+ version = "0.1.0"
+ "#,
+ );
+
+ let p = p.build();
+
+ p.cargo("check").cwd("a").run();
+}
+
+#[cargo_test]
+fn you_cannot_generate_lockfile_for_empty_workspaces() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ "#,
+ )
+ .file("bar/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ p.cargo("update")
+ .with_status(101)
+ .with_stderr("error: you can't generate a lockfile for an empty workspace.")
+ .run();
+}
+
+#[cargo_test]
+fn workspace_with_transitive_dev_deps() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.5.0"
+ authors = ["mbrubeck@example.com"]
+
+ [dependencies.bar]
+ path = "bar"
+
+ [workspace]
+ "#,
+ )
+ .file("src/main.rs", r#"fn main() {}"#)
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ authors = ["mbrubeck@example.com"]
+
+ [dev-dependencies.baz]
+ path = "../baz"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
+ pub fn init() {}
+
+ #[cfg(test)]
+
+ #[test]
+ fn test() {
+ extern crate baz;
+ baz::do_stuff();
+ }
+ "#,
+ )
+ .file("baz/Cargo.toml", &basic_manifest("baz", "0.5.0"))
+ .file("baz/src/lib.rs", r#"pub fn do_stuff() {}"#);
+ let p = p.build();
+
+ p.cargo("test -p bar").run();
+}
+
+#[cargo_test]
+fn error_if_parent_cargo_toml_is_invalid() {
+ let p = project()
+ .file("Cargo.toml", "Totally not a TOML file")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ p.cargo("check")
+ .cwd("bar")
+ .with_status(101)
+ .with_stderr_contains("[ERROR] failed to parse manifest at `[..]`")
+ .run();
+}
+
+#[cargo_test]
+fn relative_path_for_member_works() {
+ let p = project()
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["../bar"]
+ "#,
+ )
+ .file("foo/src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ workspace = "../foo"
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ p.cargo("check").cwd("foo").run();
+ p.cargo("check").cwd("bar").run();
+}
+
+#[cargo_test]
+fn relative_path_for_root_works() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+
+ [dependencies]
+ subproj = { path = "./subproj" }
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("subproj/Cargo.toml", &basic_manifest("subproj", "0.1.0"))
+ .file("subproj/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ p.cargo("check --manifest-path ./Cargo.toml").run();
+
+ p.cargo("check --manifest-path ../Cargo.toml")
+ .cwd("subproj")
+ .run();
+}
+
+#[cargo_test]
+fn path_dep_outside_workspace_is_not_member() {
+ let p = project()
+ .no_manifest()
+ .file(
+ "ws/Cargo.toml",
+ r#"
+ [package]
+ name = "ws"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ foo = { path = "../foo" }
+
+ [workspace]
+ "#,
+ )
+ .file("ws/src/lib.rs", "extern crate foo;")
+ .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("foo/src/lib.rs", "");
+ let p = p.build();
+
+ p.cargo("check").cwd("ws").run();
+}
+
+#[cargo_test]
+fn test_in_and_out_of_workspace() {
+ let p = project()
+ .no_manifest()
+ .file(
+ "ws/Cargo.toml",
+ r#"
+ [package]
+ name = "ws"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ foo = { path = "../foo" }
+
+ [workspace]
+ members = [ "../bar" ]
+ "#,
+ )
+ .file("ws/src/lib.rs", "extern crate foo; pub fn f() { foo::f() }")
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "../bar" }
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ "extern crate bar; pub fn f() { bar::f() }",
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ workspace = "../ws"
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ "#,
+ )
+ .file("bar/src/lib.rs", "pub fn f() { }");
+ let p = p.build();
+
+ p.cargo("check").cwd("ws").run();
+
+ assert!(p.root().join("ws/Cargo.lock").is_file());
+ assert!(p.root().join("ws/target").is_dir());
+ assert!(!p.root().join("foo/Cargo.lock").is_file());
+ assert!(!p.root().join("foo/target").is_dir());
+ assert!(!p.root().join("bar/Cargo.lock").is_file());
+ assert!(!p.root().join("bar/target").is_dir());
+
+ p.cargo("check").cwd("foo").run();
+ assert!(p.root().join("foo/Cargo.lock").is_file());
+ assert!(p.root().join("foo/target").is_dir());
+ assert!(!p.root().join("bar/Cargo.lock").is_file());
+ assert!(!p.root().join("bar/target").is_dir());
+}
+
+#[cargo_test]
+fn test_path_dependency_under_member() {
+ let p = project()
+ .file(
+ "ws/Cargo.toml",
+ r#"
+ [package]
+ name = "ws"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ foo = { path = "../foo" }
+
+ [workspace]
+ "#,
+ )
+ .file("ws/src/lib.rs", "extern crate foo; pub fn f() { foo::f() }")
+ .file(
+ "foo/Cargo.toml",
+ r#"
+ [package]
+ workspace = "../ws"
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "./bar" }
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ "extern crate bar; pub fn f() { bar::f() }",
+ )
+ .file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("foo/bar/src/lib.rs", "pub fn f() { }");
+ let p = p.build();
+
+ p.cargo("check").cwd("ws").run();
+
+ assert!(!p.root().join("foo/bar/Cargo.lock").is_file());
+ assert!(!p.root().join("foo/bar/target").is_dir());
+
+ p.cargo("check").cwd("foo/bar").run();
+
+ assert!(!p.root().join("foo/bar/Cargo.lock").is_file());
+ assert!(!p.root().join("foo/bar/target").is_dir());
+}
+
+#[cargo_test]
+fn excluded_simple() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "ws"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ exclude = ["foo"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("foo/src/lib.rs", "");
+ let p = p.build();
+
+ p.cargo("check").run();
+ assert!(p.root().join("target").is_dir());
+ p.cargo("check").cwd("foo").run();
+ assert!(p.root().join("foo/target").is_dir());
+}
+
+#[cargo_test]
+fn exclude_members_preferred() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "ws"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["foo/bar"]
+ exclude = ["foo"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("foo/src/lib.rs", "")
+ .file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("foo/bar/src/lib.rs", "");
+ let p = p.build();
+
+ p.cargo("check").run();
+ assert!(p.root().join("target").is_dir());
+ p.cargo("check").cwd("foo").run();
+ assert!(p.root().join("foo/target").is_dir());
+ p.cargo("check").cwd("foo/bar").run();
+ assert!(!p.root().join("foo/bar/target").is_dir());
+}
+
+#[cargo_test]
+fn exclude_but_also_depend() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "ws"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "foo/bar" }
+
+ [workspace]
+ exclude = ["foo"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("foo/src/lib.rs", "")
+ .file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("foo/bar/src/lib.rs", "");
+ let p = p.build();
+
+ p.cargo("check").run();
+ assert!(p.root().join("target").is_dir());
+ p.cargo("check").cwd("foo").run();
+ assert!(p.root().join("foo/target").is_dir());
+ p.cargo("check").cwd("foo/bar").run();
+ assert!(p.root().join("foo/bar/target").is_dir());
+}
+
+#[cargo_test]
+fn excluded_default_members_still_must_be_members() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo"]
+ default-members = ["foo", "bar"]
+ exclude = ["bar"]
+ "#,
+ )
+ .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("foo/src/lib.rs", "")
+ .file("bar/something.txt", "");
+ let p = p.build();
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: package `[..]bar` is listed in workspace’s default-members \
+but is not a member.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn excluded_default_members_crate_glob() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo", "bar/*"]
+ default-members = ["bar/*"]
+ exclude = ["bar/quux"]
+ "#,
+ )
+ .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("foo/src/main.rs", "fn main() {}")
+ .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("bar/baz/src/main.rs", "fn main() {}")
+ .file("bar/quux/Cargo.toml", &basic_manifest("quux", "0.1.0"))
+ .file("bar/quux/src/main.rs", "fn main() {}");
+
+ let p = p.build();
+ p.cargo("build").run();
+
+ assert!(p.root().join("target").is_dir());
+ assert!(!p.bin("foo").is_file());
+ assert!(p.bin("baz").is_file());
+ assert!(!p.bin("quux").exists());
+
+ p.cargo("build --workspace").run();
+ assert!(p.root().join("target").is_dir());
+ assert!(p.bin("foo").is_file());
+ assert!(!p.bin("quux").exists());
+
+ p.cargo("build").cwd("bar/quux").run();
+ assert!(p.root().join("bar/quux/target").is_dir());
+}
+
+#[cargo_test]
+fn excluded_default_members_not_crate_glob() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo", "bar/*"]
+ default-members = ["bar/*"]
+ exclude = ["bar/docs"]
+ "#,
+ )
+ .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("foo/src/main.rs", "fn main() {}")
+ .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+ .file("bar/baz/src/main.rs", "fn main() {}")
+ .file("bar/docs/readme.txt", "This folder is not a crate!");
+
+ let p = p.build();
+ p.cargo("build").run();
+
+ assert!(!p.bin("foo").is_file());
+ assert!(p.bin("baz").is_file());
+ p.cargo("build --workspace").run();
+ assert!(p.bin("foo").is_file());
+}
+
+#[cargo_test]
+fn glob_syntax() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["crates/*"]
+ exclude = ["crates/qux"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "crates/bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ workspace = "../.."
+ "#,
+ )
+ .file("crates/bar/src/main.rs", "fn main() {}")
+ .file(
+ "crates/baz/Cargo.toml",
+ r#"
+ [package]
+ name = "baz"
+ version = "0.1.0"
+ authors = []
+ workspace = "../.."
+ "#,
+ )
+ .file("crates/baz/src/main.rs", "fn main() {}")
+ .file(
+ "crates/qux/Cargo.toml",
+ r#"
+ [package]
+ name = "qux"
+ version = "0.1.0"
+ authors = []
+ "#,
+ )
+ .file("crates/qux/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ p.cargo("build").run();
+ assert!(p.bin("foo").is_file());
+ assert!(!p.bin("bar").is_file());
+ assert!(!p.bin("baz").is_file());
+
+ p.cargo("build").cwd("crates/bar").run();
+ assert!(p.bin("foo").is_file());
+ assert!(p.bin("bar").is_file());
+
+ p.cargo("build").cwd("crates/baz").run();
+ assert!(p.bin("foo").is_file());
+ assert!(p.bin("baz").is_file());
+
+ p.cargo("build").cwd("crates/qux").run();
+ assert!(!p.bin("qux").is_file());
+
+ assert!(p.root().join("Cargo.lock").is_file());
+ assert!(!p.root().join("crates/bar/Cargo.lock").is_file());
+ assert!(!p.root().join("crates/baz/Cargo.lock").is_file());
+ assert!(p.root().join("crates/qux/Cargo.lock").is_file());
+}
+
+/*FIXME: This fails because of how workspace.exclude and workspace.members are working.
+#[cargo_test]
+fn glob_syntax_2() {
+ let p = project()
+ .file("Cargo.toml", r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["crates/b*"]
+ exclude = ["crates/q*"]
+ "#)
+ .file("src/main.rs", "fn main() {}")
+ .file("crates/bar/Cargo.toml", r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ workspace = "../.."
+ "#)
+ .file("crates/bar/src/main.rs", "fn main() {}")
+ .file("crates/baz/Cargo.toml", r#"
+ [package]
+ name = "baz"
+ version = "0.1.0"
+ authors = []
+ workspace = "../.."
+ "#)
+ .file("crates/baz/src/main.rs", "fn main() {}")
+ .file("crates/qux/Cargo.toml", r#"
+ [package]
+ name = "qux"
+ version = "0.1.0"
+ authors = []
+ "#)
+ .file("crates/qux/src/main.rs", "fn main() {}");
+ p.build();
+
+ p.cargo("build").run();
+ assert!(p.bin("foo").is_file());
+ assert!(!p.bin("bar").is_file());
+ assert!(!p.bin("baz").is_file());
+
+ p.cargo("build").cwd("crates/bar").run();
+ assert!(p.bin("foo").is_file());
+ assert!(p.bin("bar").is_file());
+
+ p.cargo("build").cwd("crates/baz").run();
+ assert!(p.bin("foo").is_file());
+ assert!(p.bin("baz").is_file());
+
+ p.cargo("build").cwd("crates/qux").run();
+ assert!(!p.bin("qux").is_file());
+
+ assert!(p.root().join("Cargo.lock").is_file());
+ assert!(!p.root().join("crates/bar/Cargo.lock").is_file());
+ assert!(!p.root().join("crates/baz/Cargo.lock").is_file());
+ assert!(p.root().join("crates/qux/Cargo.lock").is_file());
+}
+*/
+
+#[cargo_test]
+fn glob_syntax_invalid_members() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["crates/*"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file("crates/bar/src/main.rs", "fn main() {}");
+ let p = p.build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to load manifest for workspace member `[..]/crates/bar`
+
+Caused by:
+ failed to read `[..]foo/crates/bar/Cargo.toml`
+
+Caused by:
+ [..]
+",
+ )
+ .run();
+}
+
+/// This is a freshness test for feature use with workspaces.
+///
+/// `feat_lib` is used by `caller1` and `caller2`, but with different features enabled.
+/// This test ensures that alternating building `caller1`, `caller2` doesn't force
+/// recompile of `feat_lib`.
+///
+/// Ideally, once we solve rust-lang/cargo#3620, then a single Cargo build at the top level
+/// will be enough.
+#[cargo_test]
+fn dep_used_with_separate_features() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["feat_lib", "caller1", "caller2"]
+ "#,
+ )
+ .file(
+ "feat_lib/Cargo.toml",
+ r#"
+ [package]
+ name = "feat_lib"
+ version = "0.1.0"
+ authors = []
+
+ [features]
+ myfeature = []
+ "#,
+ )
+ .file("feat_lib/src/lib.rs", "")
+ .file(
+ "caller1/Cargo.toml",
+ r#"
+ [package]
+ name = "caller1"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ feat_lib = { path = "../feat_lib" }
+ "#,
+ )
+ .file("caller1/src/main.rs", "fn main() {}")
+ .file("caller1/src/lib.rs", "")
+ .file(
+ "caller2/Cargo.toml",
+ r#"
+ [package]
+ name = "caller2"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ feat_lib = { path = "../feat_lib", features = ["myfeature"] }
+ caller1 = { path = "../caller1" }
+ "#,
+ )
+ .file("caller2/src/main.rs", "fn main() {}")
+ .file("caller2/src/lib.rs", "");
+ let p = p.build();
+
+ // Build the entire workspace.
+ p.cargo("build --workspace")
+ .with_stderr(
+ "\
+[..]Compiling feat_lib v0.1.0 ([..])
+[..]Compiling caller1 v0.1.0 ([..])
+[..]Compiling caller2 v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+ assert!(p.bin("caller1").is_file());
+ assert!(p.bin("caller2").is_file());
+
+ // Build `caller1`. Should build the dep library. Because the features
+ // are different than the full workspace, it rebuilds.
+ // Ideally once we solve rust-lang/cargo#3620, then a single Cargo build at the top level
+ // will be enough.
+ p.cargo("build")
+ .cwd("caller1")
+ .with_stderr(
+ "\
+[..]Compiling feat_lib v0.1.0 ([..])
+[..]Compiling caller1 v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run();
+
+ // Alternate building `caller2`/`caller1` a few times, just to make sure
+ // features are being built separately. Should not rebuild anything.
+ p.cargo("build")
+ .cwd("caller2")
+ .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+ .run();
+ p.cargo("build")
+ .cwd("caller1")
+ .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+ .run();
+ p.cargo("build")
+ .cwd("caller2")
+ .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+ .run();
+}
+
+#[cargo_test]
+fn dont_recurse_out_of_cargo_home() {
+ let git_project = git::new("dep", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("dep", "0.1.0"))
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ use std::env;
+ use std::path::Path;
+ use std::process::{self, Command};
+
+ fn main() {
+ let cargo = env::var_os("CARGO").unwrap();
+ let cargo_manifest_dir = env::var_os("CARGO_MANIFEST_DIR").unwrap();
+ let output = Command::new(cargo)
+ .args(&["metadata", "--format-version", "1", "--manifest-path"])
+ .arg(&Path::new(&cargo_manifest_dir).join("Cargo.toml"))
+ .output()
+ .unwrap();
+ if !output.status.success() {
+ eprintln!("{}", String::from_utf8(output.stderr).unwrap());
+ process::exit(1);
+ }
+ }
+ "#,
+ )
+ });
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies.dep]
+ git = "{}"
+
+ [workspace]
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/lib.rs", "");
+ let p = p.build();
+
+ p.cargo("check")
+ .env("CARGO_HOME", p.root().join(".cargo"))
+ .run();
+}
+
+// FIXME: this fails because of how workspace.exclude and workspace.members are working.
+/*
+#[cargo_test]
+fn include_and_exclude() {
+ let p = project()
+ .file("Cargo.toml", r#"
+ [workspace]
+ members = ["foo"]
+ exclude = ["foo/bar"]
+ "#)
+ .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("foo/src/lib.rs", "")
+ .file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("foo/bar/src/lib.rs", "");
+ p.build();
+
+ p.cargo("build").cwd("foo").run();
+ assert!(p.root().join("target").is_dir());
+ assert!(!p.root().join("foo/target").is_dir());
+ p.cargo("build").cwd("foo/bar").run();
+ assert!(p.root().join("foo/bar/target").is_dir());
+}
+*/
+
+#[cargo_test]
+fn cargo_home_at_root_works() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [workspace]
+ members = ["a"]
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("a/Cargo.toml", &basic_manifest("a", "0.1.0"))
+ .file("a/src/lib.rs", "");
+ let p = p.build();
+
+ p.cargo("check").run();
+ p.cargo("check --frozen").env("CARGO_HOME", p.root()).run();
+}
+
+#[cargo_test]
+fn relative_rustc() {
+ let p = project()
+ .file(
+ "src/main.rs",
+ r#"
+ use std::process::Command;
+ use std::env;
+
+ fn main() {
+ let mut cmd = Command::new("rustc");
+ for arg in env::args_os().skip(1) {
+ cmd.arg(arg);
+ }
+ std::process::exit(cmd.status().unwrap().code().unwrap());
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build").run();
+
+ let src = p
+ .root()
+ .join("target/debug/foo")
+ .with_extension(env::consts::EXE_EXTENSION);
+
+ Package::new("a", "0.1.0").publish();
+
+ let p = project()
+ .at("lib")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "lib"
+ version = "0.1.0"
+
+ [dependencies]
+ a = "0.1"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ fs::copy(&src, p.root().join(src.file_name().unwrap())).unwrap();
+
+ let file = format!("./foo{}", env::consts::EXE_SUFFIX);
+ p.cargo("build").env("RUSTC", &file).run();
+}
+
+#[cargo_test]
+fn ws_rustc_err() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a"]
+ "#,
+ )
+ .file("a/Cargo.toml", &basic_lib_manifest("a"))
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("rustc")
+ .with_status(101)
+ .with_stderr("[ERROR] [..]against an actual package[..]")
+ .run();
+
+ p.cargo("rustdoc")
+ .with_status(101)
+ .with_stderr("[ERROR] [..]against an actual package[..]")
+ .run();
+}
+
+#[cargo_test]
+fn ws_err_unused() {
+ for key in &[
+ "[lib]",
+ "[[bin]]",
+ "[[example]]",
+ "[[test]]",
+ "[[bench]]",
+ "[dependencies]",
+ "[dev-dependencies]",
+ "[build-dependencies]",
+ "[features]",
+ "[target]",
+ "[badges]",
+ ] {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [workspace]
+ members = ["a"]
+
+ {}
+ "#,
+ key
+ ),
+ )
+ .file("a/Cargo.toml", &basic_lib_manifest("a"))
+ .file("a/src/lib.rs", "")
+ .build();
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(&format!(
+ "\
+[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml`
+
+Caused by:
+ this virtual manifest specifies a {} section, which is not allowed
+",
+ key
+ ))
+ .run();
+ }
+}
+
+#[cargo_test]
+fn ws_warn_unused() {
+ for (key, name) in &[
+ ("[profile.dev]\nopt-level = 1", "profiles"),
+ ("[replace]\n\"bar:0.1.0\" = { path = \"bar\" }", "replace"),
+ ("[patch.crates-io]\nbar = { path = \"bar\" }", "patch"),
+ ] {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a"]
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "a"
+ version = "0.1.0"
+
+ {}
+ "#,
+ key
+ ),
+ )
+ .file("a/src/lib.rs", "")
+ .build();
+ p.cargo("check")
+ .with_stderr_contains(&format!(
+ "\
+[WARNING] {} for the non root package will be ignored, specify {} at the workspace root:
+package: [..]/foo/a/Cargo.toml
+workspace: [..]/foo/Cargo.toml
+",
+ name, name
+ ))
+ .run();
+ }
+}
+
+#[cargo_test]
+fn ws_warn_path() {
+ // Warnings include path to manifest.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["a"]
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
+ cargo-features = ["edition"]
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ "#,
+ )
+ .file("a/src/lib.rs", "")
+ .build();
+
+ p.cargo("check")
+ .with_stderr_contains("[WARNING] [..]/foo/a/Cargo.toml: the cargo feature `edition`[..]")
+ .run();
+}
+
+#[cargo_test]
+fn invalid_missing() {
+ // Make sure errors are not suppressed with -q.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ x = { path = 'x' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check -q")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to get `x` as a dependency of package `foo v0.1.0 [..]`
+
+Caused by:
+ failed to load source for dependency `x`
+
+Caused by:
+ Unable to update [..]/foo/x
+
+Caused by:
+ failed to read `[..]foo/x/Cargo.toml`
+
+Caused by:
+ [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn member_dep_missing() {
+ // Make sure errors are not suppressed with -q.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [dependencies]
+ baz = { path = "baz" }
+ "#,
+ )
+ .file("bar/src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("check -q")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to load manifest for workspace member `[..]/bar`
+
+Caused by:
+ failed to load manifest for dependency `baz`
+
+Caused by:
+ failed to read `[..]foo/bar/baz/Cargo.toml`
+
+Caused by:
+ [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn simple_primary_package_env_var() {
+ let is_primary_package = r#"
+ #[test]
+ fn verify_primary_package() {{
+ assert!(option_env!("CARGO_PRIMARY_PACKAGE").is_some());
+ }}
+ "#;
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["bar"]
+ "#,
+ )
+ .file("src/lib.rs", is_primary_package)
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ workspace = ".."
+ "#,
+ )
+ .file("bar/src/lib.rs", is_primary_package);
+ let p = p.build();
+
+ p.cargo("test").run();
+
+ // Again, this time selecting a specific crate
+ p.cargo("clean").run();
+ p.cargo("test -p bar").run();
+
+ // Again, this time selecting all crates
+ p.cargo("clean").run();
+ p.cargo("test --all").run();
+}
+
+#[cargo_test]
+fn virtual_primary_package_env_var() {
+ let is_primary_package = r#"
+ #[test]
+ fn verify_primary_package() {{
+ assert!(option_env!("CARGO_PRIMARY_PACKAGE").is_some());
+ }}
+ "#;
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo", "bar"]
+ "#,
+ )
+ .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("foo/src/lib.rs", is_primary_package)
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("bar/src/lib.rs", is_primary_package);
+ let p = p.build();
+
+ p.cargo("test").run();
+
+ // Again, this time selecting a specific crate
+ p.cargo("clean").run();
+ p.cargo("test -p foo").run();
+}
+
+#[cargo_test]
+fn ensure_correct_workspace_when_nested() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "sub/Cargo.toml",
+ r#"
+ [workspace]
+ members = ["foo"]
+ "#,
+ )
+ .file(
+ "sub/foo/Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "../.."}
+ "#,
+ )
+ .file("sub/foo/src/main.rs", "fn main() {}");
+ let p = p.build();
+ p.cargo("tree")
+ .cwd("sub/foo")
+ .with_stdout(
+ "\
+foo v0.1.0 ([..]/foo/sub/foo)
+└── bar v0.1.0 ([..]/foo)\
+ ",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/yank.rs b/src/tools/cargo/tests/testsuite/yank.rs
new file mode 100644
index 000000000..684a04508
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/yank.rs
@@ -0,0 +1,202 @@
+//! Tests for the `cargo yank` command.
+
+use std::fs;
+
+use cargo_test_support::paths::CargoPathExt;
+use cargo_test_support::project;
+use cargo_test_support::registry;
+
+fn setup(name: &str, version: &str) {
+ let dir = registry::api_path().join(format!("api/v1/crates/{}/{}", name, version));
+ dir.mkdir_p();
+ fs::write(dir.join("yank"), r#"{"ok": true}"#).unwrap();
+}
+
+#[cargo_test]
+fn explicit_version() {
+ let registry = registry::init();
+ setup("foo", "0.0.1");
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("yank --version 0.0.1")
+ .replace_crates_io(registry.index_url())
+ .run();
+
+ p.cargo("yank --undo --version 0.0.1")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr(
+ " Updating crates.io index
+ Unyank foo@0.0.1
+error: failed to undo a yank from the registry at file:///[..]
+
+Caused by:
+ EOF while parsing a value at line 1 column 0",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn explicit_version_with_asymmetric() {
+ let registry = registry::RegistryBuilder::new()
+ .http_api()
+ .token(cargo_test_support::registry::Token::rfc_key())
+ .build();
+ setup("foo", "0.0.1");
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [project]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ // The http_api server will check that the authorization is correct.
+ // If the authorization was not sent then we would get an unauthorized error.
+ p.cargo("yank --version 0.0.1")
+ .arg("-Zregistry-auth")
+ .masquerade_as_nightly_cargo(&["registry-auth"])
+ .replace_crates_io(registry.index_url())
+ .run();
+
+ p.cargo("yank --undo --version 0.0.1")
+ .arg("-Zregistry-auth")
+ .masquerade_as_nightly_cargo(&["registry-auth"])
+ .replace_crates_io(registry.index_url())
+ .run();
+}
+
+#[cargo_test]
+fn inline_version() {
+ let registry = registry::init();
+ setup("foo", "0.0.1");
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("yank foo@0.0.1")
+ .replace_crates_io(registry.index_url())
+ .run();
+
+ p.cargo("yank --undo foo@0.0.1")
+ .replace_crates_io(registry.index_url())
+ .with_status(101)
+ .with_stderr(
+ " Updating crates.io index
+ Unyank foo@0.0.1
+error: failed to undo a yank from the registry at file:///[..]
+
+Caused by:
+ EOF while parsing a value at line 1 column 0",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn version_required() {
+ setup("foo", "0.0.1");
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("yank foo")
+ .with_status(101)
+ .with_stderr("error: `--version` is required")
+ .run();
+}
+
+#[cargo_test]
+fn inline_version_without_name() {
+ setup("foo", "0.0.1");
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("yank @0.0.1")
+ .with_status(101)
+ .with_stderr("error: missing crate name for `@0.0.1`")
+ .run();
+}
+
+#[cargo_test]
+fn inline_and_explicit_version() {
+ setup("foo", "0.0.1");
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ p.cargo("yank foo@0.0.1 --version 0.0.1")
+ .with_status(101)
+ .with_stderr("error: cannot specify both `@0.0.1` and `--version`")
+ .run();
+}
diff --git a/src/tools/cargo/triagebot.toml b/src/tools/cargo/triagebot.toml
new file mode 100644
index 000000000..192859537
--- /dev/null
+++ b/src/tools/cargo/triagebot.toml
@@ -0,0 +1,320 @@
+[relabel]
+allow-unauthenticated = [
+ "A-*",
+ "C-*",
+ "Command-*",
+ "E-*",
+ "I-*",
+ "O-*",
+ "S-*",
+ "Z-*",
+ "beta-nominated",
+ "needs-mcve",
+ "regression-*",
+ "relnotes",
+]
+
+[ping.windows]
+message = """\
+Hey Windows Group! This bug has been identified as a good "Windows candidate".
+In case it's useful, here are some [instructions] for tackling these sorts of
+bugs. Maybe take a look?
+Thanks! <3
+
+[instructions]: https://rustc-dev-guide.rust-lang.org/notification-groups/windows.html
+"""
+label = "O-windows"
+
+[shortcut]
+
+[autolabel."S-waiting-on-review"]
+new_pr = true
+
+[assign]
+contributing_url = "https://rust-lang.github.io/cargo/contrib/"
+warn_non_default_branch = true
+
+[assign.owners]
+"*" = ["@ehuss", "@epage", "@weihanglo"]
+
+[autolabel."A-build-execution"]
+trigger_files = [
+ "src/cargo/core/compiler/compilation.rs",
+ "src/cargo/core/compiler/job_queue/",
+ "src/cargo/core/compiler/mod.rs",
+]
+
+[autolabel."A-build-scripts"]
+trigger_files = ["src/cargo/core/compiler/custom_build.rs"]
+
+[autolabel."A-cache-messages"]
+trigger_files = ["src/cargo/util/rustc.rs"]
+
+[autolabel."A-cargo-targets"]
+trigger_files = [
+ "src/cargo/ops/cargo_compile/compile_filter.rs",
+ "src/cargo/ops/cargo_compile/unit_generator.rs",
+]
+
+[autolabel."A-cfg-expr"]
+trigger_files = [
+ "crates/cargo-platform/",
+ "src/cargo/core/compiler/build_context/target_info.rs",
+]
+
+[autolabel."A-cli"]
+trigger_files = ["src/bin/", "src/cargo/util/command_prelude.rs"]
+
+[autolabel."A-cli-help"]
+trigger_files = ["crates/mdman/", "src/etc/man/"]
+
+[autolabel."A-completions"]
+trigger_files = ["src/etc/_cargo", "src/etc/cargo.bashcomp.sh"]
+
+[autolabel."A-configuration"]
+trigger_files = ["src/cargo/util/config/mod.rs"]
+
+[autolabel."A-console-output"]
+trigger_files = [
+ "src/cargo/core/shell.rs",
+ "src/cargo/util/machine_message.rs",
+ "src/cargo/util/progress.rs",
+]
+
+[autolabel."A-crate-dependencies"]
+trigger_files = ["src/cargo/core/dependency.rs"]
+
+[autolabel."A-crate-types"]
+trigger_files = ["src/cargo/core/compiler/crate_type.rs"]
+
+[autolabel."A-dep-info"]
+trigger_files = ["src/cargo/core/compiler/output_depinfo.rs"]
+
+[autolabel."A-dependency-resolution"]
+trigger_files = [
+ "benches/benchsuite/benches/resolve.rs",
+ "crates/resolver-tests/",
+ "src/cargo/core/resolver/",
+]
+
+[autolabel."A-directory-source"]
+trigger_files = ["src/cargo/sources/directory.rs"]
+
+[autolabel."A-documenting-cargo-itself"]
+trigger_files = ["src/doc/"]
+
+[autolabel."A-environment-variables"]
+trigger_files = ["crates/home/"]
+
+[autolabel."A-errors"]
+trigger_files = ["src/cargo/util/diagnostic_server.rs"]
+
+[autolabel."A-features2"]
+trigger_files = ["src/cargo/core/resolver/features.rs"]
+
+[autolabel."A-filesystem"]
+trigger_files = ["src/cargo/util/flock.rs", "src/cargo/util/important_paths.rs"]
+
+[autolabel."A-future-incompat"]
+trigger_files = ["src/cargo/core/compiler/future_incompat.rs"]
+
+[autolabel."A-git"]
+trigger_files = ["src/cargo/sources/git/"]
+
+[autolabel."A-interacts-with-crates.io"]
+trigger_files = ["crates/crates-io/", "src/cargo/ops/registry.rs"]
+
+[autolabel."A-layout"]
+trigger_files = [
+ "src/cargo/core/compiler/context/compilation_files.rs",
+ "src/cargo/core/compiler/layout.rs",
+]
+
+[autolabel."A-links"]
+trigger_files = ["src/cargo/core/compiler/links.rs"]
+
+[autolabel."A-local-registry-source"]
+trigger_files = ["src/cargo/sources/registry/local.rs"]
+
+[autolabel."A-lockfile"]
+trigger_files = ["src/cargo/ops/lockfile.rs", "src/cargo/core/resolver/encode.rs"]
+
+[autolabel."A-lto"]
+trigger_files = ["src/cargo/core/compiler/lto.rs"]
+
+[autolabel."A-manifest"]
+trigger_files = [
+ "src/cargo/core/manifest.rs",
+ "src/cargo/util/toml/mod.rs",
+ "src/cargo/util/toml_mut/",
+]
+
+[autolabel."A-networking"]
+trigger_files = ["src/cargo/util/network.rs"]
+
+[autolabel."A-overrides"]
+trigger_files = ["src/cargo/sources/replaced.rs"]
+
+[autolabel."A-profiles"]
+trigger_files = ["src/cargo/core/profiles.rs"]
+
+[autolabel."A-rebuild-detection"]
+trigger_files = ["src/cargo/core/compiler/fingerprint/"]
+
+[autolabel."A-registries"]
+trigger_files = ["src/cargo/sources/registry/", "src/cargo/core/registry.rs"]
+
+[autolabel."A-registry-authentication"]
+trigger_files = ["src/cargo/util/auth.rs", "crates/credential/"]
+
+[autolabel."A-semver"]
+trigger_files = [
+ "src/cargo/util/semver_ext.rs",
+ "src/cargo/util/to_semver.rs",
+ "src/doc/semver-check/",
+]
+
+[autolabel."A-source-replacement"]
+trigger_files = ["src/cargo/sources/replaced.rs"]
+
+[autolabel."A-sparse-registry"]
+trigger_files = ["src/cargo/sources/registry/http_remote.rs"]
+
+[autolabel."A-testing-cargo-itself"]
+trigger_files = [
+ "benches/",
+ "crates/cargo-test-macro/",
+ "crates/cargo-test-support/",
+]
+
+[autolabel."A-timings"]
+trigger_files = [
+ "src/cargo/core/compiler/timings.js",
+ "src/cargo/core/compiler/timings.rs",
+ "src/cargo/util/cpu.rs",
+]
+
+[autolabel."A-unstable"]
+trigger_files = ["src/cargo/core/features.rs"]
+
+[autolabel."A-vcs"]
+trigger_files = ["src/cargo/util/vcs.rs"]
+
+[autolabel."A-workspaces"]
+trigger_files = [
+ "benches/benchsuite/benches/workspace_initialization.rs",
+ "src/cargo/core/workspace.rs",
+ "src/cargo/util/workspace.rs"
+]
+
+[autolabel."Command-add"]
+trigger_files = ["src/bin/cargo/commands/add.rs", "src/cargo/ops/cargo_add/"]
+
+[autolabel."Command-bench"]
+trigger_files = ["src/bin/cargo/commands/bench.rs"]
+
+[autolabel."Command-build"]
+trigger_files = ["src/bin/cargo/commands/build.rs"]
+
+[autolabel."Command-check"]
+trigger_files = ["src/bin/cargo/commands/check.rs"]
+
+[autolabel."Command-clean"]
+trigger_files = ["src/bin/cargo/commands/clean.rs", "src/cargo/ops/cargo_clean.rs"]
+
+[autolabel."Command-doc"]
+trigger_files = ["src/bin/cargo/commands/doc.rs", "src/cargo/ops/cargo_doc.rs"]
+
+[autolabel."Command-fetch"]
+trigger_files = ["src/bin/cargo/commands/fetch.rs", "src/cargo/ops/cargo_fetch.rs"]
+
+[autolabel."Command-fix"]
+trigger_files = [
+ "src/bin/cargo/commands/fix.rs",
+ "src/cargo/ops/fix.rs",
+ "src/cargo/util/lockserver.rs",
+]
+
+[autolabel."Command-generate-lockfile"]
+trigger_files = ["src/bin/cargo/commands/generate_lockfile.rs", "src/cargo/ops/cargo_generate_lockfile.rs"]
+
+[autolabel."Command-git-checkout"]
+trigger_files = ["src/bin/cargo/commands/git_checkout.rs"]
+
+[autolabel."Command-init"]
+trigger_files = ["src/bin/cargo/commands/init.rs"]
+
+[autolabel."Command-install"]
+trigger_files = ["src/bin/cargo/commands/install.rs", "src/cargo/ops/cargo_install.rs"]
+
+[autolabel."Command-locate-project"]
+trigger_files = ["src/bin/cargo/commands/locate_project.rs"]
+
+[autolabel."Command-login"]
+trigger_files = ["src/bin/cargo/commands/login.rs"]
+
+[autolabel."Command-logout"]
+trigger_files = ["src/bin/cargo/commands/logout.rs"]
+
+[autolabel."Command-metadata"]
+trigger_files = ["src/bin/cargo/commands/metadata.rs", "src/cargo/ops/cargo_output_metadata.rs"]
+
+[autolabel."Command-new"]
+trigger_files = ["src/bin/cargo/commands/new.rs", "src/cargo/ops/cargo_new.rs"]
+
+[autolabel."Command-owner"]
+trigger_files = ["src/bin/cargo/commands/owner.rs"]
+
+[autolabel."Command-package"]
+trigger_files = ["src/bin/cargo/commands/package.rs", "src/cargo/ops/cargo_package.rs"]
+
+[autolabel."Command-pkgid"]
+trigger_files = ["src/bin/cargo/commands/pkgid.rs", "src/cargo/ops/cargo_pkgid.rs"]
+
+[autolabel."Command-publish"]
+trigger_files = ["src/bin/cargo/commands/publish.rs"]
+
+[autolabel."Command-read-manifest"]
+trigger_files = ["src/bin/cargo/commands/read_manifest.rs", "src/cargo/ops/cargo_read_manifest.rs"]
+
+[autolabel."Command-remove"]
+trigger_files = ["src/bin/cargo/commands/remove.rs", "src/cargo/ops/cargo_remove.rs"]
+
+[autolabel."Command-report"]
+trigger_files = ["src/bin/cargo/commands/report.rs"]
+
+[autolabel."Command-run"]
+trigger_files = ["src/bin/cargo/commands/run.rs", "src/cargo/ops/cargo_run.rs"]
+
+[autolabel."Command-rustc"]
+trigger_files = ["src/bin/cargo/commands/rustc.rs"]
+
+[autolabel."Command-rustdoc"]
+trigger_files = ["src/bin/cargo/commands/rustdoc.rs"]
+
+[autolabel."Command-search"]
+trigger_files = ["src/bin/cargo/commands/search.rs"]
+
+[autolabel."Command-test"]
+trigger_files = ["src/bin/cargo/commands/test.rs", "src/cargo/ops/cargo_test.rs"]
+
+[autolabel."Command-tree"]
+trigger_files = ["src/bin/cargo/commands/tree.rs", "src/cargo/ops/tree/"]
+
+[autolabel."Command-uninstall"]
+trigger_files = ["src/bin/cargo/commands/uninstall.rs", "src/cargo/ops/cargo_uninstall.rs"]
+
+[autolabel."Command-update"]
+trigger_files = ["src/bin/cargo/commands/update.rs"]
+
+[autolabel."Command-vendor"]
+trigger_files = ["src/bin/cargo/commands/vendor.rs", "src/cargo/ops/vendor.rs"]
+
+[autolabel."Command-verify-project"]
+trigger_files = ["src/bin/cargo/commands/verify_project.rs"]
+
+[autolabel."Command-version"]
+trigger_files = ["src/bin/cargo/commands/version.rs"]
+
+[autolabel."Command-yank"]
+trigger_files = ["src/bin/cargo/commands/yank.rs"]